Use the database URL given by the environment variable DATABASE_URL

This commit is contained in:
Me 2020-12-19 02:06:08 +00:00
parent cee711785c
commit f4d704efa0
3 changed files with 21 additions and 8 deletions

View file

@ -3,6 +3,7 @@ from fastapi import FastAPI, Header, HTTPException
from pydantic import BaseModel from pydantic import BaseModel
import datetime import datetime
import databases import databases
import os
import sqlalchemy import sqlalchemy
from sqlite3 import OperationalError from sqlite3 import OperationalError
@ -11,9 +12,6 @@ API_VERSION_MINOR = 0
REST_API_ROOT = f"/energy/v{API_VERSION_MAJOR}/" REST_API_ROOT = f"/energy/v{API_VERSION_MAJOR}/"
DATABASE_URL = "sqlite:///./energyDB.sqlite"
# DATABASE_URL = "sqlite://"
db = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData() metadata = sqlalchemy.MetaData()
energy = sqlalchemy.Table( energy = sqlalchemy.Table(
"energy", "energy",
@ -23,9 +21,14 @@ energy = sqlalchemy.Table(
sqlalchemy.Column("value", sqlalchemy.Float), sqlalchemy.Column("value", sqlalchemy.Float),
sqlalchemy.UniqueConstraint("timestamp"), sqlalchemy.UniqueConstraint("timestamp"),
) )
# DATABASE_URL = "sqlite:///./energyDB.sqlite"
DATABASE_URL = os.getenv("DATABASE_URL", default="sqlite://")
print(f"DB URL: {DATABASE_URL}")
db = databases.Database(DATABASE_URL)
engine = sqlalchemy.create_engine( engine = sqlalchemy.create_engine(
DATABASE_URL, connect_args={"check_same_thread": False} DATABASE_URL,
) connect_args={"check_same_thread": False})
metadata.create_all(engine) metadata.create_all(engine)
class EnergyValue(BaseModel): class EnergyValue(BaseModel):
@ -85,8 +88,11 @@ async def getBulkEnergyData(bulkDataRequest: BulkDataRequest):
.where(energy.c.channel_id == ch) \ .where(energy.c.channel_id == ch) \
.where(energy.c.timestamp >= bulkDataRequest.fromTime) \ .where(energy.c.timestamp >= bulkDataRequest.fromTime) \
.where(energy.c.timestamp <= bulkDataRequest.tillTime) .where(energy.c.timestamp <= bulkDataRequest.tillTime)
data = await db.fetch_all(query) try:
bulkData.append({"channel_id": ch, "data": data}) data = await db.fetch_all(query)
bulkData.append({"channel_id": ch, "data": data})
except OperationalError as e:
raise HTTPException(status_code=500, detail="Database error")
return { return {
"bulk": bulkData, "bulk": bulkData,

View file

@ -15,6 +15,6 @@ fi
ARG_HTTP_PORT=${HTTP_PORT:-8000} ARG_HTTP_PORT=${HTTP_PORT:-8000}
ARG_IP_ADDRESS=${IP_ADDRESS:-127.0.0.1} ARG_IP_ADDRESS=${IP_ADDRESS:-127.0.0.1}
export DB_URL=${DATABASE_URL:-sqlite://} export DATABASE_URL
/usr/bin/env uvicorn --port $ARG_HTTP_PORT --host $ARG_IP_ADDRESS ${UVICORN_ARGS} srv:energyDB /usr/bin/env uvicorn --port $ARG_HTTP_PORT --host $ARG_IP_ADDRESS ${UVICORN_ARGS} srv:energyDB

View file

@ -3,8 +3,14 @@ import pytest
from datetime import datetime from datetime import datetime
import json import json
import os
import urllib.parse import urllib.parse
#TODO Use in-memory DB to test the case that there is no table
#TODO Add helper function to fill the in-memory DB before test
os.environ["DATABASE_URL"] = "sqlite:///./energyDB.sqlite"
from srv import energyDB from srv import energyDB
class Test_energyDb: class Test_energyDb:
@ -66,6 +72,7 @@ class Test_energyDb:
assert response.status_code == 200 assert response.status_code == 200
def test_bulkData_get(self): def test_bulkData_get(self):
print(f"DB_URL: {os.getenv('DATABASE_URL')}")
# response = self.client.put("/energy/bulkData", json=self.bulkTestData); # response = self.client.put("/energy/bulkData", json=self.bulkTestData);
fromTimestamp = datetime.fromisoformat("2020-12-11T12:30:00") fromTimestamp = datetime.fromisoformat("2020-12-11T12:30:00")
tillTimestamp = datetime.fromisoformat("2020-12-11T12:30:59") tillTimestamp = datetime.fromisoformat("2020-12-11T12:30:59")