3 Commits

13 changed files with 184 additions and 27 deletions

7
backend/.dockerignore Normal file
View File

@@ -0,0 +1,7 @@
docker
**/__pycache__
poetry.lock
Dockerfile
docker-compose.yml
config
.venv

26
backend/Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM python:3.11-slim as builder
WORKDIR /app
COPY . /app
RUN apt update && apt install -y proj-bin
RUN pip install --upgrade poetry && \
poetry config virtualenvs.create false && \
poetry install --only=main && \
poetry export -f requirements.txt >> requirements.txt
FROM python:3.11-slim as runtime
COPY . /app
COPY --from=builder /app/requirements.txt /app
RUN apt update && apt install -y postgresql libpq5
RUN pip install --no-cache-dir -r /app/requirements.txt
WORKDIR /app
ENV CONFIG_PATH=./config.sample.yaml
ENV API_KEY=MwP7lbljnXIYAnmmmPRzasHsIknaiKqD
CMD ["python", "./main.py"]

View File

@@ -1,10 +1,11 @@
from asyncio import sleep
from logging import getLogger from logging import getLogger
from typing import Annotated, AsyncIterator from typing import Annotated, AsyncIterator
from fastapi import Depends from fastapi import Depends
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import text from sqlalchemy import text
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import OperationalError, SQLAlchemyError
from sqlalchemy.ext.asyncio import ( from sqlalchemy.ext.asyncio import (
async_sessionmaker, async_sessionmaker,
AsyncEngine, AsyncEngine,
@@ -56,11 +57,20 @@ class Database:
class_=AsyncSession, class_=AsyncSession,
) )
async with self._async_engine.begin() as session: ret = False
await session.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")) while not ret:
if clear_static_data: try:
await session.run_sync(Base.metadata.drop_all) async with self._async_engine.begin() as session:
await session.run_sync(Base.metadata.create_all) await session.execute(
text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")
)
if clear_static_data:
await session.run_sync(Base.metadata.drop_all)
await session.run_sync(Base.metadata.create_all)
ret = True
except OperationalError as err:
logger.error(err)
await sleep(1)
return True return True

View File

@@ -1,4 +1,6 @@
from pydantic import BaseModel, BaseSettings, Field, SecretStr from typing import Any
from pydantic import BaseModel, BaseSettings, Field, root_validator, SecretStr
class HttpSettings(BaseModel): class HttpSettings(BaseModel):
@@ -7,14 +9,43 @@ class HttpSettings(BaseModel):
cert: str | None = None cert: str | None = None
def check_user_password(cls, values: dict[str, Any]) -> dict[str, Any]:
user = values.get("user")
password = values.get("password")
if user is not None and password is None:
raise ValueError("user is set, password shall be set too.")
if password is not None and user is None:
raise ValueError("password is set, user shall be set too.")
return values
class DatabaseSettings(BaseModel): class DatabaseSettings(BaseModel):
name: str = "carrramba-encore-rate" name: str = "carrramba-encore-rate"
host: str = "127.0.0.1" host: str = "127.0.0.1"
port: int = 5432 port: int = 5432
driver: str = "postgresql+psycopg" driver: str = "postgresql+psycopg"
user: str = "cer" user: str | None = None
password: SecretStr | None = None password: SecretStr | None = None
_user_password_validation = root_validator(allow_reuse=True)(check_user_password)
class CacheSettings(BaseModel):
enable: bool = False
host: str = "127.0.0.1"
port: int = 6379
user: str | None = None
password: SecretStr | None = None
_user_password_validation = root_validator(allow_reuse=True)(check_user_password)
class TracingSettings(BaseModel):
enable: bool = False
class Settings(BaseSettings): class Settings(BaseSettings):
app_name: str app_name: str
@@ -24,3 +55,5 @@ class Settings(BaseSettings):
http: HttpSettings = HttpSettings() http: HttpSettings = HttpSettings()
db: DatabaseSettings = DatabaseSettings() db: DatabaseSettings = DatabaseSettings()
cache: CacheSettings = CacheSettings()
tracing: TracingSettings = TracingSettings()

View File

@@ -1,8 +1,9 @@
app_name: carrramba-encore-rate app_name: carrramba-encore-rate
clear_static_data: false
http: http:
host: 0.0.0.0 host: 0.0.0.0
port: 4443 port: 8080
cert: ./config/cert.pem cert: ./config/cert.pem
db: db:
@@ -12,3 +13,9 @@ db:
driver: postgresql+psycopg driver: postgresql+psycopg
user: cer user: cer
password: cer_password password: cer_password
cache:
enable: true
tracing:
enable: false

View File

@@ -0,0 +1,23 @@
app_name: carrramba-encore-rate
clear_static_data: false
http:
host: 0.0.0.0
port: 8080
# cert: ./config/cert.pem
db:
name: carrramba-encore-rate
host: postgres
port: 5432
driver: postgresql+psycopg
user: cer
password: cer_password
cache:
enable: true
host: redis
# TODO: Add user credentials
tracing:
enable: false

View File

@@ -1,10 +1,12 @@
from os import environ from os import environ
from fastapi_cache.backends.redis import RedisBackend
from redis import asyncio as aioredis
from yaml import safe_load from yaml import safe_load
from backend.db import db from backend.db import db
from backend.idfm_interface import IdfmInterface from backend.idfm_interface import IdfmInterface
from backend.settings import Settings from backend.settings import CacheSettings, Settings
CONFIG_PATH = environ.get("CONFIG_PATH", "./config.sample.yaml") CONFIG_PATH = environ.get("CONFIG_PATH", "./config.sample.yaml")
@@ -20,3 +22,17 @@ def load_settings(path: str) -> Settings:
settings = load_settings(CONFIG_PATH) settings = load_settings(CONFIG_PATH)
idfm_interface = IdfmInterface(settings.idfm_api_key.get_secret_value(), db) idfm_interface = IdfmInterface(settings.idfm_api_key.get_secret_value(), db)
def init_redis_backend(settings: CacheSettings) -> RedisBackend:
login = f"{settings.user}:{settings.password}@" if settings.user is not None else ""
url = f"redis://{login}{settings.host}:{settings.port}"
redis_connections_pool = aioredis.from_url(
url, encoding="utf8", decode_responses=True
)
return RedisBackend(redis_connections_pool)
redis_backend = init_redis_backend(settings.cache)

View File

@@ -5,6 +5,7 @@ from contextlib import asynccontextmanager
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from fastapi_cache import FastAPICache
from opentelemetry import trace from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
@@ -13,12 +14,14 @@ from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.sdk.trace.export import BatchSpanProcessor
from backend.db import db from backend.db import db
from dependencies import idfm_interface, settings from dependencies import idfm_interface, redis_backend, settings
from routers import line, stop from routers import line, stop
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
FastAPICache.init(redis_backend, prefix="api", enable=settings.cache.enable)
await db.connect(settings.db, settings.clear_static_data) await db.connect(settings.db, settings.clear_static_data)
if settings.clear_static_data: if settings.clear_static_data:
await idfm_interface.startup() await idfm_interface.startup()
@@ -44,20 +47,29 @@ app.include_router(line.router)
app.include_router(stop.router) app.include_router(stop.router)
FastAPIInstrumentor.instrument_app(app) if settings.tracing.enable:
FastAPIInstrumentor.instrument_app(app)
trace.set_tracer_provider( trace.set_tracer_provider(
TracerProvider(resource=Resource.create({SERVICE_NAME: settings.app_name})) TracerProvider(resource=Resource.create({SERVICE_NAME: settings.app_name}))
) )
trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) trace.get_tracer_provider().add_span_processor(
tracer = trace.get_tracer(settings.app_name) BatchSpanProcessor(OTLPSpanExporter())
)
tracer = trace.get_tracer(settings.app_name)
if __name__ == "__main__": if __name__ == "__main__":
http_settings = settings.http http_settings = settings.http
uvicorn.run(
app, config = uvicorn.Config(
app=app,
host=http_settings.host, host=http_settings.host,
port=http_settings.port, port=http_settings.port,
ssl_certfile=http_settings.cert, ssl_certfile=http_settings.cert,
proxy_headers=True,
) )
server = uvicorn.Server(config)
server.run()

View File

@@ -25,6 +25,7 @@ opentelemetry-instrumentation-sqlalchemy = "^0.38b0"
sqlalchemy = "^2.0.12" sqlalchemy = "^2.0.12"
psycopg = "^3.1.9" psycopg = "^3.1.9"
pyyaml = "^6.0" pyyaml = "^6.0"
fastapi-cache2 = {extras = ["redis"], version = "^0.2.1"}
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]

View File

@@ -1,4 +1,5 @@
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException
from fastapi_cache.decorator import cache
from backend.models import Line from backend.models import Line
from backend.schemas import Line as LineSchema, TransportMode from backend.schemas import Line as LineSchema, TransportMode
@@ -8,6 +9,7 @@ router = APIRouter(prefix="/line", tags=["line"])
@router.get("/{line_id}", response_model=LineSchema) @router.get("/{line_id}", response_model=LineSchema)
@cache(namespace="line")
async def get_line(line_id: int) -> LineSchema: async def get_line(line_id: int) -> LineSchema:
line: Line | None = await Line.get_by_id(line_id) line: Line | None = await Line.get_by_id(line_id)

View File

@@ -3,6 +3,7 @@ from datetime import datetime
from typing import Sequence from typing import Sequence
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException
from fastapi_cache.decorator import cache
from backend.idfm_interface import ( from backend.idfm_interface import (
Destinations as IdfmDestinations, Destinations as IdfmDestinations,
@@ -40,6 +41,7 @@ def optional_datetime_to_ts(dt: datetime | None) -> int | None:
# TODO: Add limit support # TODO: Add limit support
@router.get("/") @router.get("/")
@cache(namespace="stop")
async def get_stop( async def get_stop(
name: str = "", limit: int = 10 name: str = "", limit: int = 10
) -> Sequence[StopAreaSchema | StopSchema] | None: ) -> Sequence[StopAreaSchema | StopSchema] | None:
@@ -83,8 +85,8 @@ async def get_stop(
return formatted return formatted
# TODO: Cache response for 30 secs ?
@router.get("/{stop_id}/nextPassages") @router.get("/{stop_id}/nextPassages")
@cache(namespace="stop-nextPassages", expire=30)
async def get_next_passages(stop_id: int) -> NextPassagesSchema | None: async def get_next_passages(stop_id: int) -> NextPassagesSchema | None:
res = await idfm_interface.get_next_passages(stop_id) res = await idfm_interface.get_next_passages(stop_id)
if res is None: if res is None:
@@ -149,6 +151,7 @@ async def get_next_passages(stop_id: int) -> NextPassagesSchema | None:
@router.get("/{stop_id}/destinations") @router.get("/{stop_id}/destinations")
@cache(namespace="stop-destinations", expire=30)
async def get_stop_destinations( async def get_stop_destinations(
stop_id: int, stop_id: int,
) -> IdfmDestinations | None: ) -> IdfmDestinations | None:
@@ -158,6 +161,7 @@ async def get_stop_destinations(
@router.get("/{stop_id}/shape") @router.get("/{stop_id}/shape")
@cache(namespace="stop-shape")
async def get_stop_shape(stop_id: int) -> StopShapeSchema | None: async def get_stop_shape(stop_id: int) -> StopShapeSchema | None:
connection_area = None connection_area = None

View File

@@ -15,8 +15,15 @@ services:
ports: ports:
- "127.0.0.1:5432:5432" - "127.0.0.1:5432:5432"
volumes: volumes:
- ./docker/database/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d - ./backend/docker/database/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- ./docker/database/data:/var/lib/postgresql/data - ./backend/docker/database/data:/var/lib/postgresql/data
redis:
image: redis:latest
restart: always
command: redis-server --loglevel warning
ports:
- "127.0.0.1:6379:6379"
jaeger-agent: jaeger-agent:
image: jaegertracing/jaeger-agent:latest image: jaegertracing/jaeger-agent:latest
@@ -45,10 +52,6 @@ services:
ports: ports:
- "127.0.0.1:4317:4317" - "127.0.0.1:4317:4317"
- "127.0.0.1:4318:4318" - "127.0.0.1:4318:4318"
# - "127.0.0.1:9411:9411"
# - "127.0.0.1:14250:14250"
# - "127.0.0.1:14268:14268"
# - "127.0.0.1:14269:14269"
restart: on-failure restart: on-failure
depends_on: depends_on:
- cassandra-schema - cassandra-schema
@@ -68,7 +71,16 @@ services:
- "--cassandra.servers=cassandra" - "--cassandra.servers=cassandra"
ports: ports:
- "127.0.0.1:16686:16686" - "127.0.0.1:16686:16686"
# - "127.0.0.1:16687:16687"
restart: on-failure restart: on-failure
depends_on: depends_on:
- cassandra-schema - cassandra-schema
carrramba-encore-rate-api:
build: ./backend/
ports:
- "127.0.0.1:8080:8080"
carrramba-encore-rate-frontend:
build: ./frontend/
ports:
- "127.0.0.1:80:8081"

4
frontend/Dockerfile Normal file
View File

@@ -0,0 +1,4 @@
# pull the latest official nginx image
FROM nginx:mainline-alpine-slim
COPY dist /usr/share/nginx/html