27 Commits

Author SHA1 Message Date
824536ddbe 💥 Rename API_KEY to IDFM_API_KEY 2023-05-28 12:45:03 +02:00
7fbdd0606c ️ Reduce the size of the backend docker image 2023-05-28 12:40:10 +02:00
581f6b7b8f 🐛 Add workaround for fastapi-cache issue #144 2023-05-28 10:45:14 +02:00
404b228cbf 🔥 Remove env variables from backend dockerfile 2023-05-26 23:55:58 +02:00
e2ff90cd5f ️ Use Redis to cache REST responses 2023-05-26 18:10:47 +02:00
cd700ebd42 🐛 The backend shall serve requests once the database reachable 2023-05-26 18:09:24 +02:00
c44a52b7ae ♻️ Add backend and frontend to docker-compose 2023-05-26 18:01:04 +02:00
b3b36bc3de Replace rich with icecream for temporary tracing 2023-05-11 21:44:58 +02:00
5e0d7b174c 🏷️ Fix some type issues (mypy) 2023-05-11 21:40:38 +02:00
b437bbbf70 🎨 Split main into several APIRouters 2023-05-11 21:17:02 +02:00
85fdb28cc6 🐛 Set default value to Settings.clear_static_data 2023-05-11 20:31:24 +02:00
b894d68a7a ♻️ Use of pydantic to manage config+env variables
FastAPI release has been updated allowing to use lifespan parameter
to prepare/shutdown sub components.
2023-05-10 22:30:30 +02:00
ef26509b87 🐛 Fix invalid line id returned by /stop/{stop_id}/nextPassages endpoint 2023-05-09 23:25:30 +02:00
de82eb6c55 🔊 Reformat error logs generated by frontend BusinessData class 2023-05-08 17:25:14 +02:00
0ba4c1e6fa ️ Stop.postal_region and Line.id/operator_id can be integer values 2023-05-08 17:15:21 +02:00
0f1c16ab53 💄 Force App to use 100% of the shortest side 2023-05-08 16:17:56 +02:00
5692bc96d5 🐛 Check backend return codes before handling data 2023-05-08 15:05:43 +02:00
d15fee75ca ♻️ Fix /stop/ endpoints inconsistency 2023-05-08 13:44:44 +02:00
93047c8706 ♻️ Use declarative table configuration to define backend db tables 2023-05-08 13:17:09 +02:00
c84b78d3e2 🚑️ /stop responses didn't return StopArea.stops fields 2023-05-07 12:36:38 +02:00
c6e3881966 Add sqlalchemy-utils types dependency 2023-05-07 12:20:28 +02:00
b713042359 🗃️ Use of dedicated db sessions 2023-05-07 12:18:12 +02:00
5505209760 ️ Replace asyncpg with psycopg 2023-05-07 11:24:02 +02:00
6aa28f7bfb 🔨 Add OTel/Jeager to start HTTP/SQL requests monitoring 2023-05-02 23:02:09 +02:00
07d43bfcb4 ⬆️ Add sqlalchemy-utils dep 2023-05-01 23:22:51 +02:00
6eb78d7307 ️ /stop API endpoint uses stop and towns names to resolve queries 2023-05-01 22:42:02 +02:00
bcedf32bec Merge branch 'reduce-frontend-bundle-size' into develop 2023-05-01 22:36:37 +02:00
25 changed files with 988 additions and 502 deletions

12
backend/.dockerignore Normal file
View File

@@ -0,0 +1,12 @@
.dir-locals.el
.dockerignore
.gitignore
**/.mypy_cache
**/.ruff_cache
.venv
**/__pycache__
config
docker
poetry.lock
tests
Dockerfile

31
backend/Dockerfile Normal file
View File

@@ -0,0 +1,31 @@
FROM python:3.11-slim as builder
WORKDIR /app
COPY ./pyproject.toml /app
RUN apt update && \
apt install -y --no-install-recommends proj-bin && \
apt clean && \
rm -rf /var/lib/apt/lists/*
RUN pip install --upgrade poetry && \
poetry config virtualenvs.create false && \
poetry install --only=main && \
poetry export -f requirements.txt >> requirements.txt
FROM python:3.11-slim as runtime
COPY . /app
COPY --from=builder /app/requirements.txt /app
RUN apt update && \
apt install -y --no-install-recommends postgresql libpq5 && \
apt clean && \
rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir -r /app/requirements.txt
WORKDIR /app
CMD ["python", "./main.py"]

View File

@@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
from logging import getLogger
from typing import Iterable, Self, TYPE_CHECKING from typing import Iterable, Self, TYPE_CHECKING
from sqlalchemy import select from sqlalchemy import select
@@ -9,31 +10,36 @@ from sqlalchemy.orm import DeclarativeBase
if TYPE_CHECKING: if TYPE_CHECKING:
from .db import Database from .db import Database
logger = getLogger(__name__)
class Base(DeclarativeBase): class Base(DeclarativeBase):
db: Database | None = None db: Database | None = None
@classmethod @classmethod
async def add(cls, stops: Self | Iterable[Self]) -> bool: async def add(cls, objs: Self | Iterable[Self]) -> bool:
try: if cls.db is not None and (session := await cls.db.get_session()) is not None:
if isinstance(stops, Iterable):
cls.db.session.add_all(stops) # type: ignore async with session.begin():
else: try:
cls.db.session.add(stops) # type: ignore if isinstance(objs, Iterable):
await cls.db.session.commit() # type: ignore session.add_all(objs)
except (AttributeError, IntegrityError) as err: else:
print(err) session.add(objs)
return False
except (AttributeError, IntegrityError) as err:
logger.error(err)
return False
return True return True
@classmethod @classmethod
async def get_by_id(cls, id_: int | str) -> Self | None: async def get_by_id(cls, id_: int | str) -> Self | None:
try: if cls.db is not None and (session := await cls.db.get_session()) is not None:
stmt = select(cls).where(cls.id == id_) # type: ignore
res = await cls.db.session.execute(stmt) # type: ignore async with session.begin():
element = res.scalar_one_or_none() stmt = select(cls).where(cls.id == id_)
except AttributeError as err: res = await session.execute(stmt)
print(err) return res.scalar_one_or_none()
element = None
return element return None

View File

@@ -1,4 +1,11 @@
from asyncio import sleep
from logging import getLogger
from typing import Annotated, AsyncIterator
from fastapi import Depends
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
from sqlalchemy import text from sqlalchemy import text
from sqlalchemy.exc import OperationalError, SQLAlchemyError
from sqlalchemy.ext.asyncio import ( from sqlalchemy.ext.asyncio import (
async_sessionmaker, async_sessionmaker,
AsyncEngine, AsyncEngine,
@@ -7,42 +14,66 @@ from sqlalchemy.ext.asyncio import (
) )
from .base_class import Base from .base_class import Base
from ..settings import DatabaseSettings
logger = getLogger(__name__)
class Database: class Database:
def __init__(self) -> None: def __init__(self) -> None:
self._engine: AsyncEngine | None = None self._async_engine: AsyncEngine | None = None
self._session_maker: async_sessionmaker[AsyncSession] | None = None self._async_session_local: async_sessionmaker[AsyncSession] | None = None
self._session: AsyncSession | None = None
@property async def get_session(self) -> AsyncSession | None:
def session(self) -> AsyncSession | None: try:
if self._session is None and (session_maker := self._session_maker) is not None: return self._async_session_local() # type: ignore
self._session = session_maker()
return self._session
async def connect(self, db_path: str, clear_static_data: bool = False) -> bool: except (SQLAlchemyError, AttributeError) as e:
logger.exception(e)
# TODO: Preserve UserLastStopSearchResults table from drop. return None
self._engine = create_async_engine(db_path)
if self._engine is not None: # TODO: Preserve UserLastStopSearchResults table from drop.
self._session_maker = async_sessionmaker( async def connect(
self._engine, expire_on_commit=False, class_=AsyncSession self, settings: DatabaseSettings, clear_static_data: bool = False
) -> bool:
password = settings.password
path = (
f"{settings.driver}://{settings.user}:"
f"{password.get_secret_value() if password is not None else ''}"
f"@{settings.host}:{settings.port}/{settings.name}"
)
self._async_engine = create_async_engine(
path, pool_pre_ping=True, pool_size=10, max_overflow=20
)
if self._async_engine is not None:
SQLAlchemyInstrumentor().instrument(engine=self._async_engine.sync_engine)
self._async_session_local = async_sessionmaker(
bind=self._async_engine,
# autoflush=False,
expire_on_commit=False,
class_=AsyncSession,
) )
if (session := self.session) is not None:
await session.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm;"))
async with self._engine.begin() as conn: ret = False
if clear_static_data: while not ret:
await conn.run_sync(Base.metadata.drop_all) try:
await conn.run_sync(Base.metadata.create_all) async with self._async_engine.begin() as session:
await session.execute(
text("CREATE EXTENSION IF NOT EXISTS pg_trgm;")
)
if clear_static_data:
await session.run_sync(Base.metadata.drop_all)
await session.run_sync(Base.metadata.create_all)
ret = True
except OperationalError as err:
logger.error(err)
await sleep(1)
return True return True
async def disconnect(self) -> None: async def disconnect(self) -> None:
if self._session is not None: if self._async_engine is not None:
await self._session.close() await self._async_engine.dispose()
self._session = None
if self._engine is not None:
await self._engine.dispose()

View File

@@ -1,4 +1,5 @@
from collections import defaultdict from collections import defaultdict
from logging import getLogger
from re import compile as re_compile from re import compile as re_compile
from time import time from time import time
from typing import ( from typing import (
@@ -15,7 +16,7 @@ from aiohttp import ClientSession
from msgspec import ValidationError from msgspec import ValidationError
from msgspec.json import Decoder from msgspec.json import Decoder
from pyproj import Transformer from pyproj import Transformer
from shapefile import Reader as ShapeFileReader, ShapeRecord from shapefile import Reader as ShapeFileReader, ShapeRecord # type: ignore
from ..db import Database from ..db import Database
from ..models import ConnectionArea, Line, LinePicto, Stop, StopArea, StopShape from ..models import ConnectionArea, Line, LinePicto, Stop, StopArea, StopShape
@@ -37,6 +38,9 @@ from .idfm_types import (
from .ratp_types import Picto as RatpPicto from .ratp_types import Picto as RatpPicto
logger = getLogger(__name__)
class IdfmInterface: class IdfmInterface:
IDFM_ROOT_URL = "https://prim.iledefrance-mobilites.fr/marketplace" IDFM_ROOT_URL = "https://prim.iledefrance-mobilites.fr/marketplace"
@@ -55,7 +59,7 @@ class IdfmInterface:
) )
OPERATOR_RE = re_compile(r"[^:]+:Operator::([^:]+):") OPERATOR_RE = re_compile(r"[^:]+:Operator::([^:]+):")
LINE_RE = re_compile(r"[^:]+:Line::([^:]+):") LINE_RE = re_compile(r"[^:]+:Line::C([^:]+):")
def __init__(self, api_key: str, database: Database) -> None: def __init__(self, api_key: str, database: Database) -> None:
self._api_key = api_key self._api_key = api_key
@@ -357,11 +361,23 @@ class IdfmInterface:
fields = line.fields fields = line.fields
picto_id = fields.picto.id_ if fields.picto is not None else None picto_id = fields.picto.id_ if fields.picto is not None else None
picto = await LinePicto.get_by_id(picto_id) if picto_id else None
line_id = fields.id_line
try:
formatted_line_id = int(line_id[1:] if line_id[0] == "C" else line_id)
except ValueError:
logger.warning("Unable to format %s line id.", line_id)
continue
try:
operator_id = int(fields.operatorref) # type: ignore
except (ValueError, TypeError):
logger.warning("Unable to format %s operator id.", fields.operatorref)
operator_id = 0
ret.append( ret.append(
Line( Line(
id=fields.id_line, id=formatted_line_id,
short_name=fields.shortname_line, short_name=fields.shortname_line,
name=fields.name_line, name=fields.name_line,
status=IdfmLineState(fields.status.value), status=IdfmLineState(fields.status.value),
@@ -374,7 +390,7 @@ class IdfmInterface:
), ),
colour_web_hexa=fields.colourweb_hexa, colour_web_hexa=fields.colourweb_hexa,
text_colour_hexa=fields.textcolourprint_hexa, text_colour_hexa=fields.textcolourprint_hexa,
operator_id=optional_value(fields.operatorref), operator_id=operator_id,
operator_name=optional_value(fields.operatorname), operator_name=optional_value(fields.operatorname),
accessibility=IdfmState(fields.accessibility.value), accessibility=IdfmState(fields.accessibility.value),
visual_signs_available=IdfmState( visual_signs_available=IdfmState(
@@ -384,7 +400,6 @@ class IdfmInterface:
fields.audiblesigns_available.value fields.audiblesigns_available.value
), ),
picto_id=fields.picto.id_ if fields.picto is not None else None, picto_id=fields.picto.id_ if fields.picto is not None else None,
picto=picto,
record_id=line.recordid, record_id=line.recordid,
record_ts=int(line.record_timestamp.timestamp()), record_ts=int(line.record_timestamp.timestamp()),
) )
@@ -405,13 +420,21 @@ class IdfmInterface:
fields.arrxepsg2154, fields.arryepsg2154 fields.arrxepsg2154, fields.arryepsg2154
) )
try:
postal_region = int(fields.arrpostalregion)
except ValueError:
logger.warning(
"Unable to format %s postal region.", fields.arrpostalregion
)
continue
yield Stop( yield Stop(
id=int(fields.arrid), id=int(fields.arrid),
name=fields.arrname, name=fields.arrname,
epsg3857_x=epsg3857_point[0], epsg3857_x=epsg3857_point[0],
epsg3857_y=epsg3857_point[1], epsg3857_y=epsg3857_point[1],
town_name=fields.arrtown, town_name=fields.arrtown,
postal_region=fields.arrpostalregion, postal_region=postal_region,
transport_mode=TransportMode(fields.arrtype.value), transport_mode=TransportMode(fields.arrtype.value),
version=fields.arrversion, version=fields.arrversion,
created_ts=created_ts, created_ts=created_ts,

View File

@@ -5,13 +5,11 @@ from typing import Iterable, Self, Sequence
from sqlalchemy import ( from sqlalchemy import (
BigInteger, BigInteger,
Boolean, Boolean,
Column,
Enum, Enum,
ForeignKey, ForeignKey,
Integer, Integer,
select, select,
String, String,
Table,
) )
from sqlalchemy.orm import Mapped, mapped_column, relationship, selectinload from sqlalchemy.orm import Mapped, mapped_column, relationship, selectinload
from sqlalchemy.sql.expression import tuple_ from sqlalchemy.sql.expression import tuple_
@@ -25,12 +23,14 @@ from ..idfm_interface.idfm_types import (
) )
from .stop import _Stop from .stop import _Stop
line_stop_association_table = Table(
"line_stop_association_table", class LineStopAssociations(Base):
Base.metadata,
Column("line_id", ForeignKey("lines.id")), id = mapped_column(BigInteger, primary_key=True)
Column("stop_id", ForeignKey("_stops.id")), line_id = mapped_column(BigInteger, ForeignKey("lines.id"))
) stop_id = mapped_column(BigInteger, ForeignKey("_stops.id"))
__tablename__ = "line_stop_associations"
class LinePicto(Base): class LinePicto(Base):
@@ -53,7 +53,7 @@ class Line(Base):
db = db db = db
id = mapped_column(String, primary_key=True) id = mapped_column(BigInteger, primary_key=True)
short_name = mapped_column(String) short_name = mapped_column(String)
name = mapped_column(String, nullable=False) name = mapped_column(String, nullable=False)
@@ -68,7 +68,7 @@ class Line(Base):
colour_web_hexa = mapped_column(String, nullable=False) colour_web_hexa = mapped_column(String, nullable=False)
text_colour_hexa = mapped_column(String, nullable=False) text_colour_hexa = mapped_column(String, nullable=False)
operator_id = mapped_column(String) operator_id = mapped_column(Integer)
operator_name = mapped_column(String) operator_name = mapped_column(String)
accessibility = mapped_column(Enum(IdfmState), nullable=False) accessibility = mapped_column(Enum(IdfmState), nullable=False)
@@ -83,7 +83,7 @@ class Line(Base):
stops: Mapped[list[_Stop]] = relationship( stops: Mapped[list[_Stop]] = relationship(
"_Stop", "_Stop",
secondary=line_stop_association_table, secondary="line_stop_associations",
back_populates="lines", back_populates="lines",
lazy="selectin", lazy="selectin",
) )
@@ -94,23 +94,24 @@ class Line(Base):
async def get_by_name( async def get_by_name(
cls, name: str, operator_name: None | str = None cls, name: str, operator_name: None | str = None
) -> Sequence[Self] | None: ) -> Sequence[Self] | None:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return None
filters = {"name": name} async with session.begin():
if operator_name is not None: filters = {"name": name}
filters["operator_name"] = operator_name if operator_name is not None:
filters["operator_name"] = operator_name
stmt = ( stmt = (
select(cls) select(cls)
.filter_by(**filters) .filter_by(**filters)
.options(selectinload(cls.stops), selectinload(cls.picto)) .options(selectinload(cls.stops), selectinload(cls.picto))
) )
res = await session.execute(stmt) res = await session.execute(stmt)
lines = res.scalars().all() lines = res.scalars().all()
return lines return lines
return None
@classmethod @classmethod
async def _add_picto_to_line(cls, line: str | Self, picto: LinePicto) -> None: async def _add_picto_to_line(cls, line: str | Self, picto: LinePicto) -> None:
@@ -133,57 +134,63 @@ class Line(Base):
@classmethod @classmethod
async def add_pictos(cls, line_to_pictos: Iterable[tuple[str, LinePicto]]) -> bool: async def add_pictos(cls, line_to_pictos: Iterable[tuple[str, LinePicto]]) -> bool:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return False
await asyncio_gather( async with session.begin():
*[cls._add_picto_to_line(line, picto) for line, picto in line_to_pictos] await asyncio_gather(
) *[
cls._add_picto_to_line(line, picto)
for line, picto in line_to_pictos
]
)
await session.commit() return True
return True return False
@classmethod @classmethod
async def add_stops(cls, line_to_stop_ids: Iterable[tuple[str, str, int]]) -> int: async def add_stops(cls, line_to_stop_ids: Iterable[tuple[str, str, int]]) -> int:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return 0
line_names_ops, stop_ids = set(), set() async with session.begin():
for line_name, operator_name, stop_id in line_to_stop_ids:
line_names_ops.add((line_name, operator_name))
stop_ids.add(stop_id)
lines_res = await session.execute( line_names_ops, stop_ids = set(), set()
select(Line).where( for line_name, operator_name, stop_id in line_to_stop_ids:
tuple_(Line.name, Line.operator_name).in_(line_names_ops) line_names_ops.add((line_name, operator_name))
) stop_ids.add(stop_id)
)
lines = defaultdict(list) lines_res = await session.execute(
for line in lines_res.scalars(): select(Line).where(
lines[(line.name, line.operator_name)].append(line) tuple_(Line.name, Line.operator_name).in_(line_names_ops)
)
stops_res = await session.execute(select(_Stop).where(_Stop.id.in_(stop_ids)))
stops = {stop.id: stop for stop in stops_res.scalars()}
found = 0
for line_name, operator_name, stop_id in line_to_stop_ids:
if (stop := stops.get(stop_id)) is not None:
if (stop_lines := lines.get((line_name, operator_name))) is not None:
for stop_line in stop_lines:
stop_line.stops.append(stop)
found += 1
else:
print(f"No line found for {line_name}/{operator_name}")
else:
print(
f"No stop found for {stop_id} id"
f"(used by {line_name}/{operator_name})"
) )
await session.commit() lines = defaultdict(list)
for line in lines_res.scalars():
lines[(line.name, line.operator_name)].append(line)
return found stops_res = await session.execute(
select(_Stop).where(_Stop.id.in_(stop_ids))
)
stops = {stop.id: stop for stop in stops_res.scalars()}
found = 0
for line_name, operator_name, stop_id in line_to_stop_ids:
if (stop := stops.get(stop_id)) is not None:
if (
stop_lines := lines.get((line_name, operator_name))
) is not None:
for stop_line in stop_lines:
stop_line.stops.append(stop)
found += 1
else:
print(f"No line found for {line_name}/{operator_name}")
else:
print(
f"No stop found for {stop_id} id"
f"(used by {line_name}/{operator_name})"
)
return found
return 0

View File

@@ -1,18 +1,20 @@
from __future__ import annotations from __future__ import annotations
from logging import getLogger
from typing import Iterable, Sequence, TYPE_CHECKING from typing import Iterable, Sequence, TYPE_CHECKING
from sqlalchemy import ( from sqlalchemy import (
BigInteger, BigInteger,
Column, Computed,
desc,
Enum, Enum,
Float, Float,
ForeignKey, ForeignKey,
func,
Integer, Integer,
JSON, JSON,
select, select,
String, String,
Table,
) )
from sqlalchemy.orm import ( from sqlalchemy.orm import (
mapped_column, mapped_column,
@@ -22,6 +24,7 @@ from sqlalchemy.orm import (
with_polymorphic, with_polymorphic,
) )
from sqlalchemy.schema import Index from sqlalchemy.schema import Index
from sqlalchemy_utils.types.ts_vector import TSVectorType
from ..db import Base, db from ..db import Base, db
from ..idfm_interface.idfm_types import TransportMode, IdfmState, StopAreaType from ..idfm_interface.idfm_types import TransportMode, IdfmState, StopAreaType
@@ -30,12 +33,15 @@ if TYPE_CHECKING:
from .line import Line from .line import Line
stop_area_stop_association_table = Table( logger = getLogger(__name__)
"stop_area_stop_association_table",
Base.metadata, class StopAreaStopAssociations(Base):
Column("stop_id", ForeignKey("_stops.id")),
Column("stop_area_id", ForeignKey("stop_areas.id")), id = mapped_column(BigInteger, primary_key=True)
) stop_id = mapped_column(BigInteger, ForeignKey("_stops.id"))
stop_area_id = mapped_column(BigInteger, ForeignKey("stop_areas.id"))
__tablename__ = "stop_area_stop_associations"
class _Stop(Base): class _Stop(Base):
@@ -47,7 +53,7 @@ class _Stop(Base):
name = mapped_column(String, nullable=False, index=True) name = mapped_column(String, nullable=False, index=True)
town_name = mapped_column(String, nullable=False) town_name = mapped_column(String, nullable=False)
postal_region = mapped_column(String, nullable=False) postal_region = mapped_column(Integer, nullable=False)
epsg3857_x = mapped_column(Float, nullable=False) epsg3857_x = mapped_column(Float, nullable=False)
epsg3857_y = mapped_column(Float, nullable=False) epsg3857_y = mapped_column(Float, nullable=False)
@@ -57,12 +63,14 @@ class _Stop(Base):
lines: Mapped[list[Line]] = relationship( lines: Mapped[list[Line]] = relationship(
"Line", "Line",
secondary="line_stop_association_table", secondary="line_stop_associations",
back_populates="stops", back_populates="stops",
lazy="selectin", lazy="selectin",
) )
areas: Mapped[list["StopArea"]] = relationship( areas: Mapped[list["StopArea"]] = relationship(
"StopArea", secondary=stop_area_stop_association_table, back_populates="stops" "StopArea",
secondary="stop_area_stop_associations",
back_populates="stops",
) )
connection_area_id: Mapped[int] = mapped_column( connection_area_id: Mapped[int] = mapped_column(
ForeignKey("connection_areas.id"), nullable=True ForeignKey("connection_areas.id"), nullable=True
@@ -71,40 +79,45 @@ class _Stop(Base):
back_populates="stops", lazy="selectin" back_populates="stops", lazy="selectin"
) )
names_tsv = mapped_column(
TSVectorType("name", "town_name", regconfig="french"),
Computed("to_tsvector('french', name || ' ' || town_name)", persisted=True),
)
__tablename__ = "_stops" __tablename__ = "_stops"
__mapper_args__ = {"polymorphic_identity": "_stops", "polymorphic_on": kind} __mapper_args__ = {"polymorphic_identity": "_stops", "polymorphic_on": kind}
__table_args__ = ( __table_args__ = (
# To optimize the ilike requests
Index( Index(
"name_idx_gin", "names_tsv_idx",
name, names_tsv,
postgresql_ops={"name": "gin_trgm_ops"}, postgresql_ops={"name": "gin_trgm_ops"},
postgresql_using="gin", postgresql_using="gin",
), ),
) )
# TODO: Test https://www.cybertec-postgresql.com/en/postgresql-more-performance-for-like-and-ilike-statements/
# TODO: Should be able to remove with_polymorphic ?
@classmethod @classmethod
async def get_by_name(cls, name: str) -> Sequence[type[_Stop]] | None: async def get_by_name(cls, name: str) -> Sequence[_Stop] | None:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return None
stop_stop_area = with_polymorphic(_Stop, [Stop, StopArea]) async with session.begin():
stmt = ( descendants = with_polymorphic(_Stop, "*")
select(stop_stop_area)
.where(stop_stop_area.name.ilike(f"%{name}%"))
.options(
selectinload(stop_stop_area.areas),
selectinload(stop_stop_area.lines),
)
)
res = await session.execute(stmt) match_stmt = descendants.names_tsv.match(
stops = res.scalars().all() name, postgresql_regconfig="french"
)
ranking_stmt = func.ts_rank_cd(
descendants.names_tsv, func.plainto_tsquery("french", name)
)
stmt = (
select(descendants).filter(match_stmt).order_by(desc(ranking_stmt))
)
return stops res = await session.execute(stmt)
stops = res.scalars().all()
return stops
return None
class Stop(_Stop): class Stop(_Stop):
@@ -131,7 +144,7 @@ class StopArea(_Stop):
stops: Mapped[list["Stop"]] = relationship( stops: Mapped[list["Stop"]] = relationship(
"Stop", "Stop",
secondary=stop_area_stop_association_table, secondary="stop_area_stop_associations",
back_populates="areas", back_populates="areas",
lazy="selectin", lazy="selectin",
) )
@@ -146,41 +159,43 @@ class StopArea(_Stop):
async def add_stops( async def add_stops(
cls, stop_area_to_stop_ids: Iterable[tuple[int, int]] cls, stop_area_to_stop_ids: Iterable[tuple[int, int]]
) -> int | None: ) -> int | None:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return None
stop_area_ids, stop_ids = set(), set() async with session.begin():
for stop_area_id, stop_id in stop_area_to_stop_ids:
stop_area_ids.add(stop_area_id)
stop_ids.add(stop_id)
stop_areas_res = await session.scalars( stop_area_ids, stop_ids = set(), set()
select(StopArea) for stop_area_id, stop_id in stop_area_to_stop_ids:
.where(StopArea.id.in_(stop_area_ids)) stop_area_ids.add(stop_area_id)
.options(selectinload(StopArea.stops)) stop_ids.add(stop_id)
)
stop_areas: dict[int, StopArea] = {
stop_area.id: stop_area for stop_area in stop_areas_res.all()
}
stop_res = await session.execute(select(Stop).where(Stop.id.in_(stop_ids))) stop_areas_res = await session.scalars(
stops: dict[int, Stop] = {stop.id: stop for stop in stop_res.scalars()} select(StopArea)
.where(StopArea.id.in_(stop_area_ids))
.options(selectinload(StopArea.stops))
)
stop_areas: dict[int, StopArea] = {
stop_area.id: stop_area for stop_area in stop_areas_res.all()
}
found = 0 stop_res = await session.execute(
for stop_area_id, stop_id in stop_area_to_stop_ids: select(Stop).where(Stop.id.in_(stop_ids))
if (stop_area := stop_areas.get(stop_area_id)) is not None: )
if (stop := stops.get(stop_id)) is not None: stops: dict[int, Stop] = {stop.id: stop for stop in stop_res.scalars()}
stop_area.stops.append(stop)
found += 1
else:
print(f"No stop found for {stop_id} id")
else:
print(f"No stop area found for {stop_area_id}")
await session.commit() found = 0
for stop_area_id, stop_id in stop_area_to_stop_ids:
if (stop_area := stop_areas.get(stop_area_id)) is not None:
if (stop := stops.get(stop_id)) is not None:
stop_area.stops.append(stop)
found += 1
else:
print(f"No stop found for {stop_id} id")
else:
print(f"No stop area found for {stop_area_id}")
return found return found
return None
class StopShape(Base): class StopShape(Base):
@@ -221,38 +236,40 @@ class ConnectionArea(Base):
async def add_stops( async def add_stops(
cls, conn_area_to_stop_ids: Iterable[tuple[int, int]] cls, conn_area_to_stop_ids: Iterable[tuple[int, int]]
) -> int | None: ) -> int | None:
session = cls.db.session if (session := await cls.db.get_session()) is not None:
if session is None:
return None
conn_area_ids, stop_ids = set(), set() async with session.begin():
for conn_area_id, stop_id in conn_area_to_stop_ids:
conn_area_ids.add(conn_area_id)
stop_ids.add(stop_id)
conn_area_res = await session.execute( conn_area_ids, stop_ids = set(), set()
select(ConnectionArea) for conn_area_id, stop_id in conn_area_to_stop_ids:
.where(ConnectionArea.id.in_(conn_area_ids)) conn_area_ids.add(conn_area_id)
.options(selectinload(ConnectionArea.stops)) stop_ids.add(stop_id)
)
conn_areas: dict[int, ConnectionArea] = {
conn.id: conn for conn in conn_area_res.scalars()
}
stop_res = await session.execute(select(_Stop).where(_Stop.id.in_(stop_ids))) conn_area_res = await session.execute(
stops: dict[int, _Stop] = {stop.id: stop for stop in stop_res.scalars()} select(ConnectionArea)
.where(ConnectionArea.id.in_(conn_area_ids))
.options(selectinload(ConnectionArea.stops))
)
conn_areas: dict[int, ConnectionArea] = {
conn.id: conn for conn in conn_area_res.scalars()
}
found = 0 stop_res = await session.execute(
for conn_area_id, stop_id in conn_area_to_stop_ids: select(Stop).where(Stop.id.in_(stop_ids))
if (conn_area := conn_areas.get(conn_area_id)) is not None: )
if (stop := stops.get(stop_id)) is not None: stops: dict[int, Stop] = {stop.id: stop for stop in stop_res.scalars()}
conn_area.stops.append(stop)
found += 1
else:
print(f"No stop found for {stop_id} id")
else:
print(f"No connection area found for {conn_area_id}")
await session.commit() found = 0
for conn_area_id, stop_id in conn_area_to_stop_ids:
if (conn_area := conn_areas.get(conn_area_id)) is not None:
if (stop := stops.get(stop_id)) is not None:
conn_area.stops.append(stop)
found += 1
else:
print(f"No stop found for {stop_id} id")
else:
print(f"No connection area found for {conn_area_id}")
return found return found
return None

View File

@@ -1,25 +1,27 @@
from sqlalchemy import Column, ForeignKey, String, Table from sqlalchemy import BigInteger, ForeignKey, String
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
from ..db import Base, db from ..db import Base, db
from .stop import _Stop from .stop import _Stop
user_last_stop_search_stops_associations_table = Table(
"user_last_stop_search_stops_associations_table", class UserLastStopSearchStopAssociations(Base):
Base.metadata, id = mapped_column(BigInteger, primary_key=True)
Column("user_mxid", ForeignKey("user_last_stop_search_results.user_mxid")), user_mxid = mapped_column(
Column("stop_id", ForeignKey("_stops.id")), String, ForeignKey("user_last_stop_search_results.user_mxid")
) )
stop_id = mapped_column(BigInteger, ForeignKey("_stops.id"))
__tablename__ = "user_last_stop_search_stop_associations"
class UserLastStopSearchResults(Base): class UserLastStopSearchResults(Base):
db = db db = db
__tablename__ = "user_last_stop_search_results"
user_mxid = mapped_column(String, primary_key=True) user_mxid = mapped_column(String, primary_key=True)
request_content = mapped_column(String, nullable=False) request_content = mapped_column(String, nullable=False)
stops: Mapped[_Stop] = relationship( stops: Mapped[_Stop] = relationship(
_Stop, secondary=user_last_stop_search_stops_associations_table _Stop, secondary="user_last_stop_search_stop_associations"
) )
__tablename__ = "user_last_stop_search_results"

View File

@@ -46,7 +46,7 @@ class TransportMode(StrEnum):
class Line(BaseModel): class Line(BaseModel):
id: str id: int
shortName: str shortName: str
name: str name: str
status: IdfmLineState status: IdfmLineState

View File

@@ -4,7 +4,7 @@ from ..idfm_interface.idfm_types import TrainStatus
class NextPassage(BaseModel): class NextPassage(BaseModel):
line: str line: int
operator: str operator: str
destinations: list[str] destinations: list[str]
atStop: bool atStop: bool
@@ -19,4 +19,4 @@ class NextPassage(BaseModel):
class NextPassages(BaseModel): class NextPassages(BaseModel):
ts: int ts: int
passages: dict[str, dict[str, list[NextPassage]]] passages: dict[int, dict[str, list[NextPassage]]]

View File

@@ -0,0 +1,59 @@
from typing import Any
from pydantic import BaseModel, BaseSettings, Field, root_validator, SecretStr
class HttpSettings(BaseModel):
host: str = "127.0.0.1"
port: int = 8080
cert: str | None = None
def check_user_password(cls, values: dict[str, Any]) -> dict[str, Any]:
user = values.get("user")
password = values.get("password")
if user is not None and password is None:
raise ValueError("user is set, password shall be set too.")
if password is not None and user is None:
raise ValueError("password is set, user shall be set too.")
return values
class DatabaseSettings(BaseModel):
name: str = "carrramba-encore-rate"
host: str = "127.0.0.1"
port: int = 5432
driver: str = "postgresql+psycopg"
user: str | None = None
password: SecretStr | None = None
_user_password_validation = root_validator(allow_reuse=True)(check_user_password)
class CacheSettings(BaseModel):
enable: bool = False
host: str = "127.0.0.1"
port: int = 6379
user: str | None = None
password: SecretStr | None = None
_user_password_validation = root_validator(allow_reuse=True)(check_user_password)
class TracingSettings(BaseModel):
enable: bool = False
class Settings(BaseSettings):
app_name: str
idfm_api_key: SecretStr = Field(..., env="IDFM_API_KEY")
clear_static_data: bool = Field(False, env="CLEAR_STATIC_DATA")
http: HttpSettings = HttpSettings()
db: DatabaseSettings = DatabaseSettings()
cache: CacheSettings = CacheSettings()
tracing: TracingSettings = TracingSettings()

21
backend/config.local.yaml Normal file
View File

@@ -0,0 +1,21 @@
app_name: carrramba-encore-rate
clear_static_data: false
http:
host: 0.0.0.0
port: 8080
cert: ./config/cert.pem
db:
name: carrramba-encore-rate
host: 127.0.0.1
port: 5432
driver: postgresql+psycopg
user: cer
password: cer_password
cache:
enable: true
tracing:
enable: false

View File

@@ -0,0 +1,23 @@
app_name: carrramba-encore-rate
clear_static_data: false
http:
host: 0.0.0.0
port: 8080
# cert: ./config/cert.pem
db:
name: carrramba-encore-rate
host: postgres
port: 5432
driver: postgresql+psycopg
user: cer
password: cer_password
cache:
enable: true
host: redis
# TODO: Add user credentials
tracing:
enable: false

38
backend/dependencies.py Normal file
View File

@@ -0,0 +1,38 @@
from os import environ
from fastapi_cache.backends.redis import RedisBackend
from redis import asyncio as aioredis
from yaml import safe_load
from backend.db import db
from backend.idfm_interface import IdfmInterface
from backend.settings import CacheSettings, Settings
CONFIG_PATH = environ.get("CONFIG_PATH", "./config.sample.yaml")
def load_settings(path: str) -> Settings:
with open(path, "r") as config_file:
config = safe_load(config_file)
return Settings(**config)
settings = load_settings(CONFIG_PATH)
idfm_interface = IdfmInterface(settings.idfm_api_key.get_secret_value(), db)
def init_redis_backend(settings: CacheSettings) -> RedisBackend:
login = f"{settings.user}:{settings.password}@" if settings.user is not None else ""
url = f"redis://{login}{settings.host}:{settings.port}"
redis_connections_pool = aioredis.from_url(
url, encoding="utf8", decode_responses=True
)
return RedisBackend(redis_connections_pool)
redis_backend = init_redis_backend(settings.cache)

View File

@@ -1,19 +0,0 @@
version: '3.7'
services:
database:
image: postgres:15.1-alpine
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
logging:
options:
max-size: 10m
max-file: "3"
ports:
- '127.0.0.1:5438:5432'
volumes:
- ./docker/database/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- ./docker/database/data:/var/lib/postgresql/data

294
backend/main.py Normal file → Executable file
View File

@@ -1,249 +1,93 @@
from collections import defaultdict #!/usr/bin/env python3
from datetime import datetime
from os import environ, EX_USAGE
from typing import Sequence
from fastapi import FastAPI, HTTPException import uvicorn
from contextlib import asynccontextmanager
from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from rich import print from fastapi_cache import FastAPICache
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
from opentelemetry.sdk.resources import Resource, SERVICE_NAME
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from backend.db import db from backend.db import db
from backend.idfm_interface import Destinations as IdfmDestinations, IdfmInterface from dependencies import idfm_interface, redis_backend, settings
from backend.models import Line, Stop, StopArea, StopShape from routers import line, stop
from backend.schemas import (
Line as LineSchema,
TransportMode,
NextPassage as NextPassageSchema,
NextPassages as NextPassagesSchema,
Stop as StopSchema,
StopArea as StopAreaSchema,
StopShape as StopShapeSchema,
)
API_KEY = environ.get("API_KEY")
if API_KEY is None:
print('No "API_KEY" environment variable set... abort.')
exit(EX_USAGE)
# TODO: Remove postgresql+asyncpg from environ variable
DB_PATH = "postgresql+asyncpg://cer_user:cer_password@127.0.0.1:5438/cer_db"
app = FastAPI() @asynccontextmanager
async def lifespan(app: FastAPI):
FastAPICache.init(redis_backend, prefix="api", enable=settings.cache.enable)
await db.connect(settings.db, settings.clear_static_data)
if settings.clear_static_data:
await idfm_interface.startup()
yield
await db.disconnect()
app = FastAPI(lifespan=lifespan)
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=[ allow_origins=["https://localhost:4443", "https://localhost:3000"],
"https://localhost:4443",
"https://localhost:3000",
],
allow_credentials=True, allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
idfm_interface = IdfmInterface(API_KEY, db) app.mount("/widget", StaticFiles(directory="../frontend/", html=True), name="widget")
# The cache-control header entry is not managed properly by fastapi-cache:
# For now, a request with a cache-control set to no-cache
# is interpreted as disabling the use of the server cache.
# Cf. Improve Cache-Control header parsing and handling
# https://github.com/long2ice/fastapi-cache/issues/144 workaround
@app.middleware("http")
async def fastapi_cache_issue_144_workaround(request: Request, call_next):
entries = request.headers.__dict__["_list"]
new_entries = [
entry for entry in entries if entry[0].decode().lower() != "cache-control"
]
request.headers.__dict__["_list"] = new_entries
return await call_next(request)
@app.on_event("startup")
async def startup(): app.include_router(line.router)
await db.connect(DB_PATH, clear_static_data=True) app.include_router(stop.router)
await idfm_interface.startup()
# await db.connect(DB_PATH, clear_static_data=False)
print("Connected")
@app.on_event("shutdown") if settings.tracing.enable:
async def shutdown(): FastAPIInstrumentor.instrument_app(app)
await db.disconnect()
trace.set_tracer_provider(
TracerProvider(resource=Resource.create({SERVICE_NAME: settings.app_name}))
)
trace.get_tracer_provider().add_span_processor(
BatchSpanProcessor(OTLPSpanExporter())
)
tracer = trace.get_tracer(settings.app_name)
STATIC_ROOT = "../frontend/" if __name__ == "__main__":
app.mount("/widget", StaticFiles(directory=STATIC_ROOT, html=True), name="widget") http_settings = settings.http
config = uvicorn.Config(
def optional_datetime_to_ts(dt: datetime | None) -> int | None: app=app,
return int(dt.timestamp()) if dt else None host=http_settings.host,
port=http_settings.port,
ssl_certfile=http_settings.cert,
@app.get("/line/{line_id}", response_model=LineSchema) proxy_headers=True,
async def get_line(line_id: str) -> LineSchema:
line: Line | None = await Line.get_by_id(line_id)
if line is None:
raise HTTPException(status_code=404, detail=f'Line "{line_id}" not found')
return LineSchema(
id=line.id,
shortName=line.short_name,
name=line.name,
status=line.status,
transportMode=TransportMode.from_idfm_transport_mode(
line.transport_mode, line.transport_submode
),
backColorHexa=line.colour_web_hexa,
foreColorHexa=line.text_colour_hexa,
operatorId=line.operator_id,
accessibility=line.accessibility,
visualSignsAvailable=line.visual_signs_available,
audibleSignsAvailable=line.audible_signs_available,
stopIds=[stop.id for stop in line.stops],
) )
server = uvicorn.Server(config)
def _format_stop(stop: Stop) -> StopSchema: server.run()
return StopSchema(
id=stop.id,
name=stop.name,
town=stop.town_name,
epsg3857_x=stop.epsg3857_x,
epsg3857_y=stop.epsg3857_y,
lines=[line.id for line in stop.lines],
)
@app.get("/stop/")
async def get_stop(
name: str = "", limit: int = 10
) -> Sequence[StopAreaSchema | StopSchema]:
# TODO: Add limit support
formatted: list[StopAreaSchema | StopSchema] = []
matching_stops = await Stop.get_by_name(name)
# print(matching_stops, flush=True)
stop_areas: dict[int, StopArea] = {}
stops: dict[int, Stop] = {}
for stop in matching_stops:
# print(f"{stop.__dict__ = }", flush=True)
dst = stop_areas if isinstance(stop, StopArea) else stops
dst[stop.id] = stop
for stop_area in stop_areas.values():
formatted_stops = []
for stop in stop_area.stops:
formatted_stops.append(_format_stop(stop))
try:
del stops[stop.id]
except KeyError as err:
print(err)
formatted.append(
StopAreaSchema(
id=stop_area.id,
name=stop_area.name,
town=stop_area.town_name,
type=stop_area.type,
lines=[line.id for line in stop_area.lines],
stops=formatted_stops,
)
)
# print(f"{stops = }", flush=True)
formatted.extend(_format_stop(stop) for stop in stops.values())
return formatted
# TODO: Cache response for 30 secs ?
@app.get("/stop/nextPassages/{stop_id}")
async def get_next_passages(stop_id: str) -> NextPassagesSchema | None:
res = await idfm_interface.get_next_passages(stop_id)
if res is None:
return None
service_delivery = res.Siri.ServiceDelivery
stop_monitoring_deliveries = service_delivery.StopMonitoringDelivery
by_line_by_dst_passages: dict[
str, dict[str, list[NextPassageSchema]]
] = defaultdict(lambda: defaultdict(list))
for delivery in stop_monitoring_deliveries:
for stop_visit in delivery.MonitoredStopVisit:
journey = stop_visit.MonitoredVehicleJourney
# re.match will return None if the given journey.LineRef.value is not valid.
try:
line_id = IdfmInterface.LINE_RE.match(journey.LineRef.value).group(1)
except AttributeError as exc:
raise HTTPException(
status_code=404, detail=f'Line "{journey.LineRef.value}" not found'
) from exc
call = journey.MonitoredCall
dst_names = call.DestinationDisplay
dsts = [dst.value for dst in dst_names] if dst_names else []
arrivalPlatformName = (
call.ArrivalPlatformName.value if call.ArrivalPlatformName else None
)
next_passage = NextPassageSchema(
line=line_id,
operator=journey.OperatorRef.value,
destinations=dsts,
atStop=call.VehicleAtStop,
aimedArrivalTs=optional_datetime_to_ts(call.AimedArrivalTime),
expectedArrivalTs=optional_datetime_to_ts(call.ExpectedArrivalTime),
arrivalPlatformName=arrivalPlatformName,
aimedDepartTs=optional_datetime_to_ts(call.AimedDepartureTime),
expectedDepartTs=optional_datetime_to_ts(call.ExpectedDepartureTime),
arrivalStatus=call.ArrivalStatus.value,
departStatus=call.DepartureStatus.value,
)
by_line_passages = by_line_by_dst_passages[line_id]
# TODO: by_line_passages[dst].extend(dsts) instead ?
for dst in dsts:
by_line_passages[dst].append(next_passage)
return NextPassagesSchema(
ts=service_delivery.ResponseTimestamp.timestamp(),
passages=by_line_by_dst_passages,
)
@app.get("/stop/{stop_id}/destinations")
async def get_stop_destinations(
stop_id: int,
) -> IdfmDestinations | None:
destinations = await idfm_interface.get_destinations(stop_id)
return destinations
# TODO: Rename endpoint -> /stop/{stop_id}/shape
@app.get("/stop_shape/{stop_id}")
async def get_stop_shape(stop_id: int) -> StopShapeSchema | None:
connection_area = None
if (stop := await Stop.get_by_id(stop_id)) is not None:
connection_area = stop.connection_area
elif (stop_area := await StopArea.get_by_id(stop_id)) is not None:
connection_areas = {stop.connection_area for stop in stop_area.stops}
connection_areas_len = len(connection_areas)
if connection_areas_len == 1:
connection_area = connection_areas.pop()
else:
prefix = "More than one" if connection_areas_len else "No"
msg = f"{prefix} connection area has been found for stop area #{stop_id}"
raise HTTPException(status_code=500, detail=msg)
if (
connection_area is not None
and (shape := await StopShape.get_by_id(connection_area.id)) is not None
):
return StopShapeSchema(
id=shape.id,
type=shape.type,
epsg3857_bbox=shape.epsg3857_bbox,
epsg3857_points=shape.epsg3857_points,
)
msg = f"No shape found for stop {stop_id}"
raise HTTPException(status_code=404, detail=msg)

View File

@@ -4,20 +4,27 @@ version = "0.1.0"
description = "" description = ""
authors = ["Adrien SUEUR <me@adrien.run>"] authors = ["Adrien SUEUR <me@adrien.run>"]
readme = "README.md" readme = "README.md"
packages = [{include = "backend"}]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.11" python = "^3.11"
aiohttp = "^3.8.3" aiohttp = "^3.8.3"
rich = "^12.6.0"
aiofiles = "^22.1.0" aiofiles = "^22.1.0"
sqlalchemy = {extras = ["asyncio"], version = "^2.0.1"} fastapi = "^0.95.0"
fastapi = "^0.88.0"
uvicorn = "^0.20.0" uvicorn = "^0.20.0"
asyncpg = "^0.27.0"
msgspec = "^0.12.0" msgspec = "^0.12.0"
pyshp = "^2.3.1" pyshp = "^2.3.1"
pyproj = "^3.5.0" pyproj = "^3.5.0"
opentelemetry-instrumentation-fastapi = "^0.38b0"
sqlalchemy-utils = "^0.41.1"
opentelemetry-instrumentation-logging = "^0.38b0"
opentelemetry-sdk = "^1.17.0"
opentelemetry-api = "^1.17.0"
opentelemetry-exporter-otlp-proto-http = "^1.17.0"
opentelemetry-instrumentation-sqlalchemy = "^0.38b0"
sqlalchemy = "^2.0.12"
psycopg = "^3.1.9"
pyyaml = "^6.0"
fastapi-cache2 = {extras = ["redis"], version = "^0.2.1"}
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
@@ -39,8 +46,10 @@ autopep8 = "^2.0.1"
pyflakes = "^3.0.1" pyflakes = "^3.0.1"
yapf = "^0.32.0" yapf = "^0.32.0"
whatthepatch = "^1.0.4" whatthepatch = "^1.0.4"
sqlalchemy = {extras = ["mypy"], version = "^2.0.1"}
mypy = "^1.0.0" mypy = "^1.0.0"
icecream = "^2.1.3"
types-sqlalchemy-utils = "^1.0.1"
types-pyyaml = "^6.0.12.9"
[tool.mypy] [tool.mypy]
plugins = "sqlalchemy.ext.mypy.plugin" plugins = "sqlalchemy.ext.mypy.plugin"

View File

34
backend/routers/line.py Normal file
View File

@@ -0,0 +1,34 @@
from fastapi import APIRouter, HTTPException
from fastapi_cache.decorator import cache
from backend.models import Line
from backend.schemas import Line as LineSchema, TransportMode
router = APIRouter(prefix="/line", tags=["line"])
@router.get("/{line_id}", response_model=LineSchema)
@cache(namespace="line")
async def get_line(line_id: int) -> LineSchema:
line: Line | None = await Line.get_by_id(line_id)
if line is None:
raise HTTPException(status_code=404, detail=f'Line "{line_id}" not found')
return LineSchema(
id=line.id,
shortName=line.short_name,
name=line.name,
status=line.status,
transportMode=TransportMode.from_idfm_transport_mode(
line.transport_mode, line.transport_submode
),
backColorHexa=line.colour_web_hexa,
foreColorHexa=line.text_colour_hexa,
operatorId=line.operator_id,
accessibility=line.accessibility,
visualSignsAvailable=line.visual_signs_available,
audibleSignsAvailable=line.audible_signs_available,
stopIds=[stop.id for stop in line.stops],
)

194
backend/routers/stop.py Normal file
View File

@@ -0,0 +1,194 @@
from collections import defaultdict
from datetime import datetime
from typing import Sequence
from fastapi import APIRouter, HTTPException
from fastapi_cache.decorator import cache
from backend.idfm_interface import (
Destinations as IdfmDestinations,
IdfmInterface,
TrainStatus,
)
from backend.models import Stop, StopArea, StopShape
from backend.schemas import (
NextPassage as NextPassageSchema,
NextPassages as NextPassagesSchema,
Stop as StopSchema,
StopArea as StopAreaSchema,
StopShape as StopShapeSchema,
)
from dependencies import idfm_interface
router = APIRouter(prefix="/stop", tags=["stop"])
def _format_stop(stop: Stop) -> StopSchema:
return StopSchema(
id=stop.id,
name=stop.name,
town=stop.town_name,
epsg3857_x=stop.epsg3857_x,
epsg3857_y=stop.epsg3857_y,
lines=[line.id for line in stop.lines],
)
def optional_datetime_to_ts(dt: datetime | None) -> int | None:
return int(dt.timestamp()) if dt else None
# TODO: Add limit support
@router.get("/")
@cache(namespace="stop")
async def get_stop(
name: str = "", limit: int = 10
) -> Sequence[StopAreaSchema | StopSchema] | None:
matching_stops = await Stop.get_by_name(name)
if matching_stops is None:
return None
formatted: list[StopAreaSchema | StopSchema] = []
stop_areas: dict[int, StopArea] = {}
stops: dict[int, Stop] = {}
for stop in matching_stops:
if isinstance(stop, StopArea):
stop_areas[stop.id] = stop
elif isinstance(stop, Stop):
stops[stop.id] = stop
for stop_area in stop_areas.values():
formatted_stops = []
for stop in stop_area.stops:
formatted_stops.append(_format_stop(stop))
try:
del stops[stop.id]
except KeyError as err:
print(err)
formatted.append(
StopAreaSchema(
id=stop_area.id,
name=stop_area.name,
town=stop_area.town_name,
type=stop_area.type,
lines=[line.id for line in stop_area.lines],
stops=formatted_stops,
)
)
formatted.extend(_format_stop(stop) for stop in stops.values())
return formatted
@router.get("/{stop_id}/nextPassages")
@cache(namespace="stop-nextPassages", expire=30)
async def get_next_passages(stop_id: int) -> NextPassagesSchema | None:
res = await idfm_interface.get_next_passages(stop_id)
if res is None:
return None
service_delivery = res.Siri.ServiceDelivery
stop_monitoring_deliveries = service_delivery.StopMonitoringDelivery
by_line_by_dst_passages: dict[
int, dict[str, list[NextPassageSchema]]
] = defaultdict(lambda: defaultdict(list))
for delivery in stop_monitoring_deliveries:
for stop_visit in delivery.MonitoredStopVisit:
journey = stop_visit.MonitoredVehicleJourney
# re.match will return None if the given journey.LineRef.value is not valid.
try:
line_id_match = IdfmInterface.LINE_RE.match(journey.LineRef.value)
line_id = int(line_id_match.group(1)) # type: ignore
except (AttributeError, TypeError, ValueError) as err:
raise HTTPException(
status_code=404, detail=f'Line "{journey.LineRef.value}" not found'
) from err
call = journey.MonitoredCall
dst_names = call.DestinationDisplay
dsts = [dst.value for dst in dst_names] if dst_names else []
arrivalPlatformName = (
call.ArrivalPlatformName.value if call.ArrivalPlatformName else None
)
next_passage = NextPassageSchema(
line=line_id,
operator=journey.OperatorRef.value,
destinations=dsts,
atStop=call.VehicleAtStop,
aimedArrivalTs=optional_datetime_to_ts(call.AimedArrivalTime),
expectedArrivalTs=optional_datetime_to_ts(call.ExpectedArrivalTime),
arrivalPlatformName=arrivalPlatformName,
aimedDepartTs=optional_datetime_to_ts(call.AimedDepartureTime),
expectedDepartTs=optional_datetime_to_ts(call.ExpectedDepartureTime),
arrivalStatus=call.ArrivalStatus
if call.ArrivalStatus is not None
else TrainStatus.unknown,
departStatus=call.DepartureStatus
if call.DepartureStatus is not None
else TrainStatus.unknown,
)
by_line_passages = by_line_by_dst_passages[line_id]
# TODO: by_line_passages[dst].extend(dsts) instead ?
for dst in dsts:
by_line_passages[dst].append(next_passage)
return NextPassagesSchema(
ts=int(service_delivery.ResponseTimestamp.timestamp()),
passages=by_line_by_dst_passages,
)
@router.get("/{stop_id}/destinations")
@cache(namespace="stop-destinations", expire=30)
async def get_stop_destinations(
stop_id: int,
) -> IdfmDestinations | None:
destinations = await idfm_interface.get_destinations(stop_id)
return destinations
@router.get("/{stop_id}/shape")
@cache(namespace="stop-shape")
async def get_stop_shape(stop_id: int) -> StopShapeSchema | None:
connection_area = None
if (stop := await Stop.get_by_id(stop_id)) is not None:
connection_area = stop.connection_area
elif (stop_area := await StopArea.get_by_id(stop_id)) is not None:
connection_areas = {stop.connection_area for stop in stop_area.stops}
connection_areas_len = len(connection_areas)
if connection_areas_len == 1:
connection_area = connection_areas.pop()
else:
prefix = "More than one" if connection_areas_len else "No"
msg = f"{prefix} connection area has been found for stop area #{stop_id}"
raise HTTPException(status_code=500, detail=msg)
if (
connection_area is not None
and (shape := await StopShape.get_by_id(connection_area.id)) is not None
):
return StopShapeSchema(
id=shape.id,
type=shape.type,
epsg3857_bbox=shape.epsg3857_bbox,
epsg3857_points=shape.epsg3857_points,
)
msg = f"No shape found for stop {stop_id}"
raise HTTPException(status_code=404, detail=msg)

89
docker-compose.yml Normal file
View File

@@ -0,0 +1,89 @@
version: '3.7'
services:
database:
image: postgres:15.1-alpine
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
logging:
options:
max-size: 10m
max-file: "3"
ports:
- "127.0.0.1:5432:5432"
volumes:
- ./backend/docker/database/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
- ./backend/docker/database/data:/var/lib/postgresql/data
redis:
image: redis:latest
restart: always
command: redis-server --loglevel warning
ports:
- "127.0.0.1:6379:6379"
jaeger-agent:
image: jaegertracing/jaeger-agent:latest
command:
- "--reporter.grpc.host-port=jaeger-collector:14250"
ports:
- "127.0.0.1:5775:5775/udp"
- "127.0.0.1:6831:6831/udp"
- "127.0.0.1:6832:6832/udp"
- "127.0.0.1:5778:5778"
restart: on-failure
depends_on:
- jaeger-collector
jaeger-collector:
image: jaegertracing/jaeger-collector:latest
command:
- "--cassandra.keyspace=jaeger_v1_dc1"
- "--cassandra.servers=cassandra"
- "--collector.zipkin.host-port=9411"
- "--sampling.initial-sampling-probability=.5"
- "--sampling.target-samples-per-second=.01"
environment:
- SAMPLING_CONFIG_TYPE=adaptive
- COLLECTOR_OTLP_ENABLED=true
ports:
- "127.0.0.1:4317:4317"
- "127.0.0.1:4318:4318"
restart: on-failure
depends_on:
- cassandra-schema
cassandra:
image: cassandra:latest
cassandra-schema:
image: jaegertracing/jaeger-cassandra-schema:latest
depends_on:
- cassandra
jaeger-query:
image: jaegertracing/jaeger-query:latest
command:
- "--cassandra.keyspace=jaeger_v1_dc1"
- "--cassandra.servers=cassandra"
ports:
- "127.0.0.1:16686:16686"
restart: on-failure
depends_on:
- cassandra-schema
carrramba-encore-rate-api:
build: ./backend/
environment:
- CONFIG_PATH=./config.local.yaml
- IDFM_API_KEY=set_your_idfm_key_here
ports:
- "127.0.0.1:8080:8080"
carrramba-encore-rate-frontend:
build: ./frontend/
ports:
- "127.0.0.1:80:8081"

4
frontend/Dockerfile Normal file
View File

@@ -0,0 +1,4 @@
# pull the latest official nginx image
FROM nginx:mainline-alpine-slim
COPY dist /usr/share/nginx/html

View File

@@ -1,4 +1,4 @@
import { Component } from 'solid-js'; import { Component, createSignal } from 'solid-js';
import { IVisibilityActionRequest, MatrixCapabilities, WidgetApi, WidgetApiToWidgetAction } from 'matrix-widget-api'; import { IVisibilityActionRequest, MatrixCapabilities, WidgetApi, WidgetApiToWidgetAction } from 'matrix-widget-api';
import { HopeProvider } from "@hope-ui/solid"; import { HopeProvider } from "@hope-ui/solid";
@@ -6,9 +6,10 @@ import { BusinessDataProvider } from './businessData';
import { AppContextProvider } from './appContext'; import { AppContextProvider } from './appContext';
import { PassagesDisplay } from './passagesDisplay'; import { PassagesDisplay } from './passagesDisplay';
import { StopsSearchMenu } from './stopsSearchMenu'; import { StopsSearchMenu } from './stopsSearchMenu/stopsSearchMenu';
import "./App.scss"; import "./App.scss";
import { onCleanup, onMount } from 'solid-js';
function parseFragment() { function parseFragment() {
@@ -43,6 +44,33 @@ const App: Component = () => {
api.transport.reply(ev.detail, {}); api.transport.reply(ev.detail, {});
}); });
createSignal({
height: window.innerHeight,
width: window.innerWidth
});
const onResize = () => {
const body = document.body;
if (window.innerWidth * 9 / 16 < window.innerHeight) {
body.style['height'] = 'auto';
body.style['width'] = '100vw';
}
else {
body.style['height'] = '100vh';
body.style['width'] = 'auto';
}
};
onMount(() => {
window.addEventListener('resize', onResize);
onResize();
});
onCleanup(() => {
window.removeEventListener('resize', onResize);
})
return ( return (
<BusinessDataProvider> <BusinessDataProvider>
<AppContextProvider> <AppContextProvider>

View File

@@ -21,7 +21,7 @@ export interface BusinessDataStore {
getStop: (stopId: number) => Stop | undefined; getStop: (stopId: number) => Stop | undefined;
searchStopByName: (name: string) => Promise<Stops>; searchStopByName: (name: string) => Promise<Stops>;
getStopDestinations: (stopId: number) => Promise<StopDestinations>; getStopDestinations: (stopId: number) => Promise<StopDestinations | undefined>;
getStopShape: (stopId: number) => Promise<StopShape | undefined>; getStopShape: (stopId: number) => Promise<StopShape | undefined>;
}; };
@@ -44,11 +44,20 @@ export function BusinessDataProvider(props: { children: JSX.Element }) {
let line = store.lines[lineId]; let line = store.lines[lineId];
if (line === undefined) { if (line === undefined) {
console.log(`${lineId} not found... fetch it from backend.`); console.log(`${lineId} not found... fetch it from backend.`);
const data = await fetch(`${serverUrl()}/line/${lineId}`, {
const response = await fetch(`${serverUrl()}/line/${lineId}`, {
headers: { 'Content-Type': 'application/json' } headers: { 'Content-Type': 'application/json' }
}); });
line = await data.json();
setStore('lines', lineId, line); const json = await response.json();
if (response.ok) {
setStore('lines', lineId, json);
line = json;
}
else {
console.warn(`No line found for ${lineId} line id:`, json);
}
} }
return line; return line;
} }
@@ -91,12 +100,19 @@ export function BusinessDataProvider(props: { children: JSX.Element }) {
} }
const refreshPassages = async (stopId: number): Promise<void> => { const refreshPassages = async (stopId: number): Promise<void> => {
const httpOptions = { headers: { "Content-Type": "application/json" } };
console.log(`Fetching data for ${stopId}`); console.log(`Fetching data for ${stopId}`);
const data = await fetch(`${serverUrl()}/stop/nextPassages/${stopId}`, httpOptions); const httpOptions = { headers: { "Content-Type": "application/json" } };
const response = await data.json(); const response = await fetch(`${serverUrl()}/stop/${stopId}/nextPassages`, httpOptions);
_cleanupPassages(response.passages);
addPassages(response.passages); const json = await response.json();
if (response.ok) {
_cleanupPassages(json.passages);
addPassages(json.passages);
}
else {
console.warn(`No passage found for ${stopId} stop:`, json);
}
} }
const addPassages = (passages: Passages): void => { const addPassages = (passages: Passages): void => {
@@ -155,39 +171,58 @@ ${linePassagesDestination.length} here... refresh all them.`);
} }
const searchStopByName = async (name: string): Promise<Stops> => { const searchStopByName = async (name: string): Promise<Stops> => {
const data = await fetch(`${serverUrl()}/stop/?name=${name}`, { const byIdStops: Stops = {};
const response = await fetch(`${serverUrl()}/stop/?name=${name}`, {
headers: { 'Content-Type': 'application/json' } headers: { 'Content-Type': 'application/json' }
}); });
const stops = await data.json();
const byIdStops: Stops = {}; const json = await response.json();
for (const stop of stops) {
byIdStops[stop.id] = stop; if (response.ok) {
setStore('stops', stop.id, stop); for (const stop of json) {
for (const innerStop of stop.stops) { byIdStops[stop.id] = stop;
setStore('stops', innerStop.id, innerStop); setStore('stops', stop.id, stop);
if (stop.stops !== undefined) {
for (const innerStop of stop.stops) {
setStore('stops', innerStop.id, innerStop);
}
}
} }
} }
else {
console.warn(`No stop found for '${name}' query:`, json);
}
return byIdStops; return byIdStops;
} }
const getStopDestinations = async (stopId: number): Promise<StopDestinations> => { const getStopDestinations = async (stopId: number): Promise<StopDestinations | undefined> => {
const data = await fetch(`${serverUrl()}/stop/${stopId}/destinations`, { const response = await fetch(`${serverUrl()}/stop/${stopId}/destinations`, {
headers: { 'Content-Type': 'application/json' } headers: { 'Content-Type': 'application/json' }
}); });
const response = await data.json(); const destinations = response.ok ? await response.json() : undefined;
return response; return destinations;
} }
const getStopShape = async (stopId: number): Promise<StopShape | undefined> => { const getStopShape = async (stopId: number): Promise<StopShape | undefined> => {
let shape = store.stopShapes[stopId]; let shape = store.stopShapes[stopId];
if (shape === undefined) { if (shape === undefined) {
console.log(`No shape found for ${stopId} stop... fetch it from backend.`); console.log(`No shape found for ${stopId} stop... fetch it from backend.`);
const data = await fetch(`${serverUrl()}/stop_shape/${stopId}`, { const response = await fetch(`${serverUrl()}/stop/${stopId}/shape`, {
headers: { 'Content-Type': 'application/json' } headers: { 'Content-Type': 'application/json' }
}); });
shape = await data.json();
setStore('stopShapes', stopId, shape); const json = await response.json();
if (response.ok) {
setStore('stopShapes', stopId, json);
shape = json;
}
else {
console.warn(`No shape found for ${stopId} stop:`, json);
}
} }
return shape; return shape;
} }

View File

@@ -13,10 +13,8 @@
src: url(/public/fonts/IDFVoyageur-Medium.otf); src: url(/public/fonts/IDFVoyageur-Medium.otf);
} }
body { html, body {
aspect-ratio: 16/9; aspect-ratio: 16/9;
width: 100vw;
height: none;
margin: 0; margin: 0;