INIT
commit
e18548b1ec
|
@ -0,0 +1,2 @@
|
||||||
|
/venv
|
||||||
|
**/__pycache__
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
|
@ -0,0 +1,3 @@
|
||||||
|
<component name="ProjectDictionaryState">
|
||||||
|
<dictionary name="ernestlitvinenko" />
|
||||||
|
</component>
|
|
@ -0,0 +1,17 @@
|
||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<profile version="1.0">
|
||||||
|
<option name="myName" value="Project Default" />
|
||||||
|
<inspection_tool class="PyInterpreterInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||||
|
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
|
<option name="ignoredPackages">
|
||||||
|
<value>
|
||||||
|
<list size="3">
|
||||||
|
<item index="0" class="java.lang.String" itemvalue="typing-extensions" />
|
||||||
|
<item index="1" class="java.lang.String" itemvalue="pydantic-core" />
|
||||||
|
<item index="2" class="java.lang.String" itemvalue="pydantic" />
|
||||||
|
</list>
|
||||||
|
</value>
|
||||||
|
</option>
|
||||||
|
</inspection_tool>
|
||||||
|
</profile>
|
||||||
|
</component>
|
|
@ -0,0 +1,6 @@
|
||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<settings>
|
||||||
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
|
<version value="1.0" />
|
||||||
|
</settings>
|
||||||
|
</component>
|
|
@ -0,0 +1,7 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="Black">
|
||||||
|
<option name="sdkName" value="Python 3.12 (mp_driver_server)" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (mp_driver_server)" project-jdk-type="Python SDK" />
|
||||||
|
</project>
|
|
@ -0,0 +1,8 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/mp_driver_server.iml" filepath="$PROJECT_DIR$/.idea/mp_driver_server.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
|
@ -0,0 +1,10 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module type="PYTHON_MODULE" version="4">
|
||||||
|
<component name="NewModuleRootManager">
|
||||||
|
<content url="file://$MODULE_DIR$">
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||||
|
</content>
|
||||||
|
<orderEntry type="inheritedJdk" />
|
||||||
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
</component>
|
||||||
|
</module>
|
|
@ -0,0 +1,17 @@
|
||||||
|
FROM python:3.12.3
|
||||||
|
|
||||||
|
# Update system packages and install libfbclient2
|
||||||
|
RUN apt update && apt install -y libfbclient2
|
||||||
|
|
||||||
|
# Change working directory
|
||||||
|
RUN mkdir "/app"
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy and install Requirements
|
||||||
|
COPY ./requirements.txt .
|
||||||
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
|
|
||||||
|
# Copy core
|
||||||
|
COPY ./core ./core
|
||||||
|
COPY main.py .
|
|
@ -0,0 +1,42 @@
|
||||||
|
# MP Driver Backend
|
||||||
|
|
||||||
|
## Production Usage
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
1. Docker and docker-compose installed
|
||||||
|
2. WSL 2 (Windows Subsystem for Linux) installed (if you are on Windows)
|
||||||
|
3. Git installed
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
1. From source code.
|
||||||
|
2. Via docker-compose.
|
||||||
|
|
||||||
|
#### Git
|
||||||
|
1. Clone this repository
|
||||||
|
```bash
|
||||||
|
git clone git@git.elitvinenko.tech:ernestlitvinenko/mp_driver_server.git
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Change directory to the repository
|
||||||
|
```bash
|
||||||
|
cd mp_driver_server
|
||||||
|
```
|
||||||
|
3. Run the following command to start the server
|
||||||
|
```bash
|
||||||
|
docker-compose build && docker-compose up -d
|
||||||
|
```
|
||||||
|
4. The server should be started on `http://localhost:8000`
|
||||||
|
|
||||||
|
|
||||||
|
#### Docker-compose
|
||||||
|
1. Create a new directory for the project
|
||||||
|
```bash
|
||||||
|
mkdir mp_driver_server
|
||||||
|
```
|
||||||
|
2. Change directory to the new directory
|
||||||
|
```bash
|
||||||
|
cd mp_driver_server
|
||||||
|
```
|
||||||
|
3. Create a new file called `docker-compose.yml` and paste the following content
|
||||||
|
```yaml
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
from .config import Config
|
|
@ -0,0 +1,16 @@
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(env_file=".env")
|
||||||
|
firebird_host: str | None = None
|
||||||
|
firebird_user: str | None = None
|
||||||
|
firebird_password: str | None = None
|
||||||
|
firebird_database: str | None = None
|
||||||
|
|
||||||
|
secret: str | None = None
|
||||||
|
host: str | None = None
|
||||||
|
port: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
Config = Settings()
|
|
@ -0,0 +1,48 @@
|
||||||
|
import pickle
|
||||||
|
import socket
|
||||||
|
from firebird.driver import connect, driver_config
|
||||||
|
from redis import StrictRedis
|
||||||
|
from redis_cache import RedisCache
|
||||||
|
from sqlalchemy import QueuePool
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
import codecs
|
||||||
|
|
||||||
|
|
||||||
|
from core.config import Config
|
||||||
|
|
||||||
|
driver_config.server_defaults.host.value = Config.firebird_host
|
||||||
|
driver_config.server_defaults.user.value = Config.firebird_user
|
||||||
|
driver_config.server_defaults.password.value = Config.firebird_password
|
||||||
|
|
||||||
|
|
||||||
|
def get_connection():
|
||||||
|
con = connect(Config.firebird_database, no_db_triggers=False, charset='WIN1251')
|
||||||
|
|
||||||
|
stmt_register_connection = f"""
|
||||||
|
UPDATE SEANS
|
||||||
|
set SEANS_STATUS = 2,
|
||||||
|
SEANS_ID_SOTR = ?,--14,
|
||||||
|
SEANS_ID_MST = 0, -- без привязки
|
||||||
|
SEANS_COMP_NAME = ?,--//'PersonalArea'
|
||||||
|
SEANS_REMOTE_VER = ?
|
||||||
|
where ID_SEANS = RDB$GET_CONTEXT('USER_SESSION', 'ID_SEANS');
|
||||||
|
"""
|
||||||
|
cursor = con.cursor()
|
||||||
|
cursor.execute(stmt_register_connection, (31, socket.gethostname(), '2024052901'))
|
||||||
|
cursor.close()
|
||||||
|
con.commit()
|
||||||
|
return con
|
||||||
|
|
||||||
|
|
||||||
|
def __serializer(obj):
|
||||||
|
return codecs.encode(pickle.dumps(obj), "base64").decode()
|
||||||
|
|
||||||
|
|
||||||
|
def __deserializer(obj):
|
||||||
|
return pickle.loads(codecs.decode(obj.encode(), "base64"))
|
||||||
|
|
||||||
|
|
||||||
|
# pool = QueuePool(get_connection, pool_size=5, max_overflow=0, pre_ping=True, dialect=)
|
||||||
|
redis_client = StrictRedis(host="127.0.0.1", decode_responses=True)
|
||||||
|
redis_cache_obj = lambda : RedisCache(redis_client, serializer=__serializer, deserializer=__deserializer)
|
||||||
|
engine = create_engine(f'firebird+firebird://{Config.firebird_user}:{Config.firebird_password}@{Config.firebird_host}/{Config.firebird_database}?charset=WIN1251', echo=True)
|
|
@ -0,0 +1,68 @@
|
||||||
|
import queue
|
||||||
|
|
||||||
|
from firebird.driver import Connection as BaseConnection, connect, Cursor
|
||||||
|
|
||||||
|
|
||||||
|
class PoolConnection:
|
||||||
|
def __init__(self, connection: BaseConnection, pool: 'PoolManager'):
|
||||||
|
self._connection: BaseConnection = connection
|
||||||
|
self.__pool: PoolManager = pool
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_alive(self) -> bool:
|
||||||
|
return not self._connection.is_closed()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, pool: 'PoolManager', *args, **kwargs):
|
||||||
|
con = connect(*args, **kwargs)
|
||||||
|
return cls(con, pool)
|
||||||
|
|
||||||
|
def cursor(self) -> Cursor:
|
||||||
|
return self._connection.cursor()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.__pool.release_connection(self)
|
||||||
|
|
||||||
|
def close_connection(self):
|
||||||
|
if not self._connection.is_closed():
|
||||||
|
self._connection.close()
|
||||||
|
assert self.is_alive == False
|
||||||
|
|
||||||
|
|
||||||
|
class PoolManager:
|
||||||
|
MAX_CONNECTION: int = 5
|
||||||
|
_all_connection: list[PoolConnection] = []
|
||||||
|
_available_connections: queue.Queue[PoolConnection] = None
|
||||||
|
|
||||||
|
def __init__(self, **connection_args):
|
||||||
|
self.__connection_args = connection_args
|
||||||
|
if self._available_connections is None:
|
||||||
|
self._available_connections = queue.Queue()
|
||||||
|
|
||||||
|
self.build_connections()
|
||||||
|
|
||||||
|
def build_connections(self):
|
||||||
|
for _ in range(self.MAX_CONNECTION):
|
||||||
|
self.make_connection()
|
||||||
|
|
||||||
|
def make_connection(self) -> PoolConnection:
|
||||||
|
con = PoolConnection.create(self, **self.__connection_args)
|
||||||
|
self._all_connection.append(con)
|
||||||
|
self.release_connection(con)
|
||||||
|
return con
|
||||||
|
|
||||||
|
def connect(self) -> PoolConnection:
|
||||||
|
for _ in range(self.MAX_CONNECTION):
|
||||||
|
con = self._available_connections.get()
|
||||||
|
if con.is_alive:
|
||||||
|
return con
|
||||||
|
self._all_connection.remove(con)
|
||||||
|
return self.make_connection()
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
print("GRACEFULLY CLOSE CONNECTIONS")
|
||||||
|
for con in self._all_connection:
|
||||||
|
con.close_connection()
|
||||||
|
|
||||||
|
def release_connection(self, con):
|
||||||
|
self._available_connections.put(con)
|
|
@ -0,0 +1,17 @@
|
||||||
|
from ..base import MPDriverException
|
||||||
|
|
||||||
|
|
||||||
|
def incorrect_phone_number():
|
||||||
|
raise MPDriverException(422,
|
||||||
|
"IncorrectPhone",
|
||||||
|
"Формат телефона указан некорректно.\nПожалуйста, проверьте правильность набранного номера")
|
||||||
|
|
||||||
|
|
||||||
|
def profile_not_founded():
|
||||||
|
raise MPDriverException(404,
|
||||||
|
"SotrNotFounded",
|
||||||
|
"Не найден сотрудник по введенному номеру телефона")
|
||||||
|
|
||||||
|
|
||||||
|
def no_task_for_current_user():
|
||||||
|
raise MPDriverException(404, "NoTaskForUser", "Нет задачи с указанным ID для данного пользователя")
|
|
@ -0,0 +1,21 @@
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
|
||||||
|
class MPDriverException(Exception):
|
||||||
|
def __init__(self, status: int, name: str = None, mnemonic: str = None, detail: str = None, personalized_status: int = None):
|
||||||
|
self.status = status
|
||||||
|
self.name = name
|
||||||
|
self.mnemonic = mnemonic
|
||||||
|
self.personalized_status = personalized_status if personalized_status else self.status
|
||||||
|
self.detail = detail
|
||||||
|
|
||||||
|
def response(self):
|
||||||
|
content = {
|
||||||
|
"status": self.personalized_status,
|
||||||
|
"error": self.name,
|
||||||
|
"detail": self.detail,
|
||||||
|
"langs": {
|
||||||
|
"ru": self.mnemonic
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JSONResponse(content, status_code=self.status)
|
|
@ -0,0 +1,30 @@
|
||||||
|
from ..base import MPDriverException
|
||||||
|
|
||||||
|
|
||||||
|
def unavailable_status():
|
||||||
|
raise MPDriverException(403, "ProhibiitedStatus", "Вы не можете установить данный статус",
|
||||||
|
"You can't set this status")
|
||||||
|
|
||||||
|
|
||||||
|
def should_provide_error_text_with_cancelled_status():
|
||||||
|
raise MPDriverException(400, "ShouldProvideAttribute",
|
||||||
|
"Вы должны указать причину, по которой вы не смогли завершить задачу",
|
||||||
|
"Attribute error_text should be provided")
|
||||||
|
|
||||||
|
|
||||||
|
def update_task_by_chain_failed(exc: Exception):
|
||||||
|
raise MPDriverException(400, "ChainFailed", """Ошибка в отправке данных: Данные должны приходить в формате списка в следующем формате:
|
||||||
|
task (InProgress) ->
|
||||||
|
subtask (InProgress) ->
|
||||||
|
subtask(Completed/Cancelled) ->
|
||||||
|
subtask(InProgress) ->
|
||||||
|
subtask(Completed/Cancelled) ->
|
||||||
|
task(Completed)
|
||||||
|
|
||||||
|
Цепочка отправляется парами и валидна, только в том случае если пара имеет след вид:
|
||||||
|
task (InProgress) -> subtask (InProgress);
|
||||||
|
или
|
||||||
|
subtask(Completed/Cancelled) -> subtask(InProgress);
|
||||||
|
или
|
||||||
|
subtask(Completed/Cancelled) -> task(Completed)
|
||||||
|
""", f"<{type(exc)}> : {str(exc)}")
|
|
@ -0,0 +1,8 @@
|
||||||
|
|
||||||
|
|
||||||
|
def upload_lst_to_redis():
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def upload_vlst_to_redis():
|
||||||
|
pass
|
|
@ -0,0 +1,21 @@
|
||||||
|
import jwt
|
||||||
|
from fastapi import Depends, HTTPException
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
|
||||||
|
from core.config import Config
|
||||||
|
from core.model.profile.db import ProfileDB
|
||||||
|
from core.storage import profile
|
||||||
|
|
||||||
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/auth/phone")
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_from_token(token: str = Depends(oauth2_scheme)) -> ProfileDB:
|
||||||
|
try:
|
||||||
|
data = jwt.decode(token, algorithms="HS256", key=Config.secret)
|
||||||
|
except jwt.exceptions.ExpiredSignatureError as err:
|
||||||
|
raise HTTPException(status_code=401, detail=str(err))
|
||||||
|
|
||||||
|
except jwt.exceptions.InvalidSignatureError as err:
|
||||||
|
raise HTTPException(status_code=401, detail=str(err))
|
||||||
|
|
||||||
|
return profile.get_profile_by_id(data['profile_id'])
|
|
@ -0,0 +1,5 @@
|
||||||
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
|
||||||
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
pass
|
|
@ -0,0 +1,9 @@
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class ProfileDB(BaseModel):
|
||||||
|
id: int
|
||||||
|
full_name: str
|
||||||
|
phone_number: str
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
import datetime
|
||||||
|
import typing
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, ConfigDict, AliasGenerator, Json
|
||||||
|
from pydantic.alias_generators import to_camel
|
||||||
|
|
||||||
|
from .enums import StatusEnum, TaskTypeEnum, SubtaskTypeEnum, MarshTemperatureProperty
|
||||||
|
|
||||||
|
general_model_config = ConfigDict(alias_generator=AliasGenerator(serialization_alias=to_camel))
|
||||||
|
|
||||||
|
|
||||||
|
class DBModel(BaseModel):
|
||||||
|
model_config = general_model_config
|
||||||
|
id: int
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return self.id
|
||||||
|
|
||||||
|
|
||||||
|
class DBSubTask(DBModel):
|
||||||
|
start_pln: datetime.datetime
|
||||||
|
end_pln: datetime.datetime
|
||||||
|
start_fact: datetime.datetime | None
|
||||||
|
end_fact: datetime.datetime | None
|
||||||
|
status: StatusEnum
|
||||||
|
task_type: SubtaskTypeEnum
|
||||||
|
text: str
|
||||||
|
|
||||||
|
station: typing.Optional['DBMST'] = None
|
||||||
|
|
||||||
|
|
||||||
|
class DBAppTask(DBModel):
|
||||||
|
profile_id: int = Field(exclude=True)
|
||||||
|
start_pln: datetime.datetime
|
||||||
|
end_pln: datetime.datetime
|
||||||
|
start_fact: datetime.datetime | None
|
||||||
|
end_fact: datetime.datetime | None
|
||||||
|
status: StatusEnum
|
||||||
|
task_type: TaskTypeEnum
|
||||||
|
text: str
|
||||||
|
|
||||||
|
events: list['DBEvent'] = []
|
||||||
|
subtasks: list[DBSubTask] | None = []
|
||||||
|
route: 'DBMarsh' = None
|
||||||
|
|
||||||
|
|
||||||
|
class DBMarsh(DBModel):
|
||||||
|
temperature_property: MarshTemperatureProperty
|
||||||
|
name: str
|
||||||
|
|
||||||
|
parent_id: int = Field(default=0, exclude=True)
|
||||||
|
|
||||||
|
trailer: Union['DBTRS', None]
|
||||||
|
truck: Union['DBTRS', None]
|
||||||
|
|
||||||
|
|
||||||
|
class DBTRS(DBModel):
|
||||||
|
gost: str | None
|
||||||
|
parent_id: int = Field(default=0, exclude=True)
|
||||||
|
|
||||||
|
|
||||||
|
class DBMST(DBModel):
|
||||||
|
name: str
|
||||||
|
location: 'Location'
|
||||||
|
parent_id: int = Field(default=0, exclude=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Location(BaseModel):
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
parent_id: int = Field(default=0, exclude=True)
|
||||||
|
|
||||||
|
|
||||||
|
class DBEvent(DBModel):
|
||||||
|
id: int
|
||||||
|
type: str
|
||||||
|
text: str
|
||||||
|
parent_id: int = Field(default=0, exclude=True)
|
||||||
|
event_data: list | Json
|
||||||
|
event_datetime: datetime.datetime
|
|
@ -0,0 +1,414 @@
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
from datetime import date, datetime
|
||||||
|
from typing import Self, Optional, Union, Any, Callable
|
||||||
|
|
||||||
|
from firebird.driver import Cursor
|
||||||
|
from pydantic import BaseModel, Field, Json
|
||||||
|
from pypika import Query, Table
|
||||||
|
from pypika.queries import QueryBuilder
|
||||||
|
|
||||||
|
from core.database.db import redis_cache_obj
|
||||||
|
from core.storage.base import BaseStorage
|
||||||
|
|
||||||
|
from redis_cache import RedisCache
|
||||||
|
|
||||||
|
|
||||||
|
class __BaseDB(BaseModel, BaseStorage):
|
||||||
|
__tablename__: str
|
||||||
|
__read_only: bool = False
|
||||||
|
__table: Table | None = None
|
||||||
|
__state_changed = {}
|
||||||
|
__query: QueryBuilder | None = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def cache(cls) -> RedisCache:
|
||||||
|
return redis_cache_obj()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_keys(cls):
|
||||||
|
return [x for x in cls.__dict__['__annotations__'].keys() if not x.startswith("__")]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_table(cls) -> Self:
|
||||||
|
return Table(cls.__tablename__)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_one(cls, id: int) -> type[Self]:
|
||||||
|
t = cls.get_table()
|
||||||
|
keys = cls.get_keys()
|
||||||
|
stmt = Query.from_(t).select(*keys).where(getattr(t, keys[0]) == id)
|
||||||
|
with cls.get_cursor() as cur:
|
||||||
|
cur: Cursor
|
||||||
|
data = cur.execute(stmt.get_sql()).fetchone()
|
||||||
|
|
||||||
|
return cls(**{key: val for key, val in zip(keys, data)})
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse_orm(cls, row):
|
||||||
|
return cls(**{key: val for key, val in zip(cls.get_keys(), row)})
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_all(cls, where_query: QueryBuilder = None) -> list['Self']:
|
||||||
|
table = cls.get_table()
|
||||||
|
if not where_query:
|
||||||
|
stmt = Query.from_(table).select(*cls.get_keys())
|
||||||
|
else:
|
||||||
|
stmt = Query.from_(table).select(*cls.get_keys()).where(where_query)
|
||||||
|
|
||||||
|
with cls.get_cursor() as cursor:
|
||||||
|
cursor: Cursor
|
||||||
|
stmt: QueryBuilder
|
||||||
|
print(stmt)
|
||||||
|
data = cursor.execute(stmt.get_sql()).fetchall()
|
||||||
|
return [cls.parse_orm(d) for d in data]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_related(cls) -> 'Self':
|
||||||
|
table = cls.get_table()
|
||||||
|
|
||||||
|
cls.__query = Query.from_(table)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, model: Self) -> Self:
|
||||||
|
if model.__read_only:
|
||||||
|
raise Exception("This model is read only")
|
||||||
|
|
||||||
|
# table = self.get_table()
|
||||||
|
|
||||||
|
# data = self.model_dump(mode='json', exclude_none=True, exclude={'id'}, exclude_unset=True)
|
||||||
|
# stmt = Query.into(table).columns(*[self.get_keys()]).insert(
|
||||||
|
# *[kwargs[key] for key in self.get_keys() if key in kwargs.keys()])
|
||||||
|
# with self.get_cursor() as cursor:
|
||||||
|
# cursor: Cursor
|
||||||
|
# cursor.execute(stmt.get_sql())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update(cls, model: Self) -> Self:
|
||||||
|
if model.__read_only:
|
||||||
|
raise Exception("This model is read only")
|
||||||
|
|
||||||
|
|
||||||
|
class MPLSTDB(__BaseDB):
|
||||||
|
__tablename__: str = "LST"
|
||||||
|
ID_LST: int
|
||||||
|
LST_ID_VLST: int
|
||||||
|
LST_NAME: str
|
||||||
|
LST_NAME_SH: str
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_one(cls, id: int) -> type[Self]:
|
||||||
|
cache = cls.cache()
|
||||||
|
|
||||||
|
@cache.cache()
|
||||||
|
def wrapper(id):
|
||||||
|
return super(MPLSTDB, cls).fetch_one(id)
|
||||||
|
|
||||||
|
return wrapper(id)
|
||||||
|
|
||||||
|
|
||||||
|
class MPAppTaskDB(__BaseDB):
|
||||||
|
__tablename__: str = "APP_TASK"
|
||||||
|
ID_APP_TASK: int | None = None
|
||||||
|
APP_TASK_ID_SOTR: int = None
|
||||||
|
APP_TASK_ID_APP_TASK: int = None
|
||||||
|
APP_TASK_DT_START_PLN: datetime = None
|
||||||
|
APP_TASK_DT_END_PLN: datetime = None
|
||||||
|
APP_TASK_DT_START_FACT: datetime | None = None
|
||||||
|
APP_TASK_DT_END_FACT: datetime | None = None
|
||||||
|
APP_TASK_STATUS: int = None
|
||||||
|
APP_TASK_TIP: int = None
|
||||||
|
APP_TASK_TEXT: str = None
|
||||||
|
APP_TASK_DEL: int = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status(self) -> MPLSTDB:
|
||||||
|
return MPLSTDB.fetch_one(self.APP_TASK_STATUS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_subtask(self) -> bool:
|
||||||
|
return self.ID_APP_TASK != self.APP_TASK_ID_APP_TASK
|
||||||
|
|
||||||
|
@property
|
||||||
|
def task_type(self) -> MPLSTDB:
|
||||||
|
return MPLSTDB.fetch_one(self.APP_TASK_TIP)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subtasks(self) -> list['MPAppTaskDB']:
|
||||||
|
t: MPAppTaskDB = MPAppTaskDB.get_table()
|
||||||
|
return MPAppTaskDB.fetch_all(
|
||||||
|
(t.APP_TASK_ID_APP_TASK == self.ID_APP_TASK) & (t.APP_TASK_ID_APP_TASK != t.ID_APP_TASK) & (
|
||||||
|
t.APP_TASK_DEL == 0))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def events(self) -> list['MPAppEventDB']:
|
||||||
|
return MPAppEventDB.fetch_all(MPAppEventDB.get_table().APP_EVENT_ID_REC == self.ID_APP_TASK)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def params(self) -> list['MPAppParamDB']:
|
||||||
|
return MPAppParamDB.fetch_all(MPAppParamDB.get_table().APP_PARAM_ID_REC == self.ID_APP_TASK)
|
||||||
|
|
||||||
|
|
||||||
|
class EventData(__BaseDB):
|
||||||
|
key: MPLSTDB
|
||||||
|
value: Any
|
||||||
|
|
||||||
|
|
||||||
|
class MPAppEventDB(__BaseDB):
|
||||||
|
__tablename__: str = "APP_EVENT"
|
||||||
|
ID_APP_EVENT: int = None
|
||||||
|
APP_EVENT_ID_SOTR: int = None
|
||||||
|
APP_EVENT_ID_REC: int = None
|
||||||
|
APP_EVENT_VID: int = None
|
||||||
|
APP_EVENT_TIP: int = None
|
||||||
|
APP_EVENT_DT: datetime | None = None
|
||||||
|
APP_EVENT_TEXT: str | None = None
|
||||||
|
APP_EVENT_DATA: Json
|
||||||
|
APP_EVENT_DEL: int = 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def event_type(self) -> MPLSTDB:
|
||||||
|
return MPLSTDB.fetch_one(self.APP_EVENT_TIP)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def event_data(self) -> list[EventData]:
|
||||||
|
return [EventData(key=MPLSTDB.fetch_one(key), value=value) for d in list(self.APP_EVENT_DATA) for key, value in
|
||||||
|
d.items()]
|
||||||
|
|
||||||
|
|
||||||
|
class MPMSTDB(__BaseDB):
|
||||||
|
__read_only = True
|
||||||
|
__tablename__ = "MST"
|
||||||
|
ID_MST: int = Field(default=0)
|
||||||
|
MST_PR_OTHER: int = Field(default=0)
|
||||||
|
MST_ID_KG: int = Field(default=0)
|
||||||
|
MST_ID_SRV: int = Field(default=0)
|
||||||
|
MST_ID_SETTLEMENT: Optional[int] = Field(default=0)
|
||||||
|
MST_SID: Optional[str] = Field(default=None)
|
||||||
|
MST_NAME: Optional[str] = Field(default=None)
|
||||||
|
MST_CLI_NAME: Optional[str] = Field(default=None)
|
||||||
|
MST_CODE: int = Field(default=0)
|
||||||
|
MST_CODE_PODR_NDS: int | None = Field(default=0)
|
||||||
|
MST_CODE_PODR_BN: int | None = Field(default=0)
|
||||||
|
MST_PR_TTNINPUT: int = Field(default=0)
|
||||||
|
MST_PR_TTNOUTPUT: int = Field(default=0)
|
||||||
|
MST_PR_AEX: int = Field(default=0)
|
||||||
|
MST_PR_AEX_ADR: Optional[int] = Field(default=0)
|
||||||
|
MST_ID_MST_TTNOUTPUT: int = Field(default=0)
|
||||||
|
MST_PR_SORT: int = Field(default=0)
|
||||||
|
MST_PR_PVZ: int = Field(default=0)
|
||||||
|
MST_PR_VIRT: int = Field(default=0)
|
||||||
|
MST_PR_INOTHER: int = Field(default=0)
|
||||||
|
MST_PR_ZAKG: int = Field(default=0)
|
||||||
|
MST_PR_FAR: int = Field(default=0)
|
||||||
|
MST_PR_KKT: int = Field(default=0)
|
||||||
|
MST_PR_CC: int = Field(default=0)
|
||||||
|
MST_PR_AS: int = Field(default=0)
|
||||||
|
MST_KM: int = Field(default=0)
|
||||||
|
MST_MP: int = Field(default=0)
|
||||||
|
MST_ID_AGENT_AS: int = Field(default=0)
|
||||||
|
MST_PR_NOLIM_AS: int = Field(default=0)
|
||||||
|
MST_PR_WC_AS: int = Field(default=0)
|
||||||
|
MST_PR_TRS: int = Field(default=0)
|
||||||
|
MST_ID_REGION: int = Field(default=0)
|
||||||
|
MST_ADDRESS_CODE: int = Field(default=0)
|
||||||
|
MST_ID_KLADR_DOM: Optional[int] = Field(default=0)
|
||||||
|
MST_SHIR: float = Field(default=0.0)
|
||||||
|
MST_DOLG: float = Field(default=0.0)
|
||||||
|
MST_ADR_STOR: Optional[str] = Field(default=None)
|
||||||
|
MST_FUNC_MASK: int = Field(default=0)
|
||||||
|
MST_ID_SRV_CALL: int = Field(default=0)
|
||||||
|
MST_ID_MST_CALL: int = Field(default=0)
|
||||||
|
MST_PR_DIRECT: int = Field(default=0)
|
||||||
|
MST_NAME_DIRECT: Optional[str] = Field(default=None)
|
||||||
|
MST_PR_NOTE: int = Field(default=0)
|
||||||
|
MST_PR_NOTSITE: int = Field(default=0)
|
||||||
|
MST_PR_GREEN: int = Field(default=0)
|
||||||
|
MST_PR_GREENORK: int = Field(default=0)
|
||||||
|
MST_PR_GREENPRINTER: int = Field(default=0)
|
||||||
|
MST_PR_VID_TR_VD: int = Field(default=0)
|
||||||
|
MST_PR_BAN_IN: int = Field(default=0)
|
||||||
|
MST_PR_NO_CLIENT_CODES: int = Field(default=0)
|
||||||
|
MST_PR_NO_STTN02: int = Field(default=0)
|
||||||
|
MST_PR_NO_EEU: int = Field(default=0)
|
||||||
|
MST_VID_CALC_FOBYOM: int = Field(default=0)
|
||||||
|
MST_TXT: Optional[str] = Field(default=None)
|
||||||
|
MST_DEL: int = Field(default=0)
|
||||||
|
MST_CH: Optional[datetime] = Field(default=None)
|
||||||
|
MST_WCH: int = Field(default=0)
|
||||||
|
MST_IMP: Optional[datetime] = Field(default=None)
|
||||||
|
MST_MPOST: int = Field(default=0)
|
||||||
|
MST_SEANS: int = Field(default=0)
|
||||||
|
MST_OWNERMST: int = Field(default=0)
|
||||||
|
MST_CR: Optional[datetime] = Field(default=None)
|
||||||
|
MST_WCR: int = Field(default=0)
|
||||||
|
MST_FIMP: Optional[datetime] = Field(default=None)
|
||||||
|
MST_ID_MST_SYNONYM: int = Field(default=0)
|
||||||
|
MST_NAME_OLD: Optional[str] = Field(default=None)
|
||||||
|
MST_SRC_OLD: int = Field(default=0)
|
||||||
|
MST_UPPERNAME_OLD: Optional[str] = Field(default=None)
|
||||||
|
MST_TXT_AEX: Optional[str] = Field(default=None)
|
||||||
|
MST_PR_NODOOR_AEX: int = Field(default=0)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_one(cls, id: int) -> type[Self]:
|
||||||
|
cache = cls.cache()
|
||||||
|
|
||||||
|
@cache.cache()
|
||||||
|
def wrapper(id):
|
||||||
|
return super(MPMSTDB, cls).fetch_one(id)
|
||||||
|
|
||||||
|
return wrapper(id)
|
||||||
|
|
||||||
|
|
||||||
|
class MPTRSDB(__BaseDB):
|
||||||
|
__tablename__ = "TRS"
|
||||||
|
__read_only = True
|
||||||
|
ID_TRS: int = Field(default=0)
|
||||||
|
TRS_PR_TEST: int = Field(default=0)
|
||||||
|
TRS_PR_TEST_ID_SOTR: int = Field(default=0)
|
||||||
|
TRS_PR_TEST_DT: Optional[datetime] = Field(default=None)
|
||||||
|
TRS_PR: int = Field(default=0)
|
||||||
|
TRS_PR_UP: int = Field(default=0)
|
||||||
|
TRS_ID_LST_PR: int = Field(default=0)
|
||||||
|
TRS_ID_LST_VID: int = Field(default=0)
|
||||||
|
TRS_ID_LSTU_TIP: int = Field(default=0)
|
||||||
|
TRS_SID: Optional[str] = Field(default=None)
|
||||||
|
TRS_SID_GOST: Optional[str] = Field(default=None)
|
||||||
|
TRS_SID_OLD: Optional[str] = Field(default=None)
|
||||||
|
TRS_SRC_OLD: int = Field(default=0)
|
||||||
|
TRS_PR_VLAD: int = Field(default=0)
|
||||||
|
TRS_ID_AGENT_AS: int = Field(default=0)
|
||||||
|
TRS_VES: float = Field(default=0.0)
|
||||||
|
TRS_OBYOM: float = Field(default=0.0)
|
||||||
|
TRS_PR_LTOR: int = Field(default=0)
|
||||||
|
TRS_PR_LLEN: int = Field(default=0)
|
||||||
|
TRS_PR_LTOP: int = Field(default=0)
|
||||||
|
TRS_PR_TEPL: int = Field(default=0)
|
||||||
|
TRS_PR_TEPL_WHERE: int = Field(default=0)
|
||||||
|
TRS_OBYOM_TEPL: float = Field(default=0.0)
|
||||||
|
TRS_PR_NOZAGRGRUZ: int = Field(default=0)
|
||||||
|
TRS_CNT_AXIS: int = Field(default=0)
|
||||||
|
TRS_PRIM: Optional[str] = Field(default=None)
|
||||||
|
TRS_INFO: Optional[str] = Field(default=None)
|
||||||
|
TRS_TARA: Optional[float] = Field(default=0.0)
|
||||||
|
TRS_TYPEPROPERTY: int = Field(default=0)
|
||||||
|
TRS_DOGAREND: Optional[str] = Field(default=None)
|
||||||
|
TRS_1C_D_AKT: Optional[date] = Field(default=None)
|
||||||
|
TRS_1C_NOMMSG: int = Field(default=0)
|
||||||
|
TRS_1C_DEL: int = Field(default=0)
|
||||||
|
TRS_1C_DATEEND: Optional[date] = Field(default=None)
|
||||||
|
TRS_DEL: int = Field(default=0)
|
||||||
|
TRS_CR: Optional[datetime] = Field(default=None)
|
||||||
|
TRS_WCR: int = Field(default=0)
|
||||||
|
TRS_CH: Optional[datetime] = Field(default=None)
|
||||||
|
TRS_WCH: int = Field(default=0)
|
||||||
|
TRS_OWNERMST: int = Field(default=0)
|
||||||
|
TRS_SEANS: int = Field(default=0)
|
||||||
|
TRS_IMP: Optional[datetime] = Field(default=None)
|
||||||
|
TRS_FIMP: Optional[datetime] = Field(default=None)
|
||||||
|
TRS_MPOST: int = Field(default=0)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def fetch_one(cls, id: int) -> type[Self]:
|
||||||
|
cache = cls.cache()
|
||||||
|
|
||||||
|
@cache.cache()
|
||||||
|
def wrapper(id):
|
||||||
|
return super(MPTRSDB, cls).fetch_one(id)
|
||||||
|
|
||||||
|
return wrapper(id)
|
||||||
|
|
||||||
|
|
||||||
|
class MPMarshDB(__BaseDB):
|
||||||
|
__read_only = True
|
||||||
|
__tablename__ = "MARSH"
|
||||||
|
ID_MARSH: int = Field(default=0)
|
||||||
|
MARSH_PR: int = Field(default=0)
|
||||||
|
MARSH_PR_PLAN: int = Field(default=0)
|
||||||
|
MARSH_PR_VLAD: int = Field(default=0)
|
||||||
|
MARSH_PR_DOP: int = Field(default=0)
|
||||||
|
MARSH_PR_TEPL: int = Field(default=0)
|
||||||
|
MARSH_KEY_GPREF: Optional[str] = Field(default=None, min_length=1, max_length=16)
|
||||||
|
MARSH_KEY_PREF: Optional[str] = Field(default=None, min_length=1, max_length=32)
|
||||||
|
MARSH_NAME: Optional[str] = Field(default=None, min_length=1, max_length=128)
|
||||||
|
MARSH_D_N: Optional[date] = Field(default=None)
|
||||||
|
MARSH_D_K: Optional[date] = Field(default=None)
|
||||||
|
MARSH_ID_MST_OTPR: int = Field(default=0)
|
||||||
|
MARSH_ID_MST_NAZN: int = Field(default=0)
|
||||||
|
MARSH_DAYS_WEEK: int = Field(default=0)
|
||||||
|
MARSH_T_OTPR: float = Field(default=0.0)
|
||||||
|
MARSH_DATE_OUT: Optional[date] = Field(default=None)
|
||||||
|
MARSH_PRICE: Optional[float] = Field(default=0.0)
|
||||||
|
MARSH_KM: Optional[float] = Field(default=0.0)
|
||||||
|
MARSH_TXT: Optional[str] = Field(default=None, min_length=1, max_length=512)
|
||||||
|
MARSH_DEL: int = Field(default=0)
|
||||||
|
|
||||||
|
|
||||||
|
class MPMarshTRSDB(__BaseDB):
|
||||||
|
__tablename__ = "MARSH_TRS"
|
||||||
|
__read_only = True
|
||||||
|
ID_MARSH_TRS: int = Field(default=0)
|
||||||
|
MARSH_TRS_ID_MARSH: int = Field(default=0)
|
||||||
|
MARSH_TRS_DATE: Optional[date] = Field(default=None)
|
||||||
|
MARSH_TRS_ID_TRS: int = Field(default=0)
|
||||||
|
MARSH_TRS_TRS_PR_COLDONLY: Optional[int] = Field(default=0)
|
||||||
|
MARSH_TRS_ID_PRIC: int = Field(default=0)
|
||||||
|
MARSH_TRS_PRIC_PR_COLDONLY: Optional[int] = Field(default=0)
|
||||||
|
MARSH_TRS_ID_SOTR: int = Field(default=0)
|
||||||
|
MARSH_TRS_DT_DELIVERY: Optional[datetime] = Field(default=None)
|
||||||
|
MARSH_TRS_PR: int = Field(default=0)
|
||||||
|
MARSH_TRS_COMMENT: Optional[str] = Field(default=None, max_length=4096)
|
||||||
|
MARSH_TRS_TARIFF: float = Field(default=0.0)
|
||||||
|
MARSH_TRS_DEL: int = Field(default=0)
|
||||||
|
MARSH_TRS_OWNERMST: int = Field(default=0)
|
||||||
|
MARSH_TRS_MPOST: Optional[int] = Field(default=0)
|
||||||
|
MARSH_TRS_CR: Optional[datetime] = Field(default=None)
|
||||||
|
MARSH_TRS_WCR: int = Field(default=0)
|
||||||
|
MARSH_TRS_IMP: Optional[datetime] = Field(default=None)
|
||||||
|
MARSH_TRS_CH: Optional[datetime] = Field(default=None)
|
||||||
|
MARSH_TRS_WCH: int = Field(default=0)
|
||||||
|
MARSH_TRS_SEANS: int = Field(default=0)
|
||||||
|
MARSH_TRS_FIMP: Optional[datetime] = Field(default=None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def marsh(self) -> MPMarshDB:
|
||||||
|
return MPMarshDB.fetch_one(self.MARSH_TRS_ID_MARSH)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def trs(self) -> MPTRSDB:
|
||||||
|
return MPTRSDB.fetch_one(self.MARSH_TRS_ID_TRS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def trailer(self) -> MPTRSDB:
|
||||||
|
return MPTRSDB.fetch_one(self.MARSH_TRS_ID_PRIC)
|
||||||
|
|
||||||
|
|
||||||
|
class MPAppParamDB(__BaseDB):
|
||||||
|
__tablename__ = "APP_PARAM"
|
||||||
|
__read_only = True
|
||||||
|
APP_PARAM_ID_REC: int = Field(default=0)
|
||||||
|
APP_PARAM_STR: Optional[str] = Field(default=None, min_length=1, max_length=1024)
|
||||||
|
APP_PARAM_VID: int = Field(default=0)
|
||||||
|
APP_PARAM_TIP: int = Field(default=0)
|
||||||
|
APP_PARAM_DEL: int = Field(default=0)
|
||||||
|
APP_PARAM_CR: Optional[datetime] = Field(default=None)
|
||||||
|
APP_PARAM_WCR: int = Field(default=0)
|
||||||
|
APP_PARAM_CH: Optional[datetime] = Field(default=None)
|
||||||
|
APP_PARAM_WCH: int = Field(default=0)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def param_type(self) -> MPLSTDB:
|
||||||
|
return MPLSTDB.fetch_one(self.APP_PARAM_TIP)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def related_table(self) -> Union['MPMSTDB', 'MPMarshTRSDB', 'MPTRSDB', 'MPMarshDB']:
|
||||||
|
query = {
|
||||||
|
"ID_MARSH_TRS": MPMarshTRSDB,
|
||||||
|
"ID_MST": MPMSTDB,
|
||||||
|
"ID_TRS": MPTRSDB,
|
||||||
|
"ID_MARSH": MPMarshDB
|
||||||
|
}
|
||||||
|
return query[self.param_type.LST_NAME_SH].fetch_one(self.APP_PARAM_STR)
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class StatusEnum(str, Enum):
|
||||||
|
CANCELLED = "Cancelled"
|
||||||
|
IN_PROGRESS = "InProgress"
|
||||||
|
COMPLETED = "Completed"
|
||||||
|
NOT_DEFINED = "NotDefined"
|
||||||
|
|
||||||
|
|
||||||
|
class TaskTypeEnum(str, Enum):
|
||||||
|
MOV_MARSH = "MovMarsh"
|
||||||
|
|
||||||
|
|
||||||
|
class SubtaskTypeEnum(str, Enum):
|
||||||
|
MST_IN = "Mst_In"
|
||||||
|
MST_OUT = "Mst_Out"
|
||||||
|
SET_UNLOADING = "SetUnLoading"
|
||||||
|
SET_LOADING = "SetLoading"
|
||||||
|
|
||||||
|
|
||||||
|
class MarshTemperatureProperty(int, Enum):
|
||||||
|
HOT = 1
|
||||||
|
COLD = 2
|
||||||
|
UNDEFINED = 0
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from core.model.task.enums import StatusEnum
|
||||||
|
|
||||||
|
|
||||||
|
class UpdTaskData(BaseModel):
|
||||||
|
task_id: int
|
||||||
|
dt: datetime.datetime
|
||||||
|
status: StatusEnum
|
||||||
|
error_text: str | None = Field(max_length=1024, default=None)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdTaskRequest(BaseModel):
|
||||||
|
data: list[UpdTaskData]
|
||||||
|
|
||||||
|
|
||||||
|
class SetTaskStatusActiveRequest(BaseModel):
|
||||||
|
task_id: int
|
||||||
|
dt_start: datetime.datetime = Field(default_factory=datetime.datetime.now)
|
||||||
|
|
||||||
|
|
||||||
|
class SetSubtaskStatusRequest(BaseModel):
|
||||||
|
subtask_id: int
|
||||||
|
finished_dt: datetime.datetime = Field(default_factory=datetime.datetime.now)
|
|
@ -0,0 +1,2 @@
|
||||||
|
class Service:
|
||||||
|
pass
|
|
@ -0,0 +1,75 @@
|
||||||
|
from core.model.profile.db import ProfileDB
|
||||||
|
from core.model.task.db import DBAppTask, DBSubTask, DBEvent, DBMarsh, DBTRS, DBMST, Location
|
||||||
|
from core.model.task.db2 import MPAppTaskDB, MPMarshTRSDB, MPMSTDB
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_all_tasks(user: ProfileDB) -> list[DBAppTask]:
|
||||||
|
table = MPAppTaskDB.get_table()
|
||||||
|
tasks = MPAppTaskDB.fetch_all(table.APP_TASK_ID_SOTR == user.id)
|
||||||
|
|
||||||
|
returned_list = []
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
if task.is_subtask:
|
||||||
|
continue
|
||||||
|
|
||||||
|
subtasks_db = [x for x in tasks if x.is_subtask and x.APP_TASK_ID_APP_TASK == task.ID_APP_TASK]
|
||||||
|
subtasks = []
|
||||||
|
|
||||||
|
for subtask in subtasks_db:
|
||||||
|
try:
|
||||||
|
station_db: MPMSTDB = next(p for p in subtask.params if p.APP_PARAM_TIP == 8668).related_table
|
||||||
|
station = DBMST(id=station_db.ID_MST, name=station_db.MST_NAME,
|
||||||
|
location=Location(lat=station_db.MST_SHIR, lon=station_db.MST_DOLG))
|
||||||
|
except StopIteration:
|
||||||
|
station = None
|
||||||
|
|
||||||
|
subtasks.append(DBSubTask(id=subtask.ID_APP_TASK,
|
||||||
|
start_pln=subtask.APP_TASK_DT_START_PLN, end_pln=subtask.APP_TASK_DT_END_PLN,
|
||||||
|
start_fact=subtask.APP_TASK_DT_START_FACT, end_fact=subtask.APP_TASK_DT_END_FACT,
|
||||||
|
status=subtask.status.LST_NAME_SH,
|
||||||
|
task_type=subtask.task_type.LST_NAME_SH,
|
||||||
|
text=subtask.APP_TASK_TEXT,
|
||||||
|
station=station)
|
||||||
|
)
|
||||||
|
|
||||||
|
events = [
|
||||||
|
DBEvent(id=x.ID_APP_EVENT,
|
||||||
|
type=x.event_type.LST_NAME_SH,
|
||||||
|
text=x.APP_EVENT_TEXT,
|
||||||
|
parent_id=task.ID_APP_TASK,
|
||||||
|
event_data=x.event_data,
|
||||||
|
event_datetime=x.APP_EVENT_DT) for x in task.events
|
||||||
|
]
|
||||||
|
|
||||||
|
marsh_trs: MPMarshTRSDB = task.params[0].related_table
|
||||||
|
marsh = marsh_trs.marsh
|
||||||
|
trs_db = marsh_trs.trs
|
||||||
|
trailer_db = marsh_trs.trailer
|
||||||
|
|
||||||
|
trailer = DBTRS(id=trailer_db.ID_TRS, gost=trailer_db.TRS_SID_GOST)
|
||||||
|
truck = DBTRS(id=trs_db.ID_TRS, gost=trs_db.TRS_SID_GOST)
|
||||||
|
|
||||||
|
route = DBMarsh(id=marsh.ID_MARSH,
|
||||||
|
temperature_property=marsh.MARSH_PR_TEPL,
|
||||||
|
name=marsh.MARSH_NAME,
|
||||||
|
trailer=trailer,
|
||||||
|
truck=truck
|
||||||
|
)
|
||||||
|
|
||||||
|
returned_list.append(DBAppTask(
|
||||||
|
id=task.ID_APP_TASK,
|
||||||
|
profile_id=task.APP_TASK_ID_SOTR,
|
||||||
|
start_pln=task.APP_TASK_DT_START_PLN,
|
||||||
|
end_pln=task.APP_TASK_DT_END_PLN,
|
||||||
|
start_fact=task.APP_TASK_DT_START_FACT,
|
||||||
|
end_fact=task.APP_TASK_DT_END_FACT,
|
||||||
|
status=task.status.LST_NAME_SH,
|
||||||
|
task_type=task.task_type.LST_NAME_SH,
|
||||||
|
text=task.APP_TASK_TEXT,
|
||||||
|
events=events,
|
||||||
|
subtasks=subtasks,
|
||||||
|
route=route
|
||||||
|
))
|
||||||
|
|
||||||
|
return returned_list
|
|
@ -0,0 +1,6 @@
|
||||||
|
from .profile_storage import Storage as ProfileStorage
|
||||||
|
from .task_storage import Storage as TaskStorage
|
||||||
|
|
||||||
|
|
||||||
|
profile = ProfileStorage()
|
||||||
|
task = TaskStorage()
|
|
@ -0,0 +1,58 @@
|
||||||
|
import socket
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import ContextManager
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
|
from firebird.driver import Cursor, Connection
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from core.database.db import engine
|
||||||
|
|
||||||
|
|
||||||
|
class BaseStorage:
|
||||||
|
_pool = engine
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def db_authorization(con: sqlalchemy.Connection):
|
||||||
|
stmt = text(f"""
|
||||||
|
UPDATE SEANS
|
||||||
|
set SEANS_STATUS = 2,
|
||||||
|
SEANS_ID_SOTR = 31,--14,
|
||||||
|
SEANS_ID_MST = 0, -- без привязки
|
||||||
|
SEANS_COMP_NAME = '{socket.gethostname()}',--//'PersonalArea'
|
||||||
|
SEANS_REMOTE_VER = '2024052901'
|
||||||
|
where ID_SEANS = RDB$GET_CONTEXT('USER_SESSION', 'ID_SEANS');
|
||||||
|
""")
|
||||||
|
|
||||||
|
con.execute(stmt)
|
||||||
|
con.commit()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_authorized(con: sqlalchemy.Connection):
|
||||||
|
stmt = text("""
|
||||||
|
select SEANS_ID_SOTR from SEANS where ID_SEANS = RDB$GET_CONTEXT('USER_SESSION', 'ID_SEANS')
|
||||||
|
""")
|
||||||
|
return con.execute(stmt).fetchone()[0] == 31
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@contextmanager
|
||||||
|
def get_session(cls) -> ContextManager[sqlalchemy.Connection]:
|
||||||
|
connection = cls._pool.connect()
|
||||||
|
if not cls.is_authorized(connection):
|
||||||
|
cls.db_authorization(connection)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield connection
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
# @classmethod
|
||||||
|
# @contextmanager
|
||||||
|
# def get_cursor(cls) -> ContextManager[]:
|
||||||
|
# with cls.get_session() as session:
|
||||||
|
# session: sqlalchemy.Connection
|
||||||
|
# cursor = session.cursor()
|
||||||
|
# try:
|
||||||
|
# yield cursor
|
||||||
|
# finally:
|
||||||
|
# cursor.close()
|
|
@ -0,0 +1,76 @@
|
||||||
|
from random import randint
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
|
from firebird.driver import Cursor, Connection
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from core.model.profile.db import ProfileDB
|
||||||
|
from .base import BaseStorage
|
||||||
|
|
||||||
|
|
||||||
|
class Storage(BaseStorage):
|
||||||
|
def get_profile_by_phone(self, phone_number: str) -> ProfileDB | None:
|
||||||
|
stmt = text(f"""
|
||||||
|
select SOTR.ID_SOTR, SOTR.SOTR_FULLNAME, phone.PHONE_NUMBER_DGT
|
||||||
|
|
||||||
|
from phone
|
||||||
|
left join CONPHONE cros on cros.CONPHONE_ID_PHONE = phone.ID_PHONE
|
||||||
|
join SOTR on SOTR.ID_SOTR = cros.CONPHONE_ID_CLICONTACT
|
||||||
|
|
||||||
|
where phone.PHONE_NUMBER_DGT='{phone_number}' and
|
||||||
|
SOTR_DEL = 0 and
|
||||||
|
SOTR_D_UVOL is null and
|
||||||
|
SOTR_SOURCE in (3,4);
|
||||||
|
""")
|
||||||
|
with self.get_session() as cursor:
|
||||||
|
cursor: sqlalchemy.Connection
|
||||||
|
data = cursor.execute(stmt).fetchone()
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
return None
|
||||||
|
return ProfileDB(id=data[0], full_name=data[1], phone_number=data[2])
|
||||||
|
|
||||||
|
def check_profile_code(self, user_id: int, code: int) -> bool:
|
||||||
|
stmt = text("""
|
||||||
|
select ID_CLIENT_CODES,
|
||||||
|
CLIENT_CODES_ID_CLIENT,
|
||||||
|
CLIENT_CODES_VALUE from CLIENT_CODES
|
||||||
|
where ID_CLIENT_CODES = (select MAX(ID_CLIENT_CODES)
|
||||||
|
from CLIENT_CODES where CLIENT_CODES_TIP = 5 and CLIENT_CODES_ID_CLIENT = :user_id);
|
||||||
|
""")
|
||||||
|
with self.get_session() as cursor:
|
||||||
|
data = cursor.execute(stmt, {"user_id": user_id}).fetchone()
|
||||||
|
if data is not None:
|
||||||
|
return data[2] == str(code)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_profile_by_id(self, profile_id: int):
|
||||||
|
stmt = text("""
|
||||||
|
|
||||||
|
select SOTR.ID_SOTR, SOTR.SOTR_FULLNAME, PHONE.PHONE_NUMBER_DGT from SOTR
|
||||||
|
left join CONPHONE on CONPHONE.CONPHONE_ID_CLICONTACT = SOTR.ID_SOTR
|
||||||
|
left join PHONE on CONPHONE.CONPHONE_ID_PHONE = PHONE.ID_PHONE
|
||||||
|
where SOTR.ID_SOTR = :profile_id;
|
||||||
|
""")
|
||||||
|
|
||||||
|
with self.get_session() as cursor:
|
||||||
|
data = cursor.execute(stmt, {"profile_id": profile_id}).fetchone()
|
||||||
|
if not data:
|
||||||
|
return None
|
||||||
|
return ProfileDB(id=data[0], full_name=data[1], phone_number=data[2])
|
||||||
|
|
||||||
|
def generate_profile_auth_code(self, user_id: int, phone_dgt: int) -> int:
|
||||||
|
|
||||||
|
code = randint(1000, 9999)
|
||||||
|
|
||||||
|
stmt = text("""
|
||||||
|
insert into CLIENT_CODES (client_codes_tip, CLIENT_CODES_VALUE, CLIENT_CODES_ID_CLIENT, CLIENT_CODES_PHONE, CLIENT_CODES_DT)
|
||||||
|
values (5, :code, :user_id, :phone, localtimestamp);
|
||||||
|
""")
|
||||||
|
|
||||||
|
with self.get_session() as session:
|
||||||
|
session: sqlalchemy.Connection
|
||||||
|
session.execute(stmt, {"code": code, "user_id": user_id, "phone": phone_dgt})
|
||||||
|
session.commit()
|
||||||
|
return code
|
|
@ -0,0 +1,227 @@
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import typing
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
import sqlalchemy
|
||||||
|
from sqlalchemy import text, TextClause
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
|
||||||
|
from core.model.task.db import DBAppTask, DBSubTask, DBMarsh, DBTRS, DBMST, Location, DBEvent
|
||||||
|
from .base import BaseStorage
|
||||||
|
from ..model.task.enums import StatusEnum
|
||||||
|
from ..model.task.requests import UpdTaskData
|
||||||
|
|
||||||
|
|
||||||
|
class _STMTS(Enum):
|
||||||
|
FETCH_TASKS_WITH_SUBTASKS = text(f"""
|
||||||
|
select t1.ID_APP_TASK, -- 0
|
||||||
|
t1.APP_TASK_ID_SOTR, -- 1
|
||||||
|
t1.APP_TASK_ID_APP_TASK, -- 2
|
||||||
|
t1.APP_TASK_DT_START_PLN, -- 3
|
||||||
|
t1.APP_TASK_DT_END_PLN, -- 4
|
||||||
|
t1.APP_TASK_DT_START_FACT, -- 5
|
||||||
|
t1.APP_TASK_DT_END_FACT, -- 6
|
||||||
|
lst_status.LST_NAME_SH, -- 7
|
||||||
|
lst_tip.LST_NAME_SH, -- 8
|
||||||
|
t1.APP_TASK_TEXT, -- 9
|
||||||
|
|
||||||
|
lst_param_tip.LST_NAME_SH as param_tip, -- 10
|
||||||
|
|
||||||
|
-- marsh trs
|
||||||
|
trs_1.ID_TRS as truck_id, -- 11
|
||||||
|
trs_1.TRS_SID_GOST as truck_gost, -- 12
|
||||||
|
trs_2.ID_TRS as trailer_id, -- 13
|
||||||
|
trs_2.TRS_SID_GOST as trailer_gost, -- 14
|
||||||
|
m.ID_MARSH as marsh_id, -- 15
|
||||||
|
m.MARSH_PR_TEPL as marsh_temperature_property, --16
|
||||||
|
m.MARSH_NAME as marsh_name, -- 17
|
||||||
|
|
||||||
|
-- MST
|
||||||
|
mst.ID_MST, -- 18
|
||||||
|
mst.MST_NAME, -- 19
|
||||||
|
mst.MST_SHIR, -- 20
|
||||||
|
mst.MST_DOLG, -- 21
|
||||||
|
|
||||||
|
-- EVENT
|
||||||
|
event.ID_APP_EVENT, -- 22
|
||||||
|
event.LST_NAME_SH, -- 23
|
||||||
|
event.APP_EVENT_TEXT, -- 24,
|
||||||
|
event.APP_EVENT_ID_REC, -- 25
|
||||||
|
event.APP_EVENT_DT, -- 26
|
||||||
|
event.APP_EVENT_DATA -- 27
|
||||||
|
|
||||||
|
from APP_TASK t1
|
||||||
|
left join LST lst_status
|
||||||
|
on t1.APP_TASK_STATUS = lst_status.ID_LST
|
||||||
|
left join LST lst_tip on t1.APP_TASK_TIP = lst_tip.ID_LST
|
||||||
|
left join APP_PARAM param on param.APP_PARAM_ID_REC = t1.ID_APP_TASK and param.APP_PARAM_DEL = 0
|
||||||
|
LEFT JOIN LST lst_param_tip on param.APP_PARAM_TIP = lst_param_tip.ID_LST
|
||||||
|
left join MARSH_TRS mt on param.APP_PARAM_STR = mt.ID_MARSH_TRS and lst_param_tip.LST_NAME_SH = 'ID_MARSH_TRS'
|
||||||
|
left join TRS trs_1 on mt.MARSH_TRS_ID_TRS = trs_1.ID_TRS
|
||||||
|
left join TRS trs_2 on mt.MARSH_TRS_ID_PRIC = trs_2.ID_TRS
|
||||||
|
left join MARSH m on mt.MARSH_TRS_ID_MARSH = m.ID_MARSH
|
||||||
|
left join MST mst on param.APP_PARAM_STR = mst.ID_MST and lst_param_tip.LST_NAME_SH = 'ID_MST'
|
||||||
|
left join (select ID_APP_EVENT, LST_NAME_SH, APP_EVENT_TEXT, APP_EVENT_ID_REC, APP_EVENT_DT, APP_EVENT_DATA from APP_EVENT join LST on ID_LST = APP_EVENT_VID where APP_EVENT_DEL = 0) event
|
||||||
|
on event.APP_EVENT_ID_REC = t1.ID_APP_TASK
|
||||||
|
where t1.APP_TASK_DEL = 0
|
||||||
|
and t1.APP_TASK_ID_SOTR = :user_id;
|
||||||
|
""")
|
||||||
|
FETCH_NEXT_SUBTASK_FOR_TASK = text(
|
||||||
|
f"""select ID_APP_TASK from app_task where APP_TASK_ID_APP_TASK = :task_id and APP_TASK_ID_APP_TASK != ID_APP_TASK AND APP_TASK_DEL = 0 and APP_TASK_STATUS not in (8681, 8682) order by APP_TASK_DT_START_PLN""")
|
||||||
|
|
||||||
|
INSERT_EVENT = text("""
|
||||||
|
INSERT into APP_EVENT (
|
||||||
|
APP_EVENT_ID_SOTR,
|
||||||
|
APP_EVENT_ID_REC,
|
||||||
|
APP_EVENT_DT,
|
||||||
|
APP_EVENT_DATA,
|
||||||
|
APP_EVENT_TEXT,
|
||||||
|
APP_EVENT_VID, APP_EVENT_PARAM) values (:user_id,
|
||||||
|
:rec_id,
|
||||||
|
:event_dt,
|
||||||
|
:event_data,
|
||||||
|
:event_text,
|
||||||
|
8678, :event_params)
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_event_stmt(profile_id: int, task_id: int, event_dt: datetime.datetime, event_text: str,
|
||||||
|
event_data: dict[str, str], event_params: dict[str, str] | None = None) -> tuple[
|
||||||
|
TextClause, dict[str, typing.Any]]:
|
||||||
|
return _STMTS.INSERT_EVENT.value, {"user_id": profile_id,
|
||||||
|
"rec_id": task_id,
|
||||||
|
"event_dt": event_dt,
|
||||||
|
"event_text": event_text,
|
||||||
|
"event_data": json.dumps([event_data], separators=(',', ':')),
|
||||||
|
"event_params": json.dumps([event_params],
|
||||||
|
separators=(',', ":")) if event_params else None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Storage(BaseStorage):
|
||||||
|
|
||||||
|
def generate_station(self, row: tuple) -> typing.Optional[DBMST]:
|
||||||
|
return DBMST(id=row[18], name=row[19], location=Location(lat=row[20], lon=row[21], parent_id=row[18]),
|
||||||
|
parent_id=row[0])
|
||||||
|
|
||||||
|
def generate_truck(self, row: tuple) -> typing.Optional[DBTRS]:
|
||||||
|
if row[11] is not None and row[11]:
|
||||||
|
return DBTRS(id=row[11], gost=row[12], parent_id=row[15])
|
||||||
|
|
||||||
|
def generate_trailer(self, row: tuple) -> typing.Optional[DBTRS]:
|
||||||
|
if row[13] is not None and row[13] != 0:
|
||||||
|
return DBTRS(id=row[13], gost=row[14], parent_id=row[16])
|
||||||
|
|
||||||
|
def generate_route(self, row: tuple) -> typing.Optional[DBMarsh]:
|
||||||
|
return DBMarsh(id=row[15], temperature_property=row[16], name=row[17],
|
||||||
|
trailer=self.generate_trailer(row),
|
||||||
|
truck=self.generate_truck(row), parent_id=row[0])
|
||||||
|
|
||||||
|
def generate_event(self, row):
|
||||||
|
return DBEvent(id=row[22], type=row[23], text=row[24], parent_id=row[0], event_datetime=row[26],
|
||||||
|
event_data=row[27])
|
||||||
|
|
||||||
|
def generate_task(self, row):
|
||||||
|
return DBAppTask(id=row[0], profile_id=row[1], start_pln=row[3], end_pln=row[4], start_fact=row[5],
|
||||||
|
end_fact=row[6], status=row[7], task_type=row[8], text=row[9])
|
||||||
|
|
||||||
|
def generate_subtask(self, row):
|
||||||
|
return DBSubTask(id=row[0], parent_id=row[2], start_pln=row[3], end_pln=row[4], start_fact=row[5],
|
||||||
|
end_fact=row[6], status=row[7], task_type=row[8], text=row[9])
|
||||||
|
|
||||||
|
def fetch_tasks_with_subtasks(self, user_id: int) -> list[DBAppTask]:
|
||||||
|
stmt = _STMTS.FETCH_TASKS_WITH_SUBTASKS.value
|
||||||
|
with self.get_session() as cur:
|
||||||
|
cur: sqlalchemy.Connection
|
||||||
|
res = cur.execute(stmt, {"user_id": user_id}).fetchall()
|
||||||
|
|
||||||
|
tasks = list(set(self.generate_task(row) for row in res if row[0] == row[2]))
|
||||||
|
for task in tasks:
|
||||||
|
task.subtasks = list(
|
||||||
|
set(self.generate_subtask(row) for row in res if row[0] != row[2] and row[2] == task.id))
|
||||||
|
task.events = list(
|
||||||
|
set(self.generate_event(row) for row in res if row[22] is not None and row[25] == task.id))
|
||||||
|
task.subtasks.sort(key=lambda u: u.start_pln)
|
||||||
|
task.events.sort(key=lambda u: u.event_datetime)
|
||||||
|
|
||||||
|
for subtask in task.subtasks:
|
||||||
|
subtask.station = next(self.generate_station(row) for row in res if
|
||||||
|
row[18] is not None and row[0] == subtask.id)
|
||||||
|
|
||||||
|
task.route = next(self.generate_route(row) for row in res if row[0] == task.id and row[15] is not None)
|
||||||
|
|
||||||
|
tasks.sort(key=lambda u: u.start_pln)
|
||||||
|
return tasks
|
||||||
|
|
||||||
|
@deprecated("DEPRECATED: use fetch_tasks_with_subtasks instead")
|
||||||
|
def fetch_task_by_id(self, task_id: int) -> typing.Optional[DBAppTask]:
|
||||||
|
stmt = text("""
|
||||||
|
select first 1 t1.ID_APP_TASK,
|
||||||
|
t1.APP_TASK_ID_SOTR,
|
||||||
|
t1.APP_TASK_ID_APP_TASK,
|
||||||
|
t1.APP_TASK_DT_START_PLN,
|
||||||
|
t1.APP_TASK_DT_END_PLN,
|
||||||
|
t1.APP_TASK_DT_START_FACT,
|
||||||
|
t1.APP_TASK_DT_END_FACT,
|
||||||
|
lst_status.LST_NAME_SH,
|
||||||
|
lst_tip.LST_NAME_SH,
|
||||||
|
t1.APP_TASK_TEXT from APP_TASK t1
|
||||||
|
left join LST lst_status on t1.APP_TASK_STATUS = lst_status.ID_LST
|
||||||
|
left join LST lst_tip on t1.APP_TASK_TIP = lst_tip.ID_LST
|
||||||
|
where
|
||||||
|
t1.APP_TASK_DEL = 0 and
|
||||||
|
t1.ID_APP_TASK = :task_id and
|
||||||
|
t1.ID_APP_TASK = t1.APP_TASK_ID_APP_TASK;
|
||||||
|
""")
|
||||||
|
with self.get_session() as cur:
|
||||||
|
cur: sqlalchemy.Connection
|
||||||
|
res = cur.execute(stmt, {"task_id": task_id}).fetchone()
|
||||||
|
return res if res is None else DBAppTask(id=res[0], profile_id=res[1], start_pln=res[3], end_pln=res[4],
|
||||||
|
start_fact=res[5], end_fact=res[6], status=res[7], task_type=res[8],
|
||||||
|
text=res[9])
|
||||||
|
|
||||||
|
@deprecated("DEPRECATED: use fetch_tasks_with_subtasks instead")
|
||||||
|
def fetch_tasks_for_user(self, user_id: int, limit: int = 10, offset: int = 0) -> list[DBAppTask]:
|
||||||
|
stmt = text("""
|
||||||
|
select first :limit skip :offset t1.ID_APP_TASK,
|
||||||
|
t1.APP_TASK_ID_SOTR,
|
||||||
|
t1.APP_TASK_ID_APP_TASK,
|
||||||
|
t1.APP_TASK_DT_START_PLN,
|
||||||
|
t1.APP_TASK_DT_END_PLN,
|
||||||
|
t1.APP_TASK_DT_START_FACT,
|
||||||
|
t1.APP_TASK_DT_END_FACT,
|
||||||
|
lst_status.LST_NAME_SH,
|
||||||
|
lst_tip.LST_NAME_SH,
|
||||||
|
t1.APP_TASK_TEXT from APP_TASK t1
|
||||||
|
left join LST lst_status on t1.APP_TASK_STATUS = lst_status.ID_LST
|
||||||
|
left join LST lst_tip on t1.APP_TASK_TIP = lst_tip.ID_LST
|
||||||
|
where
|
||||||
|
t1.APP_TASK_DEL = 0 and
|
||||||
|
t1.ID_APP_TASK = t1.APP_TASK_ID_APP_TASK and
|
||||||
|
t1.APP_TASK_ID_SOTR = :user_id;
|
||||||
|
""")
|
||||||
|
with self.get_session() as session:
|
||||||
|
session: sqlalchemy.Connection
|
||||||
|
res = session.execute(stmt, {"limit": limit, "offset": offset, "user_id": user_id}).fetchall()
|
||||||
|
|
||||||
|
return [DBAppTask(id=row[0], profile_id=row[1], start_pln=row[3], end_pln=row[4],
|
||||||
|
start_fact=row[5], end_fact=row[6], status=row[7], task_type=row[8], text=row[9]) for row
|
||||||
|
in res]
|
||||||
|
|
||||||
|
def update_task(self, event: UpdTaskData, profile_id: int):
|
||||||
|
with self.get_session() as session:
|
||||||
|
session: sqlalchemy.Connection
|
||||||
|
|
||||||
|
status_id = session.execute(text("""
|
||||||
|
select first 1 ID_LST from LST where LST_NAME_SH = :status_val and LST_DEL = 0
|
||||||
|
"""), {"status_val": event.status.value}).fetchone()[0]
|
||||||
|
|
||||||
|
data = {"8794": f"{status_id}"}
|
||||||
|
|
||||||
|
if event.status == StatusEnum.CANCELLED:
|
||||||
|
data["error"] = event.error_text
|
||||||
|
|
||||||
|
session.execute(*generate_event_stmt(profile_id, event.task_id, event.dt,
|
||||||
|
f"Установлен новый статус {"задачи" if self.fetch_task_by_id(event.task_id) else "подзадачи"}: {event.status.value}", event_data=data))
|
||||||
|
session.commit()
|
|
@ -0,0 +1 @@
|
||||||
|
from .handlers import router as graphql_router
|
|
@ -0,0 +1,5 @@
|
||||||
|
from strawberry.fastapi import GraphQLRouter
|
||||||
|
|
||||||
|
from core.transport.graphql.schema import schema
|
||||||
|
|
||||||
|
router = GraphQLRouter(schema=schema)
|
|
@ -0,0 +1,116 @@
|
||||||
|
import typing
|
||||||
|
from dataclasses import field
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import strawberry
|
||||||
|
|
||||||
|
from core.model.task.enums import MarshTemperatureProperty
|
||||||
|
from core.storage import task
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.enum
|
||||||
|
class StatusEnumQl(Enum):
|
||||||
|
CANCELLED = "Cancelled"
|
||||||
|
IN_PROGRESS = "InProgress"
|
||||||
|
COMPLETED = "Completed"
|
||||||
|
NOT_DEFINED = "NotDefined"
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.enum
|
||||||
|
class TaskTypeEnumQl(Enum):
|
||||||
|
MOV_MARSH = "MovMarsh"
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.enum
|
||||||
|
class MarshTemperaturePropertyQl(Enum):
|
||||||
|
HOT = 1
|
||||||
|
COLD = 2
|
||||||
|
UNDEFINED = 0
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.type
|
||||||
|
class Query:
|
||||||
|
|
||||||
|
@strawberry.field
|
||||||
|
def tasks(self, user_id: str) -> list['AppTaskQL']:
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=int(user_id))
|
||||||
|
returned_list: list['AppTaskQL'] = []
|
||||||
|
for t in tasks:
|
||||||
|
updated_task = AppTaskQL(
|
||||||
|
id=str(t.id),
|
||||||
|
start_pln=t.start_pln,
|
||||||
|
end_pln=t.end_pln,
|
||||||
|
start_fact=t.start_fact,
|
||||||
|
end_fact=t.end_fact,
|
||||||
|
status=t.status.value,
|
||||||
|
task_type=t.task_type.value,
|
||||||
|
text=t.text,
|
||||||
|
route=MarshQL(
|
||||||
|
id=str(t.route.id),
|
||||||
|
temperature_property=t.route.temperature_property.value,
|
||||||
|
name=t.route.name,
|
||||||
|
trailer=TRSQL(
|
||||||
|
id=str(t.route.trailer.id),
|
||||||
|
gost=t.route.trailer.gost
|
||||||
|
) if t.route.trailer else None,
|
||||||
|
truck=TRSQL(
|
||||||
|
id=str(t.route.truck.id),
|
||||||
|
gost=t.route.truck.gost
|
||||||
|
) if t.route.truck else None,
|
||||||
|
) if t.route else None,
|
||||||
|
events=[EventQl(
|
||||||
|
id=e.id,
|
||||||
|
type=e.type,
|
||||||
|
text=e.text,
|
||||||
|
event_data=e.event_data,
|
||||||
|
event_datetime=e.event_datetime
|
||||||
|
) for e in t.events]
|
||||||
|
)
|
||||||
|
returned_list.append(updated_task)
|
||||||
|
|
||||||
|
return returned_list
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.type
|
||||||
|
class AppTaskQL:
|
||||||
|
id: str
|
||||||
|
start_pln: datetime
|
||||||
|
end_pln: datetime
|
||||||
|
start_fact: datetime | None
|
||||||
|
end_fact: datetime | None
|
||||||
|
status: StatusEnumQl
|
||||||
|
task_type: TaskTypeEnumQl
|
||||||
|
text: str
|
||||||
|
|
||||||
|
events = []
|
||||||
|
subtasks = []
|
||||||
|
route = None
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.type
|
||||||
|
class MarshQL:
|
||||||
|
id: str
|
||||||
|
temperature_property: MarshTemperaturePropertyQl
|
||||||
|
name: str
|
||||||
|
trailer: typing.Optional['TRSQL'] = None
|
||||||
|
truck: typing.Optional['TRSQL'] = None
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.type
|
||||||
|
class TRSQL:
|
||||||
|
id: str | None
|
||||||
|
gost: str | None
|
||||||
|
|
||||||
|
|
||||||
|
@strawberry.type
|
||||||
|
class EventQl:
|
||||||
|
id: int
|
||||||
|
type: str
|
||||||
|
text: str
|
||||||
|
event_data: dict
|
||||||
|
event_datetime: datetime
|
||||||
|
|
||||||
|
|
||||||
|
schema = strawberry.Schema(query=Query)
|
|
@ -0,0 +1 @@
|
||||||
|
from .handlers import router
|
|
@ -0,0 +1 @@
|
||||||
|
from .handlers import router as auth_router
|
|
@ -0,0 +1,65 @@
|
||||||
|
import time
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Header
|
||||||
|
from fastapi.security import OAuth2PasswordRequestForm
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from pydantic_extra_types.phone_numbers import PhoneNumber
|
||||||
|
|
||||||
|
from core.config import Config
|
||||||
|
from core.errors.auth.errors import profile_not_founded, incorrect_phone_number
|
||||||
|
from core.storage import profile
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/auth")
|
||||||
|
|
||||||
|
|
||||||
|
class Token(BaseModel):
|
||||||
|
access_token: str
|
||||||
|
token_type: str
|
||||||
|
|
||||||
|
|
||||||
|
class PhoneNumberRequest(BaseModel):
|
||||||
|
phoneNumber: str
|
||||||
|
|
||||||
|
|
||||||
|
class TokenRequest(BaseModel):
|
||||||
|
token: str
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/phone")
|
||||||
|
async def get_authorization_code(req: PhoneNumberRequest):
|
||||||
|
for char in req.phoneNumber:
|
||||||
|
if not char.isdigit():
|
||||||
|
incorrect_phone_number()
|
||||||
|
if not 10 <= len(req.phoneNumber) <= 12:
|
||||||
|
incorrect_phone_number()
|
||||||
|
|
||||||
|
p = profile.get_profile_by_phone("".join(char for char in req.phoneNumber if char.isdigit()))
|
||||||
|
if not p:
|
||||||
|
profile_not_founded()
|
||||||
|
return {"code": profile.generate_profile_auth_code(p.id, p.phone_number)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/phone/code")
|
||||||
|
async def get_access_token(form_data: OAuth2PasswordRequestForm = Depends()) -> Token:
|
||||||
|
p = profile.get_profile_by_phone(''.join(char for char in form_data.username if char.isdigit()))
|
||||||
|
if not p:
|
||||||
|
profile_not_founded()
|
||||||
|
|
||||||
|
check = profile.check_profile_code(p.id, form_data.password)
|
||||||
|
if not check:
|
||||||
|
raise HTTPException(status_code=403, detail="Incorrect code")
|
||||||
|
|
||||||
|
token = jwt.encode({"profile_id": p.id, "exp": time.time() + 60 * 60 * 24}, algorithm="HS256", key=Config.secret)
|
||||||
|
return Token(access_token=token, token_type="bearer")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/token")
|
||||||
|
async def test_access_token(token: str = Header(alias="Authorization")):
|
||||||
|
try:
|
||||||
|
jwt.decode(token.split(" ")[1], Config.secret, algorithms=["HS256"])
|
||||||
|
except jwt.ExpiredSignatureError:
|
||||||
|
raise HTTPException(status_code=403, detail="Token expired")
|
||||||
|
except jwt.InvalidTokenError:
|
||||||
|
raise HTTPException(status_code=403, detail="Invalid token")
|
||||||
|
return {}
|
|
@ -0,0 +1,10 @@
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .auth import auth_router
|
||||||
|
from .tasks import tasks_router
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api/v1")
|
||||||
|
|
||||||
|
# Include your routers
|
||||||
|
router.include_router(auth_router)
|
||||||
|
router.include_router(tasks_router)
|
|
@ -0,0 +1 @@
|
||||||
|
from .handlers import router as tasks_router
|
|
@ -0,0 +1,204 @@
|
||||||
|
from typing import Union, Callable
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Path
|
||||||
|
|
||||||
|
from core.errors.auth.errors import no_task_for_current_user
|
||||||
|
from core.errors.task.errors import unavailable_status, should_provide_error_text_with_cancelled_status, \
|
||||||
|
update_task_by_chain_failed
|
||||||
|
from core.helpers.profile_helpers import get_user_from_token
|
||||||
|
from core.model.profile.db import ProfileDB
|
||||||
|
from core.model.task.db import DBAppTask, DBSubTask, DBEvent
|
||||||
|
from core.model.task.enums import StatusEnum
|
||||||
|
from core.model.task.requests import SetTaskStatusActiveRequest, SetSubtaskStatusRequest, UpdTaskRequest, UpdTaskData
|
||||||
|
from core.storage import task
|
||||||
|
|
||||||
|
from core.model.task.db2 import MPAppTaskDB
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/tasks", tags=["Tasks and subtasks"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/test", description="Fetch Task for authenticated user")
|
||||||
|
async def get_task_test(user: ProfileDB = Depends(get_user_from_token)) -> list[MPAppTaskDB]:
|
||||||
|
return MPAppTaskDB.fetch_all()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", description="Fetch Task for authenticated user")
|
||||||
|
async def get_tasks(user: ProfileDB = Depends(get_user_from_token)) -> list[DBAppTask]:
|
||||||
|
return task.fetch_tasks_with_subtasks(user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("")
|
||||||
|
async def upd_task(req: UpdTaskRequest, user: ProfileDB = Depends(get_user_from_token)) -> list[DBAppTask]:
|
||||||
|
def check_task_in_progress(e: UpdTaskData, t: DBAppTask):
|
||||||
|
t.subtasks.sort(key=lambda u: u.start_pln)
|
||||||
|
try:
|
||||||
|
next(x for x in req.data if x.status == StatusEnum.IN_PROGRESS and x.task_id == t.subtasks[0].id)
|
||||||
|
except StopIteration:
|
||||||
|
raise Exception("Chain Failed")
|
||||||
|
|
||||||
|
def check_task_completed(e: UpdTaskData, t: DBAppTask):
|
||||||
|
t.subtasks.sort(key=lambda u: u.start_pln)
|
||||||
|
try:
|
||||||
|
next(x for x in req.data if x.status == StatusEnum.COMPLETED and x.task_id == t.subtasks[-1].id)
|
||||||
|
except StopIteration:
|
||||||
|
raise Exception("Chain Failed")
|
||||||
|
|
||||||
|
def check_sbt_in_progress(e: UpdTaskData, sbt: DBSubTask):
|
||||||
|
main_task = next(t for t in tasks for s in t.subtasks if s.id == sbt.id)
|
||||||
|
main_task.subtasks.sort(key=lambda u: u.start_pln)
|
||||||
|
try:
|
||||||
|
if sbt.id == main_task.subtasks[0].id:
|
||||||
|
next(x for x in req.data if x.status == StatusEnum.IN_PROGRESS and x.task_id == main_task.id)
|
||||||
|
else:
|
||||||
|
prev_idx = main_task.subtasks.index(sbt) - 1
|
||||||
|
next(ev for ev in req.data if ev.task_id == main_task.subtasks[prev_idx].id and ev.status in ( StatusEnum.COMPLETED, StatusEnum.CANCELLED))
|
||||||
|
except StopIteration as exc:
|
||||||
|
update_task_by_chain_failed(exc)
|
||||||
|
|
||||||
|
|
||||||
|
def check_sbt_completed(e: UpdTaskData, sbt: DBSubTask):
|
||||||
|
main_task = next(t for t in tasks for s in t.subtasks if s.id == sbt.id)
|
||||||
|
main_task.subtasks.sort(key=lambda u: u.start_pln)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if sbt.id == main_task.subtasks[-1].id:
|
||||||
|
next(ev for ev in req.data if ev.task_id == main_task.id and ev.status == StatusEnum.COMPLETED)
|
||||||
|
else:
|
||||||
|
next_idx = main_task.subtasks.index(sbt) + 1
|
||||||
|
next(ev for ev in req.data if
|
||||||
|
ev.task_id == main_task.subtasks[next_idx].id and ev.status == StatusEnum.IN_PROGRESS)
|
||||||
|
except StopIteration as exc:
|
||||||
|
update_task_by_chain_failed(exc)
|
||||||
|
|
||||||
|
steps: dict[Union[type[DBAppTask], type[DBSubTask]], dict[
|
||||||
|
StatusEnum, Callable[[UpdTaskData, Union[DBAppTask, DBSubTask]], None]]] = {
|
||||||
|
DBAppTask: {
|
||||||
|
StatusEnum.IN_PROGRESS: check_task_in_progress,
|
||||||
|
StatusEnum.COMPLETED: check_task_completed,
|
||||||
|
},
|
||||||
|
DBSubTask: {
|
||||||
|
StatusEnum.COMPLETED: check_sbt_completed,
|
||||||
|
StatusEnum.CANCELLED: check_sbt_completed,
|
||||||
|
StatusEnum.IN_PROGRESS: check_sbt_in_progress,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user.id)
|
||||||
|
available_tasks_ids = [x.id for x in tasks] + [sbt.id for t in tasks for sbt in t.subtasks]
|
||||||
|
|
||||||
|
req.data.sort(key=lambda u: u.dt)
|
||||||
|
if len(req.data) <= 1 and len(req.data) % 2 != 0:
|
||||||
|
raise Exception("should provide the pairs of arguments")
|
||||||
|
|
||||||
|
for idx, event in enumerate(req.data):
|
||||||
|
# Сделать проверку, как только заканчивается событие с этим же временем должно начинаться новое
|
||||||
|
# т.е. Нельзя передать только начало события или только конец
|
||||||
|
# Если передаем InProgress и идентификатор является задачей - должны передать InProgress для подзадачи
|
||||||
|
# Если передаем InProgress и идентификатор является подзадачей - должны передать Completed или Canceled
|
||||||
|
# для подзадачи
|
||||||
|
|
||||||
|
# Цепочка имеет валидна, если имеет след вид:
|
||||||
|
# task (InProgress) -> subtask (InProgress);
|
||||||
|
# subtask(Completed/Cancelled) -> subtask(InProgress);
|
||||||
|
# subtask(Completed/Cancelled) -> task(Completed)
|
||||||
|
|
||||||
|
# НЕОБХОДИМО ДЕЛАТЬ И ОБРАТНУЮ ПРОВЕРКУ
|
||||||
|
# если sbt(InProgress), то должно существовать task(InProgress) или sbt(Completed/Cancelled)
|
||||||
|
# если task(Completed), то последняя подзадача sbt по APP_TASK_DT_START_PLN должна быть передана
|
||||||
|
|
||||||
|
if event.task_id not in available_tasks_ids:
|
||||||
|
no_task_for_current_user()
|
||||||
|
if event.status == StatusEnum.NOT_DEFINED:
|
||||||
|
unavailable_status()
|
||||||
|
if event.status == StatusEnum.CANCELLED and (event.error_text is None or event.error_text.strip() == ''):
|
||||||
|
should_provide_error_text_with_cancelled_status()
|
||||||
|
|
||||||
|
# проверяем является ли событие подзадачей
|
||||||
|
is_sbt = event.task_id not in (x.id for x in tasks)
|
||||||
|
task_: DBAppTask | DBSubTask = next(x for x in tasks if x.id == event.task_id) if not is_sbt else next(
|
||||||
|
sbt for x in tasks for sbt in x.subtasks if sbt.id == event.task_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
steps[type(task_)][event.status](event, task_)
|
||||||
|
except KeyError as exc:
|
||||||
|
update_task_by_chain_failed(exc)
|
||||||
|
|
||||||
|
[task.update_task(event, user.id) for event in req.data]
|
||||||
|
return task.fetch_tasks_with_subtasks(user.id)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/planned")
|
||||||
|
async def get_planned_tasks(user: ProfileDB = Depends(get_user_from_token)) -> list[DBAppTask]:
|
||||||
|
# TODO Rebuild this method to fetch only planned tasks
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
return [x for x in tasks if x.status == StatusEnum.NOT_DEFINED]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/active")
|
||||||
|
async def get_active_task(user: ProfileDB = Depends(get_user_from_token)) -> DBAppTask | dict:
|
||||||
|
# TODO Rebuild this method to fetch only active tasks
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
try:
|
||||||
|
return next(x for x in tasks if x.status == StatusEnum.IN_PROGRESS)
|
||||||
|
except StopIteration:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/completed")
|
||||||
|
async def get_active_task(user: ProfileDB = Depends(get_user_from_token)) -> list[DBAppTask]:
|
||||||
|
# TODO Rebuild this method to fetch only active tasks
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
return [x for x in tasks if x.status == StatusEnum.COMPLETED]
|
||||||
|
|
||||||
|
|
||||||
|
# @router.post('/active')
|
||||||
|
# async def change_task_to_active(req: SetTaskStatusActiveRequest, user: ProfileDB = Depends(get_user_from_token)):
|
||||||
|
# tasks = task.fetch_tasks_for_user(user_id=user.id)
|
||||||
|
# for t in tasks:
|
||||||
|
# if t.status == StatusEnum.IN_PROGRESS:
|
||||||
|
# return t.model_dump(mode='json', exclude={'events', 'subtasks', 'route'})
|
||||||
|
#
|
||||||
|
# for t in tasks:
|
||||||
|
# if t.id == req.task_id:
|
||||||
|
# task.set_task_to_active_state(task_id=t.id, profile_id=user.id, dt=req.dt_start)
|
||||||
|
# t.status = StatusEnum.IN_PROGRESS
|
||||||
|
# return t
|
||||||
|
#
|
||||||
|
# no_task_for_current_user()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{task_id}/subtasks")
|
||||||
|
async def get_subtasks(user: ProfileDB = Depends(get_user_from_token), task_id: int = Path()) -> list[DBSubTask]:
|
||||||
|
# TODO Rebuild this method to fetch only subtasks
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
try:
|
||||||
|
t = next(x for x in tasks if x.id == task_id)
|
||||||
|
return t.subtasks
|
||||||
|
except StopIteration:
|
||||||
|
no_task_for_current_user()
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/subtask")
|
||||||
|
async def set_status_to_subtask(req_data: SetSubtaskStatusRequest,
|
||||||
|
user: ProfileDB = Depends(get_user_from_token)) -> DBSubTask:
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
try:
|
||||||
|
subtask = next(subtask for t in tasks for subtask in t.subtasks if subtask.id == req_data.subtask_id)
|
||||||
|
|
||||||
|
task.set_subtask_to_completed(subtask_id=subtask.id, profile_id=user.id, dt=req_data.finished_dt)
|
||||||
|
subtask.status = StatusEnum.COMPLETED
|
||||||
|
return subtask
|
||||||
|
|
||||||
|
except StopIteration:
|
||||||
|
no_task_for_current_user()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{task_id}/events")
|
||||||
|
async def get_events(user: ProfileDB = Depends(get_user_from_token), task_id: int = Path()) -> list[DBEvent]:
|
||||||
|
# TODO Rebuild this method to fetch only events
|
||||||
|
tasks = task.fetch_tasks_with_subtasks(user_id=user.id)
|
||||||
|
try:
|
||||||
|
t = next(x for x in tasks if x.id == task_id)
|
||||||
|
return [x for x in t.events if x.type == "Change"]
|
||||||
|
except StopIteration:
|
||||||
|
no_task_for_current_user()
|
|
@ -0,0 +1,20 @@
|
||||||
|
services:
|
||||||
|
server:
|
||||||
|
build: .
|
||||||
|
image: "80.249.144.200:5000/mp_driver_server:latest"
|
||||||
|
platform: linux/amd64
|
||||||
|
|
||||||
|
environment:
|
||||||
|
FIREBIRD_HOST: 10.2.100.126
|
||||||
|
FIREBIRD_USER: SYSDBA
|
||||||
|
FIREBIRD_PASSWORD: KfhbjyjdVbif
|
||||||
|
FIREBIRD_DATABASE: NETDBS_2
|
||||||
|
SECRET: d6bcb44a9b50c21b25c3470a6cbfa62738863d63da9e7388683a89802d0cff18
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
command: "bash -c 'cd /app && uvicorn --host 0.0.0.0 --port 8000 main:app'"
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis
|
||||||
|
ports:
|
||||||
|
- "6379:6379"
|
|
@ -0,0 +1,23 @@
|
||||||
|
from importlib import reload
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.exceptions import RequestValidationError
|
||||||
|
|
||||||
|
from core.transport.rest import router
|
||||||
|
from core.transport.graphql import graphql_router
|
||||||
|
from core.errors.base import MPDriverException
|
||||||
|
app = FastAPI()
|
||||||
|
app.include_router(router)
|
||||||
|
app.include_router(graphql_router, prefix="/graphql")
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(MPDriverException)
|
||||||
|
async def mpdriver_exception(request: Request, exc: MPDriverException):
|
||||||
|
return exc.response()
|
||||||
|
|
||||||
|
|
||||||
|
@app.exception_handler(RequestValidationError)
|
||||||
|
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
||||||
|
error = exc.errors()[0]
|
||||||
|
raise MPDriverException(422, error['type'], error['msg'])
|
|
@ -0,0 +1,51 @@
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.3.0
|
||||||
|
certifi==2024.2.2
|
||||||
|
click==8.1.7
|
||||||
|
dnspython==2.6.1
|
||||||
|
email_validator==2.1.1
|
||||||
|
fastapi==0.111.0
|
||||||
|
fastapi-cli==0.0.4
|
||||||
|
fdb==2.0.2
|
||||||
|
firebird-base==1.8.0
|
||||||
|
firebird-driver==1.10.4
|
||||||
|
future==1.0.0
|
||||||
|
graphql-core==3.2.3
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.5
|
||||||
|
httptools==0.6.1
|
||||||
|
httpx==0.27.0
|
||||||
|
idna==3.7
|
||||||
|
Jinja2==3.1.4
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
MarkupSafe==2.1.5
|
||||||
|
mdurl==0.1.2
|
||||||
|
orjson==3.10.3
|
||||||
|
phonenumbers==8.13.37
|
||||||
|
protobuf==5.26.1
|
||||||
|
pydantic==2.7.1
|
||||||
|
pydantic-extra-types==2.7.0
|
||||||
|
pydantic-settings==2.2.1
|
||||||
|
pydantic_core==2.18.2
|
||||||
|
pydotenv==0.0.7
|
||||||
|
Pygments==2.18.0
|
||||||
|
PyJWT==2.8.0
|
||||||
|
python-dateutil==2.9.0.post0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
python-multipart==0.0.9
|
||||||
|
PyYAML==6.0.1
|
||||||
|
redis==5.0.4
|
||||||
|
rich==13.7.1
|
||||||
|
shellingham==1.5.4
|
||||||
|
six==1.16.0
|
||||||
|
sniffio==1.3.1
|
||||||
|
SQLAlchemy==2.0.30
|
||||||
|
starlette==0.37.2
|
||||||
|
strawberry-graphql==0.234.0
|
||||||
|
typer==0.12.3
|
||||||
|
typing_extensions==4.11.0
|
||||||
|
ujson==5.10.0
|
||||||
|
uvicorn==0.29.0
|
||||||
|
uvloop==0.19.0
|
||||||
|
watchfiles==0.21.0
|
||||||
|
websockets==12.0
|
Loading…
Reference in New Issue