code stringlengths 110 18.9k | apis sequence | extract_api stringlengths 123 24.4k |
|---|---|---|
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == user.id)
results = session.exec(statement).all()
return results
@router.post("/task", response_model=TaskRead)
async def get_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == task.owner_id and Task.id == task.id)
result = session.exec(statement).one_or_none()
return result
@router.post("/create/task", response_model=StandardResponse)
async def create_task(task: TaskCreate, session: Session=Depends(get_session)):
db_task = Task.from_orm(task)
session.add(db_task)
session.commit()
session.refresh(db_task)
return StandardResponse()
@router.post("/create/user", response_model=StandardResponse)
async def create_user(user: UserCreate, session: Session=Depends(get_session)):
db_user = User.from_orm(user)
session.add(db_user)
session.commit()
session.refresh(db_user)
return StandardResponse()
@router.post("/delete/task", response_model=StandardResponse)
async def delete_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.id == task.id and Task.owner_id == task.owner_id)
result = session.exec(statement)
task = result.one_or_none()
if task:
session.delete(task)
session.commit()
return StandardResponse()
return StandardResponse(success="Failure", message="Invalid Task id or Owner id", code=400)
@router.post("/delete/user", response_model=StandardResponse)
async def delete_user(user: UserQuery, session: Session=Depends(get_session)):
statement = select(User).where(User.id == user.id)
result = session.exec(statement)
user = result.one_or_none()
if user:
session.delete(user)
session.commit()
return StandardResponse()
return StandardResponse(success="Failure", message="Invalid User id", code=400)
@router.post("/update/task", response_model=StandardResponse)
async def update_task(task: TaskRead, session: Session=Depends(get_session)):
task = Task.from_orm(task)
session.add(task)
session.commit()
session.refresh(task)
return StandardResponse() | [
"sqlmodel.select"
] | [((165, 176), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (174, 176), False, 'from fastapi import APIRouter, Depends\n'), ((271, 291), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (278, 291), False, 'from fastapi import APIRouter, Depends\n'), ((310, 322), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (316, 322), False, 'from sqlmodel import select, Session\n'), ((504, 524), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (511, 524), False, 'from fastapi import APIRouter, Depends\n'), ((753, 773), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (760, 773), False, 'from fastapi import APIRouter, Depends\n'), ((1056, 1076), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1063, 1076), False, 'from fastapi import APIRouter, Depends\n'), ((1339, 1359), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1346, 1359), False, 'from fastapi import APIRouter, Depends\n'), ((1621, 1641), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1628, 1641), False, 'from fastapi import APIRouter, Depends\n'), ((2119, 2139), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2126, 2139), False, 'from fastapi import APIRouter, Depends\n'), ((2569, 2589), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2576, 2589), False, 'from fastapi import APIRouter, Depends\n'), ((543, 555), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (549, 555), False, 'from sqlmodel import select, Session\n'), ((792, 804), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (798, 804), False, 'from sqlmodel import select, Session\n'), ((1660, 1672), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (1666, 1672), False, 'from sqlmodel import select, Session\n'), ((2158, 2170), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (2164, 2170), False, 'from sqlmodel import select, Session\n')] |
import types
from dataclasses import dataclass
from typing import Callable, List, Union
from fastapi import Depends, FastAPI, HTTPException, Query
from sqlmodel import Field, Session, SQLModel, select
# Model generator + container -------------------------------------------------------------
@dataclass
class MultipleModels:
path: str
base: SQLModel
response: SQLModel
def __post_init__(self):
self.creation: SQLModel = self.make_creator_cls()
self.table: SQLModel = self.make_table_cls()
self.update: SQLModel = self.make_updater_cls()
@staticmethod
def make_cls_name(base: type, rename_base_to: str) -> str:
"""For a class name of format ``"ClassBase"``, return a modified name in which
the substring ``"Base"`` is replaced with the string passed to ``rename_base_to``.
:param base: The base model. It's name must end with the substring ``"Base"``.
:param rename_base_to: String to replace `"Base"` with.
"""
return base.__name__.replace("Base", rename_base_to)
def make_creator_cls(self) -> SQLModel:
"""From a base model, make and return a creation model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-herocreate-data-model,
the creation model is simply a copy of the base model, with the substring ``"Base"`` in the
class name replaced by the substring ``"Create"``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "Create")
return type(cls_name, (self.base,), {})
def make_updater_cls(self) -> SQLModel:
"""From a base model, make and return an update model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/update/#heroupdate-model, the update model
is the same as the base model, but with all fields annotated as ``Optional`` and all field
defaults set to ``None``.
:param base: The base model. Note that unlike in ``make_creator``, this is not the base for
inheritance (all updaters inherit directly from ``SQLModel``) but rather is used to derive
the output class name, attributes, and type annotations.
"""
cls_name = self.make_cls_name(self.base, "Update")
sig = self.base.__signature__
params = list(sig.parameters)
# Pulling type via `__signature__` rather than `__annotation__` because
# this accessor drops the `typing.Union[...]` wrapper for optional fields
annotations = {p: Union[sig.parameters[p].annotation, None] for p in params}
defaults = {p: None for p in params}
attrs = {**defaults, "__annotations__": annotations}
return type(cls_name, (SQLModel,), attrs)
def make_table_cls(self) -> SQLModel:
"""From a base model, make and return a table model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-hero-table-model,
the table model is the same as the base model, with the addition of the ``table=True`` class
creation keyword and an ``id`` attribute of type ``Optional[int]`` set to a default value of
``Field(default=None, primary_key=True)``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "")
attrs = dict(id=Field(default=None, primary_key=True))
annotations = dict(id=Union[int, None])
attrs.update(dict(__annotations__=annotations))
# We are using `typing.new_class` (vs. `type`) b/c it supports the `table=True` kwarg.
# https://twitter.com/simonw/status/1430255521127305216?s=20
# https://docs.python.org/3/reference/datamodel.html#customizing-class-creation
return types.new_class(
cls_name, (self.base,), dict(table=True), lambda ns: ns.update(attrs)
)
# SQLModel database interface functions ---------------------------------------------------
def create(*, session: Session, table_cls: SQLModel, model: SQLModel) -> SQLModel:
db_model = table_cls.from_orm(model)
session.add(db_model)
session.commit()
session.refresh(db_model)
return db_model
def read_range(*, session: Session, table_cls: SQLModel, offset: int, limit: int) -> List:
return session.exec(select(table_cls).offset(offset).limit(limit)).all()
def read_single(*, session: Session, table_cls: SQLModel, id: int):
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
return db_model
def update(*, session: Session, table_cls: SQLModel, id: int, model: SQLModel) -> SQLModel:
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
model_data = model.dict(exclude_unset=True)
for key, value in model_data.items():
setattr(db_model, key, value)
session.add(db_model)
session.commit()
session.refresh(db_model)
return db_model
def delete(*, session: Session, table_cls: SQLModel, id: int) -> dict:
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
session.delete(db_model)
session.commit()
return {"ok": True}
# Endpoint registration -------------------------------------------------------------------
@dataclass
class RegisterEndpoints:
"""From a ``MultipleModels`` object, register create, read, update, delete (CRUD) API endpoints.
:param api: The ``FastAPI`` instance.
:param get_session: A function which yields a context-managed ``sqlmodel.Session`` object.
:param models: The ``MultipleModels`` object.
:param limit: The bounds for an API read requests.
"""
api: FastAPI
get_session: Callable
models: MultipleModels
limit: Query = Query(default=100, lte=100)
def __post_init__(self):
self.register_all()
def register_all(self):
self.register_create_endpoint()
self.register_read_range_endpoint()
self.register_read_single_endpoint()
self.register_update_endpoint()
self.register_delete_endpoint()
def register_create_endpoint(self):
@self.api.post(self.models.path, response_model=self.models.response)
def endpoint(*, session: Session = Depends(self.get_session), model: self.models.creation):
return create(session=session, table_cls=self.models.table, model=model)
def register_read_range_endpoint(self):
@self.api.get(self.models.path, response_model=List[self.models.response])
def endpoint(
*, session: Session = Depends(self.get_session), offset: int = 0, limit: int = self.limit,
):
return read_range(
session=session, table_cls=self.models.table, offset=offset, limit=limit,
)
def register_read_single_endpoint(self):
@self.api.get(self.models.path + "{id}", response_model=self.models.response)
def endpoint(*, session: Session = Depends(self.get_session), id: int):
return read_single(session=session, table_cls=self.models.table, id=id)
def register_update_endpoint(self):
@self.api.patch(self.models.path + "{id}", response_model=self.models.response)
def endpoint(
*, session: Session = Depends(self.get_session), id: int, model: self.models.update,
):
return update(session=session, table_cls=self.models.table, id=id, model=model)
def register_delete_endpoint(self):
@self.api.delete(self.models.path + "{id}")
def endpoint(*, session: Session = Depends(self.get_session), id: int):
return delete(session=session, table_cls=self.models.table, id=id)
def register_endpoints(
api: FastAPI,
get_session: Callable,
models: MultipleModels,
limit: Query = Query(default=100, lte=100)
):
_ = RegisterEndpoints(api, get_session, models, limit) | [
"sqlmodel.select",
"sqlmodel.Field"
] | [((5955, 5982), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (5960, 5982), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7999, 8026), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (8004, 8026), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((4522, 4594), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (4535, 4594), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((4786, 4858), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (4799, 4858), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((5234, 5306), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (5247, 5306), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((6441, 6466), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (6448, 6466), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((6767, 6792), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (6774, 6792), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7157, 7182), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7164, 7182), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7463, 7488), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7470, 7488), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7765, 7790), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7772, 7790), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((3369, 3406), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3374, 3406), False, 'from sqlmodel import Field, Session, SQLModel, select\n'), ((4322, 4339), 'sqlmodel.select', 'select', (['table_cls'], {}), '(table_cls)\n', (4328, 4339), False, 'from sqlmodel import Field, Session, SQLModel, select\n')] |
import datetime
from sqlmodel import Field, Relationship, SQLModel
class User(SQLModel, table=True):
__tablename__ = "users"
id: int = Field(primary_key=True)
create_at: datetime.datetime = Field(default_factory=lambda: datetime.datetime.utcnow())
user_name: str
password: str
alias: str
| [
"sqlmodel.Field"
] | [((146, 169), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (151, 169), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((235, 261), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (259, 261), False, 'import datetime\n')] |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
return session
@classmethod
def create_memory_sesssion(cls):
engine = create_engine(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
# Can this be "yield" instead?
return session
@classmethod
@contextlib.contextmanager
def example_session_context(cls):
"""
Used together with hypothesis: create a class-variable to be used in hypothesis. Unset once the test is over.
Session strategy doesn't seem to work as expected, nor does setup example and teardown example with sql.
"""
assert not isinstance(cls.example_session, Session)
try:
# cls.example_session = cls.create_file_sesssion()
cls.example_session = cls.create_memory_sesssion()
yield cls.example_session
finally:
if cls.example_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if cls.example_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
cls.example_session.close()
cls.example_session = None
@classmethod
@contextlib.contextmanager
def method_client_context(cls):
""" Same reasoning as above. """
# See https://sqlmodel.tiangolo.com/tutorial/fastapi/tests/#pytest-fixtures
# https://strawberry.rocks/docs/integrations/fastapi#context_getter
# app.dependency_overrides.clear()
app.dependency_overrides[get_session] = cls.method_get_session
cls.method_client = TestClient(app)
try:
yield cls.method_client
finally:
cls.method_client = None
app.dependency_overrides.clear()
@classmethod
@contextlib.contextmanager
def example_client_context(cls):
""" Same reasoning as above. """
# See https://sqlmodel.tiangolo.com/tutorial/fastapi/tests/#pytest-fixtures
# https://strawberry.rocks/docs/integrations/fastapi#context_getter
with cls.example_session_context() as _session:
app.dependency_overrides[get_session] = cls.example_get_session
cls.example_client = TestClient(app)
try:
yield cls.example_client
finally:
cls.example_client = None
app.dependency_overrides.clear()
@classmethod
def method_get_session(cls) -> Session: # type: ignore
assert isinstance(cls.method_session, Session)
assert cls.method_session.bind.url.database in {TEST_DB_FILE_PATH, TEST_DB_MEMORY_PATH} # type: ignore
yield cls.method_session
@classmethod
def example_get_session(cls) -> Session: # type: ignore
assert isinstance(cls.example_session, Session)
assert cls.example_session.bind.url.database in {TEST_DB_FILE_PATH, TEST_DB_MEMORY_PATH} # type: ignore
yield cls.example_session
@classmethod
def example_get_client(cls) -> TestClient: # type: ignore
yield cls.example_client
@pytest.fixture(name='method_client_fixture')
def method_client_fixture(self) -> TestClient: # type: ignore
with BaseTest.method_client_context() as client:
assert isinstance(client, TestClient)
yield client
@pytest.fixture(name='example_client_fixture')
def example_client_fixture(self) -> TestClient: # type: ignore
assert isinstance(BaseTest.example_client, TestClient)
yield self.example_client
@pytest.fixture(name='method_session_fixture')
def method_session_fixture(self) -> Session: # type: ignore
assert isinstance(BaseTest.method_session, Session)
yield BaseTest.method_session
@pytest.fixture(name='example_session_fixture')
def example_session_fixture(self) -> Session: # type: ignore
assert isinstance(BaseTest.example_session, Session)
yield BaseTest.example_session
@classmethod
def get_schema(cls) -> strawberry.Schema:
return schema
@pytest.fixture(name='schema_fixture')
def schema_fixture(self):
return BaseTest.get_schema()
@classmethod
def schema_strategy(cls) -> SearchStrategy:
""" Deprecated? """
return st.builds(cls.get_schema)
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((5233, 5277), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""method_client_fixture"""'}), "(name='method_client_fixture')\n", (5247, 5277), False, 'import pytest\n'), ((5483, 5528), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""example_client_fixture"""'}), "(name='example_client_fixture')\n", (5497, 5528), False, 'import pytest\n'), ((5700, 5745), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""method_session_fixture"""'}), "(name='method_session_fixture')\n", (5714, 5745), False, 'import pytest\n'), ((5915, 5961), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""example_session_fixture"""'}), "(name='example_session_fixture')\n", (5929, 5961), False, 'import pytest\n'), ((6220, 6257), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""schema_fixture"""'}), "(name='schema_fixture')\n", (6234, 6257), False, 'import pytest\n'), ((1042, 1057), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1052, 1057), False, 'from starlette.testclient import TestClient\n'), ((1589, 1621), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (1619, 1621), False, 'from fastapi_server.main import app\n'), ((1730, 1825), 'sqlmodel.create_engine', 'create_engine', (['TEST_DB_URL'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "(TEST_DB_URL, connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (1743, 1825), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((1830, 1866), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1858, 1866), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2042, 2144), 'sqlmodel.create_engine', 'create_engine', (['TEST_DB_MEMORY_URL'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (2055, 2144), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2149, 2185), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (2177, 2185), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((3751, 3766), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (3761, 3766), False, 'from starlette.testclient import TestClient\n'), ((6434, 6459), 'hypothesis.strategies.builds', 'st.builds', (['cls.get_schema'], {}), '(cls.get_schema)\n', (6443, 6459), True, 'import hypothesis.strategies as st\n'), ((1268, 1299), 'pathlib.Path', 'pathlib.Path', (['TEST_DB_FILE_PATH'], {}), '(TEST_DB_FILE_PATH)\n', (1280, 1299), False, 'import pathlib\n'), ((1880, 1930), 'sqlmodel.Session', 'Session', (['engine'], {'autoflush': '(False)', 'autocommit': '(False)'}), '(engine, autoflush=False, autocommit=False)\n', (1887, 1930), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2199, 2249), 'sqlmodel.Session', 'Session', (['engine'], {'autoflush': '(False)', 'autocommit': '(False)'}), '(engine, autoflush=False, autocommit=False)\n', (2206, 2249), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((3882, 3914), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (3912, 3914), False, 'from fastapi_server.main import app\n'), ((4367, 4382), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (4377, 4382), False, 'from starlette.testclient import TestClient\n'), ((1475, 1493), 'os.remove', 'os.remove', (['db_path'], {}), '(db_path)\n', (1484, 1493), False, 'import os\n'), ((3002, 3033), 'pathlib.Path', 'pathlib.Path', (['TEST_DB_FILE_PATH'], {}), '(TEST_DB_FILE_PATH)\n', (3014, 3033), False, 'import pathlib\n'), ((4520, 4552), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (4550, 4552), False, 'from fastapi_server.main import app\n'), ((3217, 3235), 'os.remove', 'os.remove', (['db_path'], {}), '(db_path)\n', (3226, 3235), False, 'import os\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = select(entities.GrandParent).where(entities.GrandParent.label == "test-child")
result = session.exec(statement)
assert result.one()
@pytest.mark.database
def test_dumb_extractor(connection, sql_engine, recreate_meta):
class User(Entity, table=True):
__identifying__ = {"label"}
label: Optional[str]
new_label: Optional[str] = None
User.metadata.create_all(connection)
num_users = 100
sess = Session(connection)
users = [User(label=f"user_{i}") for i in range(num_users)]
user_le = User._get_load_entity()
for user in users:
user.id = user_le._get_hash(user.dict())
sess.add(user)
count = sess.exec(select(func.count(User.id))).one()
assert count == num_users
connection.commit()
statement = select(User.id, User.label)
query = BaseQuery.from_select_statement(statement)
assert query.length(connection=connection) == num_users
pyblock = PyBlock(function=transform_func, inputs=[query["label"]])
u_load = User.load(user=query["id"], new_label=pyblock["out"])
run = RunEntity()
sess.add(run)
sess.commit()
sess.refresh(run)
gen = Generator(
name="test",
extract=query,
transforms=[pyblock],
loads=[u_load],
batch_size=10000,
)
connection.commit()
gen.run(sql_engine, sql_engine, run_id=run.id, ordering=0)
| [
"sqlmodel.func.count",
"sqlmodel.select",
"sqlmodel.Session"
] | [((1168, 1200), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1182, 1200), False, 'import pytest\n'), ((1311, 1331), 'sqlmodel.select', 'select', (['Parent.label'], {}), '(Parent.label)\n', (1317, 1331), False, 'from sqlmodel import Session, func, select\n'), ((1344, 1388), 'dbgen.core.node.query.BaseQuery.from_select_statement', 'BaseQuery.from_select_statement', (['select_stmt'], {}), '(select_stmt)\n', (1375, 1388), False, 'from dbgen.core.node.query import BaseQuery\n'), ((1442, 1521), 'dbgen.core.node.transforms.PyBlock', 'PyBlock', ([], {'function': 'transform_func', 'inputs': "[query['label']]", 'outputs': "['newnames']"}), "(function=transform_func, inputs=[query['label']], outputs=['newnames'])\n", (1449, 1521), False, 'from dbgen.core.node.transforms import PyBlock\n'), ((1659, 1732), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'extract': 'query', 'transforms': '[pyblock]', 'loads': '[load]'}), "(name='test', extract=query, transforms=[pyblock], loads=[load])\n", (1668, 1732), False, 'from dbgen.core.generator import Generator\n'), ((2727, 2762), 'dbgen.core.func.Import', 'Import', ([], {'lib': '"""numpy"""', 'lib_alias': '"""np"""'}), "(lib='numpy', lib_alias='np')\n", (2733, 2762), False, 'from dbgen.core.func import Import\n'), ((3049, 3062), 'dbgen.core.args.Const', 'Const', (['"""test"""'], {}), "('test')\n", (3054, 3062), False, 'from dbgen.core.args import Const\n'), ((3077, 3123), 'tests.example.entities.GrandParent.load', 'entities.GrandParent.load', ([], {'label': 'val', 'type': 'val'}), '(label=val, type=val)\n', (3102, 3123), True, 'import tests.example.entities as entities\n'), ((3228, 3294), 'tests.example.entities.Parent.load', 'entities.Parent.load', ([], {'label': 'val', 'type': 'val', 'grand_parent_id': 'gp_load'}), '(label=val, type=val, grand_parent_id=gp_load)\n', (3248, 3294), True, 'import tests.example.entities as entities\n'), ((3308, 3383), 'tests.example.entities.Child.load', 'entities.Child.load', ([], {'label': 'val', 'type': 'val', 'parent_id': 'p_load', 'uncle_id': 'u_load'}), '(label=val, type=val, parent_id=p_load, uncle_id=u_load)\n', (3327, 3383), True, 'import tests.example.entities as entities\n'), ((3837, 3884), 'tests.example.entities.Parent.metadata.create_all', 'entities.Parent.metadata.create_all', (['sql_engine'], {}), '(sql_engine)\n', (3872, 3884), True, 'import tests.example.entities as entities\n'), ((4090, 4150), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'transforms': '[pyblock]', 'loads': '[p_load]'}), "(name='test', transforms=[pyblock], loads=[p_load])\n", (4099, 4150), False, 'from dbgen.core.generator import Generator\n'), ((4728, 4747), 'sqlmodel.Session', 'Session', (['connection'], {}), '(connection)\n', (4735, 4747), False, 'from sqlmodel import Session, func, select\n'), ((5072, 5099), 'sqlmodel.select', 'select', (['User.id', 'User.label'], {}), '(User.id, User.label)\n', (5078, 5099), False, 'from sqlmodel import Session, func, select\n'), ((5112, 5154), 'dbgen.core.node.query.BaseQuery.from_select_statement', 'BaseQuery.from_select_statement', (['statement'], {}), '(statement)\n', (5143, 5154), False, 'from dbgen.core.node.query import BaseQuery\n'), ((5229, 5286), 'dbgen.core.node.transforms.PyBlock', 'PyBlock', ([], {'function': 'transform_func', 'inputs': "[query['label']]"}), "(function=transform_func, inputs=[query['label']])\n", (5236, 5286), False, 'from dbgen.core.node.transforms import PyBlock\n'), ((5364, 5375), 'dbgen.core.metadata.RunEntity', 'RunEntity', ([], {}), '()\n', (5373, 5375), False, 'from dbgen.core.metadata import RunEntity\n'), ((5444, 5541), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'extract': 'query', 'transforms': '[pyblock]', 'loads': '[u_load]', 'batch_size': '(10000)'}), "(name='test', extract=query, transforms=[pyblock], loads=[u_load],\n batch_size=10000)\n", (5453, 5541), False, 'from dbgen.core.generator import Generator\n'), ((3462, 3476), 'random.shuffle', 'shuffle', (['loads'], {}), '(loads)\n', (3469, 3476), False, 'from random import shuffle\n'), ((3491, 3526), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'loads': 'loads'}), "(name='test', loads=loads)\n", (3500, 3526), False, 'from dbgen.core.generator import Generator\n'), ((4185, 4204), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (4192, 4204), False, 'from sqlmodel import Session, func, select\n'), ((4235, 4257), 'typing.cast', 'cast', (['Session', 'session'], {}), '(Session, session)\n', (4239, 4257), False, 'from typing import Optional, cast\n'), ((1590, 1609), 'dbgen.core.args.Const', 'Const', (['"""child_type"""'], {}), "('child_type')\n", (1595, 1609), False, 'from dbgen.core.args import Const\n'), ((3174, 3188), 'dbgen.core.args.Const', 'Const', (['"""uncle"""'], {}), "('uncle')\n", (3179, 3188), False, 'from dbgen.core.args import Const\n'), ((4062, 4078), 'dbgen.core.args.Const', 'Const', (['"""gp_type"""'], {}), "('gp_type')\n", (4067, 4078), False, 'from dbgen.core.args import Const\n'), ((3940, 3953), 'dbgen.core.args.Const', 'Const', (['"""test"""'], {}), "('test')\n", (3945, 3953), False, 'from dbgen.core.args import Const\n'), ((4278, 4306), 'sqlmodel.select', 'select', (['entities.GrandParent'], {}), '(entities.GrandParent)\n', (4284, 4306), False, 'from sqlmodel import Session, func, select\n'), ((4974, 4993), 'sqlmodel.func.count', 'func.count', (['User.id'], {}), '(User.id)\n', (4984, 4993), False, 'from sqlmodel import Session, func, select\n')] |
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from sqlmodel import Session, select
from sqlalchemy.exc import IntegrityError
from typing import List
import datetime as dt
from app.src.common.security import get_current_user
from app.src.common.utils import profiling_api
from app.src.models.app_user import AppUser
from app.src.models.product_type import (
ProductType,
ProductTypeRead,
ProductTypeCreate,
ProductTypeUpdate,
)
from app.src.db.engine import get_session
router = APIRouter()
# A scopo didattico inserita la validazione di producttype_id con Path:
# - non potrà essere < 1
async def get_producttype_or_404(
*,
session: Session = Depends(get_session),
producttype_id: int = Path(..., ge=1),
current_user: AppUser = Depends(get_current_user),
):
start_time = dt.datetime.now()
try:
db_pt = session.get(ProductType, producttype_id)
if db_pt:
return {
"db_pt": db_pt,
"username": current_user.username,
"start_time": start_time,
}
else:
raise HTTPException(status_code=404, detail="Product type not found")
except KeyError:
raise HTTPException(status_code=400, detail="Product type not found")
@router.get("/", response_model=List[ProductTypeRead])
# lte -> less than or equal
async def read_product_types(
*,
session: Session = Depends(get_session),
offset: int = 0,
limit: int = Query(default=100, lte=100),
current_user: AppUser = Depends(get_current_user),
):
"""
Get all the existing product types
"""
start_time = dt.datetime.now()
product_types = session.exec(select(ProductType).offset(offset).limit(limit)).all()
profiling_api("ProductType:get:all", start_time, current_user.username)
return product_types
@router.get("/{producttype_id}", response_model=ProductTypeRead)
async def read_product_type(
*, producttype_id: int, db_pt: ProductType = Depends(get_producttype_or_404)
):
"""
Get the product type by id
"""
profiling_api(
f"ProductType:read:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return db_pt["db_pt"]
@router.post("/", response_model=ProductTypeRead)
async def create_product_type(
*,
session: Session = Depends(get_session),
product_type: ProductTypeCreate,
current_user: AppUser = Depends(get_current_user),
):
"""
Create a product type
"""
start_time = dt.datetime.now()
try:
db_pt = ProductType.from_orm(product_type)
session.add(db_pt)
session.commit()
session.refresh(db_pt)
except IntegrityError:
raise HTTPException(
status_code=404, detail="Impossible to create product type with same name"
)
profiling_api("ProductType:insert:single", start_time, current_user.username)
return db_pt
@router.patch("/{producttype_id}", response_model=ProductTypeRead)
async def update_product_type(
*,
producttype_id: int,
session: Session = Depends(get_session),
pt: ProductTypeUpdate,
db_pt: ProductType = Depends(get_producttype_or_404),
):
"""
Modify a product type
"""
# exclude_unset=True: it would only include the values
# that were sent by the client
existing_pt = db_pt["db_pt"]
pt_data = pt.dict(exclude_unset=True)
for key, value in pt_data.items():
setattr(existing_pt, key, value)
session.add(existing_pt)
session.commit()
session.refresh(existing_pt)
profiling_api(
f"ProductType:update:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return existing_pt
@router.delete("/{producttype_id}")
async def delete_product_type(
*,
producttype_id: int,
session: Session = Depends(get_session),
db_pt: ProductType = Depends(get_producttype_or_404),
):
"""
Delete and remove an existing product type by id; it must be >= 1
"""
existing_db_pt = db_pt["db_pt"]
session.delete(existing_db_pt)
session.commit()
profiling_api(
f"ProductType:delete:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return {"ok": True}
| [
"sqlmodel.select"
] | [((518, 529), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (527, 529), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((693, 713), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (700, 713), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((741, 756), 'fastapi.Path', 'Path', (['...'], {'ge': '(1)'}), '(..., ge=1)\n', (745, 756), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((786, 811), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (793, 811), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((833, 850), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (848, 850), True, 'import datetime as dt\n'), ((1435, 1455), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1442, 1455), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1495, 1522), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1500, 1522), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1552, 1577), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1559, 1577), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1654, 1671), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1669, 1671), True, 'import datetime as dt\n'), ((1764, 1835), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""ProductType:get:all"""', 'start_time', 'current_user.username'], {}), "('ProductType:get:all', start_time, current_user.username)\n", (1777, 1835), False, 'from app.src.common.utils import profiling_api\n'), ((2006, 2037), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (2013, 2037), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2092, 2194), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:read:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:read:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (2105, 2194), False, 'from app.src.common.utils import profiling_api\n'), ((2360, 2380), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2367, 2380), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2447, 2472), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (2454, 2472), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2536, 2553), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (2551, 2553), True, 'import datetime as dt\n'), ((2854, 2931), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""ProductType:insert:single"""', 'start_time', 'current_user.username'], {}), "('ProductType:insert:single', start_time, current_user.username)\n", (2867, 2931), False, 'from app.src.common.utils import profiling_api\n'), ((3104, 3124), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3111, 3124), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3178, 3209), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (3185, 3209), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3592, 3696), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:update:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:update:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (3605, 3696), False, 'from app.src.common.utils import profiling_api\n'), ((3870, 3890), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3877, 3890), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3917, 3948), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (3924, 3948), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((4135, 4239), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:delete:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:delete:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (4148, 4239), False, 'from app.src.common.utils import profiling_api\n'), ((2579, 2613), 'app.src.models.product_type.ProductType.from_orm', 'ProductType.from_orm', (['product_type'], {}), '(product_type)\n', (2599, 2613), False, 'from app.src.models.product_type import ProductType, ProductTypeRead, ProductTypeCreate, ProductTypeUpdate\n'), ((1127, 1190), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Product type not found"""'}), "(status_code=404, detail='Product type not found')\n", (1140, 1190), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1226, 1289), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""Product type not found"""'}), "(status_code=400, detail='Product type not found')\n", (1239, 1289), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2738, 2832), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Impossible to create product type with same name"""'}), "(status_code=404, detail=\n 'Impossible to create product type with same name')\n", (2751, 2832), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1705, 1724), 'sqlmodel.select', 'select', (['ProductType'], {}), '(ProductType)\n', (1711, 1724), False, 'from sqlmodel import Session, select\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistorySummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id_order: int
history_id_conference: int
summary_treatmsummary_conference_id: int
state: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
problem: str
question: str
summary_plan: str
surgeon_summary: str
pre_operation_abg: bool
post_operation_abg: bool
pre_operation_redo_abg: bool
pre_operation_jaw_surgery: bool
pre_operation_computing_design: bool
pre_operation_3d_print: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConferenceDoctorMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
summary_treatmsummary_conference_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_summary_conference", response_model=HistorySummaryTreatmsummaryConference)
async def create_history_summary_conference(history_summary_conference: HistorySummaryTreatmsummaryConference, session: AsyncSession = Depends(get_session)):
session.add(history_summary_conference)
await session.commit()
await session.refresh(history_summary_conference)
return history_summary_conference
@router.post("/summary_conference", response_model=SummaryTreatmsummaryConference)
async def create_summary_conference(summary_conference: SummaryTreatmsummaryConference, session: AsyncSession = Depends(get_session)):
session.add(summary_conference)
await session.commit()
await session.refresh(summary_conference)
return summary_conference
@router.get("/history_summary_conference/{id}", response_model=HistorySummaryTreatmsummaryConference)
async def get_history_summary_conference(id: int, session: AsyncSession = Depends(get_session)):
history_summary_conferences = await session.execute(select(HistorySummaryTreatmsummaryConference).where(HistorySummaryTreatmsummaryConference.id == id))
history_summary_conference = history_summary_conferences.scalars().first()
return history_summary_conference
@router.put("/history_summary_conference/{id}", response_model=HistorySummaryTreatmsummaryConference)
async def update_history_summary_conference(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_summary_conference/{id}")
async def delete_history_summary_conference(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_summary_conference/{id}")
async def delete_summary_conference(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((256, 267), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (265, 267), False, 'from fastapi import APIRouter, Depends\n'), ((361, 398), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (366, 398), False, 'from sqlmodel import Field, SQLModel\n'), ((709, 746), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (714, 746), False, 'from sqlmodel import Field, SQLModel\n'), ((1231, 1268), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1236, 1268), False, 'from sqlmodel import Field, SQLModel\n'), ((1675, 1695), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1682, 1695), False, 'from fastapi import APIRouter, Depends\n'), ((2058, 2078), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2065, 2078), False, 'from fastapi import APIRouter, Depends\n'), ((2398, 2418), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2405, 2418), False, 'from fastapi import APIRouter, Depends\n'), ((2877, 2897), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2884, 2897), False, 'from fastapi import APIRouter, Depends\n'), ((3037, 3057), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3044, 3057), False, 'from fastapi import APIRouter, Depends\n'), ((3189, 3209), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3196, 3209), False, 'from fastapi import APIRouter, Depends\n'), ((2477, 2522), 'sqlalchemy.select', 'select', (['HistorySummaryTreatmsummaryConference'], {}), '(HistorySummaryTreatmsummaryConference)\n', (2483, 2522), False, 'from sqlalchemy import select\n')] |
from sqlalchemy import inspect
from sqlalchemy.engine.reflection import Inspector
from sqlmodel import create_engine
def test_create_db_and_table(clear_sqlmodel):
from docs_src.tutorial.create_db_and_table import tutorial003 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
mod.create_db_and_tables()
insp: Inspector = inspect(mod.engine)
assert insp.has_table(str(mod.Hero.__tablename__))
| [
"sqlmodel.create_engine"
] | [((289, 318), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (302, 318), False, 'from sqlmodel import create_engine\n'), ((323, 349), 'docs_src.tutorial.create_db_and_table.tutorial003.create_db_and_tables', 'mod.create_db_and_tables', ([], {}), '()\n', (347, 349), True, 'from docs_src.tutorial.create_db_and_table import tutorial003 as mod\n'), ((372, 391), 'sqlalchemy.inspect', 'inspect', (['mod.engine'], {}), '(mod.engine)\n', (379, 391), False, 'from sqlalchemy import inspect\n')] |
from sqlmodel import SQLModel, Field
import uuid as uuid_pkg
from typing import Optional
class FilesBase(SQLModel):
name: str
class Files(FilesBase, table=True):
id: int = Field(default=None, primary_key=True)
uuid: uuid_pkg.UUID = Field(
default_factory=uuid_pkg.uuid4,
index=True,
nullable=False,
)
count_download: int = Field(default=0)
class FilesCreate(FilesBase):
pass
| [
"sqlmodel.Field"
] | [((184, 221), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (189, 221), False, 'from sqlmodel import SQLModel, Field\n'), ((248, 313), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid_pkg.uuid4', 'index': '(True)', 'nullable': '(False)'}), '(default_factory=uuid_pkg.uuid4, index=True, nullable=False)\n', (253, 313), False, 'from sqlmodel import SQLModel, Field\n'), ((371, 387), 'sqlmodel.Field', 'Field', ([], {'default': '(0)'}), '(default=0)\n', (376, 387), False, 'from sqlmodel import SQLModel, Field\n')] |
from sqlmodel import Session, select
from database import UserRead, PostCreate, UserCreate, User, Post
from typing import Union
from datetime import datetime
def create_object(
session: Session,
model: Union[User, Post],
request_data: Union[UserCreate, PostCreate],
user: UserRead = None,
isPost: bool = False,
) -> dict:
if isPost:
setattr(request_data, "author_name", user.name)
db_object = model.from_orm(request_data)
if isPost:
setattr(db_object, "updated_at", datetime.utcnow())
setattr(db_object, "created_at", datetime.utcnow())
session.add(db_object)
session.commit()
session.refresh(db_object)
return db_object
def get_objects(
session: Session, model: Union[User, Post], offset: int, limit: int
) -> list:
objects = session.exec(select(model).offset(offset).limit(limit)).all()
return objects
def get_object(
session: Session,
model: Union[User, Post],
criteria: Union[int, str],
isUser: bool = False,
) -> Union[User, Post]:
if isUser:
statement = select(model).where(model.email == criteria)
results = session.exec(statement)
user = results.first()
if not user:
raise Exception("User not found")
return user
post = session.get(Post, criteria)
return post
def patch_object(
session: Session,
old_object: Union[User, Post],
request_data: dict,
isPost: bool = False,
) -> Union[User, Post]:
for key, value in request_data.items():
setattr(old_object, key, value)
if isPost:
setattr(old_object, "updated_at", datetime.utcnow())
session.add(old_object)
session.commit()
session.refresh(old_object)
return old_object
def delete_object(session: Session, object_: Union[User, Post]) -> dict:
session.delete(object_)
session.commit()
return {"ok": True}
| [
"sqlmodel.select"
] | [((574, 591), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (589, 591), False, 'from datetime import datetime\n'), ((518, 535), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (533, 535), False, 'from datetime import datetime\n'), ((1628, 1645), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1643, 1645), False, 'from datetime import datetime\n'), ((1076, 1089), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (1082, 1089), False, 'from sqlmodel import Session, select\n'), ((822, 835), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (828, 835), False, 'from sqlmodel import Session, select\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
content: str
age: Optional[int] = None
search_vector: Optional[str] = Field(
sa_column=Column(
TSVectorType(
"name",
"content",
# weights={"name": "A", "secret_name": "B", "age": "D"},
)
)
)
class Parents(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
# children = orm.relationship("Children")
class Children(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
parent_id: Optional[int] = Field(default=None, foreign_key="parents.id")
stand_by_db()
| [
"sqlmodel.Field"
] | [((260, 297), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (265, 297), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((670, 707), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (675, 707), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((832, 869), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (837, 869), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((915, 960), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""parents.id"""'}), "(default=None, foreign_key='parents.id')\n", (920, 960), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((439, 470), 'sqlalchemy_utils.types.TSVectorType', 'TSVectorType', (['"""name"""', '"""content"""'], {}), "('name', 'content')\n", (451, 470), False, 'from sqlalchemy_utils.types import TSVectorType\n')] |
from sqlmodel import SQLModel, Relationship
from typing import List
from app.models.base_uuid_model import BaseUUIDModel
class RoleBase(SQLModel):
name: str
description: str
class Role(BaseUUIDModel, RoleBase, table=True):
users: List["User"] = Relationship(back_populates="role", sa_relationship_kwargs={"lazy": "selectin"})
| [
"sqlmodel.Relationship"
] | [((263, 348), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""role"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='role', sa_relationship_kwargs={'lazy': 'selectin'}\n )\n", (275, 348), False, 'from sqlmodel import SQLModel, Relationship\n')] |
from typing import TYPE_CHECKING, Optional
from uuid import UUID
from sqlalchemy.orm import joinedload
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import ORMUtils
from joj.horse.utils.base import is_uuid
if TYPE_CHECKING:
from joj.horse.models import Problem, ProblemSet
class ProblemProblemSetLink(ORMUtils, table=True): # type: ignore[call-arg]
__tablename__ = "problem_problem_set_links"
problem_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problems.id", ondelete="CASCADE"), primary_key=True
),
)
problem_set_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problem_sets.id", ondelete="CASCADE"), primary_key=True
),
)
position: int = Field(
index=True, nullable=False, sa_column_kwargs={"server_default": "0"}
)
problem: "Problem" = Relationship(back_populates="problem_problem_set_links")
problem_set: "ProblemSet" = Relationship(back_populates="problem_problem_set_links")
@classmethod
async def find_by_problem_set_and_problem(
cls, problem_set: str, problem: str
) -> Optional["ProblemProblemSetLink"]:
# this is buggy, do not use!
# not sure how much it's better than three queries (maybe even worse)
from joj.horse import models
statement = cls.sql_select().options(
joinedload(cls.problem_set, innerjoin=True),
joinedload(cls.problem, innerjoin=True),
)
if is_uuid(problem_set):
statement = statement.where(cls.problem_set_id == problem_set)
else:
statement = statement.where(models.ProblemSet.url == problem_set)
if is_uuid(problem):
statement = statement.where(cls.problem_id == problem)
else:
statement = statement.where(models.Problem.url == problem)
from loguru import logger
logger.info(statement)
result = await cls.session_exec(statement)
logger.info(result.all())
return result.one_or_none()
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((856, 931), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'nullable': '(False)', 'sa_column_kwargs': "{'server_default': '0'}"}), "(index=True, nullable=False, sa_column_kwargs={'server_default': '0'})\n", (861, 931), False, 'from sqlmodel import Field, Relationship\n'), ((972, 1028), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_problem_set_links"""'}), "(back_populates='problem_problem_set_links')\n", (984, 1028), False, 'from sqlmodel import Field, Relationship\n'), ((1061, 1117), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_problem_set_links"""'}), "(back_populates='problem_problem_set_links')\n", (1073, 1117), False, 'from sqlmodel import Field, Relationship\n'), ((1602, 1622), 'joj.horse.utils.base.is_uuid', 'is_uuid', (['problem_set'], {}), '(problem_set)\n', (1609, 1622), False, 'from joj.horse.utils.base import is_uuid\n'), ((1802, 1818), 'joj.horse.utils.base.is_uuid', 'is_uuid', (['problem'], {}), '(problem)\n', (1809, 1818), False, 'from joj.horse.utils.base import is_uuid\n'), ((2015, 2037), 'loguru.logger.info', 'logger.info', (['statement'], {}), '(statement)\n', (2026, 2037), False, 'from loguru import logger\n'), ((1483, 1526), 'sqlalchemy.orm.joinedload', 'joinedload', (['cls.problem_set'], {'innerjoin': '(True)'}), '(cls.problem_set, innerjoin=True)\n', (1493, 1526), False, 'from sqlalchemy.orm import joinedload\n'), ((1540, 1579), 'sqlalchemy.orm.joinedload', 'joinedload', (['cls.problem'], {'innerjoin': '(True)'}), '(cls.problem, innerjoin=True)\n', (1550, 1579), False, 'from sqlalchemy.orm import joinedload\n'), ((592, 637), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""problems.id"""'], {'ondelete': '"""CASCADE"""'}), "('problems.id', ondelete='CASCADE')\n", (602, 637), False, 'from sqlalchemy.schema import Column, ForeignKey\n'), ((751, 800), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""problem_sets.id"""'], {'ondelete': '"""CASCADE"""'}), "('problem_sets.id', ondelete='CASCADE')\n", (761, 800), False, 'from sqlalchemy.schema import Column, ForeignKey\n')] |
from sqlmodel import SQLModel, Field
from typing import Optional, List
from pydantic import validator
# --- model ---
class User(SQLModel, table=True):
id: Optional[int] = Field(None, primary_key=True)
username: str
password: str
# --- serializers ---
class UserOut(SQLModel):
username: str
class UserIn(SQLModel):
username: str
password: str
confirm_password: str
# @validator(confirm_password)
# def validate_password(cls, v, values):
# if v and v != values['password']:
# raise ValueError("aaaa")
# return v
UserList = List[UserOut] | [
"sqlmodel.Field"
] | [((178, 207), 'sqlmodel.Field', 'Field', (['None'], {'primary_key': '(True)'}), '(None, primary_key=True)\n', (183, 207), False, 'from sqlmodel import SQLModel, Field\n')] |
from datetime import datetime
try:
from humps.main import depascalize
from sqlalchemy import Column, DateTime
from sqlalchemy.orm.decl_api import declared_attr
from sqlmodel import Field, SQLModel
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Model(SQLModel):
"""
Abstract model providing `id`, `date_created` and `date_updated` fields.
And also automatic table naming to `snake_case`.
"""
id: int = Field(primary_key=True)
date_created: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
date_updated: datetime = Field(sa_column=Column(DateTime, onupdate=datetime.utcnow))
@declared_attr
def __tablename__(cls):
"""
Convert Pascal class name style to `snake_case`
"""
return depascalize(cls.__name__)
| [
"sqlmodel.Field"
] | [((542, 565), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (547, 565), False, 'from sqlmodel import Field, SQLModel\n'), ((886, 911), 'humps.main.depascalize', 'depascalize', (['cls.__name__'], {}), '(cls.__name__)\n', (897, 911), False, 'from humps.main import depascalize\n'), ((611, 652), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (617, 652), False, 'from sqlalchemy import Column, DateTime\n'), ((699, 741), 'sqlalchemy.Column', 'Column', (['DateTime'], {'onupdate': 'datetime.utcnow'}), '(DateTime, onupdate=datetime.utcnow)\n', (705, 741), False, 'from sqlalchemy import Column, DateTime\n')] |
import logging
import os
import secrets
import aioredis
import boto3
import pytest
import redis as pyredis
from fastapi.testclient import TestClient
from sqlalchemy import text
from sqlalchemy_utils import create_database, database_exists, drop_database
from sqlmodel import Session, create_engine
from iris.agent.settings import AgentSettings
from iris.api.authentication import (
current_active_user,
current_superuser,
current_verified_user,
)
from iris.api.main import app
from iris.api.settings import APISettings
from iris.commons.clickhouse import ClickHouse
from iris.commons.dependencies import get_settings
from iris.commons.models.base import Base
from iris.commons.redis import Redis
from iris.commons.settings import CommonSettings
from iris.commons.storage import Storage
from iris.commons.utils import json_serializer
from iris.worker import WorkerSettings
pytest.register_assert_rewrite("tests.assertions")
pytest_plugins = ["tests.fixtures.models", "tests.fixtures.storage"]
def should_cleanup():
return os.environ.get("IRIS_TEST_CLEANUP", "") != "0"
@pytest.fixture
def logger():
return logging.getLogger(__name__)
@pytest.fixture
def settings():
namespace = secrets.token_hex(nbytes=4)
print(f"@{namespace}", end=" ")
# Redis has 16 databases by default, we use the last one for testing.
return CommonSettings(
CLICKHOUSE_PUBLIC_USER="public",
CLICKHOUSE_DATABASE="iris_test",
DATABASE_URL=f"postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}",
S3_PREFIX=f"iris-test-{namespace}",
S3_PUBLIC_RESOURCES=["arn:aws:s3:::test-public-exports/*"],
REDIS_NAMESPACE=f"iris-test-{namespace}",
REDIS_URL="redis://default:iris@redis.docker.localhost?db=15",
RETRY_TIMEOUT=-1,
)
@pytest.fixture
def api_settings(settings):
return APISettings(
API_CORS_ALLOW_ORIGIN="https://example.org,http://localhost:8000",
**settings.dict(),
)
@pytest.fixture
def agent_settings(settings, tmp_path):
return AgentSettings(
**settings.dict(),
AGENT_CARACAL_SNIFFER_WAIT_TIME=1,
AGENT_MIN_TTL=0,
AGENT_RESULTS_DIR_PATH=tmp_path / "agent_results",
AGENT_TARGETS_DIR_PATH=tmp_path / "agent_targets",
)
@pytest.fixture
def worker_settings(settings, tmp_path):
return WorkerSettings(
**settings.dict(),
WORKER_RESULTS_DIR_PATH=tmp_path / "worker_results",
WORKER_MAX_OPEN_FILES=128,
)
@pytest.fixture
def clickhouse(settings, logger):
return ClickHouse(settings, logger)
@pytest.fixture
def engine(settings):
engine = create_engine(settings.DATABASE_URL, json_serializer=json_serializer)
if not database_exists(engine.url):
create_database(engine.url)
Base.metadata.create_all(engine)
return engine
@pytest.fixture
async def redis(settings, logger):
client = aioredis.from_url(settings.REDIS_URL, decode_responses=True)
yield Redis(client, settings, logger)
await client.close()
@pytest.fixture
def session(engine):
with Session(engine) as session:
yield session
@pytest.fixture
def storage(settings, logger):
return Storage(settings, logger)
@pytest.fixture
def make_client(engine, api_settings):
def _make_client(user=None):
if user and user.is_active:
app.dependency_overrides[current_active_user] = lambda: user
if user and user.is_active and user.is_verified:
app.dependency_overrides[current_verified_user] = lambda: user
if user and user.is_active and user.is_verified and user.is_superuser:
app.dependency_overrides[current_superuser] = lambda: user
app.dependency_overrides[get_settings] = lambda: api_settings
return TestClient(app)
yield _make_client
app.dependency_overrides.clear()
@pytest.fixture(autouse=True, scope="session")
def cleanup_redis():
yield
if should_cleanup():
redis_ = pyredis.from_url("redis://default:iris@redis.docker.localhost?db=15")
redis_.flushdb()
redis_.close()
@pytest.fixture(autouse=True, scope="session")
def cleanup_database():
yield
if should_cleanup():
# TODO: Cleanup/simplify this code.
engine = create_engine("postgresql://iris:iris@postgres.docker.localhost")
with engine.connect() as conn:
databases = conn.execute(
text(
"""
SELECT datname
FROM pg_database
WHERE datistemplate = false AND datname LIKE 'iris-test-%'
"""
)
).all()
for (database,) in databases:
drop_database(
f"postgresql://iris:iris@postgres.docker.localhost/{database}"
)
@pytest.fixture(autouse=True, scope="session")
def cleanup_s3():
yield
if should_cleanup():
s3 = boto3.client(
"s3",
aws_access_key_id="minioadmin",
aws_secret_access_key="minioadmin",
endpoint_url="http://minio.docker.localhost",
)
buckets = s3.list_buckets()
buckets = [x["Name"] for x in buckets["Buckets"]]
for bucket in buckets:
if "test-" in bucket:
objects = s3.list_objects_v2(Bucket=bucket)
if objects["KeyCount"]:
objects = [{"Key": x["Key"]} for x in objects.get("Contents", [])]
s3.delete_objects(Bucket=bucket, Delete=dict(Objects=objects))
s3.delete_bucket(Bucket=bucket)
# https://github.com/boto/botocore/pull/1810
s3._endpoint.http_session._manager.clear()
| [
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((887, 937), 'pytest.register_assert_rewrite', 'pytest.register_assert_rewrite', (['"""tests.assertions"""'], {}), "('tests.assertions')\n", (917, 937), False, 'import pytest\n'), ((3884, 3929), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (3898, 3929), False, 'import pytest\n'), ((4124, 4169), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (4138, 4169), False, 'import pytest\n'), ((4865, 4910), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (4879, 4910), False, 'import pytest\n'), ((1132, 1159), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1149, 1159), False, 'import logging\n'), ((1210, 1237), 'secrets.token_hex', 'secrets.token_hex', ([], {'nbytes': '(4)'}), '(nbytes=4)\n', (1227, 1237), False, 'import secrets\n'), ((1359, 1775), 'iris.commons.settings.CommonSettings', 'CommonSettings', ([], {'CLICKHOUSE_PUBLIC_USER': '"""public"""', 'CLICKHOUSE_DATABASE': '"""iris_test"""', 'DATABASE_URL': 'f"""postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}"""', 'S3_PREFIX': 'f"""iris-test-{namespace}"""', 'S3_PUBLIC_RESOURCES': "['arn:aws:s3:::test-public-exports/*']", 'REDIS_NAMESPACE': 'f"""iris-test-{namespace}"""', 'REDIS_URL': '"""redis://default:iris@redis.docker.localhost?db=15"""', 'RETRY_TIMEOUT': '(-1)'}), "(CLICKHOUSE_PUBLIC_USER='public', CLICKHOUSE_DATABASE=\n 'iris_test', DATABASE_URL=\n f'postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}',\n S3_PREFIX=f'iris-test-{namespace}', S3_PUBLIC_RESOURCES=[\n 'arn:aws:s3:::test-public-exports/*'], REDIS_NAMESPACE=\n f'iris-test-{namespace}', REDIS_URL=\n 'redis://default:iris@redis.docker.localhost?db=15', RETRY_TIMEOUT=-1)\n", (1373, 1775), False, 'from iris.commons.settings import CommonSettings\n'), ((2577, 2605), 'iris.commons.clickhouse.ClickHouse', 'ClickHouse', (['settings', 'logger'], {}), '(settings, logger)\n', (2587, 2605), False, 'from iris.commons.clickhouse import ClickHouse\n'), ((2659, 2728), 'sqlmodel.create_engine', 'create_engine', (['settings.DATABASE_URL'], {'json_serializer': 'json_serializer'}), '(settings.DATABASE_URL, json_serializer=json_serializer)\n', (2672, 2728), False, 'from sqlmodel import Session, create_engine\n'), ((2809, 2841), 'iris.commons.models.base.Base.metadata.create_all', 'Base.metadata.create_all', (['engine'], {}), '(engine)\n', (2833, 2841), False, 'from iris.commons.models.base import Base\n'), ((2926, 2986), 'aioredis.from_url', 'aioredis.from_url', (['settings.REDIS_URL'], {'decode_responses': '(True)'}), '(settings.REDIS_URL, decode_responses=True)\n', (2943, 2986), False, 'import aioredis\n'), ((3212, 3237), 'iris.commons.storage.Storage', 'Storage', (['settings', 'logger'], {}), '(settings, logger)\n', (3219, 3237), False, 'from iris.commons.storage import Storage\n'), ((3848, 3880), 'iris.api.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (3878, 3880), False, 'from iris.api.main import app\n'), ((1042, 1081), 'os.environ.get', 'os.environ.get', (['"""IRIS_TEST_CLEANUP"""', '""""""'], {}), "('IRIS_TEST_CLEANUP', '')\n", (1056, 1081), False, 'import os\n'), ((2740, 2767), 'sqlalchemy_utils.database_exists', 'database_exists', (['engine.url'], {}), '(engine.url)\n', (2755, 2767), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((2777, 2804), 'sqlalchemy_utils.create_database', 'create_database', (['engine.url'], {}), '(engine.url)\n', (2792, 2804), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((2997, 3028), 'iris.commons.redis.Redis', 'Redis', (['client', 'settings', 'logger'], {}), '(client, settings, logger)\n', (3002, 3028), False, 'from iris.commons.redis import Redis\n'), ((3102, 3117), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3109, 3117), False, 'from sqlmodel import Session, create_engine\n'), ((3804, 3819), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (3814, 3819), False, 'from fastapi.testclient import TestClient\n'), ((4003, 4072), 'redis.from_url', 'pyredis.from_url', (['"""redis://default:iris@redis.docker.localhost?db=15"""'], {}), "('redis://default:iris@redis.docker.localhost?db=15')\n", (4019, 4072), True, 'import redis as pyredis\n'), ((4290, 4355), 'sqlmodel.create_engine', 'create_engine', (['"""postgresql://iris:iris@postgres.docker.localhost"""'], {}), "('postgresql://iris:iris@postgres.docker.localhost')\n", (4303, 4355), False, 'from sqlmodel import Session, create_engine\n'), ((4977, 5114), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': '"""minioadmin"""', 'aws_secret_access_key': '"""minioadmin"""', 'endpoint_url': '"""http://minio.docker.localhost"""'}), "('s3', aws_access_key_id='minioadmin', aws_secret_access_key=\n 'minioadmin', endpoint_url='http://minio.docker.localhost')\n", (4989, 5114), False, 'import boto3\n'), ((4754, 4831), 'sqlalchemy_utils.drop_database', 'drop_database', (['f"""postgresql://iris:iris@postgres.docker.localhost/{database}"""'], {}), "(f'postgresql://iris:iris@postgres.docker.localhost/{database}')\n", (4767, 4831), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((4449, 4655), 'sqlalchemy.text', 'text', (['"""\n SELECT datname\n FROM pg_database\n WHERE datistemplate = false AND datname LIKE \'iris-test-%\'\n """'], {}), '(\n """\n SELECT datname\n FROM pg_database\n WHERE datistemplate = false AND datname LIKE \'iris-test-%\'\n """\n )\n', (4453, 4655), False, 'from sqlalchemy import text\n')] |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = select(ToDo).where(ToDo.due_date < date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
reminders = select(ToDo).where(ToDo.reminder <= date.today(),
ToDo.status != 'done').order_by(
ToDo.due_date)
due_in = select(ToDo).where(
ToDo.due_date < due_date, ToDo.due_date >= date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
no_due = select(ToDo).where(
ToDo.due_date == None, ToDo.status != 'done',
ToDo.reminder == None).order_by(ToDo.date_init)
if len(make_table_view(engine, overdue)) > 1:
typer.secho(f'\nOVERDUE\n', fg=typer.colors.BRIGHT_RED,
bold=True)
typer.secho(tabulate(make_table_view(engine, overdue),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, reminders)) > 1:
typer.secho(f'\nREMINDERS\n', fg=typer.colors.BRIGHT_YELLOW, bold=True)
typer.secho(tabulate(make_table_view(engine, reminders),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, due_in)) > 1:
typer.secho(f'\nDUE IN {due_date.date()}\n',
fg=typer.colors.BRIGHT_GREEN, bold=True)
typer.secho(tabulate(make_table_view(engine, due_in),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, no_due)) > 1:
typer.secho(f'\nNO DUE\n', fg=typer.colors.BRIGHT_BLUE, bold=True)
typer.secho(tabulate(make_table_view(engine, no_due),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
print('\n')
| [
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.func.max"
] | [((400, 413), 'typer.Typer', 'typer.Typer', ([], {}), '()\n', (411, 413), False, 'import typer\n'), ((600, 637), 'typer.Option', 'typer.Option', (['None', '"""--project"""', '"""-p"""'], {}), "(None, '--project', '-p')\n", (612, 637), False, 'import typer\n'), ((668, 728), 'typer.Option', 'typer.Option', (['None', '"""--due-date"""', '"""-d"""'], {'formats': "['%Y-%m-%d']"}), "(None, '--due-date', '-d', formats=['%Y-%m-%d'])\n", (680, 728), False, 'import typer\n'), ((801, 861), 'typer.Option', 'typer.Option', (['None', '"""--reminder"""', '"""-r"""'], {'formats': "['%Y-%m-%d']"}), "(None, '--reminder', '-r', formats=['%Y-%m-%d'])\n", (813, 861), False, 'import typer\n'), ((930, 974), 'typer.Option', 'typer.Option', (['Status.to_do', '"""--status"""', '"""-s"""'], {}), "(Status.to_do, '--status', '-s')\n", (942, 974), False, 'import typer\n'), ((995, 1028), 'typer.Option', 'typer.Option', (['None', '"""--tag"""', '"""-t"""'], {}), "(None, '--tag', '-t')\n", (1007, 1028), False, 'import typer\n'), ((3098, 3164), 'typer.Option', 'typer.Option', (['None', '"""--duration"""', '"""-d"""'], {'help': '"""Duration in minutes"""'}), "(None, '--duration', '-d', help='Duration in minutes')\n", (3110, 3164), False, 'import typer\n'), ((6176, 6213), 'typer.Option', 'typer.Option', (['None', '"""--remarks"""', '"""-r"""'], {}), "(None, '--remarks', '-r')\n", (6188, 6213), False, 'import typer\n'), ((1094, 1110), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1108, 1110), False, 'from datetime import datetime, timedelta, date\n'), ((3274, 3289), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3281, 3289), False, 'from sqlmodel import Session, select, func\n'), ((6247, 6262), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6254, 6262), False, 'from sqlmodel import Session, select, func\n'), ((8242, 8308), 'typer.secho', 'typer.secho', (['f"""\nOVERDUE\n"""'], {'fg': 'typer.colors.BRIGHT_RED', 'bold': '(True)'}), "(f'\\nOVERDUE\\n', fg=typer.colors.BRIGHT_RED, bold=True)\n", (8253, 8308), False, 'import typer\n'), ((8533, 8604), 'typer.secho', 'typer.secho', (['f"""\nREMINDERS\n"""'], {'fg': 'typer.colors.BRIGHT_YELLOW', 'bold': '(True)'}), "(f'\\nREMINDERS\\n', fg=typer.colors.BRIGHT_YELLOW, bold=True)\n", (8544, 8604), False, 'import typer\n'), ((9114, 9180), 'typer.secho', 'typer.secho', (['f"""\nNO DUE\n"""'], {'fg': 'typer.colors.BRIGHT_BLUE', 'bold': '(True)'}), "(f'\\nNO DUE\\n', fg=typer.colors.BRIGHT_BLUE, bold=True)\n", (9125, 9180), False, 'import typer\n'), ((1306, 1324), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1316, 1324), False, 'import typer\n'), ((1520, 1538), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1530, 1538), False, 'import typer\n'), ((1784, 1802), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1794, 1802), False, 'import typer\n'), ((1817, 1832), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1824, 1832), False, 'from sqlmodel import Session, select, func\n'), ((2772, 2842), 'typer.secho', 'typer.secho', (['f"""Add {task}. Task id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), "(f'Add {task}. Task id: {new_id}\\n', fg=typer.colors.GREEN)\n", (2783, 2842), False, 'import typer\n'), ((3398, 3471), 'typer.secho', 'typer.secho', (['"""\nThe Timer must be stopped first\n"""'], {'fg': 'typer.colors.RED'}), '("""\nThe Timer must be stopped first\n""", fg=typer.colors.RED)\n', (3409, 3471), False, 'import typer\n'), ((3512, 3530), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (3522, 3530), False, 'import typer\n'), ((6420, 6437), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6435, 6437), False, 'from datetime import datetime, timedelta, date\n'), ((6627, 6661), 'typer.confirm', 'typer.confirm', (['"""Is the task done?"""'], {}), "('Is the task done?')\n", (6640, 6661), False, 'import typer\n'), ((7086, 7178), 'typer.secho', 'typer.secho', (['f"""\nStop task ({query.id}). Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nStop task ({query.id}). Timer id: {new_id}\n""", fg=typer.\n colors.GREEN)\n', (7097, 7178), False, 'import typer\n'), ((7402, 7418), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7416, 7418), False, 'from datetime import datetime, timedelta, date\n'), ((7464, 7482), 'datetime.timedelta', 'timedelta', ([], {'weeks': '(1)'}), '(weeks=1)\n', (7473, 7482), False, 'from datetime import datetime, timedelta, date\n'), ((5845, 5905), 'typer.secho', 'typer.secho', (['f"""\nTask already done\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTask already done\n""", fg=typer.colors.RED)\n', (5856, 5905), False, 'import typer\n'), ((5954, 5972), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (5964, 5972), False, 'import typer\n'), ((6017, 6075), 'typer.secho', 'typer.secho', (['f"""\nInvalid task id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid task id\n""", fg=typer.colors.RED)\n', (6028, 6075), False, 'import typer\n'), ((6116, 6134), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (6126, 6134), False, 'import typer\n'), ((7248, 7306), 'typer.secho', 'typer.secho', (['f"""\nNo task running\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nNo task running\n""", fg=typer.colors.RED)\n', (7259, 7306), False, 'import typer\n'), ((7323, 7341), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (7333, 7341), False, 'import typer\n'), ((3880, 3907), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'duration'}), '(minutes=duration)\n', (3889, 3907), False, 'from datetime import datetime, timedelta, date\n'), ((4422, 4517), 'typer.secho', 'typer.secho', (['f"""\nTask Start task {task_id}. Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nTask Start task {task_id}. Timer id: {new_id}\n""", fg=\n typer.colors.GREEN)\n', (4433, 4517), False, 'import typer\n'), ((5677, 5767), 'typer.secho', 'typer.secho', (['f"""\nStart task {task_id}. Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nStart task {task_id}. Timer id: {new_id}\n""", fg=typer.\n colors.GREEN)\n', (5688, 5767), False, 'import typer\n'), ((7598, 7610), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7604, 7610), False, 'from sqlmodel import Session, select, func\n'), ((7633, 7645), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7643, 7645), False, 'from datetime import datetime, timedelta, date\n'), ((7744, 7756), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7750, 7756), False, 'from sqlmodel import Session, select, func\n'), ((7780, 7792), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7790, 7792), False, 'from datetime import datetime, timedelta, date\n'), ((7899, 7911), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7905, 7911), False, 'from sqlmodel import Session, select, func\n'), ((7970, 7982), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7980, 7982), False, 'from datetime import datetime, timedelta, date\n'), ((8053, 8065), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (8059, 8065), False, 'from sqlmodel import Session, select, func\n'), ((2290, 2366), 'typer.secho', 'typer.secho', (['f"""\nTasks already done in the project\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTasks already done in the project\n""", fg=typer.colors.RED)\n', (2301, 2366), False, 'import typer\n'), ((2431, 2449), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2441, 2449), False, 'import typer\n'), ((3943, 3963), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(0)'}), '(minutes=0)\n', (3952, 3963), False, 'from datetime import datetime, timedelta, date\n'), ((3989, 4062), 'typer.secho', 'typer.secho', (['f"""\nDuration must be grater than 0\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nDuration must be grater than 0\n""", fg=typer.colors.RED)\n', (4000, 4062), False, 'import typer\n'), ((4148, 4166), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (4158, 4166), False, 'import typer\n'), ((4585, 4624), 'typer.progressbar', 'typer.progressbar', ([], {'length': 'total_seconds'}), '(length=total_seconds)\n', (4602, 4624), False, 'import typer\n'), ((2734, 2751), 'sqlmodel.func.max', 'func.max', (['ToDo.id'], {}), '(ToDo.id)\n', (2742, 2751), False, 'from sqlmodel import Session, select, func\n'), ((4668, 4685), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4683, 4685), False, 'from datetime import datetime, timedelta, date\n'), ((4727, 4744), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4742, 4744), False, 'from datetime import datetime, timedelta, date\n'), ((4780, 4793), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4790, 4793), False, 'import time\n'), ((4899, 4997), 'typer.secho', 'typer.secho', (['"""\n\nYour Time is over! Well done!\n"""'], {'blink': '(True)', 'fg': 'typer.colors.BRIGHT_GREEN'}), '("""\n\nYour Time is over! Well done!\n""", blink=True, fg=typer.\n colors.BRIGHT_GREEN)\n', (4910, 4997), False, 'import typer\n'), ((5150, 5178), 'typer.confirm', 'typer.confirm', (['"""Any remark?"""'], {}), "('Any remark?')\n", (5163, 5178), False, 'import typer\n'), ((5452, 5464), 'typer.Exit', 'typer.Exit', ([], {}), '()\n', (5462, 5464), False, 'import typer\n'), ((7047, 7065), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (7055, 7065), False, 'from sqlmodel import Session, select, func\n'), ((3340, 3353), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (3346, 3353), False, 'from sqlmodel import Session, select, func\n'), ((5259, 5294), 'typer.prompt', 'typer.prompt', (['"""Enter your remarks."""'], {}), "('Enter your remarks.')\n", (5271, 5294), False, 'import typer\n'), ((6344, 6357), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (6350, 6357), False, 'from sqlmodel import Session, select, func\n'), ((1924, 1936), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (1930, 1936), False, 'from sqlmodel import Session, select, func\n'), ((4375, 4393), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (4383, 4393), False, 'from sqlmodel import Session, select, func\n'), ((5630, 5648), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (5638, 5648), False, 'from sqlmodel import Session, select, func\n'), ((2093, 2105), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (2099, 2105), False, 'from sqlmodel import Session, select, func\n')] |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("traction_issue_enabled", sa.Boolean(), nullable=False),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("cred_def_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("wallet_key", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"student",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("age", sa.Integer(), nullable=True),
sa.Column("student_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(op.f("ix_student_name"), "student", ["name"], unique=False)
op.create_table(
"out_of_band",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("msg", postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column("msg_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sender_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("recipient_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("action", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["recipient_id"],
["line_of_business.id"],
),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.ForeignKeyConstraint(
["sender_id"],
["line_of_business.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("out_of_band")
op.drop_index(op.f("ix_student_name"), table_name="student")
op.drop_table("student")
op.drop_table("line_of_business")
op.drop_index(op.f("ix_job_applicant_name"), table_name="job_applicant")
op.drop_table("job_applicant")
op.drop_table("sandbox")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] | [((7044, 7072), 'alembic.op.drop_table', 'op.drop_table', (['"""out_of_band"""'], {}), "('out_of_band')\n", (7057, 7072), False, 'from alembic import op\n'), ((7142, 7166), 'alembic.op.drop_table', 'op.drop_table', (['"""student"""'], {}), "('student')\n", (7155, 7166), False, 'from alembic import op\n'), ((7171, 7204), 'alembic.op.drop_table', 'op.drop_table', (['"""line_of_business"""'], {}), "('line_of_business')\n", (7184, 7204), False, 'from alembic import op\n'), ((7286, 7316), 'alembic.op.drop_table', 'op.drop_table', (['"""job_applicant"""'], {}), "('job_applicant')\n", (7299, 7316), False, 'from alembic import op\n'), ((7321, 7345), 'alembic.op.drop_table', 'op.drop_table', (['"""sandbox"""'], {}), "('sandbox')\n", (7334, 7345), False, 'from alembic import op\n'), ((1040, 1069), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1063, 1069), True, 'import sqlalchemy as sa\n'), ((2281, 2336), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (2304, 2336), True, 'import sqlalchemy as sa\n'), ((2381, 2410), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2404, 2410), True, 'import sqlalchemy as sa\n'), ((2420, 2461), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (2439, 2461), True, 'import sqlalchemy as sa\n'), ((2498, 2527), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (2502, 2527), False, 'from alembic import op\n'), ((3782, 3837), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (3805, 3837), True, 'import sqlalchemy as sa\n'), ((3882, 3911), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3905, 3911), True, 'import sqlalchemy as sa\n'), ((5256, 5311), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (5279, 5311), True, 'import sqlalchemy as sa\n'), ((5356, 5385), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (5379, 5385), True, 'import sqlalchemy as sa\n'), ((5395, 5436), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (5414, 5436), True, 'import sqlalchemy as sa\n'), ((5464, 5487), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (5468, 5487), False, 'from alembic import op\n'), ((6564, 6630), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['recipient_id']", "['line_of_business.id']"], {}), "(['recipient_id'], ['line_of_business.id'])\n", (6587, 6630), True, 'import sqlalchemy as sa\n'), ((6675, 6730), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (6698, 6730), True, 'import sqlalchemy as sa\n'), ((6775, 6838), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sender_id']", "['line_of_business.id']"], {}), "(['sender_id'], ['line_of_business.id'])\n", (6798, 6838), True, 'import sqlalchemy as sa\n'), ((6883, 6912), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (6906, 6912), True, 'import sqlalchemy as sa\n'), ((7091, 7114), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (7095, 7114), False, 'from alembic import op\n'), ((7223, 7252), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (7227, 7252), False, 'from alembic import op\n'), ((498, 527), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (513, 527), False, 'from sqlalchemy.dialects import postgresql\n'), ((682, 704), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (702, 704), False, 'from sqlalchemy.dialects import postgresql\n'), ((847, 869), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (867, 869), False, 'from sqlalchemy.dialects import postgresql\n'), ((980, 1014), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1012, 1014), False, 'import sqlmodel\n'), ((1172, 1201), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1187, 1201), False, 'from sqlalchemy.dialects import postgresql\n'), ((1356, 1378), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1376, 1378), False, 'from sqlalchemy.dialects import postgresql\n'), ((1521, 1543), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1541, 1543), False, 'from sqlalchemy.dialects import postgresql\n'), ((1655, 1689), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1687, 1689), False, 'import sqlmodel\n'), ((1740, 1768), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1766, 1768), False, 'import sqlmodel\n'), ((1815, 1849), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1847, 1849), False, 'import sqlmodel\n'), ((1893, 1906), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1904, 1906), True, 'import sqlalchemy as sa\n'), ((1975, 2009), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2007, 2009), False, 'import sqlmodel\n'), ((2071, 2099), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2097, 2099), False, 'import sqlmodel\n'), ((2148, 2176), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2174, 2176), False, 'import sqlmodel\n'), ((2221, 2255), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2253, 2255), False, 'import sqlmodel\n'), ((2673, 2702), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (2688, 2702), False, 'from sqlalchemy.dialects import postgresql\n'), ((2857, 2879), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2877, 2879), False, 'from sqlalchemy.dialects import postgresql\n'), ((3022, 3044), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (3042, 3044), False, 'from sqlalchemy.dialects import postgresql\n'), ((3156, 3190), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3188, 3190), False, 'import sqlmodel\n'), ((3242, 3276), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3274, 3276), False, 'import sqlmodel\n'), ((3326, 3354), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3352, 3354), False, 'import sqlmodel\n'), ((3417, 3429), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3427, 3429), True, 'import sqlalchemy as sa\n'), ((3480, 3514), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3512, 3514), False, 'import sqlmodel\n'), ((3565, 3599), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3597, 3599), False, 'import sqlmodel\n'), ((3648, 3676), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3674, 3676), False, 'import sqlmodel\n'), ((3727, 3755), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3753, 3755), False, 'import sqlmodel\n'), ((4008, 4037), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (4023, 4037), False, 'from sqlalchemy.dialects import postgresql\n'), ((4192, 4214), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4212, 4214), False, 'from sqlalchemy.dialects import postgresql\n'), ((4357, 4379), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4377, 4379), False, 'from sqlalchemy.dialects import postgresql\n'), ((4491, 4525), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4523, 4525), False, 'import sqlmodel\n'), ((4576, 4604), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (4602, 4604), False, 'import sqlmodel\n'), ((4651, 4685), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4683, 4685), False, 'import sqlmodel\n'), ((4728, 4740), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4738, 4740), True, 'import sqlalchemy as sa\n'), ((4790, 4824), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4822, 4824), False, 'import sqlmodel\n'), ((4868, 4881), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (4879, 4881), True, 'import sqlalchemy as sa\n'), ((4950, 4984), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4982, 4984), False, 'import sqlmodel\n'), ((5046, 5074), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5072, 5074), False, 'import sqlmodel\n'), ((5123, 5151), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5149, 5151), False, 'import sqlmodel\n'), ((5196, 5230), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5228, 5230), False, 'import sqlmodel\n'), ((5617, 5646), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (5632, 5646), False, 'from sqlalchemy.dialects import postgresql\n'), ((5801, 5823), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5821, 5823), False, 'from sqlalchemy.dialects import postgresql\n'), ((5966, 5988), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5986, 5988), False, 'from sqlalchemy.dialects import postgresql\n'), ((6185, 6219), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6217, 6219), False, 'import sqlmodel\n'), ((6269, 6297), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6295, 6297), False, 'import sqlmodel\n'), ((6350, 6378), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6376, 6378), False, 'import sqlmodel\n'), ((6429, 6457), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6455, 6457), False, 'import sqlmodel\n'), ((6504, 6538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6536, 6538), False, 'import sqlmodel\n'), ((556, 584), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (563, 584), True, 'import sqlalchemy as sa\n'), ((733, 749), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (740, 749), True, 'import sqlalchemy as sa\n'), ((898, 914), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (905, 914), True, 'import sqlalchemy as sa\n'), ((1230, 1258), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1237, 1258), True, 'import sqlalchemy as sa\n'), ((1407, 1423), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1414, 1423), True, 'import sqlalchemy as sa\n'), ((1572, 1588), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1579, 1588), True, 'import sqlalchemy as sa\n'), ((2731, 2759), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (2738, 2759), True, 'import sqlalchemy as sa\n'), ((2908, 2924), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2915, 2924), True, 'import sqlalchemy as sa\n'), ((3073, 3089), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (3080, 3089), True, 'import sqlalchemy as sa\n'), ((4066, 4094), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (4073, 4094), True, 'import sqlalchemy as sa\n'), ((4243, 4259), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4250, 4259), True, 'import sqlalchemy as sa\n'), ((4408, 4424), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4415, 4424), True, 'import sqlalchemy as sa\n'), ((5675, 5703), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (5682, 5703), True, 'import sqlalchemy as sa\n'), ((5852, 5868), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (5859, 5868), True, 'import sqlalchemy as sa\n'), ((6017, 6033), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (6024, 6033), True, 'import sqlalchemy as sa\n'), ((6127, 6136), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6134, 6136), True, 'import sqlalchemy as sa\n')] |
"""initial-db-tables
Revision ID: d925cb39480e
Revises:
Create Date: 2022-05-05 11:45:18.781171
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "d925cb39480e"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"contact",
sa.Column(
"contact_id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column("tags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("author_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("endorse_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("connection_protocol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"connection_alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("contact_id"),
)
op.create_table(
"endorserequest",
sa.Column(
"endorse_request_id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column("tags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("transaction_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("endorser_did", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("author_did", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"transaction_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("ledger_txn", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("endorse_request_id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("endorserequest")
op.drop_table("contact")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] | [((3169, 3200), 'alembic.op.drop_table', 'op.drop_table', (['"""endorserequest"""'], {}), "('endorserequest')\n", (3182, 3200), False, 'from alembic import op\n'), ((3205, 3229), 'alembic.op.drop_table', 'op.drop_table', (['"""contact"""'], {}), "('contact')\n", (3218, 3229), False, 'from alembic import op\n'), ((1683, 1720), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""contact_id"""'], {}), "('contact_id')\n", (1706, 1720), True, 'import sqlalchemy as sa\n'), ((2992, 3037), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""endorse_request_id"""'], {}), "('endorse_request_id')\n", (3015, 3037), True, 'import sqlalchemy as sa\n'), ((516, 545), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (531, 545), False, 'from sqlalchemy.dialects import postgresql\n'), ((773, 795), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (793, 795), False, 'from sqlalchemy.dialects import postgresql\n'), ((938, 960), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (958, 960), False, 'from sqlalchemy.dialects import postgresql\n'), ((1081, 1115), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1113, 1115), False, 'import sqlmodel\n'), ((1170, 1204), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1202, 1204), False, 'import sqlmodel\n'), ((1258, 1286), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1284, 1286), False, 'import sqlmodel\n'), ((1346, 1380), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1378, 1380), False, 'import sqlmodel\n'), ((1450, 1484), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1482, 1484), False, 'import sqlmodel\n'), ((1543, 1577), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1575, 1577), False, 'import sqlmodel\n'), ((1622, 1656), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1654, 1656), False, 'import sqlmodel\n'), ((1840, 1869), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1855, 1869), False, 'from sqlalchemy.dialects import postgresql\n'), ((2097, 2119), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2117, 2119), False, 'from sqlalchemy.dialects import postgresql\n'), ((2262, 2284), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2282, 2284), False, 'from sqlalchemy.dialects import postgresql\n'), ((2406, 2434), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2432, 2434), False, 'import sqlmodel\n'), ((2488, 2516), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2514, 2516), False, 'import sqlmodel\n'), ((2569, 2603), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2601, 2603), False, 'import sqlmodel\n'), ((2654, 2688), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2686, 2688), False, 'import sqlmodel\n'), ((2758, 2792), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2790, 2792), False, 'import sqlmodel\n'), ((2847, 2881), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2879, 2881), False, 'import sqlmodel\n'), ((2932, 2966), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2964, 2966), False, 'import sqlmodel\n'), ((574, 602), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (581, 602), True, 'import sqlalchemy as sa\n'), ((686, 697), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (695, 697), True, 'import sqlalchemy as sa\n'), ((824, 840), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (831, 840), True, 'import sqlalchemy as sa\n'), ((989, 1005), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (996, 1005), True, 'import sqlalchemy as sa\n'), ((1898, 1926), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1905, 1926), True, 'import sqlalchemy as sa\n'), ((2010, 2021), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (2019, 2021), True, 'import sqlalchemy as sa\n'), ((2148, 2164), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2155, 2164), True, 'import sqlalchemy as sa\n'), ((2313, 2329), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2320, 2329), True, 'import sqlalchemy as sa\n')] |
from datetime import date
from fastapi import FastAPI, Query
from sqlmodel import Session, create_engine, select
from .datatypes import ArtmuseumAddress, ArtmuseumTimeLabel
from .db.crud import init_db
from .db.models import ArtmuseumExhibition, PhilharmoniaConcert
sql_engine = create_engine(
"sqlite:///database.db", connect_args={"check_same_thread": False}
)
app = FastAPI(
title="Murmansk Culture API",
# description="",
version="0.0.1",
contact={
"name": "<NAME>",
"url": "https://github.com/anorlovsky",
"email": "<EMAIL>",
},
redoc_url="/",
docs_url=None,
)
@app.on_event("startup")
def on_startup():
init_db(sql_engine)
@app.get(
"/artmuseum",
response_model=list[ArtmuseumExhibition],
description="Возвращает список текущих и ближайших выставок [Мурманского областного художественного музея](https://artmmuseum.ru/)",
)
async def get_artmuseum_exhibitions(
time: ArtmuseumTimeLabel = Query(
None,
description='Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки',
)
):
with Session(sql_engine) as session:
if time is None:
stmt = select(ArtmuseumExhibition)
elif time == ArtmuseumTimeLabel.NOW:
stmt = select(ArtmuseumExhibition).where(
ArtmuseumExhibition.start_date <= date.today()
)
elif time == ArtmuseumTimeLabel.SOON:
stmt = select(ArtmuseumExhibition).where(
ArtmuseumExhibition.start_date > date.today()
)
return session.exec(stmt).all()
@app.get(
"/philharmonia",
response_model=list[PhilharmoniaConcert],
description="Возвращает список ближайших концертов [Мурманской областной филармонии](https://www.murmansound.ru)",
)
async def get_philharmonia_concerts():
with Session(sql_engine) as session:
return session.exec(select(PhilharmoniaConcert)).all()
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session"
] | [((283, 369), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {'connect_args': "{'check_same_thread': False}"}), "('sqlite:///database.db', connect_args={'check_same_thread': \n False})\n", (296, 369), False, 'from sqlmodel import Session, create_engine, select\n'), ((377, 557), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Murmansk Culture API"""', 'version': '"""0.0.1"""', 'contact': "{'name': '<NAME>', 'url': 'https://github.com/anorlovsky', 'email': '<EMAIL>'}", 'redoc_url': '"""/"""', 'docs_url': 'None'}), "(title='Murmansk Culture API', version='0.0.1', contact={'name':\n '<NAME>', 'url': 'https://github.com/anorlovsky', 'email': '<EMAIL>'},\n redoc_url='/', docs_url=None)\n", (384, 557), False, 'from fastapi import FastAPI, Query\n'), ((978, 1088), 'fastapi.Query', 'Query', (['None'], {'description': '"""Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки"""'}), '(None, description=\n \'Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки\'\n )\n', (983, 1088), False, 'from fastapi import FastAPI, Query\n'), ((1114, 1133), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (1121, 1133), False, 'from sqlmodel import Session, create_engine, select\n'), ((1859, 1878), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (1866, 1878), False, 'from sqlmodel import Session, create_engine, select\n'), ((1190, 1217), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1196, 1217), False, 'from sqlmodel import Session, create_engine, select\n'), ((1919, 1946), 'sqlmodel.select', 'select', (['PhilharmoniaConcert'], {}), '(PhilharmoniaConcert)\n', (1925, 1946), False, 'from sqlmodel import Session, create_engine, select\n'), ((1282, 1309), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1288, 1309), False, 'from sqlmodel import Session, create_engine, select\n'), ((1367, 1379), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1377, 1379), False, 'from datetime import date\n'), ((1459, 1486), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1465, 1486), False, 'from sqlmodel import Session, create_engine, select\n'), ((1543, 1555), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1553, 1555), False, 'from datetime import date\n')] |
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.domain import Domain
from joj.horse.schemas.domain_invitation import DomainInvitationDetail
class DomainInvitation(DomainURLORMModel, DomainInvitationDetail, table=True): # type: ignore[call-arg]
__tablename__ = "domain_invitations"
__table_args__ = (
UniqueConstraint("domain_id", "url"),
UniqueConstraint("domain_id", "code"),
)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="invitations")
event.listen(DomainInvitation, "before_insert", url_pre_save)
event.listen(DomainInvitation, "before_update", url_pre_save)
| [
"sqlmodel.Relationship"
] | [((869, 930), 'sqlalchemy.event.listen', 'event.listen', (['DomainInvitation', '"""before_insert"""', 'url_pre_save'], {}), "(DomainInvitation, 'before_insert', url_pre_save)\n", (881, 930), False, 'from sqlalchemy import event\n'), ((931, 992), 'sqlalchemy.event.listen', 'event.listen', (['DomainInvitation', '"""before_update"""', 'url_pre_save'], {}), "(DomainInvitation, 'before_update', url_pre_save)\n", (943, 992), False, 'from sqlalchemy import event\n'), ((824, 866), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""invitations"""'}), "(back_populates='invitations')\n", (836, 866), False, 'from sqlmodel import Field, Relationship\n'), ((559, 595), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""url"""'], {}), "('domain_id', 'url')\n", (575, 595), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((605, 642), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""code"""'], {}), "('domain_id', 'code')\n", (621, 642), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((724, 768), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (734, 768), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n')] |
from typing import Optional, Dict, List, Any, Union
import datetime as dt
from sqlmodel import Field, Session, SQLModel, create_engine, select
import threading as th
import queue
# ~~~ Database ~~~~~~~~~~~~~~~
class Database:
def __init__(self, uri: str):
self.engine = create_engine(uri)
SQLModel.metadata.create_all(self.engine)
def create_all(self, items: List[SQLModel]):
with Session(self.engine) as session:
for item in items:
session.add(item)
session.commit()
def get_by_id(self, id: Union[str, int], model: SQLModel):
with Session(self.engine) as session:
stmt = select(model).where(model.id == id)
return session.exec(stmt).first()
def get_by_field(self, key: str, value: Any, model: SQLModel):
stmt = select(model).where(getattr(model, key) == value)
print(stmt)
return self.exec(stmt)
def exec(self, stmt: str, params = {}):
with Session(self.engine) as session:
return session.exec(stmt, params=params).all()
class DatabaseWorker(th.Thread):
def __init__(self,
uri: str,
queue: queue.Queue,
batch: int = None,
timeout: int = 10
):
super().__init__()
self.q = queue
self.db = None
self.uri = uri
self.timeout = timeout
self.batch = batch
def run(self):
self.db = Database(self.uri)
while True:
cache = []
try:
cache.append(self.q.get(timeout=self.timeout))
if self.batch:
if len(cache) % self.batch == 0:
self.db.create_all(cache)
cache = []
else:
cache = []
except queue.Empty:
self.db.create_all(cache)
break
# ~~~ Models ~~~~~~~~~~~~~~~~~
class Document(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
href: str
date: dt.datetime
text: Optional[str] = None
date_collected: dt.datetime
collected_by: str
class Paragraph(SQLModel, table=True):
id: str = Field(primary_key=True)
text: str
document_id: str = Field(foreign_key="document.id")
sentiment: str
sent_score: float
class Entity(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
description: Optional[str]
class EntityMention(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
text: str
score: Optional[float]
label: str
start: int
end: int
paragraph_id: str = Field(foreign_key="paragraph.id")
kb_id: Optional[str] = Field(foreign_key="entity.id")
class EntityFeature(SQLModel, table=True):
id: int = Field(primary_key=True)
kb_id: str = Field(foreign_key="entity.id")
key: str
value: str
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field"
] | [((2043, 2066), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2048, 2066), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2256, 2279), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2261, 2279), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2317, 2349), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""document.id"""'}), "(foreign_key='document.id')\n", (2322, 2349), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2442, 2465), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2447, 2465), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2579, 2616), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2584, 2616), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2725, 2758), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""paragraph.id"""'}), "(foreign_key='paragraph.id')\n", (2730, 2758), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2786, 2816), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""entity.id"""'}), "(foreign_key='entity.id')\n", (2791, 2816), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2875, 2898), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2880, 2898), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2916, 2946), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""entity.id"""'}), "(foreign_key='entity.id')\n", (2921, 2946), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((284, 302), 'sqlmodel.create_engine', 'create_engine', (['uri'], {}), '(uri)\n', (297, 302), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((311, 352), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self.engine'], {}), '(self.engine)\n', (339, 352), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((416, 436), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (423, 436), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((620, 640), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (627, 640), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1004, 1024), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (1011, 1024), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((841, 854), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (847, 854), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((672, 685), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (678, 685), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
# A file containing fixtures for testing
# Fixtures defined here are available for the whole scope
from fastapi.testclient import TestClient
import pytest
import os
from ..main import app, session
from sqlmodel import SQLModel, Session, create_engine
from sqlmodel.pool import StaticPool
from ..utils import get_session
db_name = "test_db.sqlite"
test_con = f"sqlite:///{db_name}"
test_engine = create_engine(
test_con, connect_args={"check_same_thread": False}, echo=True
)
@pytest.fixture(name="create_db", scope="session")
def create_db():
# setup
SQLModel.metadata.create_all(test_engine)
yield
# teardown
os.remove(db_name)
@pytest.fixture(name="session")
def session_fixture(create_db):
create_db
with Session(test_engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_session_override():
return session
app.dependency_overrides[get_session] = get_session_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((397, 474), 'sqlmodel.create_engine', 'create_engine', (['test_con'], {'connect_args': "{'check_same_thread': False}", 'echo': '(True)'}), "(test_con, connect_args={'check_same_thread': False}, echo=True)\n", (410, 474), False, 'from sqlmodel import SQLModel, Session, create_engine\n'), ((484, 533), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""create_db"""', 'scope': '"""session"""'}), "(name='create_db', scope='session')\n", (498, 533), False, 'import pytest\n'), ((660, 690), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (674, 690), False, 'import pytest\n'), ((805, 834), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (819, 834), False, 'import pytest\n'), ((567, 608), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['test_engine'], {}), '(test_engine)\n', (595, 608), False, 'from sqlmodel import SQLModel, Session, create_engine\n'), ((638, 656), 'os.remove', 'os.remove', (['db_name'], {}), '(db_name)\n', (647, 656), False, 'import os\n'), ((1007, 1022), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1017, 1022), False, 'from fastapi.testclient import TestClient\n'), ((747, 767), 'sqlmodel.Session', 'Session', (['test_engine'], {}), '(test_engine)\n', (754, 767), False, 'from sqlmodel import SQLModel, Session, create_engine\n')] |
from sqlmodel import Session
from .database import create_db_and_tables, engine
from .hero_model import Hero
from .team_model import Team
def create_heroes():
with Session(engine) as session:
team_z_force = Team(name="Z-Force", headquarters="<NAME>")
hero_deadpond = Hero(
name="Deadpond", secret_name="<NAME>", team=team_z_force, experience_points=1
)
session.add(hero_deadpond)
session.commit()
session.refresh(hero_deadpond)
print("Created hero:", hero_deadpond)
print("Hero's team:", hero_deadpond.team)
def main():
create_db_and_tables()
create_heroes()
if __name__ == "__main__":
main()
| [
"sqlmodel.Session"
] | [((171, 186), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (178, 186), False, 'from sqlmodel import Session\n')] |
from sqlalchemy.engine import Engine
from sqlmodel import create_engine, Session, SQLModel
from sqlmodel.engine.create import _FutureEngine
from typing import Union, Optional
from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, \
raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains
from typing import List
class DBConnectionBase:
"""
References
----------
for users who want to know the differences between Engine, Connection, Session
https://stackoverflow.com/questions/34322471/sqlalchemy-engine-connection-and-session-difference
"""
def __init__(self, db_engine: Union[Engine, _FutureEngine]):
self._db_engine = db_engine
SQLModel.metadata.create_all(self._db_engine)
@classmethod
def from_db_user(cls, db_type, db_driver, host, user, password, port, db, charset='utf8', echo=True):
engine = create_engine(f"{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}", echo=echo)
return cls(engine)
@classmethod
def from_full_db_path(cls, full_db_path, echo=True):
engine = create_engine(f"{full_db_path}", echo=echo)
return cls(engine)
def get_db_engine(self):
return self._db_engine
def execute(self, sql, always_commit=False, fetch: Optional[Union[int, str]] = None):
with Session(self._db_engine) as session:
q = session.execute(sql)
if always_commit:
session.commit()
if fetch is not None:
raise_if_incorrect_type(fetch, (int, str))
if isinstance(fetch, int):
if fetch == 1:
return q.fetchone()
elif fetch > 1:
return q.fetchmany(fetch)
elif isinstance(fetch, str):
if fetch == 'all':
return q.fetchall()
raise ValueError
return q
def get_db_inspector(self):
from sqlalchemy import inspect
inspector = inspect(self._db_engine)
return inspector
def get_schemas(self, schemas='all', tables='all'):
inspector = self.get_db_inspector()
from collections import defaultdict
schema_containers = defaultdict(dict)
schemas = _validate_schema_names(inspector, schemas)
return _get_schemas(inspector, schema_containers, schemas, tables)
def get_tables_names(self):
inspector = self.get_db_inspector()
return inspector.get_table_names()
def _get_schemas(inspector, schema_containers, schemas: Union[str, List[str]], tables: Union[str, List[List[str]], List[str]]):
schema_list = _validate_schema_names(inspector, schemas)
if check_all_element_type_uniform(tables, list):
for schema, table in zip(schema_list, tables):
table_names = _validate_table_names(inspector, schema, table)
for sub_table_names in table_names:
schema_containers[schema][sub_table_names] = inspector.get_columns(sub_table_names, schema=schema)
return schema_containers
elif check_all_element_type_uniform(tables, str) or tables == 'all':
for schema in schema_list:
table_names = _validate_table_names(inspector, schema, tables)
for table_name in table_names:
schema_containers[schema][table_name] = inspector.get_columns(table_name, schema=schema)
return schema_containers
raise ValueError
def _validate_schema_names(inspector, schemas: List[str]):
if schemas == 'all':
return inspector.get_schema_names()
if isinstance(schemas, list):
raise_if_not_all_value_contains(schemas, inspector.get_schema_names())
return schemas
raise ValueError('schemas must be "all" or a list of string')
def _validate_table_names(inspector, schema: str, tables: List[str]):
if tables == 'all':
return inspector.get_table_names(schema=schema)
if isinstance(tables, list):
if check_all_element_type_uniform(tables, str):
raise_if_value_not_contains(tables, inspector.get_table_names(schema=schema))
return tables
elif check_all_element_type_uniform(tables, list):
for sub_tab in tables:
print(sub_tab, inspector.get_table_names(schema=schema))
raise_if_value_not_contains(sub_tab, inspector.get_table_names(schema=schema))
return tables
raise ValueError('tables name are not existed in database, pls verify')
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((2815, 2859), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (2845, 2859), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((767, 812), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self._db_engine'], {}), '(self._db_engine)\n', (795, 812), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((954, 1047), 'sqlmodel.create_engine', 'create_engine', (['f"""{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}"""'], {'echo': 'echo'}), "(f'{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}',\n echo=echo)\n", (967, 1047), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((1163, 1206), 'sqlmodel.create_engine', 'create_engine', (['f"""{full_db_path}"""'], {'echo': 'echo'}), "(f'{full_db_path}', echo=echo)\n", (1176, 1206), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((2117, 2141), 'sqlalchemy.inspect', 'inspect', (['self._db_engine'], {}), '(self._db_engine)\n', (2124, 2141), False, 'from sqlalchemy import inspect\n'), ((2341, 2358), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (2352, 2358), False, 'from collections import defaultdict\n'), ((4104, 4147), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (4134, 4147), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1400, 1424), 'sqlmodel.Session', 'Session', (['self._db_engine'], {}), '(self._db_engine)\n', (1407, 1424), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((3196, 3239), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (3226, 3239), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((4278, 4322), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (4308, 4322), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1588, 1630), 'pyemits.common.validation.raise_if_incorrect_type', 'raise_if_incorrect_type', (['fetch', '(int, str)'], {}), '(fetch, (int, str))\n', (1611, 1630), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n')] |
"""add verified result to application
Revision ID: d8a156ffaeae
Revises: <KEY>
Create Date: 2022-03-30 16:00:13.195216
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = "d8a156ffaeae"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"job_applicant",
sa.Column("verified", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("job_applicant", "verified")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((659, 702), 'alembic.op.drop_column', 'op.drop_column', (['"""job_applicant"""', '"""verified"""'], {}), "('job_applicant', 'verified')\n", (673, 702), False, 'from alembic import op\n'), ((477, 511), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (509, 511), False, 'import sqlmodel\n')] |
from datetime import date
from typing import List, Optional
from api.ecoindex.models.responses import ApiEcoindex
from api.models.enums import Version
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from db.helper import date_filter
async def get_host_list_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
page: Optional[int] = 1,
size: Optional[int] = 50,
) -> List[str]:
statement = (
select(ApiEcoindex.host)
.where(ApiEcoindex.version == version.get_version_number())
.offset(size * (page - 1))
.limit(size)
)
if q:
statement = statement.filter(ApiEcoindex.host.like(f"%{q}%"))
statement = date_filter(statement=statement, date_from=date_from, date_to=date_to)
statement = statement.group_by(ApiEcoindex.host).order_by(ApiEcoindex.host)
hosts = await session.execute(statement)
return hosts.scalars().all()
async def get_count_hosts_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
) -> int:
sub_statement = (
f"SELECT host FROM apiecoindex WHERE version = {version.get_version_number()}"
)
if q:
sub_statement += f" AND host LIKE '%{q}%'"
if date_from:
sub_statement += f" AND date >= '{date_from}'"
if date_to:
sub_statement += f" AND date <= '{date_to}'"
sub_statement += " GROUP BY host"
statement = f"SELECT count(*) FROM ({sub_statement}) t"
result = await session.execute(statement=statement)
return result.scalar()
| [
"sqlmodel.select"
] | [((830, 900), 'db.helper.date_filter', 'date_filter', ([], {'statement': 'statement', 'date_from': 'date_from', 'date_to': 'date_to'}), '(statement=statement, date_from=date_from, date_to=date_to)\n', (841, 900), False, 'from db.helper import date_filter\n'), ((780, 811), 'api.ecoindex.models.responses.ApiEcoindex.host.like', 'ApiEcoindex.host.like', (['f"""%{q}%"""'], {}), "(f'%{q}%')\n", (801, 811), False, 'from api.ecoindex.models.responses import ApiEcoindex\n'), ((577, 601), 'sqlmodel.select', 'select', (['ApiEcoindex.host'], {}), '(ApiEcoindex.host)\n', (583, 601), False, 'from sqlmodel import select\n')] |
from datetime import datetime, date
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryTravelReimburse(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
history_procedure_id: int
group: str
guardian_id: Optional[int] = None
procedure_id: int
amount: float
detail: str
pdf_path: str
signature_path: str
document_path: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_travel_reimburse", response_model=HistoryTravelReimburse)
async def create_history_travel_reimburse(history_travel_reimburse: HistoryTravelReimburse, session: AsyncSession = Depends(get_session)):
session.add(history_travel_reimburse)
await session.commit()
await session.refresh(history_travel_reimburse)
return history_travel_reimburse
@router.get("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse(id: int, session: AsyncSession = Depends(get_session)):
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse).where(HistoryTravelReimburse.id == id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.put("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def update_history_travel_reimburse(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_travel_reimburse/{id}")
async def delete_history_travel_reimburse(session: AsyncSession = Depends(get_session)):
return None
@router.get("/history_travel_reimburse/patient/{patient_id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_patient(patient_id: int, session: AsyncSession = Depends(get_session)):
history_id = await session.execute(select(HistoryTravelReimburse.id).where(HistoryTravelReimburse.patient_id == patient_id))
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse).where(HistoryTravelReimburse.history_id == history_id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.get("/history_travel_reimburse", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_daily(session: AsyncSession = Depends(get_session)):
return None
@router.get("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_pdf(id: int, session: AsyncSession = Depends(get_session)):
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse.pdf_path).where(HistoryTravelReimburse.id == id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.post("/history_travel_reimburse/{id}/document", response_model=HistoryTravelReimburse)
async def upload_document(session: AsyncSession = Depends(get_session)):
return None
@router.post("/history_travel_reimburse/{id}/signature")
async def upload_signature(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((295, 306), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (304, 306), False, 'from fastapi import APIRouter, Depends\n'), ((385, 422), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (390, 422), False, 'from sqlmodel import Field, SQLModel\n'), ((953, 973), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (960, 973), False, 'from fastapi import APIRouter, Depends\n'), ((1292, 1312), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1299, 1312), False, 'from fastapi import APIRouter, Depends\n'), ((1713, 1733), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1720, 1733), False, 'from fastapi import APIRouter, Depends\n'), ((1869, 1889), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1876, 1889), False, 'from fastapi import APIRouter, Depends\n'), ((2099, 2119), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2106, 2119), False, 'from fastapi import APIRouter, Depends\n'), ((2654, 2674), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2661, 2674), False, 'from fastapi import APIRouter, Depends\n'), ((2856, 2876), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2863, 2876), False, 'from fastapi import APIRouter, Depends\n'), ((3271, 3291), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3278, 3291), False, 'from fastapi import APIRouter, Depends\n'), ((3420, 3440), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3427, 3440), False, 'from fastapi import APIRouter, Depends\n'), ((1369, 1399), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse'], {}), '(HistoryTravelReimburse)\n', (1375, 1399), False, 'from sqlalchemy import select\n'), ((2161, 2194), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse.id'], {}), '(HistoryTravelReimburse.id)\n', (2167, 2194), False, 'from sqlalchemy import select\n'), ((2305, 2335), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse'], {}), '(HistoryTravelReimburse)\n', (2311, 2335), False, 'from sqlalchemy import select\n'), ((2933, 2972), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse.pdf_path'], {}), '(HistoryTravelReimburse.pdf_path)\n', (2939, 2972), False, 'from sqlalchemy import select\n')] |
import asyncio
import os
from decimal import Decimal
from typing import Optional
from pydantic import condecimal
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlmodel import Field, SQLModel, select
class Restaurant(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
name: str = Field(index=True)
address: str
currency: str
class MenuItem(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
name: str
price: condecimal(decimal_places=2)
restaurant_id: Optional[int] = Field(default=None, foreign_key="restaurant.id")
async def main() -> None:
db_url = os.environ.get("RESTAURANT_DB_URL", "sqlite+aiosqlite:///my_db")
db_engine = create_async_engine(db_url)
async with db_engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async with AsyncSession(db_engine, expire_on_commit=False) as session:
# Writing
restaurant = Restaurant(
name="Second best Pizza in town", address="Foo street 1", currency="EUR"
)
session.add(restaurant)
await session.commit()
pizza1 = MenuItem(name="Margherita", price=10.50, restaurant_id=restaurant.id)
pizza2 = MenuItem(name="2xPineapple", price=16.80, restaurant_id=restaurant.id)
session.add_all((pizza1, pizza2))
await session.commit()
# Reading
query = (
select(MenuItem)
.join(Restaurant)
.where(Restaurant.name == "Second best Pizza in town")
)
result = await session.execute(query)
menu_items = result.scalars().all()
assert len(menu_items) == 2
assert menu_items[0] == MenuItem(
id=1, name="Margherita", price=Decimal("10.50"), restaurant_id=restaurant.id
)
if __name__ == "__main__":
asyncio.run(main())
| [
"sqlmodel.select",
"sqlmodel.Field"
] | [((284, 321), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (289, 321), False, 'from sqlmodel import Field, SQLModel, select\n'), ((338, 355), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (343, 355), False, 'from sqlmodel import Field, SQLModel, select\n'), ((445, 482), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (450, 482), False, 'from sqlmodel import Field, SQLModel, select\n'), ((508, 536), 'pydantic.condecimal', 'condecimal', ([], {'decimal_places': '(2)'}), '(decimal_places=2)\n', (518, 536), False, 'from pydantic import condecimal\n'), ((573, 621), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""restaurant.id"""'}), "(default=None, foreign_key='restaurant.id')\n", (578, 621), False, 'from sqlmodel import Field, SQLModel, select\n'), ((663, 727), 'os.environ.get', 'os.environ.get', (['"""RESTAURANT_DB_URL"""', '"""sqlite+aiosqlite:///my_db"""'], {}), "('RESTAURANT_DB_URL', 'sqlite+aiosqlite:///my_db')\n", (677, 727), False, 'import os\n'), ((744, 771), 'sqlalchemy.ext.asyncio.create_async_engine', 'create_async_engine', (['db_url'], {}), '(db_url)\n', (763, 771), False, 'from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine\n'), ((888, 935), 'sqlalchemy.ext.asyncio.AsyncSession', 'AsyncSession', (['db_engine'], {'expire_on_commit': '(False)'}), '(db_engine, expire_on_commit=False)\n', (900, 935), False, 'from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine\n'), ((1791, 1807), 'decimal.Decimal', 'Decimal', (['"""10.50"""'], {}), "('10.50')\n", (1798, 1807), False, 'from decimal import Decimal\n'), ((1455, 1471), 'sqlmodel.select', 'select', (['MenuItem'], {}), '(MenuItem)\n', (1461, 1471), False, 'from sqlmodel import Field, SQLModel, select\n')] |
"""init_db
Revision ID: 23799b5136c5
Revises:
Create Date: 2021-12-11 00:49:58.116933
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '23799b5136c5'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('full_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_superuser', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False)
op.create_index(op.f('ix_user_full_name'), 'user', ['full_name'], unique=False)
op.create_index(op.f('ix_user_hashed_password'), 'user', ['hashed_password'], unique=False)
op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False)
op.create_index(op.f('ix_user_is_active'), 'user', ['is_active'], unique=False)
op.create_index(op.f('ix_user_is_superuser'), 'user', ['is_superuser'], unique=False)
op.create_table('task',
sa.Column('status', sa.Enum('draft', 'in_process', 'delete', 'done', name='taskstatus'), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_task_created_at'), 'task', ['created_at'], unique=False)
op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False)
op.create_index(op.f('ix_task_title'), 'task', ['title'], unique=False)
op.create_index(op.f('ix_task_user_id'), 'task', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_task_user_id'), table_name='task')
op.drop_index(op.f('ix_task_title'), table_name='task')
op.drop_index(op.f('ix_task_id'), table_name='task')
op.drop_index(op.f('ix_task_created_at'), table_name='task')
op.drop_table('task')
op.drop_index(op.f('ix_user_is_superuser'), table_name='user')
op.drop_index(op.f('ix_user_is_active'), table_name='user')
op.drop_index(op.f('ix_user_id'), table_name='user')
op.drop_index(op.f('ix_user_hashed_password'), table_name='user')
op.drop_index(op.f('ix_user_full_name'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ### | [
"sqlmodel.sql.sqltypes.AutoString"
] | [((2498, 2519), 'alembic.op.drop_table', 'op.drop_table', (['"""task"""'], {}), "('task')\n", (2511, 2519), False, 'from alembic import op\n'), ((2906, 2927), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (2919, 2927), False, 'from alembic import op\n'), ((808, 837), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (831, 837), True, 'import sqlalchemy as sa\n'), ((864, 885), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (868, 885), False, 'from alembic import op\n'), ((940, 965), 'alembic.op.f', 'op.f', (['"""ix_user_full_name"""'], {}), "('ix_user_full_name')\n", (944, 965), False, 'from alembic import op\n'), ((1024, 1055), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (1028, 1055), False, 'from alembic import op\n'), ((1120, 1138), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (1124, 1138), False, 'from alembic import op\n'), ((1190, 1215), 'alembic.op.f', 'op.f', (['"""ix_user_is_active"""'], {}), "('ix_user_is_active')\n", (1194, 1215), False, 'from alembic import op\n'), ((1274, 1302), 'alembic.op.f', 'op.f', (['"""ix_user_is_superuser"""'], {}), "('ix_user_is_superuser')\n", (1278, 1302), False, 'from alembic import op\n'), ((1725, 1774), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (1748, 1774), True, 'import sqlalchemy as sa\n'), ((1782, 1811), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1805, 1811), True, 'import sqlalchemy as sa\n'), ((1838, 1864), 'alembic.op.f', 'op.f', (['"""ix_task_created_at"""'], {}), "('ix_task_created_at')\n", (1842, 1864), False, 'from alembic import op\n'), ((1924, 1942), 'alembic.op.f', 'op.f', (['"""ix_task_id"""'], {}), "('ix_task_id')\n", (1928, 1942), False, 'from alembic import op\n'), ((1994, 2015), 'alembic.op.f', 'op.f', (['"""ix_task_title"""'], {}), "('ix_task_title')\n", (1998, 2015), False, 'from alembic import op\n'), ((2070, 2093), 'alembic.op.f', 'op.f', (['"""ix_task_user_id"""'], {}), "('ix_task_user_id')\n", (2074, 2093), False, 'from alembic import op\n'), ((2268, 2291), 'alembic.op.f', 'op.f', (['"""ix_task_user_id"""'], {}), "('ix_task_user_id')\n", (2272, 2291), False, 'from alembic import op\n'), ((2330, 2351), 'alembic.op.f', 'op.f', (['"""ix_task_title"""'], {}), "('ix_task_title')\n", (2334, 2351), False, 'from alembic import op\n'), ((2390, 2408), 'alembic.op.f', 'op.f', (['"""ix_task_id"""'], {}), "('ix_task_id')\n", (2394, 2408), False, 'from alembic import op\n'), ((2447, 2473), 'alembic.op.f', 'op.f', (['"""ix_task_created_at"""'], {}), "('ix_task_created_at')\n", (2451, 2473), False, 'from alembic import op\n'), ((2538, 2566), 'alembic.op.f', 'op.f', (['"""ix_user_is_superuser"""'], {}), "('ix_user_is_superuser')\n", (2542, 2566), False, 'from alembic import op\n'), ((2605, 2630), 'alembic.op.f', 'op.f', (['"""ix_user_is_active"""'], {}), "('ix_user_is_active')\n", (2609, 2630), False, 'from alembic import op\n'), ((2669, 2687), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (2673, 2687), False, 'from alembic import op\n'), ((2726, 2757), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (2730, 2757), False, 'from alembic import op\n'), ((2796, 2821), 'alembic.op.f', 'op.f', (['"""ix_user_full_name"""'], {}), "('ix_user_full_name')\n", (2800, 2821), False, 'from alembic import op\n'), ((2860, 2881), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (2864, 2881), False, 'from alembic import op\n'), ((416, 428), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (426, 428), True, 'import sqlalchemy as sa\n'), ((473, 507), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (505, 507), False, 'import sqlmodel\n'), ((549, 583), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (581, 583), False, 'import sqlmodel\n'), ((634, 668), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (666, 668), False, 'import sqlmodel\n'), ((714, 726), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (724, 726), True, 'import sqlalchemy as sa\n'), ((774, 786), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (784, 786), True, 'import sqlalchemy as sa\n'), ((1396, 1463), 'sqlalchemy.Enum', 'sa.Enum', (['"""draft"""', '"""in_process"""', '"""delete"""', '"""done"""'], {'name': '"""taskstatus"""'}), "('draft', 'in_process', 'delete', 'done', name='taskstatus')\n", (1403, 1463), True, 'import sqlalchemy as sa\n'), ((1501, 1513), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1511, 1513), True, 'import sqlalchemy as sa\n'), ((1559, 1572), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1570, 1572), True, 'import sqlalchemy as sa\n'), ((1613, 1647), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1645, 1647), False, 'import sqlmodel\n'), ((1691, 1703), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1701, 1703), True, 'import sqlalchemy as sa\n')] |
from sqlmodel import SQLModel, create_engine, Session
from victor_api.config import settings
engine = create_engine(
url=settings.db.url,
echo=settings.db.echo,
connect_args=settings.db.connect_args
)
def get_session():
with Session(engine) as session:
yield session
def init_db():
SQLModel.metadata.create_all(engine)
| [
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((104, 205), 'sqlmodel.create_engine', 'create_engine', ([], {'url': 'settings.db.url', 'echo': 'settings.db.echo', 'connect_args': 'settings.db.connect_args'}), '(url=settings.db.url, echo=settings.db.echo, connect_args=\n settings.db.connect_args)\n', (117, 205), False, 'from sqlmodel import SQLModel, create_engine, Session\n'), ((316, 352), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (344, 352), False, 'from sqlmodel import SQLModel, create_engine, Session\n'), ((245, 260), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (252, 260), False, 'from sqlmodel import SQLModel, create_engine, Session\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, create_engine
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
headquarters: str
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
create_db_and_tables()
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((541, 577), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (554, 577), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((141, 178), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (146, 178), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((275, 312), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (280, 312), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((408, 450), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (413, 450), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((612, 648), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (640, 648), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 9