code stringlengths 110 18.9k | apis list | extract_api stringlengths 123 24.4k |
|---|---|---|
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == user.id)
results = session.exec(statement).all()
return results
@router.post("/task", response_model=TaskRead)
async def get_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == task.owner_id and Task.id == task.id)
result = session.exec(statement).one_or_none()
return result
@router.post("/create/task", response_model=StandardResponse)
async def create_task(task: TaskCreate, session: Session=Depends(get_session)):
db_task = Task.from_orm(task)
session.add(db_task)
session.commit()
session.refresh(db_task)
return StandardResponse()
@router.post("/create/user", response_model=StandardResponse)
async def create_user(user: UserCreate, session: Session=Depends(get_session)):
db_user = User.from_orm(user)
session.add(db_user)
session.commit()
session.refresh(db_user)
return StandardResponse()
@router.post("/delete/task", response_model=StandardResponse)
async def delete_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.id == task.id and Task.owner_id == task.owner_id)
result = session.exec(statement)
task = result.one_or_none()
if task:
session.delete(task)
session.commit()
return StandardResponse()
return StandardResponse(success="Failure", message="Invalid Task id or Owner id", code=400)
@router.post("/delete/user", response_model=StandardResponse)
async def delete_user(user: UserQuery, session: Session=Depends(get_session)):
statement = select(User).where(User.id == user.id)
result = session.exec(statement)
user = result.one_or_none()
if user:
session.delete(user)
session.commit()
return StandardResponse()
return StandardResponse(success="Failure", message="Invalid User id", code=400)
@router.post("/update/task", response_model=StandardResponse)
async def update_task(task: TaskRead, session: Session=Depends(get_session)):
task = Task.from_orm(task)
session.add(task)
session.commit()
session.refresh(task)
return StandardResponse() | [
"sqlmodel.select"
] | [((165, 176), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (174, 176), False, 'from fastapi import APIRouter, Depends\n'), ((271, 291), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (278, 291), False, 'from fastapi import APIRouter, Depends\n'), ((310, 322), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (316, 322), False, 'from sqlmodel import select, Session\n'), ((504, 524), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (511, 524), False, 'from fastapi import APIRouter, Depends\n'), ((753, 773), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (760, 773), False, 'from fastapi import APIRouter, Depends\n'), ((1056, 1076), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1063, 1076), False, 'from fastapi import APIRouter, Depends\n'), ((1339, 1359), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1346, 1359), False, 'from fastapi import APIRouter, Depends\n'), ((1621, 1641), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1628, 1641), False, 'from fastapi import APIRouter, Depends\n'), ((2119, 2139), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2126, 2139), False, 'from fastapi import APIRouter, Depends\n'), ((2569, 2589), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2576, 2589), False, 'from fastapi import APIRouter, Depends\n'), ((543, 555), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (549, 555), False, 'from sqlmodel import select, Session\n'), ((792, 804), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (798, 804), False, 'from sqlmodel import select, Session\n'), ((1660, 1672), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (1666, 1672), False, 'from sqlmodel import select, Session\n'), ((2158, 2170), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (2164, 2170), False, 'from sqlmodel import select, Session\n')] |
import types
from dataclasses import dataclass
from typing import Callable, List, Union
from fastapi import Depends, FastAPI, HTTPException, Query
from sqlmodel import Field, Session, SQLModel, select
# Model generator + container -------------------------------------------------------------
@dataclass
class MultipleModels:
path: str
base: SQLModel
response: SQLModel
def __post_init__(self):
self.creation: SQLModel = self.make_creator_cls()
self.table: SQLModel = self.make_table_cls()
self.update: SQLModel = self.make_updater_cls()
@staticmethod
def make_cls_name(base: type, rename_base_to: str) -> str:
"""For a class name of format ``"ClassBase"``, return a modified name in which
the substring ``"Base"`` is replaced with the string passed to ``rename_base_to``.
:param base: The base model. It's name must end with the substring ``"Base"``.
:param rename_base_to: String to replace `"Base"` with.
"""
return base.__name__.replace("Base", rename_base_to)
def make_creator_cls(self) -> SQLModel:
"""From a base model, make and return a creation model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-herocreate-data-model,
the creation model is simply a copy of the base model, with the substring ``"Base"`` in the
class name replaced by the substring ``"Create"``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "Create")
return type(cls_name, (self.base,), {})
def make_updater_cls(self) -> SQLModel:
"""From a base model, make and return an update model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/update/#heroupdate-model, the update model
is the same as the base model, but with all fields annotated as ``Optional`` and all field
defaults set to ``None``.
:param base: The base model. Note that unlike in ``make_creator``, this is not the base for
inheritance (all updaters inherit directly from ``SQLModel``) but rather is used to derive
the output class name, attributes, and type annotations.
"""
cls_name = self.make_cls_name(self.base, "Update")
sig = self.base.__signature__
params = list(sig.parameters)
# Pulling type via `__signature__` rather than `__annotation__` because
# this accessor drops the `typing.Union[...]` wrapper for optional fields
annotations = {p: Union[sig.parameters[p].annotation, None] for p in params}
defaults = {p: None for p in params}
attrs = {**defaults, "__annotations__": annotations}
return type(cls_name, (SQLModel,), attrs)
def make_table_cls(self) -> SQLModel:
"""From a base model, make and return a table model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-hero-table-model,
the table model is the same as the base model, with the addition of the ``table=True`` class
creation keyword and an ``id`` attribute of type ``Optional[int]`` set to a default value of
``Field(default=None, primary_key=True)``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "")
attrs = dict(id=Field(default=None, primary_key=True))
annotations = dict(id=Union[int, None])
attrs.update(dict(__annotations__=annotations))
# We are using `typing.new_class` (vs. `type`) b/c it supports the `table=True` kwarg.
# https://twitter.com/simonw/status/1430255521127305216?s=20
# https://docs.python.org/3/reference/datamodel.html#customizing-class-creation
return types.new_class(
cls_name, (self.base,), dict(table=True), lambda ns: ns.update(attrs)
)
# SQLModel database interface functions ---------------------------------------------------
def create(*, session: Session, table_cls: SQLModel, model: SQLModel) -> SQLModel:
db_model = table_cls.from_orm(model)
session.add(db_model)
session.commit()
session.refresh(db_model)
return db_model
def read_range(*, session: Session, table_cls: SQLModel, offset: int, limit: int) -> List:
return session.exec(select(table_cls).offset(offset).limit(limit)).all()
def read_single(*, session: Session, table_cls: SQLModel, id: int):
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
return db_model
def update(*, session: Session, table_cls: SQLModel, id: int, model: SQLModel) -> SQLModel:
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
model_data = model.dict(exclude_unset=True)
for key, value in model_data.items():
setattr(db_model, key, value)
session.add(db_model)
session.commit()
session.refresh(db_model)
return db_model
def delete(*, session: Session, table_cls: SQLModel, id: int) -> dict:
db_model = session.get(table_cls, id)
if not db_model:
raise HTTPException(status_code=404, detail=f"{table_cls.__name__} not found")
session.delete(db_model)
session.commit()
return {"ok": True}
# Endpoint registration -------------------------------------------------------------------
@dataclass
class RegisterEndpoints:
"""From a ``MultipleModels`` object, register create, read, update, delete (CRUD) API endpoints.
:param api: The ``FastAPI`` instance.
:param get_session: A function which yields a context-managed ``sqlmodel.Session`` object.
:param models: The ``MultipleModels`` object.
:param limit: The bounds for an API read requests.
"""
api: FastAPI
get_session: Callable
models: MultipleModels
limit: Query = Query(default=100, lte=100)
def __post_init__(self):
self.register_all()
def register_all(self):
self.register_create_endpoint()
self.register_read_range_endpoint()
self.register_read_single_endpoint()
self.register_update_endpoint()
self.register_delete_endpoint()
def register_create_endpoint(self):
@self.api.post(self.models.path, response_model=self.models.response)
def endpoint(*, session: Session = Depends(self.get_session), model: self.models.creation):
return create(session=session, table_cls=self.models.table, model=model)
def register_read_range_endpoint(self):
@self.api.get(self.models.path, response_model=List[self.models.response])
def endpoint(
*, session: Session = Depends(self.get_session), offset: int = 0, limit: int = self.limit,
):
return read_range(
session=session, table_cls=self.models.table, offset=offset, limit=limit,
)
def register_read_single_endpoint(self):
@self.api.get(self.models.path + "{id}", response_model=self.models.response)
def endpoint(*, session: Session = Depends(self.get_session), id: int):
return read_single(session=session, table_cls=self.models.table, id=id)
def register_update_endpoint(self):
@self.api.patch(self.models.path + "{id}", response_model=self.models.response)
def endpoint(
*, session: Session = Depends(self.get_session), id: int, model: self.models.update,
):
return update(session=session, table_cls=self.models.table, id=id, model=model)
def register_delete_endpoint(self):
@self.api.delete(self.models.path + "{id}")
def endpoint(*, session: Session = Depends(self.get_session), id: int):
return delete(session=session, table_cls=self.models.table, id=id)
def register_endpoints(
api: FastAPI,
get_session: Callable,
models: MultipleModels,
limit: Query = Query(default=100, lte=100)
):
_ = RegisterEndpoints(api, get_session, models, limit) | [
"sqlmodel.select",
"sqlmodel.Field"
] | [((5955, 5982), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (5960, 5982), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7999, 8026), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (8004, 8026), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((4522, 4594), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (4535, 4594), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((4786, 4858), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (4799, 4858), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((5234, 5306), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""{table_cls.__name__} not found"""'}), "(status_code=404, detail=f'{table_cls.__name__} not found')\n", (5247, 5306), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((6441, 6466), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (6448, 6466), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((6767, 6792), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (6774, 6792), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7157, 7182), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7164, 7182), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7463, 7488), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7470, 7488), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((7765, 7790), 'fastapi.Depends', 'Depends', (['self.get_session'], {}), '(self.get_session)\n', (7772, 7790), False, 'from fastapi import Depends, FastAPI, HTTPException, Query\n'), ((3369, 3406), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3374, 3406), False, 'from sqlmodel import Field, Session, SQLModel, select\n'), ((4322, 4339), 'sqlmodel.select', 'select', (['table_cls'], {}), '(table_cls)\n', (4328, 4339), False, 'from sqlmodel import Field, Session, SQLModel, select\n')] |
import datetime
from sqlmodel import Field, Relationship, SQLModel
class User(SQLModel, table=True):
__tablename__ = "users"
id: int = Field(primary_key=True)
create_at: datetime.datetime = Field(default_factory=lambda: datetime.datetime.utcnow())
user_name: str
password: str
alias: str
| [
"sqlmodel.Field"
] | [((146, 169), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (151, 169), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((235, 261), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (259, 261), False, 'import datetime\n')] |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
return session
@classmethod
def create_memory_sesssion(cls):
engine = create_engine(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
# Can this be "yield" instead?
return session
@classmethod
@contextlib.contextmanager
def example_session_context(cls):
"""
Used together with hypothesis: create a class-variable to be used in hypothesis. Unset once the test is over.
Session strategy doesn't seem to work as expected, nor does setup example and teardown example with sql.
"""
assert not isinstance(cls.example_session, Session)
try:
# cls.example_session = cls.create_file_sesssion()
cls.example_session = cls.create_memory_sesssion()
yield cls.example_session
finally:
if cls.example_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if cls.example_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
cls.example_session.close()
cls.example_session = None
@classmethod
@contextlib.contextmanager
def method_client_context(cls):
""" Same reasoning as above. """
# See https://sqlmodel.tiangolo.com/tutorial/fastapi/tests/#pytest-fixtures
# https://strawberry.rocks/docs/integrations/fastapi#context_getter
# app.dependency_overrides.clear()
app.dependency_overrides[get_session] = cls.method_get_session
cls.method_client = TestClient(app)
try:
yield cls.method_client
finally:
cls.method_client = None
app.dependency_overrides.clear()
@classmethod
@contextlib.contextmanager
def example_client_context(cls):
""" Same reasoning as above. """
# See https://sqlmodel.tiangolo.com/tutorial/fastapi/tests/#pytest-fixtures
# https://strawberry.rocks/docs/integrations/fastapi#context_getter
with cls.example_session_context() as _session:
app.dependency_overrides[get_session] = cls.example_get_session
cls.example_client = TestClient(app)
try:
yield cls.example_client
finally:
cls.example_client = None
app.dependency_overrides.clear()
@classmethod
def method_get_session(cls) -> Session: # type: ignore
assert isinstance(cls.method_session, Session)
assert cls.method_session.bind.url.database in {TEST_DB_FILE_PATH, TEST_DB_MEMORY_PATH} # type: ignore
yield cls.method_session
@classmethod
def example_get_session(cls) -> Session: # type: ignore
assert isinstance(cls.example_session, Session)
assert cls.example_session.bind.url.database in {TEST_DB_FILE_PATH, TEST_DB_MEMORY_PATH} # type: ignore
yield cls.example_session
@classmethod
def example_get_client(cls) -> TestClient: # type: ignore
yield cls.example_client
@pytest.fixture(name='method_client_fixture')
def method_client_fixture(self) -> TestClient: # type: ignore
with BaseTest.method_client_context() as client:
assert isinstance(client, TestClient)
yield client
@pytest.fixture(name='example_client_fixture')
def example_client_fixture(self) -> TestClient: # type: ignore
assert isinstance(BaseTest.example_client, TestClient)
yield self.example_client
@pytest.fixture(name='method_session_fixture')
def method_session_fixture(self) -> Session: # type: ignore
assert isinstance(BaseTest.method_session, Session)
yield BaseTest.method_session
@pytest.fixture(name='example_session_fixture')
def example_session_fixture(self) -> Session: # type: ignore
assert isinstance(BaseTest.example_session, Session)
yield BaseTest.example_session
@classmethod
def get_schema(cls) -> strawberry.Schema:
return schema
@pytest.fixture(name='schema_fixture')
def schema_fixture(self):
return BaseTest.get_schema()
@classmethod
def schema_strategy(cls) -> SearchStrategy:
""" Deprecated? """
return st.builds(cls.get_schema)
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((5233, 5277), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""method_client_fixture"""'}), "(name='method_client_fixture')\n", (5247, 5277), False, 'import pytest\n'), ((5483, 5528), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""example_client_fixture"""'}), "(name='example_client_fixture')\n", (5497, 5528), False, 'import pytest\n'), ((5700, 5745), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""method_session_fixture"""'}), "(name='method_session_fixture')\n", (5714, 5745), False, 'import pytest\n'), ((5915, 5961), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""example_session_fixture"""'}), "(name='example_session_fixture')\n", (5929, 5961), False, 'import pytest\n'), ((6220, 6257), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""schema_fixture"""'}), "(name='schema_fixture')\n", (6234, 6257), False, 'import pytest\n'), ((1042, 1057), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1052, 1057), False, 'from starlette.testclient import TestClient\n'), ((1589, 1621), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (1619, 1621), False, 'from fastapi_server.main import app\n'), ((1730, 1825), 'sqlmodel.create_engine', 'create_engine', (['TEST_DB_URL'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "(TEST_DB_URL, connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (1743, 1825), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((1830, 1866), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1858, 1866), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2042, 2144), 'sqlmodel.create_engine', 'create_engine', (['TEST_DB_MEMORY_URL'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (2055, 2144), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2149, 2185), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (2177, 2185), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((3751, 3766), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (3761, 3766), False, 'from starlette.testclient import TestClient\n'), ((6434, 6459), 'hypothesis.strategies.builds', 'st.builds', (['cls.get_schema'], {}), '(cls.get_schema)\n', (6443, 6459), True, 'import hypothesis.strategies as st\n'), ((1268, 1299), 'pathlib.Path', 'pathlib.Path', (['TEST_DB_FILE_PATH'], {}), '(TEST_DB_FILE_PATH)\n', (1280, 1299), False, 'import pathlib\n'), ((1880, 1930), 'sqlmodel.Session', 'Session', (['engine'], {'autoflush': '(False)', 'autocommit': '(False)'}), '(engine, autoflush=False, autocommit=False)\n', (1887, 1930), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2199, 2249), 'sqlmodel.Session', 'Session', (['engine'], {'autoflush': '(False)', 'autocommit': '(False)'}), '(engine, autoflush=False, autocommit=False)\n', (2206, 2249), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((3882, 3914), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (3912, 3914), False, 'from fastapi_server.main import app\n'), ((4367, 4382), 'starlette.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (4377, 4382), False, 'from starlette.testclient import TestClient\n'), ((1475, 1493), 'os.remove', 'os.remove', (['db_path'], {}), '(db_path)\n', (1484, 1493), False, 'import os\n'), ((3002, 3033), 'pathlib.Path', 'pathlib.Path', (['TEST_DB_FILE_PATH'], {}), '(TEST_DB_FILE_PATH)\n', (3014, 3033), False, 'import pathlib\n'), ((4520, 4552), 'fastapi_server.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (4550, 4552), False, 'from fastapi_server.main import app\n'), ((3217, 3235), 'os.remove', 'os.remove', (['db_path'], {}), '(db_path)\n', (3226, 3235), False, 'import os\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = select(entities.GrandParent).where(entities.GrandParent.label == "test-child")
result = session.exec(statement)
assert result.one()
@pytest.mark.database
def test_dumb_extractor(connection, sql_engine, recreate_meta):
class User(Entity, table=True):
__identifying__ = {"label"}
label: Optional[str]
new_label: Optional[str] = None
User.metadata.create_all(connection)
num_users = 100
sess = Session(connection)
users = [User(label=f"user_{i}") for i in range(num_users)]
user_le = User._get_load_entity()
for user in users:
user.id = user_le._get_hash(user.dict())
sess.add(user)
count = sess.exec(select(func.count(User.id))).one()
assert count == num_users
connection.commit()
statement = select(User.id, User.label)
query = BaseQuery.from_select_statement(statement)
assert query.length(connection=connection) == num_users
pyblock = PyBlock(function=transform_func, inputs=[query["label"]])
u_load = User.load(user=query["id"], new_label=pyblock["out"])
run = RunEntity()
sess.add(run)
sess.commit()
sess.refresh(run)
gen = Generator(
name="test",
extract=query,
transforms=[pyblock],
loads=[u_load],
batch_size=10000,
)
connection.commit()
gen.run(sql_engine, sql_engine, run_id=run.id, ordering=0)
| [
"sqlmodel.func.count",
"sqlmodel.select",
"sqlmodel.Session"
] | [((1168, 1200), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1182, 1200), False, 'import pytest\n'), ((1311, 1331), 'sqlmodel.select', 'select', (['Parent.label'], {}), '(Parent.label)\n', (1317, 1331), False, 'from sqlmodel import Session, func, select\n'), ((1344, 1388), 'dbgen.core.node.query.BaseQuery.from_select_statement', 'BaseQuery.from_select_statement', (['select_stmt'], {}), '(select_stmt)\n', (1375, 1388), False, 'from dbgen.core.node.query import BaseQuery\n'), ((1442, 1521), 'dbgen.core.node.transforms.PyBlock', 'PyBlock', ([], {'function': 'transform_func', 'inputs': "[query['label']]", 'outputs': "['newnames']"}), "(function=transform_func, inputs=[query['label']], outputs=['newnames'])\n", (1449, 1521), False, 'from dbgen.core.node.transforms import PyBlock\n'), ((1659, 1732), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'extract': 'query', 'transforms': '[pyblock]', 'loads': '[load]'}), "(name='test', extract=query, transforms=[pyblock], loads=[load])\n", (1668, 1732), False, 'from dbgen.core.generator import Generator\n'), ((2727, 2762), 'dbgen.core.func.Import', 'Import', ([], {'lib': '"""numpy"""', 'lib_alias': '"""np"""'}), "(lib='numpy', lib_alias='np')\n", (2733, 2762), False, 'from dbgen.core.func import Import\n'), ((3049, 3062), 'dbgen.core.args.Const', 'Const', (['"""test"""'], {}), "('test')\n", (3054, 3062), False, 'from dbgen.core.args import Const\n'), ((3077, 3123), 'tests.example.entities.GrandParent.load', 'entities.GrandParent.load', ([], {'label': 'val', 'type': 'val'}), '(label=val, type=val)\n', (3102, 3123), True, 'import tests.example.entities as entities\n'), ((3228, 3294), 'tests.example.entities.Parent.load', 'entities.Parent.load', ([], {'label': 'val', 'type': 'val', 'grand_parent_id': 'gp_load'}), '(label=val, type=val, grand_parent_id=gp_load)\n', (3248, 3294), True, 'import tests.example.entities as entities\n'), ((3308, 3383), 'tests.example.entities.Child.load', 'entities.Child.load', ([], {'label': 'val', 'type': 'val', 'parent_id': 'p_load', 'uncle_id': 'u_load'}), '(label=val, type=val, parent_id=p_load, uncle_id=u_load)\n', (3327, 3383), True, 'import tests.example.entities as entities\n'), ((3837, 3884), 'tests.example.entities.Parent.metadata.create_all', 'entities.Parent.metadata.create_all', (['sql_engine'], {}), '(sql_engine)\n', (3872, 3884), True, 'import tests.example.entities as entities\n'), ((4090, 4150), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'transforms': '[pyblock]', 'loads': '[p_load]'}), "(name='test', transforms=[pyblock], loads=[p_load])\n", (4099, 4150), False, 'from dbgen.core.generator import Generator\n'), ((4728, 4747), 'sqlmodel.Session', 'Session', (['connection'], {}), '(connection)\n', (4735, 4747), False, 'from sqlmodel import Session, func, select\n'), ((5072, 5099), 'sqlmodel.select', 'select', (['User.id', 'User.label'], {}), '(User.id, User.label)\n', (5078, 5099), False, 'from sqlmodel import Session, func, select\n'), ((5112, 5154), 'dbgen.core.node.query.BaseQuery.from_select_statement', 'BaseQuery.from_select_statement', (['statement'], {}), '(statement)\n', (5143, 5154), False, 'from dbgen.core.node.query import BaseQuery\n'), ((5229, 5286), 'dbgen.core.node.transforms.PyBlock', 'PyBlock', ([], {'function': 'transform_func', 'inputs': "[query['label']]"}), "(function=transform_func, inputs=[query['label']])\n", (5236, 5286), False, 'from dbgen.core.node.transforms import PyBlock\n'), ((5364, 5375), 'dbgen.core.metadata.RunEntity', 'RunEntity', ([], {}), '()\n', (5373, 5375), False, 'from dbgen.core.metadata import RunEntity\n'), ((5444, 5541), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'extract': 'query', 'transforms': '[pyblock]', 'loads': '[u_load]', 'batch_size': '(10000)'}), "(name='test', extract=query, transforms=[pyblock], loads=[u_load],\n batch_size=10000)\n", (5453, 5541), False, 'from dbgen.core.generator import Generator\n'), ((3462, 3476), 'random.shuffle', 'shuffle', (['loads'], {}), '(loads)\n', (3469, 3476), False, 'from random import shuffle\n'), ((3491, 3526), 'dbgen.core.generator.Generator', 'Generator', ([], {'name': '"""test"""', 'loads': 'loads'}), "(name='test', loads=loads)\n", (3500, 3526), False, 'from dbgen.core.generator import Generator\n'), ((4185, 4204), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (4192, 4204), False, 'from sqlmodel import Session, func, select\n'), ((4235, 4257), 'typing.cast', 'cast', (['Session', 'session'], {}), '(Session, session)\n', (4239, 4257), False, 'from typing import Optional, cast\n'), ((1590, 1609), 'dbgen.core.args.Const', 'Const', (['"""child_type"""'], {}), "('child_type')\n", (1595, 1609), False, 'from dbgen.core.args import Const\n'), ((3174, 3188), 'dbgen.core.args.Const', 'Const', (['"""uncle"""'], {}), "('uncle')\n", (3179, 3188), False, 'from dbgen.core.args import Const\n'), ((4062, 4078), 'dbgen.core.args.Const', 'Const', (['"""gp_type"""'], {}), "('gp_type')\n", (4067, 4078), False, 'from dbgen.core.args import Const\n'), ((3940, 3953), 'dbgen.core.args.Const', 'Const', (['"""test"""'], {}), "('test')\n", (3945, 3953), False, 'from dbgen.core.args import Const\n'), ((4278, 4306), 'sqlmodel.select', 'select', (['entities.GrandParent'], {}), '(entities.GrandParent)\n', (4284, 4306), False, 'from sqlmodel import Session, func, select\n'), ((4974, 4993), 'sqlmodel.func.count', 'func.count', (['User.id'], {}), '(User.id)\n', (4984, 4993), False, 'from sqlmodel import Session, func, select\n')] |
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from sqlmodel import Session, select
from sqlalchemy.exc import IntegrityError
from typing import List
import datetime as dt
from app.src.common.security import get_current_user
from app.src.common.utils import profiling_api
from app.src.models.app_user import AppUser
from app.src.models.product_type import (
ProductType,
ProductTypeRead,
ProductTypeCreate,
ProductTypeUpdate,
)
from app.src.db.engine import get_session
router = APIRouter()
# A scopo didattico inserita la validazione di producttype_id con Path:
# - non potrà essere < 1
async def get_producttype_or_404(
*,
session: Session = Depends(get_session),
producttype_id: int = Path(..., ge=1),
current_user: AppUser = Depends(get_current_user),
):
start_time = dt.datetime.now()
try:
db_pt = session.get(ProductType, producttype_id)
if db_pt:
return {
"db_pt": db_pt,
"username": current_user.username,
"start_time": start_time,
}
else:
raise HTTPException(status_code=404, detail="Product type not found")
except KeyError:
raise HTTPException(status_code=400, detail="Product type not found")
@router.get("/", response_model=List[ProductTypeRead])
# lte -> less than or equal
async def read_product_types(
*,
session: Session = Depends(get_session),
offset: int = 0,
limit: int = Query(default=100, lte=100),
current_user: AppUser = Depends(get_current_user),
):
"""
Get all the existing product types
"""
start_time = dt.datetime.now()
product_types = session.exec(select(ProductType).offset(offset).limit(limit)).all()
profiling_api("ProductType:get:all", start_time, current_user.username)
return product_types
@router.get("/{producttype_id}", response_model=ProductTypeRead)
async def read_product_type(
*, producttype_id: int, db_pt: ProductType = Depends(get_producttype_or_404)
):
"""
Get the product type by id
"""
profiling_api(
f"ProductType:read:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return db_pt["db_pt"]
@router.post("/", response_model=ProductTypeRead)
async def create_product_type(
*,
session: Session = Depends(get_session),
product_type: ProductTypeCreate,
current_user: AppUser = Depends(get_current_user),
):
"""
Create a product type
"""
start_time = dt.datetime.now()
try:
db_pt = ProductType.from_orm(product_type)
session.add(db_pt)
session.commit()
session.refresh(db_pt)
except IntegrityError:
raise HTTPException(
status_code=404, detail="Impossible to create product type with same name"
)
profiling_api("ProductType:insert:single", start_time, current_user.username)
return db_pt
@router.patch("/{producttype_id}", response_model=ProductTypeRead)
async def update_product_type(
*,
producttype_id: int,
session: Session = Depends(get_session),
pt: ProductTypeUpdate,
db_pt: ProductType = Depends(get_producttype_or_404),
):
"""
Modify a product type
"""
# exclude_unset=True: it would only include the values
# that were sent by the client
existing_pt = db_pt["db_pt"]
pt_data = pt.dict(exclude_unset=True)
for key, value in pt_data.items():
setattr(existing_pt, key, value)
session.add(existing_pt)
session.commit()
session.refresh(existing_pt)
profiling_api(
f"ProductType:update:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return existing_pt
@router.delete("/{producttype_id}")
async def delete_product_type(
*,
producttype_id: int,
session: Session = Depends(get_session),
db_pt: ProductType = Depends(get_producttype_or_404),
):
"""
Delete and remove an existing product type by id; it must be >= 1
"""
existing_db_pt = db_pt["db_pt"]
session.delete(existing_db_pt)
session.commit()
profiling_api(
f"ProductType:delete:by_id:{producttype_id}",
db_pt["start_time"],
db_pt["username"],
)
return {"ok": True}
| [
"sqlmodel.select"
] | [((518, 529), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (527, 529), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((693, 713), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (700, 713), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((741, 756), 'fastapi.Path', 'Path', (['...'], {'ge': '(1)'}), '(..., ge=1)\n', (745, 756), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((786, 811), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (793, 811), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((833, 850), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (848, 850), True, 'import datetime as dt\n'), ((1435, 1455), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1442, 1455), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1495, 1522), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1500, 1522), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1552, 1577), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1559, 1577), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1654, 1671), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1669, 1671), True, 'import datetime as dt\n'), ((1764, 1835), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""ProductType:get:all"""', 'start_time', 'current_user.username'], {}), "('ProductType:get:all', start_time, current_user.username)\n", (1777, 1835), False, 'from app.src.common.utils import profiling_api\n'), ((2006, 2037), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (2013, 2037), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2092, 2194), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:read:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:read:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (2105, 2194), False, 'from app.src.common.utils import profiling_api\n'), ((2360, 2380), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2367, 2380), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2447, 2472), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (2454, 2472), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2536, 2553), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (2551, 2553), True, 'import datetime as dt\n'), ((2854, 2931), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""ProductType:insert:single"""', 'start_time', 'current_user.username'], {}), "('ProductType:insert:single', start_time, current_user.username)\n", (2867, 2931), False, 'from app.src.common.utils import profiling_api\n'), ((3104, 3124), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3111, 3124), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3178, 3209), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (3185, 3209), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3592, 3696), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:update:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:update:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (3605, 3696), False, 'from app.src.common.utils import profiling_api\n'), ((3870, 3890), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3877, 3890), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3917, 3948), 'fastapi.Depends', 'Depends', (['get_producttype_or_404'], {}), '(get_producttype_or_404)\n', (3924, 3948), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((4135, 4239), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""ProductType:delete:by_id:{producttype_id}"""', "db_pt['start_time']", "db_pt['username']"], {}), "(f'ProductType:delete:by_id:{producttype_id}', db_pt[\n 'start_time'], db_pt['username'])\n", (4148, 4239), False, 'from app.src.common.utils import profiling_api\n'), ((2579, 2613), 'app.src.models.product_type.ProductType.from_orm', 'ProductType.from_orm', (['product_type'], {}), '(product_type)\n', (2599, 2613), False, 'from app.src.models.product_type import ProductType, ProductTypeRead, ProductTypeCreate, ProductTypeUpdate\n'), ((1127, 1190), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Product type not found"""'}), "(status_code=404, detail='Product type not found')\n", (1140, 1190), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1226, 1289), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""Product type not found"""'}), "(status_code=400, detail='Product type not found')\n", (1239, 1289), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2738, 2832), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Impossible to create product type with same name"""'}), "(status_code=404, detail=\n 'Impossible to create product type with same name')\n", (2751, 2832), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1705, 1724), 'sqlmodel.select', 'select', (['ProductType'], {}), '(ProductType)\n', (1711, 1724), False, 'from sqlmodel import Session, select\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistorySummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id_order: int
history_id_conference: int
summary_treatmsummary_conference_id: int
state: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
problem: str
question: str
summary_plan: str
surgeon_summary: str
pre_operation_abg: bool
post_operation_abg: bool
pre_operation_redo_abg: bool
pre_operation_jaw_surgery: bool
pre_operation_computing_design: bool
pre_operation_3d_print: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConferenceDoctorMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
summary_treatmsummary_conference_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_summary_conference", response_model=HistorySummaryTreatmsummaryConference)
async def create_history_summary_conference(history_summary_conference: HistorySummaryTreatmsummaryConference, session: AsyncSession = Depends(get_session)):
session.add(history_summary_conference)
await session.commit()
await session.refresh(history_summary_conference)
return history_summary_conference
@router.post("/summary_conference", response_model=SummaryTreatmsummaryConference)
async def create_summary_conference(summary_conference: SummaryTreatmsummaryConference, session: AsyncSession = Depends(get_session)):
session.add(summary_conference)
await session.commit()
await session.refresh(summary_conference)
return summary_conference
@router.get("/history_summary_conference/{id}", response_model=HistorySummaryTreatmsummaryConference)
async def get_history_summary_conference(id: int, session: AsyncSession = Depends(get_session)):
history_summary_conferences = await session.execute(select(HistorySummaryTreatmsummaryConference).where(HistorySummaryTreatmsummaryConference.id == id))
history_summary_conference = history_summary_conferences.scalars().first()
return history_summary_conference
@router.put("/history_summary_conference/{id}", response_model=HistorySummaryTreatmsummaryConference)
async def update_history_summary_conference(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_summary_conference/{id}")
async def delete_history_summary_conference(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_summary_conference/{id}")
async def delete_summary_conference(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((256, 267), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (265, 267), False, 'from fastapi import APIRouter, Depends\n'), ((361, 398), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (366, 398), False, 'from sqlmodel import Field, SQLModel\n'), ((709, 746), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (714, 746), False, 'from sqlmodel import Field, SQLModel\n'), ((1231, 1268), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1236, 1268), False, 'from sqlmodel import Field, SQLModel\n'), ((1675, 1695), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1682, 1695), False, 'from fastapi import APIRouter, Depends\n'), ((2058, 2078), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2065, 2078), False, 'from fastapi import APIRouter, Depends\n'), ((2398, 2418), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2405, 2418), False, 'from fastapi import APIRouter, Depends\n'), ((2877, 2897), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2884, 2897), False, 'from fastapi import APIRouter, Depends\n'), ((3037, 3057), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3044, 3057), False, 'from fastapi import APIRouter, Depends\n'), ((3189, 3209), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3196, 3209), False, 'from fastapi import APIRouter, Depends\n'), ((2477, 2522), 'sqlalchemy.select', 'select', (['HistorySummaryTreatmsummaryConference'], {}), '(HistorySummaryTreatmsummaryConference)\n', (2483, 2522), False, 'from sqlalchemy import select\n')] |
from sqlalchemy import inspect
from sqlalchemy.engine.reflection import Inspector
from sqlmodel import create_engine
def test_create_db_and_table(clear_sqlmodel):
from docs_src.tutorial.create_db_and_table import tutorial003 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
mod.create_db_and_tables()
insp: Inspector = inspect(mod.engine)
assert insp.has_table(str(mod.Hero.__tablename__))
| [
"sqlmodel.create_engine"
] | [((289, 318), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (302, 318), False, 'from sqlmodel import create_engine\n'), ((323, 349), 'docs_src.tutorial.create_db_and_table.tutorial003.create_db_and_tables', 'mod.create_db_and_tables', ([], {}), '()\n', (347, 349), True, 'from docs_src.tutorial.create_db_and_table import tutorial003 as mod\n'), ((372, 391), 'sqlalchemy.inspect', 'inspect', (['mod.engine'], {}), '(mod.engine)\n', (379, 391), False, 'from sqlalchemy import inspect\n')] |
from sqlmodel import SQLModel, Field
import uuid as uuid_pkg
from typing import Optional
class FilesBase(SQLModel):
name: str
class Files(FilesBase, table=True):
id: int = Field(default=None, primary_key=True)
uuid: uuid_pkg.UUID = Field(
default_factory=uuid_pkg.uuid4,
index=True,
nullable=False,
)
count_download: int = Field(default=0)
class FilesCreate(FilesBase):
pass
| [
"sqlmodel.Field"
] | [((184, 221), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (189, 221), False, 'from sqlmodel import SQLModel, Field\n'), ((248, 313), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid_pkg.uuid4', 'index': '(True)', 'nullable': '(False)'}), '(default_factory=uuid_pkg.uuid4, index=True, nullable=False)\n', (253, 313), False, 'from sqlmodel import SQLModel, Field\n'), ((371, 387), 'sqlmodel.Field', 'Field', ([], {'default': '(0)'}), '(default=0)\n', (376, 387), False, 'from sqlmodel import SQLModel, Field\n')] |
from sqlmodel import Session, select
from database import UserRead, PostCreate, UserCreate, User, Post
from typing import Union
from datetime import datetime
def create_object(
session: Session,
model: Union[User, Post],
request_data: Union[UserCreate, PostCreate],
user: UserRead = None,
isPost: bool = False,
) -> dict:
if isPost:
setattr(request_data, "author_name", user.name)
db_object = model.from_orm(request_data)
if isPost:
setattr(db_object, "updated_at", datetime.utcnow())
setattr(db_object, "created_at", datetime.utcnow())
session.add(db_object)
session.commit()
session.refresh(db_object)
return db_object
def get_objects(
session: Session, model: Union[User, Post], offset: int, limit: int
) -> list:
objects = session.exec(select(model).offset(offset).limit(limit)).all()
return objects
def get_object(
session: Session,
model: Union[User, Post],
criteria: Union[int, str],
isUser: bool = False,
) -> Union[User, Post]:
if isUser:
statement = select(model).where(model.email == criteria)
results = session.exec(statement)
user = results.first()
if not user:
raise Exception("User not found")
return user
post = session.get(Post, criteria)
return post
def patch_object(
session: Session,
old_object: Union[User, Post],
request_data: dict,
isPost: bool = False,
) -> Union[User, Post]:
for key, value in request_data.items():
setattr(old_object, key, value)
if isPost:
setattr(old_object, "updated_at", datetime.utcnow())
session.add(old_object)
session.commit()
session.refresh(old_object)
return old_object
def delete_object(session: Session, object_: Union[User, Post]) -> dict:
session.delete(object_)
session.commit()
return {"ok": True}
| [
"sqlmodel.select"
] | [((574, 591), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (589, 591), False, 'from datetime import datetime\n'), ((518, 535), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (533, 535), False, 'from datetime import datetime\n'), ((1628, 1645), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1643, 1645), False, 'from datetime import datetime\n'), ((1076, 1089), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (1082, 1089), False, 'from sqlmodel import Session, select\n'), ((822, 835), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (828, 835), False, 'from sqlmodel import Session, select\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
content: str
age: Optional[int] = None
search_vector: Optional[str] = Field(
sa_column=Column(
TSVectorType(
"name",
"content",
# weights={"name": "A", "secret_name": "B", "age": "D"},
)
)
)
class Parents(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
# children = orm.relationship("Children")
class Children(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
parent_id: Optional[int] = Field(default=None, foreign_key="parents.id")
stand_by_db()
| [
"sqlmodel.Field"
] | [((260, 297), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (265, 297), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((670, 707), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (675, 707), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((832, 869), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (837, 869), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((915, 960), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""parents.id"""'}), "(default=None, foreign_key='parents.id')\n", (920, 960), False, 'from sqlmodel import Field, SQLModel, Relationship, Column\n'), ((439, 470), 'sqlalchemy_utils.types.TSVectorType', 'TSVectorType', (['"""name"""', '"""content"""'], {}), "('name', 'content')\n", (451, 470), False, 'from sqlalchemy_utils.types import TSVectorType\n')] |
from sqlmodel import SQLModel, Relationship
from typing import List
from app.models.base_uuid_model import BaseUUIDModel
class RoleBase(SQLModel):
name: str
description: str
class Role(BaseUUIDModel, RoleBase, table=True):
users: List["User"] = Relationship(back_populates="role", sa_relationship_kwargs={"lazy": "selectin"})
| [
"sqlmodel.Relationship"
] | [((263, 348), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""role"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='role', sa_relationship_kwargs={'lazy': 'selectin'}\n )\n", (275, 348), False, 'from sqlmodel import SQLModel, Relationship\n')] |
from typing import TYPE_CHECKING, Optional
from uuid import UUID
from sqlalchemy.orm import joinedload
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import ORMUtils
from joj.horse.utils.base import is_uuid
if TYPE_CHECKING:
from joj.horse.models import Problem, ProblemSet
class ProblemProblemSetLink(ORMUtils, table=True): # type: ignore[call-arg]
__tablename__ = "problem_problem_set_links"
problem_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problems.id", ondelete="CASCADE"), primary_key=True
),
)
problem_set_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problem_sets.id", ondelete="CASCADE"), primary_key=True
),
)
position: int = Field(
index=True, nullable=False, sa_column_kwargs={"server_default": "0"}
)
problem: "Problem" = Relationship(back_populates="problem_problem_set_links")
problem_set: "ProblemSet" = Relationship(back_populates="problem_problem_set_links")
@classmethod
async def find_by_problem_set_and_problem(
cls, problem_set: str, problem: str
) -> Optional["ProblemProblemSetLink"]:
# this is buggy, do not use!
# not sure how much it's better than three queries (maybe even worse)
from joj.horse import models
statement = cls.sql_select().options(
joinedload(cls.problem_set, innerjoin=True),
joinedload(cls.problem, innerjoin=True),
)
if is_uuid(problem_set):
statement = statement.where(cls.problem_set_id == problem_set)
else:
statement = statement.where(models.ProblemSet.url == problem_set)
if is_uuid(problem):
statement = statement.where(cls.problem_id == problem)
else:
statement = statement.where(models.Problem.url == problem)
from loguru import logger
logger.info(statement)
result = await cls.session_exec(statement)
logger.info(result.all())
return result.one_or_none()
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((856, 931), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'nullable': '(False)', 'sa_column_kwargs': "{'server_default': '0'}"}), "(index=True, nullable=False, sa_column_kwargs={'server_default': '0'})\n", (861, 931), False, 'from sqlmodel import Field, Relationship\n'), ((972, 1028), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_problem_set_links"""'}), "(back_populates='problem_problem_set_links')\n", (984, 1028), False, 'from sqlmodel import Field, Relationship\n'), ((1061, 1117), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_problem_set_links"""'}), "(back_populates='problem_problem_set_links')\n", (1073, 1117), False, 'from sqlmodel import Field, Relationship\n'), ((1602, 1622), 'joj.horse.utils.base.is_uuid', 'is_uuid', (['problem_set'], {}), '(problem_set)\n', (1609, 1622), False, 'from joj.horse.utils.base import is_uuid\n'), ((1802, 1818), 'joj.horse.utils.base.is_uuid', 'is_uuid', (['problem'], {}), '(problem)\n', (1809, 1818), False, 'from joj.horse.utils.base import is_uuid\n'), ((2015, 2037), 'loguru.logger.info', 'logger.info', (['statement'], {}), '(statement)\n', (2026, 2037), False, 'from loguru import logger\n'), ((1483, 1526), 'sqlalchemy.orm.joinedload', 'joinedload', (['cls.problem_set'], {'innerjoin': '(True)'}), '(cls.problem_set, innerjoin=True)\n', (1493, 1526), False, 'from sqlalchemy.orm import joinedload\n'), ((1540, 1579), 'sqlalchemy.orm.joinedload', 'joinedload', (['cls.problem'], {'innerjoin': '(True)'}), '(cls.problem, innerjoin=True)\n', (1550, 1579), False, 'from sqlalchemy.orm import joinedload\n'), ((592, 637), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""problems.id"""'], {'ondelete': '"""CASCADE"""'}), "('problems.id', ondelete='CASCADE')\n", (602, 637), False, 'from sqlalchemy.schema import Column, ForeignKey\n'), ((751, 800), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""problem_sets.id"""'], {'ondelete': '"""CASCADE"""'}), "('problem_sets.id', ondelete='CASCADE')\n", (761, 800), False, 'from sqlalchemy.schema import Column, ForeignKey\n')] |
from sqlmodel import SQLModel, Field
from typing import Optional, List
from pydantic import validator
# --- model ---
class User(SQLModel, table=True):
id: Optional[int] = Field(None, primary_key=True)
username: str
password: str
# --- serializers ---
class UserOut(SQLModel):
username: str
class UserIn(SQLModel):
username: str
password: str
confirm_password: str
# @validator(confirm_password)
# def validate_password(cls, v, values):
# if v and v != values['password']:
# raise ValueError("aaaa")
# return v
UserList = List[UserOut] | [
"sqlmodel.Field"
] | [((178, 207), 'sqlmodel.Field', 'Field', (['None'], {'primary_key': '(True)'}), '(None, primary_key=True)\n', (183, 207), False, 'from sqlmodel import SQLModel, Field\n')] |
from datetime import datetime
try:
from humps.main import depascalize
from sqlalchemy import Column, DateTime
from sqlalchemy.orm.decl_api import declared_attr
from sqlmodel import Field, SQLModel
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Model(SQLModel):
"""
Abstract model providing `id`, `date_created` and `date_updated` fields.
And also automatic table naming to `snake_case`.
"""
id: int = Field(primary_key=True)
date_created: datetime = Field(sa_column=Column(DateTime, default=datetime.utcnow))
date_updated: datetime = Field(sa_column=Column(DateTime, onupdate=datetime.utcnow))
@declared_attr
def __tablename__(cls):
"""
Convert Pascal class name style to `snake_case`
"""
return depascalize(cls.__name__)
| [
"sqlmodel.Field"
] | [((542, 565), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (547, 565), False, 'from sqlmodel import Field, SQLModel\n'), ((886, 911), 'humps.main.depascalize', 'depascalize', (['cls.__name__'], {}), '(cls.__name__)\n', (897, 911), False, 'from humps.main import depascalize\n'), ((611, 652), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (617, 652), False, 'from sqlalchemy import Column, DateTime\n'), ((699, 741), 'sqlalchemy.Column', 'Column', (['DateTime'], {'onupdate': 'datetime.utcnow'}), '(DateTime, onupdate=datetime.utcnow)\n', (705, 741), False, 'from sqlalchemy import Column, DateTime\n')] |
import logging
import os
import secrets
import aioredis
import boto3
import pytest
import redis as pyredis
from fastapi.testclient import TestClient
from sqlalchemy import text
from sqlalchemy_utils import create_database, database_exists, drop_database
from sqlmodel import Session, create_engine
from iris.agent.settings import AgentSettings
from iris.api.authentication import (
current_active_user,
current_superuser,
current_verified_user,
)
from iris.api.main import app
from iris.api.settings import APISettings
from iris.commons.clickhouse import ClickHouse
from iris.commons.dependencies import get_settings
from iris.commons.models.base import Base
from iris.commons.redis import Redis
from iris.commons.settings import CommonSettings
from iris.commons.storage import Storage
from iris.commons.utils import json_serializer
from iris.worker import WorkerSettings
pytest.register_assert_rewrite("tests.assertions")
pytest_plugins = ["tests.fixtures.models", "tests.fixtures.storage"]
def should_cleanup():
return os.environ.get("IRIS_TEST_CLEANUP", "") != "0"
@pytest.fixture
def logger():
return logging.getLogger(__name__)
@pytest.fixture
def settings():
namespace = secrets.token_hex(nbytes=4)
print(f"@{namespace}", end=" ")
# Redis has 16 databases by default, we use the last one for testing.
return CommonSettings(
CLICKHOUSE_PUBLIC_USER="public",
CLICKHOUSE_DATABASE="iris_test",
DATABASE_URL=f"postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}",
S3_PREFIX=f"iris-test-{namespace}",
S3_PUBLIC_RESOURCES=["arn:aws:s3:::test-public-exports/*"],
REDIS_NAMESPACE=f"iris-test-{namespace}",
REDIS_URL="redis://default:iris@redis.docker.localhost?db=15",
RETRY_TIMEOUT=-1,
)
@pytest.fixture
def api_settings(settings):
return APISettings(
API_CORS_ALLOW_ORIGIN="https://example.org,http://localhost:8000",
**settings.dict(),
)
@pytest.fixture
def agent_settings(settings, tmp_path):
return AgentSettings(
**settings.dict(),
AGENT_CARACAL_SNIFFER_WAIT_TIME=1,
AGENT_MIN_TTL=0,
AGENT_RESULTS_DIR_PATH=tmp_path / "agent_results",
AGENT_TARGETS_DIR_PATH=tmp_path / "agent_targets",
)
@pytest.fixture
def worker_settings(settings, tmp_path):
return WorkerSettings(
**settings.dict(),
WORKER_RESULTS_DIR_PATH=tmp_path / "worker_results",
WORKER_MAX_OPEN_FILES=128,
)
@pytest.fixture
def clickhouse(settings, logger):
return ClickHouse(settings, logger)
@pytest.fixture
def engine(settings):
engine = create_engine(settings.DATABASE_URL, json_serializer=json_serializer)
if not database_exists(engine.url):
create_database(engine.url)
Base.metadata.create_all(engine)
return engine
@pytest.fixture
async def redis(settings, logger):
client = aioredis.from_url(settings.REDIS_URL, decode_responses=True)
yield Redis(client, settings, logger)
await client.close()
@pytest.fixture
def session(engine):
with Session(engine) as session:
yield session
@pytest.fixture
def storage(settings, logger):
return Storage(settings, logger)
@pytest.fixture
def make_client(engine, api_settings):
def _make_client(user=None):
if user and user.is_active:
app.dependency_overrides[current_active_user] = lambda: user
if user and user.is_active and user.is_verified:
app.dependency_overrides[current_verified_user] = lambda: user
if user and user.is_active and user.is_verified and user.is_superuser:
app.dependency_overrides[current_superuser] = lambda: user
app.dependency_overrides[get_settings] = lambda: api_settings
return TestClient(app)
yield _make_client
app.dependency_overrides.clear()
@pytest.fixture(autouse=True, scope="session")
def cleanup_redis():
yield
if should_cleanup():
redis_ = pyredis.from_url("redis://default:iris@redis.docker.localhost?db=15")
redis_.flushdb()
redis_.close()
@pytest.fixture(autouse=True, scope="session")
def cleanup_database():
yield
if should_cleanup():
# TODO: Cleanup/simplify this code.
engine = create_engine("postgresql://iris:iris@postgres.docker.localhost")
with engine.connect() as conn:
databases = conn.execute(
text(
"""
SELECT datname
FROM pg_database
WHERE datistemplate = false AND datname LIKE 'iris-test-%'
"""
)
).all()
for (database,) in databases:
drop_database(
f"postgresql://iris:iris@postgres.docker.localhost/{database}"
)
@pytest.fixture(autouse=True, scope="session")
def cleanup_s3():
yield
if should_cleanup():
s3 = boto3.client(
"s3",
aws_access_key_id="minioadmin",
aws_secret_access_key="minioadmin",
endpoint_url="http://minio.docker.localhost",
)
buckets = s3.list_buckets()
buckets = [x["Name"] for x in buckets["Buckets"]]
for bucket in buckets:
if "test-" in bucket:
objects = s3.list_objects_v2(Bucket=bucket)
if objects["KeyCount"]:
objects = [{"Key": x["Key"]} for x in objects.get("Contents", [])]
s3.delete_objects(Bucket=bucket, Delete=dict(Objects=objects))
s3.delete_bucket(Bucket=bucket)
# https://github.com/boto/botocore/pull/1810
s3._endpoint.http_session._manager.clear()
| [
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((887, 937), 'pytest.register_assert_rewrite', 'pytest.register_assert_rewrite', (['"""tests.assertions"""'], {}), "('tests.assertions')\n", (917, 937), False, 'import pytest\n'), ((3884, 3929), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (3898, 3929), False, 'import pytest\n'), ((4124, 4169), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (4138, 4169), False, 'import pytest\n'), ((4865, 4910), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (4879, 4910), False, 'import pytest\n'), ((1132, 1159), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1149, 1159), False, 'import logging\n'), ((1210, 1237), 'secrets.token_hex', 'secrets.token_hex', ([], {'nbytes': '(4)'}), '(nbytes=4)\n', (1227, 1237), False, 'import secrets\n'), ((1359, 1775), 'iris.commons.settings.CommonSettings', 'CommonSettings', ([], {'CLICKHOUSE_PUBLIC_USER': '"""public"""', 'CLICKHOUSE_DATABASE': '"""iris_test"""', 'DATABASE_URL': 'f"""postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}"""', 'S3_PREFIX': 'f"""iris-test-{namespace}"""', 'S3_PUBLIC_RESOURCES': "['arn:aws:s3:::test-public-exports/*']", 'REDIS_NAMESPACE': 'f"""iris-test-{namespace}"""', 'REDIS_URL': '"""redis://default:iris@redis.docker.localhost?db=15"""', 'RETRY_TIMEOUT': '(-1)'}), "(CLICKHOUSE_PUBLIC_USER='public', CLICKHOUSE_DATABASE=\n 'iris_test', DATABASE_URL=\n f'postgresql://iris:iris@postgres.docker.localhost/iris-test-{namespace}',\n S3_PREFIX=f'iris-test-{namespace}', S3_PUBLIC_RESOURCES=[\n 'arn:aws:s3:::test-public-exports/*'], REDIS_NAMESPACE=\n f'iris-test-{namespace}', REDIS_URL=\n 'redis://default:iris@redis.docker.localhost?db=15', RETRY_TIMEOUT=-1)\n", (1373, 1775), False, 'from iris.commons.settings import CommonSettings\n'), ((2577, 2605), 'iris.commons.clickhouse.ClickHouse', 'ClickHouse', (['settings', 'logger'], {}), '(settings, logger)\n', (2587, 2605), False, 'from iris.commons.clickhouse import ClickHouse\n'), ((2659, 2728), 'sqlmodel.create_engine', 'create_engine', (['settings.DATABASE_URL'], {'json_serializer': 'json_serializer'}), '(settings.DATABASE_URL, json_serializer=json_serializer)\n', (2672, 2728), False, 'from sqlmodel import Session, create_engine\n'), ((2809, 2841), 'iris.commons.models.base.Base.metadata.create_all', 'Base.metadata.create_all', (['engine'], {}), '(engine)\n', (2833, 2841), False, 'from iris.commons.models.base import Base\n'), ((2926, 2986), 'aioredis.from_url', 'aioredis.from_url', (['settings.REDIS_URL'], {'decode_responses': '(True)'}), '(settings.REDIS_URL, decode_responses=True)\n', (2943, 2986), False, 'import aioredis\n'), ((3212, 3237), 'iris.commons.storage.Storage', 'Storage', (['settings', 'logger'], {}), '(settings, logger)\n', (3219, 3237), False, 'from iris.commons.storage import Storage\n'), ((3848, 3880), 'iris.api.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (3878, 3880), False, 'from iris.api.main import app\n'), ((1042, 1081), 'os.environ.get', 'os.environ.get', (['"""IRIS_TEST_CLEANUP"""', '""""""'], {}), "('IRIS_TEST_CLEANUP', '')\n", (1056, 1081), False, 'import os\n'), ((2740, 2767), 'sqlalchemy_utils.database_exists', 'database_exists', (['engine.url'], {}), '(engine.url)\n', (2755, 2767), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((2777, 2804), 'sqlalchemy_utils.create_database', 'create_database', (['engine.url'], {}), '(engine.url)\n', (2792, 2804), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((2997, 3028), 'iris.commons.redis.Redis', 'Redis', (['client', 'settings', 'logger'], {}), '(client, settings, logger)\n', (3002, 3028), False, 'from iris.commons.redis import Redis\n'), ((3102, 3117), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3109, 3117), False, 'from sqlmodel import Session, create_engine\n'), ((3804, 3819), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (3814, 3819), False, 'from fastapi.testclient import TestClient\n'), ((4003, 4072), 'redis.from_url', 'pyredis.from_url', (['"""redis://default:iris@redis.docker.localhost?db=15"""'], {}), "('redis://default:iris@redis.docker.localhost?db=15')\n", (4019, 4072), True, 'import redis as pyredis\n'), ((4290, 4355), 'sqlmodel.create_engine', 'create_engine', (['"""postgresql://iris:iris@postgres.docker.localhost"""'], {}), "('postgresql://iris:iris@postgres.docker.localhost')\n", (4303, 4355), False, 'from sqlmodel import Session, create_engine\n'), ((4977, 5114), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': '"""minioadmin"""', 'aws_secret_access_key': '"""minioadmin"""', 'endpoint_url': '"""http://minio.docker.localhost"""'}), "('s3', aws_access_key_id='minioadmin', aws_secret_access_key=\n 'minioadmin', endpoint_url='http://minio.docker.localhost')\n", (4989, 5114), False, 'import boto3\n'), ((4754, 4831), 'sqlalchemy_utils.drop_database', 'drop_database', (['f"""postgresql://iris:iris@postgres.docker.localhost/{database}"""'], {}), "(f'postgresql://iris:iris@postgres.docker.localhost/{database}')\n", (4767, 4831), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((4449, 4655), 'sqlalchemy.text', 'text', (['"""\n SELECT datname\n FROM pg_database\n WHERE datistemplate = false AND datname LIKE \'iris-test-%\'\n """'], {}), '(\n """\n SELECT datname\n FROM pg_database\n WHERE datistemplate = false AND datname LIKE \'iris-test-%\'\n """\n )\n', (4453, 4655), False, 'from sqlalchemy import text\n')] |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = select(ToDo).where(ToDo.due_date < date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
reminders = select(ToDo).where(ToDo.reminder <= date.today(),
ToDo.status != 'done').order_by(
ToDo.due_date)
due_in = select(ToDo).where(
ToDo.due_date < due_date, ToDo.due_date >= date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
no_due = select(ToDo).where(
ToDo.due_date == None, ToDo.status != 'done',
ToDo.reminder == None).order_by(ToDo.date_init)
if len(make_table_view(engine, overdue)) > 1:
typer.secho(f'\nOVERDUE\n', fg=typer.colors.BRIGHT_RED,
bold=True)
typer.secho(tabulate(make_table_view(engine, overdue),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, reminders)) > 1:
typer.secho(f'\nREMINDERS\n', fg=typer.colors.BRIGHT_YELLOW, bold=True)
typer.secho(tabulate(make_table_view(engine, reminders),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, due_in)) > 1:
typer.secho(f'\nDUE IN {due_date.date()}\n',
fg=typer.colors.BRIGHT_GREEN, bold=True)
typer.secho(tabulate(make_table_view(engine, due_in),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
if len(make_table_view(engine, no_due)) > 1:
typer.secho(f'\nNO DUE\n', fg=typer.colors.BRIGHT_BLUE, bold=True)
typer.secho(tabulate(make_table_view(engine, no_due),
headers="firstrow"), fg=typer.colors.BRIGHT_WHITE)
print('\n')
| [
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.func.max"
] | [((400, 413), 'typer.Typer', 'typer.Typer', ([], {}), '()\n', (411, 413), False, 'import typer\n'), ((600, 637), 'typer.Option', 'typer.Option', (['None', '"""--project"""', '"""-p"""'], {}), "(None, '--project', '-p')\n", (612, 637), False, 'import typer\n'), ((668, 728), 'typer.Option', 'typer.Option', (['None', '"""--due-date"""', '"""-d"""'], {'formats': "['%Y-%m-%d']"}), "(None, '--due-date', '-d', formats=['%Y-%m-%d'])\n", (680, 728), False, 'import typer\n'), ((801, 861), 'typer.Option', 'typer.Option', (['None', '"""--reminder"""', '"""-r"""'], {'formats': "['%Y-%m-%d']"}), "(None, '--reminder', '-r', formats=['%Y-%m-%d'])\n", (813, 861), False, 'import typer\n'), ((930, 974), 'typer.Option', 'typer.Option', (['Status.to_do', '"""--status"""', '"""-s"""'], {}), "(Status.to_do, '--status', '-s')\n", (942, 974), False, 'import typer\n'), ((995, 1028), 'typer.Option', 'typer.Option', (['None', '"""--tag"""', '"""-t"""'], {}), "(None, '--tag', '-t')\n", (1007, 1028), False, 'import typer\n'), ((3098, 3164), 'typer.Option', 'typer.Option', (['None', '"""--duration"""', '"""-d"""'], {'help': '"""Duration in minutes"""'}), "(None, '--duration', '-d', help='Duration in minutes')\n", (3110, 3164), False, 'import typer\n'), ((6176, 6213), 'typer.Option', 'typer.Option', (['None', '"""--remarks"""', '"""-r"""'], {}), "(None, '--remarks', '-r')\n", (6188, 6213), False, 'import typer\n'), ((1094, 1110), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1108, 1110), False, 'from datetime import datetime, timedelta, date\n'), ((3274, 3289), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3281, 3289), False, 'from sqlmodel import Session, select, func\n'), ((6247, 6262), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6254, 6262), False, 'from sqlmodel import Session, select, func\n'), ((8242, 8308), 'typer.secho', 'typer.secho', (['f"""\nOVERDUE\n"""'], {'fg': 'typer.colors.BRIGHT_RED', 'bold': '(True)'}), "(f'\\nOVERDUE\\n', fg=typer.colors.BRIGHT_RED, bold=True)\n", (8253, 8308), False, 'import typer\n'), ((8533, 8604), 'typer.secho', 'typer.secho', (['f"""\nREMINDERS\n"""'], {'fg': 'typer.colors.BRIGHT_YELLOW', 'bold': '(True)'}), "(f'\\nREMINDERS\\n', fg=typer.colors.BRIGHT_YELLOW, bold=True)\n", (8544, 8604), False, 'import typer\n'), ((9114, 9180), 'typer.secho', 'typer.secho', (['f"""\nNO DUE\n"""'], {'fg': 'typer.colors.BRIGHT_BLUE', 'bold': '(True)'}), "(f'\\nNO DUE\\n', fg=typer.colors.BRIGHT_BLUE, bold=True)\n", (9125, 9180), False, 'import typer\n'), ((1306, 1324), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1316, 1324), False, 'import typer\n'), ((1520, 1538), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1530, 1538), False, 'import typer\n'), ((1784, 1802), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1794, 1802), False, 'import typer\n'), ((1817, 1832), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1824, 1832), False, 'from sqlmodel import Session, select, func\n'), ((2772, 2842), 'typer.secho', 'typer.secho', (['f"""Add {task}. Task id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), "(f'Add {task}. Task id: {new_id}\\n', fg=typer.colors.GREEN)\n", (2783, 2842), False, 'import typer\n'), ((3398, 3471), 'typer.secho', 'typer.secho', (['"""\nThe Timer must be stopped first\n"""'], {'fg': 'typer.colors.RED'}), '("""\nThe Timer must be stopped first\n""", fg=typer.colors.RED)\n', (3409, 3471), False, 'import typer\n'), ((3512, 3530), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (3522, 3530), False, 'import typer\n'), ((6420, 6437), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6435, 6437), False, 'from datetime import datetime, timedelta, date\n'), ((6627, 6661), 'typer.confirm', 'typer.confirm', (['"""Is the task done?"""'], {}), "('Is the task done?')\n", (6640, 6661), False, 'import typer\n'), ((7086, 7178), 'typer.secho', 'typer.secho', (['f"""\nStop task ({query.id}). Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nStop task ({query.id}). Timer id: {new_id}\n""", fg=typer.\n colors.GREEN)\n', (7097, 7178), False, 'import typer\n'), ((7402, 7418), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7416, 7418), False, 'from datetime import datetime, timedelta, date\n'), ((7464, 7482), 'datetime.timedelta', 'timedelta', ([], {'weeks': '(1)'}), '(weeks=1)\n', (7473, 7482), False, 'from datetime import datetime, timedelta, date\n'), ((5845, 5905), 'typer.secho', 'typer.secho', (['f"""\nTask already done\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTask already done\n""", fg=typer.colors.RED)\n', (5856, 5905), False, 'import typer\n'), ((5954, 5972), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (5964, 5972), False, 'import typer\n'), ((6017, 6075), 'typer.secho', 'typer.secho', (['f"""\nInvalid task id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid task id\n""", fg=typer.colors.RED)\n', (6028, 6075), False, 'import typer\n'), ((6116, 6134), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (6126, 6134), False, 'import typer\n'), ((7248, 7306), 'typer.secho', 'typer.secho', (['f"""\nNo task running\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nNo task running\n""", fg=typer.colors.RED)\n', (7259, 7306), False, 'import typer\n'), ((7323, 7341), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (7333, 7341), False, 'import typer\n'), ((3880, 3907), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'duration'}), '(minutes=duration)\n', (3889, 3907), False, 'from datetime import datetime, timedelta, date\n'), ((4422, 4517), 'typer.secho', 'typer.secho', (['f"""\nTask Start task {task_id}. Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nTask Start task {task_id}. Timer id: {new_id}\n""", fg=\n typer.colors.GREEN)\n', (4433, 4517), False, 'import typer\n'), ((5677, 5767), 'typer.secho', 'typer.secho', (['f"""\nStart task {task_id}. Timer id: {new_id}\n"""'], {'fg': 'typer.colors.GREEN'}), '(f"""\nStart task {task_id}. Timer id: {new_id}\n""", fg=typer.\n colors.GREEN)\n', (5688, 5767), False, 'import typer\n'), ((7598, 7610), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7604, 7610), False, 'from sqlmodel import Session, select, func\n'), ((7633, 7645), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7643, 7645), False, 'from datetime import datetime, timedelta, date\n'), ((7744, 7756), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7750, 7756), False, 'from sqlmodel import Session, select, func\n'), ((7780, 7792), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7790, 7792), False, 'from datetime import datetime, timedelta, date\n'), ((7899, 7911), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (7905, 7911), False, 'from sqlmodel import Session, select, func\n'), ((7970, 7982), 'datetime.date.today', 'date.today', ([], {}), '()\n', (7980, 7982), False, 'from datetime import datetime, timedelta, date\n'), ((8053, 8065), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (8059, 8065), False, 'from sqlmodel import Session, select, func\n'), ((2290, 2366), 'typer.secho', 'typer.secho', (['f"""\nTasks already done in the project\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTasks already done in the project\n""", fg=typer.colors.RED)\n', (2301, 2366), False, 'import typer\n'), ((2431, 2449), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2441, 2449), False, 'import typer\n'), ((3943, 3963), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(0)'}), '(minutes=0)\n', (3952, 3963), False, 'from datetime import datetime, timedelta, date\n'), ((3989, 4062), 'typer.secho', 'typer.secho', (['f"""\nDuration must be grater than 0\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nDuration must be grater than 0\n""", fg=typer.colors.RED)\n', (4000, 4062), False, 'import typer\n'), ((4148, 4166), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (4158, 4166), False, 'import typer\n'), ((4585, 4624), 'typer.progressbar', 'typer.progressbar', ([], {'length': 'total_seconds'}), '(length=total_seconds)\n', (4602, 4624), False, 'import typer\n'), ((2734, 2751), 'sqlmodel.func.max', 'func.max', (['ToDo.id'], {}), '(ToDo.id)\n', (2742, 2751), False, 'from sqlmodel import Session, select, func\n'), ((4668, 4685), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4683, 4685), False, 'from datetime import datetime, timedelta, date\n'), ((4727, 4744), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4742, 4744), False, 'from datetime import datetime, timedelta, date\n'), ((4780, 4793), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4790, 4793), False, 'import time\n'), ((4899, 4997), 'typer.secho', 'typer.secho', (['"""\n\nYour Time is over! Well done!\n"""'], {'blink': '(True)', 'fg': 'typer.colors.BRIGHT_GREEN'}), '("""\n\nYour Time is over! Well done!\n""", blink=True, fg=typer.\n colors.BRIGHT_GREEN)\n', (4910, 4997), False, 'import typer\n'), ((5150, 5178), 'typer.confirm', 'typer.confirm', (['"""Any remark?"""'], {}), "('Any remark?')\n", (5163, 5178), False, 'import typer\n'), ((5452, 5464), 'typer.Exit', 'typer.Exit', ([], {}), '()\n', (5462, 5464), False, 'import typer\n'), ((7047, 7065), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (7055, 7065), False, 'from sqlmodel import Session, select, func\n'), ((3340, 3353), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (3346, 3353), False, 'from sqlmodel import Session, select, func\n'), ((5259, 5294), 'typer.prompt', 'typer.prompt', (['"""Enter your remarks."""'], {}), "('Enter your remarks.')\n", (5271, 5294), False, 'import typer\n'), ((6344, 6357), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (6350, 6357), False, 'from sqlmodel import Session, select, func\n'), ((1924, 1936), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (1930, 1936), False, 'from sqlmodel import Session, select, func\n'), ((4375, 4393), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (4383, 4393), False, 'from sqlmodel import Session, select, func\n'), ((5630, 5648), 'sqlmodel.func.max', 'func.max', (['Timer.id'], {}), '(Timer.id)\n', (5638, 5648), False, 'from sqlmodel import Session, select, func\n'), ((2093, 2105), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (2099, 2105), False, 'from sqlmodel import Session, select, func\n')] |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("traction_issue_enabled", sa.Boolean(), nullable=False),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("cred_def_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("wallet_key", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"student",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("age", sa.Integer(), nullable=True),
sa.Column("student_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(op.f("ix_student_name"), "student", ["name"], unique=False)
op.create_table(
"out_of_band",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("msg", postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column("msg_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sender_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("recipient_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("action", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["recipient_id"],
["line_of_business.id"],
),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.ForeignKeyConstraint(
["sender_id"],
["line_of_business.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("out_of_band")
op.drop_index(op.f("ix_student_name"), table_name="student")
op.drop_table("student")
op.drop_table("line_of_business")
op.drop_index(op.f("ix_job_applicant_name"), table_name="job_applicant")
op.drop_table("job_applicant")
op.drop_table("sandbox")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] | [((7044, 7072), 'alembic.op.drop_table', 'op.drop_table', (['"""out_of_band"""'], {}), "('out_of_band')\n", (7057, 7072), False, 'from alembic import op\n'), ((7142, 7166), 'alembic.op.drop_table', 'op.drop_table', (['"""student"""'], {}), "('student')\n", (7155, 7166), False, 'from alembic import op\n'), ((7171, 7204), 'alembic.op.drop_table', 'op.drop_table', (['"""line_of_business"""'], {}), "('line_of_business')\n", (7184, 7204), False, 'from alembic import op\n'), ((7286, 7316), 'alembic.op.drop_table', 'op.drop_table', (['"""job_applicant"""'], {}), "('job_applicant')\n", (7299, 7316), False, 'from alembic import op\n'), ((7321, 7345), 'alembic.op.drop_table', 'op.drop_table', (['"""sandbox"""'], {}), "('sandbox')\n", (7334, 7345), False, 'from alembic import op\n'), ((1040, 1069), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1063, 1069), True, 'import sqlalchemy as sa\n'), ((2281, 2336), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (2304, 2336), True, 'import sqlalchemy as sa\n'), ((2381, 2410), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2404, 2410), True, 'import sqlalchemy as sa\n'), ((2420, 2461), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (2439, 2461), True, 'import sqlalchemy as sa\n'), ((2498, 2527), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (2502, 2527), False, 'from alembic import op\n'), ((3782, 3837), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (3805, 3837), True, 'import sqlalchemy as sa\n'), ((3882, 3911), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3905, 3911), True, 'import sqlalchemy as sa\n'), ((5256, 5311), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (5279, 5311), True, 'import sqlalchemy as sa\n'), ((5356, 5385), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (5379, 5385), True, 'import sqlalchemy as sa\n'), ((5395, 5436), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (5414, 5436), True, 'import sqlalchemy as sa\n'), ((5464, 5487), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (5468, 5487), False, 'from alembic import op\n'), ((6564, 6630), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['recipient_id']", "['line_of_business.id']"], {}), "(['recipient_id'], ['line_of_business.id'])\n", (6587, 6630), True, 'import sqlalchemy as sa\n'), ((6675, 6730), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sandbox_id']", "['sandbox.id']"], {}), "(['sandbox_id'], ['sandbox.id'])\n", (6698, 6730), True, 'import sqlalchemy as sa\n'), ((6775, 6838), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['sender_id']", "['line_of_business.id']"], {}), "(['sender_id'], ['line_of_business.id'])\n", (6798, 6838), True, 'import sqlalchemy as sa\n'), ((6883, 6912), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (6906, 6912), True, 'import sqlalchemy as sa\n'), ((7091, 7114), 'alembic.op.f', 'op.f', (['"""ix_student_name"""'], {}), "('ix_student_name')\n", (7095, 7114), False, 'from alembic import op\n'), ((7223, 7252), 'alembic.op.f', 'op.f', (['"""ix_job_applicant_name"""'], {}), "('ix_job_applicant_name')\n", (7227, 7252), False, 'from alembic import op\n'), ((498, 527), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (513, 527), False, 'from sqlalchemy.dialects import postgresql\n'), ((682, 704), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (702, 704), False, 'from sqlalchemy.dialects import postgresql\n'), ((847, 869), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (867, 869), False, 'from sqlalchemy.dialects import postgresql\n'), ((980, 1014), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1012, 1014), False, 'import sqlmodel\n'), ((1172, 1201), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1187, 1201), False, 'from sqlalchemy.dialects import postgresql\n'), ((1356, 1378), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1376, 1378), False, 'from sqlalchemy.dialects import postgresql\n'), ((1521, 1543), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1541, 1543), False, 'from sqlalchemy.dialects import postgresql\n'), ((1655, 1689), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1687, 1689), False, 'import sqlmodel\n'), ((1740, 1768), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1766, 1768), False, 'import sqlmodel\n'), ((1815, 1849), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1847, 1849), False, 'import sqlmodel\n'), ((1893, 1906), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1904, 1906), True, 'import sqlalchemy as sa\n'), ((1975, 2009), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2007, 2009), False, 'import sqlmodel\n'), ((2071, 2099), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2097, 2099), False, 'import sqlmodel\n'), ((2148, 2176), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2174, 2176), False, 'import sqlmodel\n'), ((2221, 2255), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2253, 2255), False, 'import sqlmodel\n'), ((2673, 2702), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (2688, 2702), False, 'from sqlalchemy.dialects import postgresql\n'), ((2857, 2879), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2877, 2879), False, 'from sqlalchemy.dialects import postgresql\n'), ((3022, 3044), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (3042, 3044), False, 'from sqlalchemy.dialects import postgresql\n'), ((3156, 3190), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3188, 3190), False, 'import sqlmodel\n'), ((3242, 3276), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3274, 3276), False, 'import sqlmodel\n'), ((3326, 3354), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3352, 3354), False, 'import sqlmodel\n'), ((3417, 3429), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3427, 3429), True, 'import sqlalchemy as sa\n'), ((3480, 3514), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3512, 3514), False, 'import sqlmodel\n'), ((3565, 3599), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3597, 3599), False, 'import sqlmodel\n'), ((3648, 3676), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3674, 3676), False, 'import sqlmodel\n'), ((3727, 3755), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (3753, 3755), False, 'import sqlmodel\n'), ((4008, 4037), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (4023, 4037), False, 'from sqlalchemy.dialects import postgresql\n'), ((4192, 4214), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4212, 4214), False, 'from sqlalchemy.dialects import postgresql\n'), ((4357, 4379), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (4377, 4379), False, 'from sqlalchemy.dialects import postgresql\n'), ((4491, 4525), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4523, 4525), False, 'import sqlmodel\n'), ((4576, 4604), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (4602, 4604), False, 'import sqlmodel\n'), ((4651, 4685), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4683, 4685), False, 'import sqlmodel\n'), ((4728, 4740), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4738, 4740), True, 'import sqlalchemy as sa\n'), ((4790, 4824), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4822, 4824), False, 'import sqlmodel\n'), ((4868, 4881), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (4879, 4881), True, 'import sqlalchemy as sa\n'), ((4950, 4984), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4982, 4984), False, 'import sqlmodel\n'), ((5046, 5074), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5072, 5074), False, 'import sqlmodel\n'), ((5123, 5151), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (5149, 5151), False, 'import sqlmodel\n'), ((5196, 5230), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5228, 5230), False, 'import sqlmodel\n'), ((5617, 5646), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (5632, 5646), False, 'from sqlalchemy.dialects import postgresql\n'), ((5801, 5823), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5821, 5823), False, 'from sqlalchemy.dialects import postgresql\n'), ((5966, 5988), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (5986, 5988), False, 'from sqlalchemy.dialects import postgresql\n'), ((6185, 6219), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6217, 6219), False, 'import sqlmodel\n'), ((6269, 6297), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6295, 6297), False, 'import sqlmodel\n'), ((6350, 6378), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6376, 6378), False, 'import sqlmodel\n'), ((6429, 6457), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (6455, 6457), False, 'import sqlmodel\n'), ((6504, 6538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6536, 6538), False, 'import sqlmodel\n'), ((556, 584), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (563, 584), True, 'import sqlalchemy as sa\n'), ((733, 749), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (740, 749), True, 'import sqlalchemy as sa\n'), ((898, 914), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (905, 914), True, 'import sqlalchemy as sa\n'), ((1230, 1258), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1237, 1258), True, 'import sqlalchemy as sa\n'), ((1407, 1423), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1414, 1423), True, 'import sqlalchemy as sa\n'), ((1572, 1588), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1579, 1588), True, 'import sqlalchemy as sa\n'), ((2731, 2759), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (2738, 2759), True, 'import sqlalchemy as sa\n'), ((2908, 2924), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2915, 2924), True, 'import sqlalchemy as sa\n'), ((3073, 3089), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (3080, 3089), True, 'import sqlalchemy as sa\n'), ((4066, 4094), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (4073, 4094), True, 'import sqlalchemy as sa\n'), ((4243, 4259), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4250, 4259), True, 'import sqlalchemy as sa\n'), ((4408, 4424), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (4415, 4424), True, 'import sqlalchemy as sa\n'), ((5675, 5703), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (5682, 5703), True, 'import sqlalchemy as sa\n'), ((5852, 5868), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (5859, 5868), True, 'import sqlalchemy as sa\n'), ((6017, 6033), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (6024, 6033), True, 'import sqlalchemy as sa\n'), ((6127, 6136), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (6134, 6136), True, 'import sqlalchemy as sa\n')] |
"""initial-db-tables
Revision ID: d925cb39480e
Revises:
Create Date: 2022-05-05 11:45:18.781171
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "d925cb39480e"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"contact",
sa.Column(
"contact_id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column("tags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("author_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("endorse_status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("connection_protocol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"connection_alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("contact_id"),
)
op.create_table(
"endorserequest",
sa.Column(
"endorse_request_id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column("tags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("transaction_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("endorser_did", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("author_did", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column(
"transaction_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("ledger_txn", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("endorse_request_id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("endorserequest")
op.drop_table("contact")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] | [((3169, 3200), 'alembic.op.drop_table', 'op.drop_table', (['"""endorserequest"""'], {}), "('endorserequest')\n", (3182, 3200), False, 'from alembic import op\n'), ((3205, 3229), 'alembic.op.drop_table', 'op.drop_table', (['"""contact"""'], {}), "('contact')\n", (3218, 3229), False, 'from alembic import op\n'), ((1683, 1720), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""contact_id"""'], {}), "('contact_id')\n", (1706, 1720), True, 'import sqlalchemy as sa\n'), ((2992, 3037), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""endorse_request_id"""'], {}), "('endorse_request_id')\n", (3015, 3037), True, 'import sqlalchemy as sa\n'), ((516, 545), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (531, 545), False, 'from sqlalchemy.dialects import postgresql\n'), ((773, 795), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (793, 795), False, 'from sqlalchemy.dialects import postgresql\n'), ((938, 960), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (958, 960), False, 'from sqlalchemy.dialects import postgresql\n'), ((1081, 1115), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1113, 1115), False, 'import sqlmodel\n'), ((1170, 1204), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1202, 1204), False, 'import sqlmodel\n'), ((1258, 1286), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1284, 1286), False, 'import sqlmodel\n'), ((1346, 1380), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1378, 1380), False, 'import sqlmodel\n'), ((1450, 1484), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1482, 1484), False, 'import sqlmodel\n'), ((1543, 1577), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1575, 1577), False, 'import sqlmodel\n'), ((1622, 1656), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1654, 1656), False, 'import sqlmodel\n'), ((1840, 1869), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1855, 1869), False, 'from sqlalchemy.dialects import postgresql\n'), ((2097, 2119), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2117, 2119), False, 'from sqlalchemy.dialects import postgresql\n'), ((2262, 2284), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (2282, 2284), False, 'from sqlalchemy.dialects import postgresql\n'), ((2406, 2434), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2432, 2434), False, 'import sqlmodel\n'), ((2488, 2516), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2514, 2516), False, 'import sqlmodel\n'), ((2569, 2603), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2601, 2603), False, 'import sqlmodel\n'), ((2654, 2688), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2686, 2688), False, 'import sqlmodel\n'), ((2758, 2792), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2790, 2792), False, 'import sqlmodel\n'), ((2847, 2881), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2879, 2881), False, 'import sqlmodel\n'), ((2932, 2966), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2964, 2966), False, 'import sqlmodel\n'), ((574, 602), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (581, 602), True, 'import sqlalchemy as sa\n'), ((686, 697), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (695, 697), True, 'import sqlalchemy as sa\n'), ((824, 840), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (831, 840), True, 'import sqlalchemy as sa\n'), ((989, 1005), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (996, 1005), True, 'import sqlalchemy as sa\n'), ((1898, 1926), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1905, 1926), True, 'import sqlalchemy as sa\n'), ((2010, 2021), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (2019, 2021), True, 'import sqlalchemy as sa\n'), ((2148, 2164), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2155, 2164), True, 'import sqlalchemy as sa\n'), ((2313, 2329), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (2320, 2329), True, 'import sqlalchemy as sa\n')] |
from datetime import date
from fastapi import FastAPI, Query
from sqlmodel import Session, create_engine, select
from .datatypes import ArtmuseumAddress, ArtmuseumTimeLabel
from .db.crud import init_db
from .db.models import ArtmuseumExhibition, PhilharmoniaConcert
sql_engine = create_engine(
"sqlite:///database.db", connect_args={"check_same_thread": False}
)
app = FastAPI(
title="Murmansk Culture API",
# description="",
version="0.0.1",
contact={
"name": "<NAME>",
"url": "https://github.com/anorlovsky",
"email": "<EMAIL>",
},
redoc_url="/",
docs_url=None,
)
@app.on_event("startup")
def on_startup():
init_db(sql_engine)
@app.get(
"/artmuseum",
response_model=list[ArtmuseumExhibition],
description="Возвращает список текущих и ближайших выставок [Мурманского областного художественного музея](https://artmmuseum.ru/)",
)
async def get_artmuseum_exhibitions(
time: ArtmuseumTimeLabel = Query(
None,
description='Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки',
)
):
with Session(sql_engine) as session:
if time is None:
stmt = select(ArtmuseumExhibition)
elif time == ArtmuseumTimeLabel.NOW:
stmt = select(ArtmuseumExhibition).where(
ArtmuseumExhibition.start_date <= date.today()
)
elif time == ArtmuseumTimeLabel.SOON:
stmt = select(ArtmuseumExhibition).where(
ArtmuseumExhibition.start_date > date.today()
)
return session.exec(stmt).all()
@app.get(
"/philharmonia",
response_model=list[PhilharmoniaConcert],
description="Возвращает список ближайших концертов [Мурманской областной филармонии](https://www.murmansound.ru)",
)
async def get_philharmonia_concerts():
with Session(sql_engine) as session:
return session.exec(select(PhilharmoniaConcert)).all()
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session"
] | [((283, 369), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {'connect_args': "{'check_same_thread': False}"}), "('sqlite:///database.db', connect_args={'check_same_thread': \n False})\n", (296, 369), False, 'from sqlmodel import Session, create_engine, select\n'), ((377, 557), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Murmansk Culture API"""', 'version': '"""0.0.1"""', 'contact': "{'name': '<NAME>', 'url': 'https://github.com/anorlovsky', 'email': '<EMAIL>'}", 'redoc_url': '"""/"""', 'docs_url': 'None'}), "(title='Murmansk Culture API', version='0.0.1', contact={'name':\n '<NAME>', 'url': 'https://github.com/anorlovsky', 'email': '<EMAIL>'},\n redoc_url='/', docs_url=None)\n", (384, 557), False, 'from fastapi import FastAPI, Query\n'), ((978, 1088), 'fastapi.Query', 'Query', (['None'], {'description': '"""Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки"""'}), '(None, description=\n \'Вернуть только текущие (`"now"`) или только ближайшие (`"soon"`) выставки\'\n )\n', (983, 1088), False, 'from fastapi import FastAPI, Query\n'), ((1114, 1133), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (1121, 1133), False, 'from sqlmodel import Session, create_engine, select\n'), ((1859, 1878), 'sqlmodel.Session', 'Session', (['sql_engine'], {}), '(sql_engine)\n', (1866, 1878), False, 'from sqlmodel import Session, create_engine, select\n'), ((1190, 1217), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1196, 1217), False, 'from sqlmodel import Session, create_engine, select\n'), ((1919, 1946), 'sqlmodel.select', 'select', (['PhilharmoniaConcert'], {}), '(PhilharmoniaConcert)\n', (1925, 1946), False, 'from sqlmodel import Session, create_engine, select\n'), ((1282, 1309), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1288, 1309), False, 'from sqlmodel import Session, create_engine, select\n'), ((1367, 1379), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1377, 1379), False, 'from datetime import date\n'), ((1459, 1486), 'sqlmodel.select', 'select', (['ArtmuseumExhibition'], {}), '(ArtmuseumExhibition)\n', (1465, 1486), False, 'from sqlmodel import Session, create_engine, select\n'), ((1543, 1555), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1553, 1555), False, 'from datetime import date\n')] |
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.domain import Domain
from joj.horse.schemas.domain_invitation import DomainInvitationDetail
class DomainInvitation(DomainURLORMModel, DomainInvitationDetail, table=True): # type: ignore[call-arg]
__tablename__ = "domain_invitations"
__table_args__ = (
UniqueConstraint("domain_id", "url"),
UniqueConstraint("domain_id", "code"),
)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="invitations")
event.listen(DomainInvitation, "before_insert", url_pre_save)
event.listen(DomainInvitation, "before_update", url_pre_save)
| [
"sqlmodel.Relationship"
] | [((869, 930), 'sqlalchemy.event.listen', 'event.listen', (['DomainInvitation', '"""before_insert"""', 'url_pre_save'], {}), "(DomainInvitation, 'before_insert', url_pre_save)\n", (881, 930), False, 'from sqlalchemy import event\n'), ((931, 992), 'sqlalchemy.event.listen', 'event.listen', (['DomainInvitation', '"""before_update"""', 'url_pre_save'], {}), "(DomainInvitation, 'before_update', url_pre_save)\n", (943, 992), False, 'from sqlalchemy import event\n'), ((824, 866), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""invitations"""'}), "(back_populates='invitations')\n", (836, 866), False, 'from sqlmodel import Field, Relationship\n'), ((559, 595), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""url"""'], {}), "('domain_id', 'url')\n", (575, 595), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((605, 642), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""code"""'], {}), "('domain_id', 'code')\n", (621, 642), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((724, 768), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (734, 768), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n')] |
from typing import Optional, Dict, List, Any, Union
import datetime as dt
from sqlmodel import Field, Session, SQLModel, create_engine, select
import threading as th
import queue
# ~~~ Database ~~~~~~~~~~~~~~~
class Database:
def __init__(self, uri: str):
self.engine = create_engine(uri)
SQLModel.metadata.create_all(self.engine)
def create_all(self, items: List[SQLModel]):
with Session(self.engine) as session:
for item in items:
session.add(item)
session.commit()
def get_by_id(self, id: Union[str, int], model: SQLModel):
with Session(self.engine) as session:
stmt = select(model).where(model.id == id)
return session.exec(stmt).first()
def get_by_field(self, key: str, value: Any, model: SQLModel):
stmt = select(model).where(getattr(model, key) == value)
print(stmt)
return self.exec(stmt)
def exec(self, stmt: str, params = {}):
with Session(self.engine) as session:
return session.exec(stmt, params=params).all()
class DatabaseWorker(th.Thread):
def __init__(self,
uri: str,
queue: queue.Queue,
batch: int = None,
timeout: int = 10
):
super().__init__()
self.q = queue
self.db = None
self.uri = uri
self.timeout = timeout
self.batch = batch
def run(self):
self.db = Database(self.uri)
while True:
cache = []
try:
cache.append(self.q.get(timeout=self.timeout))
if self.batch:
if len(cache) % self.batch == 0:
self.db.create_all(cache)
cache = []
else:
cache = []
except queue.Empty:
self.db.create_all(cache)
break
# ~~~ Models ~~~~~~~~~~~~~~~~~
class Document(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
href: str
date: dt.datetime
text: Optional[str] = None
date_collected: dt.datetime
collected_by: str
class Paragraph(SQLModel, table=True):
id: str = Field(primary_key=True)
text: str
document_id: str = Field(foreign_key="document.id")
sentiment: str
sent_score: float
class Entity(SQLModel, table=True):
id: str = Field(primary_key=True)
name: str
description: Optional[str]
class EntityMention(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
text: str
score: Optional[float]
label: str
start: int
end: int
paragraph_id: str = Field(foreign_key="paragraph.id")
kb_id: Optional[str] = Field(foreign_key="entity.id")
class EntityFeature(SQLModel, table=True):
id: int = Field(primary_key=True)
kb_id: str = Field(foreign_key="entity.id")
key: str
value: str
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field"
] | [((2043, 2066), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2048, 2066), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2256, 2279), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2261, 2279), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2317, 2349), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""document.id"""'}), "(foreign_key='document.id')\n", (2322, 2349), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2442, 2465), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2447, 2465), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2579, 2616), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2584, 2616), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2725, 2758), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""paragraph.id"""'}), "(foreign_key='paragraph.id')\n", (2730, 2758), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2786, 2816), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""entity.id"""'}), "(foreign_key='entity.id')\n", (2791, 2816), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2875, 2898), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2880, 2898), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2916, 2946), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""entity.id"""'}), "(foreign_key='entity.id')\n", (2921, 2946), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((284, 302), 'sqlmodel.create_engine', 'create_engine', (['uri'], {}), '(uri)\n', (297, 302), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((311, 352), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self.engine'], {}), '(self.engine)\n', (339, 352), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((416, 436), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (423, 436), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((620, 640), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (627, 640), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1004, 1024), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (1011, 1024), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((841, 854), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (847, 854), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((672, 685), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (678, 685), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
# A file containing fixtures for testing
# Fixtures defined here are available for the whole scope
from fastapi.testclient import TestClient
import pytest
import os
from ..main import app, session
from sqlmodel import SQLModel, Session, create_engine
from sqlmodel.pool import StaticPool
from ..utils import get_session
db_name = "test_db.sqlite"
test_con = f"sqlite:///{db_name}"
test_engine = create_engine(
test_con, connect_args={"check_same_thread": False}, echo=True
)
@pytest.fixture(name="create_db", scope="session")
def create_db():
# setup
SQLModel.metadata.create_all(test_engine)
yield
# teardown
os.remove(db_name)
@pytest.fixture(name="session")
def session_fixture(create_db):
create_db
with Session(test_engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_session_override():
return session
app.dependency_overrides[get_session] = get_session_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((397, 474), 'sqlmodel.create_engine', 'create_engine', (['test_con'], {'connect_args': "{'check_same_thread': False}", 'echo': '(True)'}), "(test_con, connect_args={'check_same_thread': False}, echo=True)\n", (410, 474), False, 'from sqlmodel import SQLModel, Session, create_engine\n'), ((484, 533), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""create_db"""', 'scope': '"""session"""'}), "(name='create_db', scope='session')\n", (498, 533), False, 'import pytest\n'), ((660, 690), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (674, 690), False, 'import pytest\n'), ((805, 834), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (819, 834), False, 'import pytest\n'), ((567, 608), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['test_engine'], {}), '(test_engine)\n', (595, 608), False, 'from sqlmodel import SQLModel, Session, create_engine\n'), ((638, 656), 'os.remove', 'os.remove', (['db_name'], {}), '(db_name)\n', (647, 656), False, 'import os\n'), ((1007, 1022), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1017, 1022), False, 'from fastapi.testclient import TestClient\n'), ((747, 767), 'sqlmodel.Session', 'Session', (['test_engine'], {}), '(test_engine)\n', (754, 767), False, 'from sqlmodel import SQLModel, Session, create_engine\n')] |
from sqlmodel import Session
from .database import create_db_and_tables, engine
from .hero_model import Hero
from .team_model import Team
def create_heroes():
with Session(engine) as session:
team_z_force = Team(name="Z-Force", headquarters="<NAME>")
hero_deadpond = Hero(
name="Deadpond", secret_name="<NAME>", team=team_z_force, experience_points=1
)
session.add(hero_deadpond)
session.commit()
session.refresh(hero_deadpond)
print("Created hero:", hero_deadpond)
print("Hero's team:", hero_deadpond.team)
def main():
create_db_and_tables()
create_heroes()
if __name__ == "__main__":
main()
| [
"sqlmodel.Session"
] | [((171, 186), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (178, 186), False, 'from sqlmodel import Session\n')] |
from sqlalchemy.engine import Engine
from sqlmodel import create_engine, Session, SQLModel
from sqlmodel.engine.create import _FutureEngine
from typing import Union, Optional
from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, \
raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains
from typing import List
class DBConnectionBase:
"""
References
----------
for users who want to know the differences between Engine, Connection, Session
https://stackoverflow.com/questions/34322471/sqlalchemy-engine-connection-and-session-difference
"""
def __init__(self, db_engine: Union[Engine, _FutureEngine]):
self._db_engine = db_engine
SQLModel.metadata.create_all(self._db_engine)
@classmethod
def from_db_user(cls, db_type, db_driver, host, user, password, port, db, charset='utf8', echo=True):
engine = create_engine(f"{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}", echo=echo)
return cls(engine)
@classmethod
def from_full_db_path(cls, full_db_path, echo=True):
engine = create_engine(f"{full_db_path}", echo=echo)
return cls(engine)
def get_db_engine(self):
return self._db_engine
def execute(self, sql, always_commit=False, fetch: Optional[Union[int, str]] = None):
with Session(self._db_engine) as session:
q = session.execute(sql)
if always_commit:
session.commit()
if fetch is not None:
raise_if_incorrect_type(fetch, (int, str))
if isinstance(fetch, int):
if fetch == 1:
return q.fetchone()
elif fetch > 1:
return q.fetchmany(fetch)
elif isinstance(fetch, str):
if fetch == 'all':
return q.fetchall()
raise ValueError
return q
def get_db_inspector(self):
from sqlalchemy import inspect
inspector = inspect(self._db_engine)
return inspector
def get_schemas(self, schemas='all', tables='all'):
inspector = self.get_db_inspector()
from collections import defaultdict
schema_containers = defaultdict(dict)
schemas = _validate_schema_names(inspector, schemas)
return _get_schemas(inspector, schema_containers, schemas, tables)
def get_tables_names(self):
inspector = self.get_db_inspector()
return inspector.get_table_names()
def _get_schemas(inspector, schema_containers, schemas: Union[str, List[str]], tables: Union[str, List[List[str]], List[str]]):
schema_list = _validate_schema_names(inspector, schemas)
if check_all_element_type_uniform(tables, list):
for schema, table in zip(schema_list, tables):
table_names = _validate_table_names(inspector, schema, table)
for sub_table_names in table_names:
schema_containers[schema][sub_table_names] = inspector.get_columns(sub_table_names, schema=schema)
return schema_containers
elif check_all_element_type_uniform(tables, str) or tables == 'all':
for schema in schema_list:
table_names = _validate_table_names(inspector, schema, tables)
for table_name in table_names:
schema_containers[schema][table_name] = inspector.get_columns(table_name, schema=schema)
return schema_containers
raise ValueError
def _validate_schema_names(inspector, schemas: List[str]):
if schemas == 'all':
return inspector.get_schema_names()
if isinstance(schemas, list):
raise_if_not_all_value_contains(schemas, inspector.get_schema_names())
return schemas
raise ValueError('schemas must be "all" or a list of string')
def _validate_table_names(inspector, schema: str, tables: List[str]):
if tables == 'all':
return inspector.get_table_names(schema=schema)
if isinstance(tables, list):
if check_all_element_type_uniform(tables, str):
raise_if_value_not_contains(tables, inspector.get_table_names(schema=schema))
return tables
elif check_all_element_type_uniform(tables, list):
for sub_tab in tables:
print(sub_tab, inspector.get_table_names(schema=schema))
raise_if_value_not_contains(sub_tab, inspector.get_table_names(schema=schema))
return tables
raise ValueError('tables name are not existed in database, pls verify')
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((2815, 2859), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (2845, 2859), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((767, 812), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self._db_engine'], {}), '(self._db_engine)\n', (795, 812), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((954, 1047), 'sqlmodel.create_engine', 'create_engine', (['f"""{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}"""'], {'echo': 'echo'}), "(f'{db_type}+{db_driver}://{user}:{password}@{host}:{port}/{db}',\n echo=echo)\n", (967, 1047), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((1163, 1206), 'sqlmodel.create_engine', 'create_engine', (['f"""{full_db_path}"""'], {'echo': 'echo'}), "(f'{full_db_path}', echo=echo)\n", (1176, 1206), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((2117, 2141), 'sqlalchemy.inspect', 'inspect', (['self._db_engine'], {}), '(self._db_engine)\n', (2124, 2141), False, 'from sqlalchemy import inspect\n'), ((2341, 2358), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (2352, 2358), False, 'from collections import defaultdict\n'), ((4104, 4147), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (4134, 4147), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1400, 1424), 'sqlmodel.Session', 'Session', (['self._db_engine'], {}), '(self._db_engine)\n', (1407, 1424), False, 'from sqlmodel import create_engine, Session, SQLModel\n'), ((3196, 3239), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'str'], {}), '(tables, str)\n', (3226, 3239), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((4278, 4322), 'pyemits.common.validation.check_all_element_type_uniform', 'check_all_element_type_uniform', (['tables', 'list'], {}), '(tables, list)\n', (4308, 4322), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n'), ((1588, 1630), 'pyemits.common.validation.raise_if_incorrect_type', 'raise_if_incorrect_type', (['fetch', '(int, str)'], {}), '(fetch, (int, str))\n', (1611, 1630), False, 'from pyemits.common.validation import raise_if_incorrect_type, raise_if_not_all_value_contains, raise_if_not_all_element_type_uniform, check_all_element_type_uniform, raise_if_value_not_contains\n')] |
"""add verified result to application
Revision ID: d8a156ffaeae
Revises: <KEY>
Create Date: 2022-03-30 16:00:13.195216
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = "d8a156ffaeae"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"job_applicant",
sa.Column("verified", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("job_applicant", "verified")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((659, 702), 'alembic.op.drop_column', 'op.drop_column', (['"""job_applicant"""', '"""verified"""'], {}), "('job_applicant', 'verified')\n", (673, 702), False, 'from alembic import op\n'), ((477, 511), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (509, 511), False, 'import sqlmodel\n')] |
from datetime import date
from typing import List, Optional
from api.ecoindex.models.responses import ApiEcoindex
from api.models.enums import Version
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from db.helper import date_filter
async def get_host_list_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
page: Optional[int] = 1,
size: Optional[int] = 50,
) -> List[str]:
statement = (
select(ApiEcoindex.host)
.where(ApiEcoindex.version == version.get_version_number())
.offset(size * (page - 1))
.limit(size)
)
if q:
statement = statement.filter(ApiEcoindex.host.like(f"%{q}%"))
statement = date_filter(statement=statement, date_from=date_from, date_to=date_to)
statement = statement.group_by(ApiEcoindex.host).order_by(ApiEcoindex.host)
hosts = await session.execute(statement)
return hosts.scalars().all()
async def get_count_hosts_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
) -> int:
sub_statement = (
f"SELECT host FROM apiecoindex WHERE version = {version.get_version_number()}"
)
if q:
sub_statement += f" AND host LIKE '%{q}%'"
if date_from:
sub_statement += f" AND date >= '{date_from}'"
if date_to:
sub_statement += f" AND date <= '{date_to}'"
sub_statement += " GROUP BY host"
statement = f"SELECT count(*) FROM ({sub_statement}) t"
result = await session.execute(statement=statement)
return result.scalar()
| [
"sqlmodel.select"
] | [((830, 900), 'db.helper.date_filter', 'date_filter', ([], {'statement': 'statement', 'date_from': 'date_from', 'date_to': 'date_to'}), '(statement=statement, date_from=date_from, date_to=date_to)\n', (841, 900), False, 'from db.helper import date_filter\n'), ((780, 811), 'api.ecoindex.models.responses.ApiEcoindex.host.like', 'ApiEcoindex.host.like', (['f"""%{q}%"""'], {}), "(f'%{q}%')\n", (801, 811), False, 'from api.ecoindex.models.responses import ApiEcoindex\n'), ((577, 601), 'sqlmodel.select', 'select', (['ApiEcoindex.host'], {}), '(ApiEcoindex.host)\n', (583, 601), False, 'from sqlmodel import select\n')] |
from datetime import datetime, date
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryTravelReimburse(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
history_procedure_id: int
group: str
guardian_id: Optional[int] = None
procedure_id: int
amount: float
detail: str
pdf_path: str
signature_path: str
document_path: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_travel_reimburse", response_model=HistoryTravelReimburse)
async def create_history_travel_reimburse(history_travel_reimburse: HistoryTravelReimburse, session: AsyncSession = Depends(get_session)):
session.add(history_travel_reimburse)
await session.commit()
await session.refresh(history_travel_reimburse)
return history_travel_reimburse
@router.get("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse(id: int, session: AsyncSession = Depends(get_session)):
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse).where(HistoryTravelReimburse.id == id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.put("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def update_history_travel_reimburse(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_travel_reimburse/{id}")
async def delete_history_travel_reimburse(session: AsyncSession = Depends(get_session)):
return None
@router.get("/history_travel_reimburse/patient/{patient_id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_patient(patient_id: int, session: AsyncSession = Depends(get_session)):
history_id = await session.execute(select(HistoryTravelReimburse.id).where(HistoryTravelReimburse.patient_id == patient_id))
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse).where(HistoryTravelReimburse.history_id == history_id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.get("/history_travel_reimburse", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_daily(session: AsyncSession = Depends(get_session)):
return None
@router.get("/history_travel_reimburse/{id}", response_model=HistoryTravelReimburse)
async def get_history_travel_reimburse_pdf(id: int, session: AsyncSession = Depends(get_session)):
history_travel_reimburses = await session.execute(select(HistoryTravelReimburse.pdf_path).where(HistoryTravelReimburse.id == id))
history_travel_reimburse = history_travel_reimburses.scalars().first()
return history_travel_reimburse
@router.post("/history_travel_reimburse/{id}/document", response_model=HistoryTravelReimburse)
async def upload_document(session: AsyncSession = Depends(get_session)):
return None
@router.post("/history_travel_reimburse/{id}/signature")
async def upload_signature(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((295, 306), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (304, 306), False, 'from fastapi import APIRouter, Depends\n'), ((385, 422), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (390, 422), False, 'from sqlmodel import Field, SQLModel\n'), ((953, 973), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (960, 973), False, 'from fastapi import APIRouter, Depends\n'), ((1292, 1312), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1299, 1312), False, 'from fastapi import APIRouter, Depends\n'), ((1713, 1733), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1720, 1733), False, 'from fastapi import APIRouter, Depends\n'), ((1869, 1889), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1876, 1889), False, 'from fastapi import APIRouter, Depends\n'), ((2099, 2119), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2106, 2119), False, 'from fastapi import APIRouter, Depends\n'), ((2654, 2674), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2661, 2674), False, 'from fastapi import APIRouter, Depends\n'), ((2856, 2876), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2863, 2876), False, 'from fastapi import APIRouter, Depends\n'), ((3271, 3291), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3278, 3291), False, 'from fastapi import APIRouter, Depends\n'), ((3420, 3440), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3427, 3440), False, 'from fastapi import APIRouter, Depends\n'), ((1369, 1399), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse'], {}), '(HistoryTravelReimburse)\n', (1375, 1399), False, 'from sqlalchemy import select\n'), ((2161, 2194), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse.id'], {}), '(HistoryTravelReimburse.id)\n', (2167, 2194), False, 'from sqlalchemy import select\n'), ((2305, 2335), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse'], {}), '(HistoryTravelReimburse)\n', (2311, 2335), False, 'from sqlalchemy import select\n'), ((2933, 2972), 'sqlalchemy.select', 'select', (['HistoryTravelReimburse.pdf_path'], {}), '(HistoryTravelReimburse.pdf_path)\n', (2939, 2972), False, 'from sqlalchemy import select\n')] |
import asyncio
import os
from decimal import Decimal
from typing import Optional
from pydantic import condecimal
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlmodel import Field, SQLModel, select
class Restaurant(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
name: str = Field(index=True)
address: str
currency: str
class MenuItem(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
name: str
price: condecimal(decimal_places=2)
restaurant_id: Optional[int] = Field(default=None, foreign_key="restaurant.id")
async def main() -> None:
db_url = os.environ.get("RESTAURANT_DB_URL", "sqlite+aiosqlite:///my_db")
db_engine = create_async_engine(db_url)
async with db_engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async with AsyncSession(db_engine, expire_on_commit=False) as session:
# Writing
restaurant = Restaurant(
name="Second best Pizza in town", address="Foo street 1", currency="EUR"
)
session.add(restaurant)
await session.commit()
pizza1 = MenuItem(name="Margherita", price=10.50, restaurant_id=restaurant.id)
pizza2 = MenuItem(name="2xPineapple", price=16.80, restaurant_id=restaurant.id)
session.add_all((pizza1, pizza2))
await session.commit()
# Reading
query = (
select(MenuItem)
.join(Restaurant)
.where(Restaurant.name == "Second best Pizza in town")
)
result = await session.execute(query)
menu_items = result.scalars().all()
assert len(menu_items) == 2
assert menu_items[0] == MenuItem(
id=1, name="Margherita", price=Decimal("10.50"), restaurant_id=restaurant.id
)
if __name__ == "__main__":
asyncio.run(main())
| [
"sqlmodel.select",
"sqlmodel.Field"
] | [((284, 321), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (289, 321), False, 'from sqlmodel import Field, SQLModel, select\n'), ((338, 355), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (343, 355), False, 'from sqlmodel import Field, SQLModel, select\n'), ((445, 482), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (450, 482), False, 'from sqlmodel import Field, SQLModel, select\n'), ((508, 536), 'pydantic.condecimal', 'condecimal', ([], {'decimal_places': '(2)'}), '(decimal_places=2)\n', (518, 536), False, 'from pydantic import condecimal\n'), ((573, 621), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""restaurant.id"""'}), "(default=None, foreign_key='restaurant.id')\n", (578, 621), False, 'from sqlmodel import Field, SQLModel, select\n'), ((663, 727), 'os.environ.get', 'os.environ.get', (['"""RESTAURANT_DB_URL"""', '"""sqlite+aiosqlite:///my_db"""'], {}), "('RESTAURANT_DB_URL', 'sqlite+aiosqlite:///my_db')\n", (677, 727), False, 'import os\n'), ((744, 771), 'sqlalchemy.ext.asyncio.create_async_engine', 'create_async_engine', (['db_url'], {}), '(db_url)\n', (763, 771), False, 'from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine\n'), ((888, 935), 'sqlalchemy.ext.asyncio.AsyncSession', 'AsyncSession', (['db_engine'], {'expire_on_commit': '(False)'}), '(db_engine, expire_on_commit=False)\n', (900, 935), False, 'from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine\n'), ((1791, 1807), 'decimal.Decimal', 'Decimal', (['"""10.50"""'], {}), "('10.50')\n", (1798, 1807), False, 'from decimal import Decimal\n'), ((1455, 1471), 'sqlmodel.select', 'select', (['MenuItem'], {}), '(MenuItem)\n', (1461, 1471), False, 'from sqlmodel import Field, SQLModel, select\n')] |
"""init_db
Revision ID: 23799b5136c5
Revises:
Create Date: 2021-12-11 00:49:58.116933
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '23799b5136c5'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('full_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_superuser', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False)
op.create_index(op.f('ix_user_full_name'), 'user', ['full_name'], unique=False)
op.create_index(op.f('ix_user_hashed_password'), 'user', ['hashed_password'], unique=False)
op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False)
op.create_index(op.f('ix_user_is_active'), 'user', ['is_active'], unique=False)
op.create_index(op.f('ix_user_is_superuser'), 'user', ['is_superuser'], unique=False)
op.create_table('task',
sa.Column('status', sa.Enum('draft', 'in_process', 'delete', 'done', name='taskstatus'), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_task_created_at'), 'task', ['created_at'], unique=False)
op.create_index(op.f('ix_task_id'), 'task', ['id'], unique=False)
op.create_index(op.f('ix_task_title'), 'task', ['title'], unique=False)
op.create_index(op.f('ix_task_user_id'), 'task', ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_task_user_id'), table_name='task')
op.drop_index(op.f('ix_task_title'), table_name='task')
op.drop_index(op.f('ix_task_id'), table_name='task')
op.drop_index(op.f('ix_task_created_at'), table_name='task')
op.drop_table('task')
op.drop_index(op.f('ix_user_is_superuser'), table_name='user')
op.drop_index(op.f('ix_user_is_active'), table_name='user')
op.drop_index(op.f('ix_user_id'), table_name='user')
op.drop_index(op.f('ix_user_hashed_password'), table_name='user')
op.drop_index(op.f('ix_user_full_name'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ### | [
"sqlmodel.sql.sqltypes.AutoString"
] | [((2498, 2519), 'alembic.op.drop_table', 'op.drop_table', (['"""task"""'], {}), "('task')\n", (2511, 2519), False, 'from alembic import op\n'), ((2906, 2927), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (2919, 2927), False, 'from alembic import op\n'), ((808, 837), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (831, 837), True, 'import sqlalchemy as sa\n'), ((864, 885), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (868, 885), False, 'from alembic import op\n'), ((940, 965), 'alembic.op.f', 'op.f', (['"""ix_user_full_name"""'], {}), "('ix_user_full_name')\n", (944, 965), False, 'from alembic import op\n'), ((1024, 1055), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (1028, 1055), False, 'from alembic import op\n'), ((1120, 1138), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (1124, 1138), False, 'from alembic import op\n'), ((1190, 1215), 'alembic.op.f', 'op.f', (['"""ix_user_is_active"""'], {}), "('ix_user_is_active')\n", (1194, 1215), False, 'from alembic import op\n'), ((1274, 1302), 'alembic.op.f', 'op.f', (['"""ix_user_is_superuser"""'], {}), "('ix_user_is_superuser')\n", (1278, 1302), False, 'from alembic import op\n'), ((1725, 1774), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (1748, 1774), True, 'import sqlalchemy as sa\n'), ((1782, 1811), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1805, 1811), True, 'import sqlalchemy as sa\n'), ((1838, 1864), 'alembic.op.f', 'op.f', (['"""ix_task_created_at"""'], {}), "('ix_task_created_at')\n", (1842, 1864), False, 'from alembic import op\n'), ((1924, 1942), 'alembic.op.f', 'op.f', (['"""ix_task_id"""'], {}), "('ix_task_id')\n", (1928, 1942), False, 'from alembic import op\n'), ((1994, 2015), 'alembic.op.f', 'op.f', (['"""ix_task_title"""'], {}), "('ix_task_title')\n", (1998, 2015), False, 'from alembic import op\n'), ((2070, 2093), 'alembic.op.f', 'op.f', (['"""ix_task_user_id"""'], {}), "('ix_task_user_id')\n", (2074, 2093), False, 'from alembic import op\n'), ((2268, 2291), 'alembic.op.f', 'op.f', (['"""ix_task_user_id"""'], {}), "('ix_task_user_id')\n", (2272, 2291), False, 'from alembic import op\n'), ((2330, 2351), 'alembic.op.f', 'op.f', (['"""ix_task_title"""'], {}), "('ix_task_title')\n", (2334, 2351), False, 'from alembic import op\n'), ((2390, 2408), 'alembic.op.f', 'op.f', (['"""ix_task_id"""'], {}), "('ix_task_id')\n", (2394, 2408), False, 'from alembic import op\n'), ((2447, 2473), 'alembic.op.f', 'op.f', (['"""ix_task_created_at"""'], {}), "('ix_task_created_at')\n", (2451, 2473), False, 'from alembic import op\n'), ((2538, 2566), 'alembic.op.f', 'op.f', (['"""ix_user_is_superuser"""'], {}), "('ix_user_is_superuser')\n", (2542, 2566), False, 'from alembic import op\n'), ((2605, 2630), 'alembic.op.f', 'op.f', (['"""ix_user_is_active"""'], {}), "('ix_user_is_active')\n", (2609, 2630), False, 'from alembic import op\n'), ((2669, 2687), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (2673, 2687), False, 'from alembic import op\n'), ((2726, 2757), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (2730, 2757), False, 'from alembic import op\n'), ((2796, 2821), 'alembic.op.f', 'op.f', (['"""ix_user_full_name"""'], {}), "('ix_user_full_name')\n", (2800, 2821), False, 'from alembic import op\n'), ((2860, 2881), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (2864, 2881), False, 'from alembic import op\n'), ((416, 428), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (426, 428), True, 'import sqlalchemy as sa\n'), ((473, 507), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (505, 507), False, 'import sqlmodel\n'), ((549, 583), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (581, 583), False, 'import sqlmodel\n'), ((634, 668), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (666, 668), False, 'import sqlmodel\n'), ((714, 726), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (724, 726), True, 'import sqlalchemy as sa\n'), ((774, 786), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (784, 786), True, 'import sqlalchemy as sa\n'), ((1396, 1463), 'sqlalchemy.Enum', 'sa.Enum', (['"""draft"""', '"""in_process"""', '"""delete"""', '"""done"""'], {'name': '"""taskstatus"""'}), "('draft', 'in_process', 'delete', 'done', name='taskstatus')\n", (1403, 1463), True, 'import sqlalchemy as sa\n'), ((1501, 1513), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1511, 1513), True, 'import sqlalchemy as sa\n'), ((1559, 1572), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1570, 1572), True, 'import sqlalchemy as sa\n'), ((1613, 1647), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1645, 1647), False, 'import sqlmodel\n'), ((1691, 1703), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1701, 1703), True, 'import sqlalchemy as sa\n')] |
from sqlmodel import SQLModel, create_engine, Session
from victor_api.config import settings
engine = create_engine(
url=settings.db.url,
echo=settings.db.echo,
connect_args=settings.db.connect_args
)
def get_session():
with Session(engine) as session:
yield session
def init_db():
SQLModel.metadata.create_all(engine)
| [
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((104, 205), 'sqlmodel.create_engine', 'create_engine', ([], {'url': 'settings.db.url', 'echo': 'settings.db.echo', 'connect_args': 'settings.db.connect_args'}), '(url=settings.db.url, echo=settings.db.echo, connect_args=\n settings.db.connect_args)\n', (117, 205), False, 'from sqlmodel import SQLModel, create_engine, Session\n'), ((316, 352), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (344, 352), False, 'from sqlmodel import SQLModel, create_engine, Session\n'), ((245, 260), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (252, 260), False, 'from sqlmodel import SQLModel, create_engine, Session\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, create_engine
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
headquarters: str
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
create_db_and_tables()
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((541, 577), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (554, 577), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((141, 178), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (146, 178), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((275, 312), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (280, 312), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((408, 450), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (413, 450), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((612, 648), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (640, 648), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from sqlmodel import Session, select
from sqlalchemy.exc import IntegrityError
from typing import List, Any
import datetime as dt
from app.src.common.utils import profiling_api
from app.src.models.product import (
Product,
ProductRead,
ProductCreate,
ProductUpdate,
ProductReadwithTypeAndTags,
)
from app.src.db.engine import get_session
from app.src.api.endpoints.product_type import get_producttype_or_404
from app.src.api.endpoints.tags import get_tag_or_404, get_tag_by_name_or_404
from app.src.common.security import get_current_user
from app.src.models.app_user import AppUser
from app.src.models.tag import Tag
from app.src.logger import logger
router = APIRouter()
async def get_product_or_404(
*,
session: Session = Depends(get_session),
product_id: int = Path(..., ge=1),
current_user: AppUser = Depends(get_current_user),
):
start_time = dt.datetime.now()
try:
db_product = session.get(Product, product_id)
if db_product:
return {
"db_product": db_product,
"username": current_user.username,
"start_time": start_time,
}
else:
logger.error("Product not found")
logger.exception("Product not found")
raise HTTPException(status_code=404, detail="Product not found")
except KeyError:
logger.error("Product not found")
logger.exception("KeyError: Product not found")
raise HTTPException(status_code=400, detail="Product not found")
@router.get("/", response_model=List[ProductReadwithTypeAndTags])
async def read_all_products(
session: Session = Depends(get_session),
offset: int = 0,
limit: int = Query(default=100, lte=100),
current_user: AppUser = Depends(get_current_user),
) -> Any:
"""
Retrieve all products
"""
start_time = dt.datetime.now()
products = session.exec(select(Product).offset(offset).limit(limit)).all()
profiling_api("Product:get:all", start_time, current_user.username)
return products
@router.get("/{product_id}", response_model=ProductReadwithTypeAndTags)
async def read_product(
*, product_id: int, db_product: Product = Depends(get_product_or_404)
):
"""
Get the product type by id
"""
# Le righe commentate sotto, sostituite dalla nuova Depends
# Nota: il parametro product_id a get_product_or_404 è preso dal path
# p = session.get(Product, product_id)
# if not p:
# raise HTTPException(
# status_code=404,
# detail="Product type not found"
# )
profiling_api(
f"Product:read:by_id:{product_id}",
db_product["start_time"],
db_product["username"],
)
return db_product["db_product"]
@router.post("/", response_model=ProductRead)
async def create_product(
*,
session: Session = Depends(get_session),
product: ProductCreate,
current_user: AppUser = Depends(get_current_user),
) -> Any:
"""
Create a new single product
"""
start_time = dt.datetime.now()
# Controllo esistenza product type
_ = await get_producttype_or_404(producttype_id=product.type_id, session=session)
# Controllo integrità o altri errori
try:
db_product = Product.from_orm(product)
session.add(db_product)
session.commit()
profiling_api("Product:insert:single", start_time, current_user.username)
except IntegrityError:
logger.error("Impossible to create product with same name")
logger.exception("Integrity Error: Impossible to create product with same name")
raise HTTPException(
status_code=404, detail="Impossible to create product with same name"
)
session.refresh(db_product)
return db_product
@router.patch("/update/{product_id}", response_model=ProductRead)
async def update_product_by_id(
*,
product_id: int,
session: Session = Depends(get_session),
product: ProductUpdate,
db_product: Product = Depends(get_product_or_404),
):
"""
Modify and existing product by id
"""
# Le righe commentate sotto, sostituite dalla nuova Depends
# Nota: il parametro product_id a get_product_or_404 è preso dal path
# db_product = session.get(Product, product_id)
# if not db_product:
# raise HTTPException(status_code=404, detail="Product not found")
existing_product = db_product["db_product"]
pr_data = product.dict(exclude_unset=True)
for key, value in pr_data.items():
setattr(existing_product, key, value)
session.add(existing_product)
session.commit()
session.refresh(existing_product)
profiling_api(
f"Product:update:by_id:{product_id}",
db_product["start_time"],
db_product["username"],
)
return existing_product
@router.patch("/update/by_name/{product_name}", response_model=ProductRead)
async def update_product_by_name(
*,
session: Session = Depends(get_session),
product_name: str,
product: ProductUpdate,
current_user: AppUser = Depends(get_current_user),
):
"""
Modify an existing product by name
"""
start_time = dt.datetime.now()
db_product = session.exec(select(Product).where(Product.name == product_name)).one()
if not db_product:
raise HTTPException(
status_code=404, detail="Product not found, impossible to update"
)
pr_data = product.dict(exclude_unset=True) # to use the nullable data
for key, value in pr_data.items():
setattr(db_product, key, value)
session.add(db_product)
session.commit()
session.refresh(db_product)
profiling_api(
f"Product:update:by_name:{product_name}",
start_time,
current_user.username,
)
return db_product
@router.patch(
"/update/{product_id}/add_tag_by_id/{tag_id}",
response_model=ProductReadwithTypeAndTags,
)
async def update_product_add_tag_by_id(
*,
product_id: int,
session: Session = Depends(get_session),
db_product: Product = Depends(get_product_or_404),
db_tag: Tag = Depends(get_tag_or_404),
):
"""
Add tag to product
"""
existing_product = db_product["db_product"]
existing_tag = db_tag["db_tag"]
existing_product.tags.append(existing_tag)
session.add(existing_product)
session.commit()
session.refresh(existing_product)
profiling_api(
f"Product:update:add_tag:by_id:{product_id}",
db_product["start_time"],
db_product["username"],
)
return existing_product
@router.patch(
"/update/{product_id}/add_tag_by_name/{tag_name}",
response_model=ProductReadwithTypeAndTags,
)
async def update_product_add_tag_by_name(
*,
product_id: int,
session: Session = Depends(get_session),
db_product: Product = Depends(get_product_or_404),
db_tag: Tag = Depends(get_tag_by_name_or_404),
):
"""
Add tag to product
"""
existing_product = db_product["db_product"]
existing_tag = db_tag["db_tag"]
existing_product.tags.append(existing_tag)
session.add(existing_product)
session.commit()
session.refresh(existing_product)
profiling_api(
f"Product:update:add_tag:by_name:{product_id}",
db_product["start_time"],
db_product["username"],
)
return existing_product
@router.patch(
"/update/{product_id}/remove_tag_by_id/{tag_id}",
response_model=ProductReadwithTypeAndTags,
)
async def update_product_remove_tag_by_id(
*,
product_id: int,
session: Session = Depends(get_session),
db_product: Product = Depends(get_product_or_404),
db_tag: Tag = Depends(get_tag_or_404),
):
"""
Remove tag from product
"""
existing_product = db_product["db_product"]
existing_tag = db_tag["db_tag"]
try:
existing_product.tags.remove(existing_tag)
session.add(existing_product)
session.commit()
session.refresh(existing_product)
profiling_api(
f"Product:update:remove_tag:by_id:{product_id}",
db_product["start_time"],
db_product["username"],
)
except Exception as message:
logger.error(message)
logger.exception(message)
raise HTTPException(
status_code=404,
detail="Impossible to remove the tag: product or tag not existing",
)
return existing_product
@router.patch(
"/update/{product_id}/remove_tag_by_name/{tag_name}",
response_model=ProductReadwithTypeAndTags,
)
async def update_product_remove_tag_by_name(
*,
product_id: int,
session: Session = Depends(get_session),
db_product: Product = Depends(get_product_or_404),
db_tag: Tag = Depends(get_tag_by_name_or_404),
):
"""
Remove tag from product
"""
existing_db_product = db_product["db_product"]
existing_db_tag = db_tag["db_tag"]
db_product.tags.remove(existing_db_tag)
session.add(existing_db_product)
session.commit()
session.refresh(existing_db_product)
profiling_api(
f"Product:update:remove_tag:by_name:{product_id}",
db_product["start_time"],
db_product["username"],
)
return db_product
@router.delete("/{product_id}")
async def delete_product(
*,
product_id: int,
session: Session = Depends(get_session),
db_product: Product = Depends(get_product_or_404),
):
"""
Delete and remove an existing product by id; it must be >= 1
"""
# Le righe commentate sotto, sostituite dalla nuova Depends
# Nota: il parametro product_id a get_product_or_404 è preso dal path
# product = session.get(Product, product_id)
# if not product:
# raise HTTPException(
# status_code=404, detail="Product not found, impossible to remove"
# )
existing_db_product = db_product["db_product"]
session.delete(existing_db_product)
session.commit()
profiling_api(
f"Product:update:add_tag:by_id:{product_id}",
db_product["start_time"],
db_product["username"],
)
return {"ok": True}
| [
"sqlmodel.select"
] | [((751, 762), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (760, 762), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((825, 845), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (832, 845), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((869, 884), 'fastapi.Path', 'Path', (['...'], {'ge': '(1)'}), '(..., ge=1)\n', (873, 884), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((914, 939), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (921, 939), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((961, 978), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (976, 978), True, 'import datetime as dt\n'), ((1734, 1754), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1741, 1754), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1794, 1821), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1799, 1821), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1851, 1876), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1858, 1876), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1947, 1964), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (1962, 1964), True, 'import datetime as dt\n'), ((2048, 2115), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""Product:get:all"""', 'start_time', 'current_user.username'], {}), "('Product:get:all', start_time, current_user.username)\n", (2061, 2115), False, 'from app.src.common.utils import profiling_api\n'), ((2280, 2307), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (2287, 2307), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((2683, 2786), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:read:by_id:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:read:by_id:{product_id}', db_product['start_time'],\n db_product['username'])\n", (2696, 2786), False, 'from app.src.common.utils import profiling_api\n'), ((2954, 2974), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2961, 2974), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3032, 3057), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (3039, 3057), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3134, 3151), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (3149, 3151), True, 'import datetime as dt\n'), ((4023, 4043), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4030, 4043), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((4099, 4126), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (4106, 4126), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((4755, 4861), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:by_id:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:by_id:{product_id}', db_product['start_time'\n ], db_product['username'])\n", (4768, 4861), False, 'from app.src.common.utils import profiling_api\n'), ((5058, 5078), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5065, 5078), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((5159, 5184), 'fastapi.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (5166, 5184), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((5261, 5278), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (5276, 5278), True, 'import datetime as dt\n'), ((5747, 5841), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:by_name:{product_name}"""', 'start_time', 'current_user.username'], {}), "(f'Product:update:by_name:{product_name}', start_time,\n current_user.username)\n", (5760, 5841), False, 'from app.src.common.utils import profiling_api\n'), ((6099, 6119), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6106, 6119), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((6147, 6174), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (6154, 6174), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((6194, 6217), 'fastapi.Depends', 'Depends', (['get_tag_or_404'], {}), '(get_tag_or_404)\n', (6201, 6217), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((6491, 6605), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:add_tag:by_id:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:add_tag:by_id:{product_id}', db_product[\n 'start_time'], db_product['username'])\n", (6504, 6605), False, 'from app.src.common.utils import profiling_api\n'), ((6874, 6894), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6881, 6894), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((6922, 6949), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (6929, 6949), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((6969, 7000), 'fastapi.Depends', 'Depends', (['get_tag_by_name_or_404'], {}), '(get_tag_by_name_or_404)\n', (6976, 7000), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((7274, 7390), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:add_tag:by_name:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:add_tag:by_name:{product_id}', db_product[\n 'start_time'], db_product['username'])\n", (7287, 7390), False, 'from app.src.common.utils import profiling_api\n'), ((7659, 7679), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (7666, 7679), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((7707, 7734), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (7714, 7734), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((7754, 7777), 'fastapi.Depends', 'Depends', (['get_tag_or_404'], {}), '(get_tag_or_404)\n', (7761, 7777), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((8737, 8757), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8744, 8757), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((8785, 8812), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (8792, 8812), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((8832, 8863), 'fastapi.Depends', 'Depends', (['get_tag_by_name_or_404'], {}), '(get_tag_by_name_or_404)\n', (8839, 8863), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((9151, 9270), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:remove_tag:by_name:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:remove_tag:by_name:{product_id}', db_product\n ['start_time'], db_product['username'])\n", (9164, 9270), False, 'from app.src.common.utils import profiling_api\n'), ((9430, 9450), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9437, 9450), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((9478, 9505), 'fastapi.Depends', 'Depends', (['get_product_or_404'], {}), '(get_product_or_404)\n', (9485, 9505), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((10039, 10153), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:add_tag:by_id:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:add_tag:by_id:{product_id}', db_product[\n 'start_time'], db_product['username'])\n", (10052, 10153), False, 'from app.src.common.utils import profiling_api\n'), ((3205, 3276), 'app.src.api.endpoints.product_type.get_producttype_or_404', 'get_producttype_or_404', ([], {'producttype_id': 'product.type_id', 'session': 'session'}), '(producttype_id=product.type_id, session=session)\n', (3227, 3276), False, 'from app.src.api.endpoints.product_type import get_producttype_or_404\n'), ((3348, 3373), 'app.src.models.product.Product.from_orm', 'Product.from_orm', (['product'], {}), '(product)\n', (3364, 3373), False, 'from app.src.models.product import Product, ProductRead, ProductCreate, ProductUpdate, ProductReadwithTypeAndTags\n'), ((3439, 3512), 'app.src.common.utils.profiling_api', 'profiling_api', (['"""Product:insert:single"""', 'start_time', 'current_user.username'], {}), "('Product:insert:single', start_time, current_user.username)\n", (3452, 3512), False, 'from app.src.common.utils import profiling_api\n'), ((5405, 5490), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Product not found, impossible to update"""'}), "(status_code=404, detail='Product not found, impossible to update'\n )\n", (5418, 5490), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((8084, 8201), 'app.src.common.utils.profiling_api', 'profiling_api', (['f"""Product:update:remove_tag:by_id:{product_id}"""', "db_product['start_time']", "db_product['username']"], {}), "(f'Product:update:remove_tag:by_id:{product_id}', db_product[\n 'start_time'], db_product['username'])\n", (8097, 8201), False, 'from app.src.common.utils import profiling_api\n'), ((1261, 1294), 'app.src.logger.logger.error', 'logger.error', (['"""Product not found"""'], {}), "('Product not found')\n", (1273, 1294), False, 'from app.src.logger import logger\n'), ((1307, 1344), 'app.src.logger.logger.exception', 'logger.exception', (['"""Product not found"""'], {}), "('Product not found')\n", (1323, 1344), False, 'from app.src.logger import logger\n'), ((1363, 1421), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Product not found"""'}), "(status_code=404, detail='Product not found')\n", (1376, 1421), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((1451, 1484), 'app.src.logger.logger.error', 'logger.error', (['"""Product not found"""'], {}), "('Product not found')\n", (1463, 1484), False, 'from app.src.logger import logger\n'), ((1493, 1540), 'app.src.logger.logger.exception', 'logger.exception', (['"""KeyError: Product not found"""'], {}), "('KeyError: Product not found')\n", (1509, 1540), False, 'from app.src.logger import logger\n'), ((1555, 1613), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""Product not found"""'}), "(status_code=400, detail='Product not found')\n", (1568, 1613), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((3548, 3607), 'app.src.logger.logger.error', 'logger.error', (['"""Impossible to create product with same name"""'], {}), "('Impossible to create product with same name')\n", (3560, 3607), False, 'from app.src.logger import logger\n'), ((3616, 3701), 'app.src.logger.logger.exception', 'logger.exception', (['"""Integrity Error: Impossible to create product with same name"""'], {}), "('Integrity Error: Impossible to create product with same name'\n )\n", (3632, 3701), False, 'from app.src.logger import logger\n'), ((3711, 3800), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Impossible to create product with same name"""'}), "(status_code=404, detail=\n 'Impossible to create product with same name')\n", (3724, 3800), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((8285, 8306), 'app.src.logger.logger.error', 'logger.error', (['message'], {}), '(message)\n', (8297, 8306), False, 'from app.src.logger import logger\n'), ((8315, 8340), 'app.src.logger.logger.exception', 'logger.exception', (['message'], {}), '(message)\n', (8331, 8340), False, 'from app.src.logger import logger\n'), ((8355, 8458), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Impossible to remove the tag: product or tag not existing"""'}), "(status_code=404, detail=\n 'Impossible to remove the tag: product or tag not existing')\n", (8368, 8458), False, 'from fastapi import APIRouter, Depends, HTTPException, Query, Path\n'), ((5309, 5324), 'sqlmodel.select', 'select', (['Product'], {}), '(Product)\n', (5315, 5324), False, 'from sqlmodel import Session, select\n'), ((1993, 2008), 'sqlmodel.select', 'select', (['Product'], {}), '(Product)\n', (1999, 2008), False, 'from sqlmodel import Session, select\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select
from sqlalchemy.exc import NoResultFound
from ..models.client import Client
from ..models.epic import Epic
from datetime import datetime
router = APIRouter(prefix="/api/clients", tags=["client"])
@router.post("/")
async def post_client(*, client: Client, session: Session = Depends(get_session)):
"""
Post a new client.
Parameters
----------
client : Client
Client that is to be added to the database.
session : Session
SQL session that is to be used to add the client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.name == client.name)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(client)
session.commit()
session.refresh(client)
return client
@router.get("/")
async def read_clients(session: Session = Depends(get_session)):
"""
Get a list of all clients.
Parameters
----------
session : Session
SQL session that is to be used to get a list of the clients.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client)
results = session.exec(statement).all()
return results
@router.get("/active")
async def read_clients(session: Session = Depends(get_session)):
"""
Get a list of all active clients.
Parameters
----------
session : Session
SQL session that is to be used to get a list of all of the active clients.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.is_active == True).order_by(Client.id.asc())
results = session.exec(statement).all()
return results
@router.get("/{client_id}")
async def read_clients(
*, client_id: int = None, session: Session = Depends(get_session)
):
"""
Get a client by client_id.
Parameters
----------
client_id : int
ID of client that is to be read.
session : Session
SQL session that is to be used to read a client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.id == client_id)
try:
result = session.exec(statement).one()
return result
except NoResultFound:
msg = f"""There is no client with id = {client_id}"""
return msg
@router.get("/names/{name}")
async def read_clients_by_name(
*, name: str = None, session: Session = Depends(get_session)
):
"""
Get a client by client_name.
Parameters
----------
name : str
Name of client to be read.
session : Session
SQL session that is to be used to read a client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.name == name)
result = session.exec(statement).one()
return result
@router.get("/{client_id}/epics/")
async def read_clients_epics(
client_id: int = None, session: Session = Depends(get_session)
):
"""
Get epics from a client_id.
Parameters
----------
client_id : int
ID of client that is to be used to pull epics from.
session : Session
SQL session that is to be used to pull the epics.
Defaults to creating a dependency on the running SQL model session.
"""
statement = (
select(Client.id, Client.name, Epic.name)
.select_from(Client)
.join(Epic)
.where(Client.id == client_id)
)
results = session.exec(statement).all()
return results
@router.put("/{client_id}/deactivate-client")
async def update_clients(
*,
client_id: int,
session: Session = Depends(get_session),
):
"""Deactivate a client"""
statement = select(Client).where(Client.id == client_id)
client_to_update = session.exec(statement).one()
client_to_update.active = False
statement2 = select(Epic).join(Client)
client_to_update = session.exec(statement).one()
client_to_update.active = False
session.add(client_to_update)
session.commit()
session.refresh(client_to_update)
return client_to_update
@router.put("/{client_id}/activate")
async def activate_clients(
*,
client_id: int,
session: Session = Depends(get_session),
):
"""
Activate a client using its id as a key.
Parameters
----------
client_id : int
ID of the client to be activated.
session : Session
SQL session that is to be used to activate a client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.id == client_id)
client_to_update = session.exec(statement).one()
client_to_update.is_active = True
client_to_update.updated_at = datetime.now()
session.add(client_to_update)
session.commit()
session.refresh(client_to_update)
return client_to_update
@router.put("/{client_id}/deactivate")
async def deactivate_clients(
*,
client_id: int,
session: Session = Depends(get_session),
):
"""
Deactivate a client using its id as a key.
Parameters
----------
client_id : int
ID of the client to be deactivated.
session : Session
SQL session that is to be used to deactivate a client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.id == client_id)
client_to_update = session.exec(statement).one()
client_to_update.is_active = False
client_to_update.updated_at = datetime.now()
session.add(client_to_update)
session.commit()
session.refresh(client_to_update)
return client_to_update
@router.put("/{client_id}/deactivate-epics")
async def update_clients_and_epics(
*,
client_id: int,
session: Session = Depends(get_session),
):
"""Deactivate a client and its epics"""
"""
Deactivate a client and its epics using the client's ID as a key.
Parameters
----------
client_id : int
ID of the client to deactivate.
session : Session
SQL session that is to be used to deactivate the client and its respective epics.
Defaults to creating a dependency on the running SQL model session.
"""
statement1 = select(Client).where(Client.id == client_id)
client_to_update = session.exec(statement1).one()
client_to_update.is_active = False
client_to_update.updated_at = datetime.now()
session.add(client_to_update)
statement2 = select(Epic).where(Epic.client_id == client_id)
epics_to_update = session.exec(statement2).all()
for epic in epics_to_update:
epic.is_active = False
session.add(epic)
session.commit()
return True
@router.put("/{client_id}")
async def update_clients(
*,
client_id: int = None,
new_client_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
"""
Update a client from a client_id.
Parameters
----------
client_id : int
ID of the client to update.
new_client_name : str
New name of the client.
session : Session
SQL session that is to be used to update a client.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Client).where(Client.id == client_id)
client_to_update = session.exec(statement).one()
if new_client_name != None:
client_to_update.name = new_client_name
if is_active != None:
client_to_update.is_active = is_active
client_to_update.updated_at = datetime.now()
session.add(client_to_update)
session.commit()
session.refresh(client_to_update)
return client_to_update
| [
"sqlmodel.select"
] | [((263, 312), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/clients"""', 'tags': "['client']"}), "(prefix='/api/clients', tags=['client'])\n", (272, 312), False, 'from fastapi import APIRouter, Depends\n'), ((393, 413), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (400, 413), False, 'from fastapi import APIRouter, Depends\n'), ((1050, 1070), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1057, 1070), False, 'from fastapi import APIRouter, Depends\n'), ((1334, 1348), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (1340, 1348), False, 'from sqlmodel import Session, select\n'), ((1479, 1499), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1486, 1499), False, 'from fastapi import APIRouter, Depends\n'), ((2023, 2043), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2030, 2043), False, 'from fastapi import APIRouter, Depends\n'), ((2694, 2714), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2701, 2714), False, 'from fastapi import APIRouter, Depends\n'), ((3235, 3255), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3242, 3255), False, 'from fastapi import APIRouter, Depends\n'), ((3923, 3943), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3930, 3943), False, 'from fastapi import APIRouter, Depends\n'), ((4499, 4519), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4506, 4519), False, 'from fastapi import APIRouter, Depends\n'), ((5023, 5037), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5035, 5037), False, 'from datetime import datetime\n'), ((5280, 5300), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5287, 5300), False, 'from fastapi import APIRouter, Depends\n'), ((5811, 5825), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5823, 5825), False, 'from datetime import datetime\n'), ((6081, 6101), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6088, 6101), False, 'from fastapi import APIRouter, Depends\n'), ((6704, 6718), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6716, 6718), False, 'from datetime import datetime\n'), ((7172, 7192), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (7179, 7192), False, 'from fastapi import APIRouter, Depends\n'), ((7855, 7869), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7867, 7869), False, 'from datetime import datetime\n'), ((730, 744), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (736, 744), False, 'from sqlmodel import Session, select\n'), ((2357, 2371), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (2363, 2371), False, 'from sqlmodel import Session, select\n'), ((3019, 3033), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (3025, 3033), False, 'from sqlmodel import Session, select\n'), ((3994, 4008), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (4000, 4008), False, 'from sqlmodel import Session, select\n'), ((4145, 4157), 'sqlmodel.select', 'select', (['Epic'], {}), '(Epic)\n', (4151, 4157), False, 'from sqlmodel import Session, select\n'), ((4853, 4867), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (4859, 4867), False, 'from sqlmodel import Session, select\n'), ((5640, 5654), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (5646, 5654), False, 'from sqlmodel import Session, select\n'), ((6532, 6546), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (6538, 6546), False, 'from sqlmodel import Session, select\n'), ((6770, 6782), 'sqlmodel.select', 'select', (['Epic'], {}), '(Epic)\n', (6776, 6782), False, 'from sqlmodel import Session, select\n'), ((7570, 7584), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (7576, 7584), False, 'from sqlmodel import Session, select\n'), ((1784, 1798), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (1790, 1798), False, 'from sqlmodel import Session, select\n'), ((3600, 3641), 'sqlmodel.select', 'select', (['Client.id', 'Client.name', 'Epic.name'], {}), '(Client.id, Client.name, Epic.name)\n', (3606, 3641), False, 'from sqlmodel import Session, select\n')] |
import asyncio
import uuid
from typing import List, Optional
import pytest
from fastapi_users import models
from pydantic import UUID4
from sqlmodel import Field, Relationship
from fastapi_users_db_sqlmodel import SQLModelBaseOAuthAccount, SQLModelBaseUserDB
class User(models.BaseUser):
first_name: Optional[str]
class UserCreate(models.BaseUserCreate):
first_name: Optional[str]
class UserUpdate(models.BaseUserUpdate):
pass
class UserDB(SQLModelBaseUserDB, User, table=True):
class Config:
orm_mode = True
class UserOAuth(User):
pass
class UserDBOAuth(SQLModelBaseUserDB, table=True):
__tablename__ = "user_oauth"
oauth_accounts: List["OAuthAccount"] = Relationship(
back_populates="user",
sa_relationship_kwargs={"lazy": "joined", "cascade": "all, delete"},
)
class OAuthAccount(SQLModelBaseOAuthAccount, table=True):
user_id: UUID4 = Field(foreign_key="user_oauth.id")
user: Optional[UserDBOAuth] = Relationship(back_populates="oauth_accounts")
@pytest.fixture(scope="session")
def event_loop():
"""Force the pytest-asyncio loop to be the main one."""
loop = asyncio.get_event_loop()
yield loop
@pytest.fixture
def oauth_account1() -> OAuthAccount:
return OAuthAccount(
id=uuid.UUID("b9089e5d-2642-406d-a7c0-cbc641aca0ec"),
oauth_name="service1",
access_token="TOKEN",
expires_at=1579000751,
account_id="user_oauth1",
account_email="<EMAIL>",
)
@pytest.fixture
def oauth_account2() -> OAuthAccount:
return OAuthAccount(
id=uuid.UUID("c9089e5d-2642-406d-a7c0-cbc641aca0ec"),
oauth_name="service2",
access_token="TOKEN",
expires_at=1579000751,
account_id="user_oauth2",
account_email="<EMAIL>",
)
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((1033, 1064), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (1047, 1064), False, 'import pytest\n'), ((706, 814), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""user"""', 'sa_relationship_kwargs': "{'lazy': 'joined', 'cascade': 'all, delete'}"}), "(back_populates='user', sa_relationship_kwargs={'lazy':\n 'joined', 'cascade': 'all, delete'})\n", (718, 814), False, 'from sqlmodel import Field, Relationship\n'), ((915, 949), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""user_oauth.id"""'}), "(foreign_key='user_oauth.id')\n", (920, 949), False, 'from sqlmodel import Field, Relationship\n'), ((984, 1029), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""oauth_accounts"""'}), "(back_populates='oauth_accounts')\n", (996, 1029), False, 'from sqlmodel import Field, Relationship\n'), ((1154, 1178), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1176, 1178), False, 'import asyncio\n'), ((1286, 1335), 'uuid.UUID', 'uuid.UUID', (['"""b9089e5d-2642-406d-a7c0-cbc641aca0ec"""'], {}), "('b9089e5d-2642-406d-a7c0-cbc641aca0ec')\n", (1295, 1335), False, 'import uuid\n'), ((1594, 1643), 'uuid.UUID', 'uuid.UUID', (['"""c9089e5d-2642-406d-a7c0-cbc641aca0ec"""'], {}), "('c9089e5d-2642-406d-a7c0-cbc641aca0ec')\n", (1603, 1643), False, 'import uuid\n')] |
from fastapi import Depends
from sqlmodel import select
from joj.horse import models, schemas
from joj.horse.schemas import StandardListResponse
from joj.horse.schemas.auth import Authentication
from joj.horse.utils.parser import parse_ordering_query, parse_pagination_query
from joj.horse.utils.router import MyRouter
router = MyRouter()
router_name = "problem_groups"
router_tag = "problem group"
router_prefix = "/api/v1"
@router.get("")
async def list_problem_groups(
ordering: schemas.OrderingQuery = Depends(parse_ordering_query()),
pagination: schemas.PaginationQuery = Depends(parse_pagination_query),
auth: Authentication = Depends(),
) -> StandardListResponse[schemas.ProblemGroup]:
statement = select(models.ProblemGroup)
problem_groups, count = await models.ProblemGroup.execute_list_statement(
statement, ordering, pagination
)
return StandardListResponse(problem_groups, count)
| [
"sqlmodel.select"
] | [((330, 340), 'joj.horse.utils.router.MyRouter', 'MyRouter', ([], {}), '()\n', (338, 340), False, 'from joj.horse.utils.router import MyRouter\n'), ((589, 620), 'fastapi.Depends', 'Depends', (['parse_pagination_query'], {}), '(parse_pagination_query)\n', (596, 620), False, 'from fastapi import Depends\n'), ((649, 658), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (656, 658), False, 'from fastapi import Depends\n'), ((725, 752), 'sqlmodel.select', 'select', (['models.ProblemGroup'], {}), '(models.ProblemGroup)\n', (731, 752), False, 'from sqlmodel import select\n'), ((888, 931), 'joj.horse.schemas.StandardListResponse', 'StandardListResponse', (['problem_groups', 'count'], {}), '(problem_groups, count)\n', (908, 931), False, 'from joj.horse.schemas import StandardListResponse\n'), ((522, 544), 'joj.horse.utils.parser.parse_ordering_query', 'parse_ordering_query', ([], {}), '()\n', (542, 544), False, 'from joj.horse.utils.parser import parse_ordering_query, parse_pagination_query\n'), ((787, 862), 'joj.horse.models.ProblemGroup.execute_list_statement', 'models.ProblemGroup.execute_list_statement', (['statement', 'ordering', 'pagination'], {}), '(statement, ordering, pagination)\n', (829, 862), False, 'from joj.horse import models, schemas\n')] |
from pathlib import Path
import pytest
from sqlmodel import select
from kfs import db
@pytest.fixture()
def base_dir(tmp_path: Path) -> Path:
return tmp_path
@pytest.fixture()
def sql_file_path(base_dir: Path) -> Path:
return base_dir / "kfs.db"
@pytest.fixture()
def sqlite_url(sql_file_path: Path) -> str:
return f"sqlite:///{sql_file_path}"
@pytest.fixture(autouse=True)
def database(sqlite_url: str) -> None:
db.init(sqlite_url)
def test_init(sql_file_path: Path) -> None:
"""After init, the database has been created and the file exists"""
assert sql_file_path.exists()
def test_database() -> None:
with db.get_session() as session:
# Create a new file with a tag
tag = db.Tag(category="vendor", value="chevron")
file = db.File(name="test_file.csv", path="/some/directory", tags=[tag])
session.add(file)
session.commit()
with db.get_session() as session:
# Retrieve the file from database
read_file = session.exec(select(db.File)).one()
assert file is not read_file
assert read_file.name == "test_file.csv"
assert read_file.path == "/some/directory"
assert len(read_file.tags) == 1
read_tag = read_file.tags[0]
assert read_tag.category == "vendor"
assert read_tag.value == "chevron"
assert len(read_tag.files) == 1
| [
"sqlmodel.select"
] | [((91, 107), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (105, 107), False, 'import pytest\n'), ((169, 185), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (183, 185), False, 'import pytest\n'), ((263, 279), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (277, 279), False, 'import pytest\n'), ((367, 395), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (381, 395), False, 'import pytest\n'), ((439, 458), 'kfs.db.init', 'db.init', (['sqlite_url'], {}), '(sqlite_url)\n', (446, 458), False, 'from kfs import db\n'), ((651, 667), 'kfs.db.get_session', 'db.get_session', ([], {}), '()\n', (665, 667), False, 'from kfs import db\n'), ((733, 775), 'kfs.db.Tag', 'db.Tag', ([], {'category': '"""vendor"""', 'value': '"""chevron"""'}), "(category='vendor', value='chevron')\n", (739, 775), False, 'from kfs import db\n'), ((791, 856), 'kfs.db.File', 'db.File', ([], {'name': '"""test_file.csv"""', 'path': '"""/some/directory"""', 'tags': '[tag]'}), "(name='test_file.csv', path='/some/directory', tags=[tag])\n", (798, 856), False, 'from kfs import db\n'), ((918, 934), 'kfs.db.get_session', 'db.get_session', ([], {}), '()\n', (932, 934), False, 'from kfs import db\n'), ((1022, 1037), 'sqlmodel.select', 'select', (['db.File'], {}), '(db.File)\n', (1028, 1037), False, 'from sqlmodel import select\n')] |
"""Anime CRUD controller."""
import sqlmodel
from sqlmodel.ext.asyncio import session as aio_session
from app.crud import base
from app.models import anime
class AnimeCRUD(base.BaseCRUD[anime.Anime, anime.AnimeCreate,
anime.AnimeUpdate]):
"""CRUD controller for anime.
It contains Create, Read, Update, and Delete methods.
"""
@classmethod
async def get_by_title(cls, session: aio_session.AsyncSession,
title: str) -> anime.Anime | None:
"""Gets an anime by their title.
Args:
session: The database session.
title: The anime's title.
"""
anime_list = await session.exec(
sqlmodel.select(anime.Anime).where(anime.Anime.title_en == title))
return anime_list.first()
| [
"sqlmodel.select"
] | [((723, 751), 'sqlmodel.select', 'sqlmodel.select', (['anime.Anime'], {}), '(anime.Anime)\n', (738, 751), False, 'import sqlmodel\n')] |
"""Instancia da tabela User e seus metodos"""
from typing import Optional, List, TYPE_CHECKING
from datetime import datetime
from sqlalchemy import UniqueConstraint
from sqlmodel import SQLModel, Field, Relationship
if TYPE_CHECKING:
from .tokens import Token
class User(SQLModel, table=True):
"""Tabela de usuarios"""
__table_args__ = (UniqueConstraint("email", "username"),)
id: Optional[int] = Field(primary_key=True, default=None, nullable=False)
name: str
email: str
username: str
password_hash: str
secundary_id: int = 0
is_staff: bool
is_active_user: bool
last_login: datetime
date_joined: datetime
token: List["Token"] = Relationship(back_populates="user")
def __repr__(self):
return f"<User {self.name}>"
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((418, 471), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None', 'nullable': '(False)'}), '(primary_key=True, default=None, nullable=False)\n', (423, 471), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((692, 727), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""user"""'}), "(back_populates='user')\n", (704, 727), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((353, 390), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""email"""', '"""username"""'], {}), "('email', 'username')\n", (369, 390), False, 'from sqlalchemy import UniqueConstraint\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
from pydantic import validator
from datetime import datetime
import numpy as np
class Forecast(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
epic_id: int = Field(foreign_key="app_db.epic.id")
days: float
month: int
year: int
created_at: datetime
updated_at: datetime
is_locked: bool
__table_args__ = {"schema": "app_db"}
@validator("days")
def valid_days(cls, days_input):
assert days_input in np.arange(
0, 24, 0.1
), "Work days cannot be greater than 24"
return days_input
@validator("year")
def valid_year(cls, year_input):
assert year_input >= datetime.now().year
return year_input
| [
"sqlmodel.Field"
] | [((210, 247), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (215, 247), False, 'from sqlmodel import Field, SQLModel\n'), ((267, 305), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.appuser.id"""'}), "(foreign_key='app_db.appuser.id')\n", (272, 305), False, 'from sqlmodel import Field, SQLModel\n'), ((325, 360), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.epic.id"""'}), "(foreign_key='app_db.epic.id')\n", (330, 360), False, 'from sqlmodel import Field, SQLModel\n'), ((525, 542), 'pydantic.validator', 'validator', (['"""days"""'], {}), "('days')\n", (534, 542), False, 'from pydantic import validator\n'), ((724, 741), 'pydantic.validator', 'validator', (['"""year"""'], {}), "('year')\n", (733, 741), False, 'from pydantic import validator\n'), ((609, 630), 'numpy.arange', 'np.arange', (['(0)', '(24)', '(0.1)'], {}), '(0, 24, 0.1)\n', (618, 630), True, 'import numpy as np\n'), ((808, 822), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (820, 822), False, 'from datetime import datetime\n')] |
from typing import List, Union
from fastapi import APIRouter, Request
from fastapi.exceptions import HTTPException
from sqlmodel import Session, or_, select
from ..db import ActiveSession
from ..models.content import Content, ContentIncoming, ContentResponse
from ..security import AuthenticatedUser, User, get_current_user
router = APIRouter()
@router.get("/", response_model=List[ContentResponse])
async def list_contents(*, session: Session = ActiveSession):
contents = session.exec(select(Content)).all()
return contents
@router.get("/{id_or_slug}/", response_model=ContentResponse)
async def query_content(*, id_or_slug: Union[str, int], session: Session = ActiveSession):
content = session.query(Content).where(
or_(
Content.id == id_or_slug,
Content.slug == id_or_slug,
)
)
if not content:
raise HTTPException(status_code=404, detail="Content not found")
return content.first()
@router.post("/", response_model=ContentResponse, dependencies=[AuthenticatedUser])
async def create_content(
*,
session: Session = ActiveSession,
request: Request,
content: ContentIncoming,
):
# set the ownsership of the content to the current user
db_content = Content.from_orm(content)
user: User = get_current_user(request=request)
db_content.user_id = user.id
session.add(db_content)
session.commit()
session.refresh(db_content)
return db_content
@router.patch(
"/{content_id}/",
response_model=ContentResponse,
dependencies=[AuthenticatedUser],
)
async def update_content(
*,
content_id: int,
session: Session = ActiveSession,
request: Request,
patch: ContentIncoming,
):
# Query the content
content = session.get(Content, content_id)
if not content:
raise HTTPException(status_code=404, detail="Content not found")
# Check the user owns the content
current_user: User = get_current_user(request=request)
if content.user_id != current_user.id and not current_user.superuser:
raise HTTPException(status_code=403, detail="You don't own this content")
# Update the content
patch_data = patch.dict(exclude_unset=True)
for key, value in patch_data.items():
setattr(content, key, value)
# Commit the session
session.commit()
session.refresh(content)
return content
@router.delete("/{content_id}/", dependencies=[AuthenticatedUser])
def delete_content(*, session: Session = ActiveSession, request: Request, content_id: int):
content = session.get(Content, content_id)
if not content:
raise HTTPException(status_code=404, detail="Content not found")
# Check the user owns the content
current_user = get_current_user(request=request)
if content.user_id != current_user.id and not current_user.superuser:
raise HTTPException(status_code=403, detail="You don't own this content")
session.delete(content)
session.commit()
return {"ok": True}
| [
"sqlmodel.or_",
"sqlmodel.select"
] | [((336, 347), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (345, 347), False, 'from fastapi import APIRouter, Request\n'), ((745, 802), 'sqlmodel.or_', 'or_', (['(Content.id == id_or_slug)', '(Content.slug == id_or_slug)'], {}), '(Content.id == id_or_slug, Content.slug == id_or_slug)\n', (748, 802), False, 'from sqlmodel import Session, or_, select\n'), ((878, 936), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Content not found"""'}), "(status_code=404, detail='Content not found')\n", (891, 936), False, 'from fastapi.exceptions import HTTPException\n'), ((1831, 1889), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Content not found"""'}), "(status_code=404, detail='Content not found')\n", (1844, 1889), False, 'from fastapi.exceptions import HTTPException\n'), ((2076, 2143), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""You don\'t own this content"""'}), '(status_code=403, detail="You don\'t own this content")\n', (2089, 2143), False, 'from fastapi.exceptions import HTTPException\n'), ((2635, 2693), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Content not found"""'}), "(status_code=404, detail='Content not found')\n", (2648, 2693), False, 'from fastapi.exceptions import HTTPException\n'), ((2873, 2940), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""You don\'t own this content"""'}), '(status_code=403, detail="You don\'t own this content")\n', (2886, 2940), False, 'from fastapi.exceptions import HTTPException\n'), ((495, 510), 'sqlmodel.select', 'select', (['Content'], {}), '(Content)\n', (501, 510), False, 'from sqlmodel import Session, or_, select\n')] |
import os
import jwt
import time
import requests
from flask import abort
from flask import Blueprint
from flask import request
from flask import jsonify
from flask import redirect
from flask import current_app
from flask import session
from urllib.parse import unquote
from base64 import b64encode
from datetime import datetime
from sqlmodel import select
from sqlmodel import Session as SQLSession
from app.models.user import User
bp = Blueprint("auth", __name__)
@bp.route("/login")
def login():
CLIENT_ID = current_app.config["CLIENT_ID"]
REDIRECT_URI = current_app.config["REDIRECT_URI"]
USER_STATE = b64encode(os.urandom(64)).decode("utf-8")
UAA_AUTHORIZE_URI = current_app.config["UAA_AUTHORIZE_URI"]
session["USER_STATE"] = USER_STATE
UAA_LOGIN = f"{UAA_AUTHORIZE_URI}?client_id={CLIENT_ID}&response_type=code&redirect_uri={REDIRECT_URI}&state={USER_STATE}"
return redirect(UAA_LOGIN)
@bp.route("/logout")
def logout():
CLIENT_ID = current_app.config["CLIENT_ID"]
REDIRECT_URI = current_app.config["REDIRECT_URI"]
UAA_LOGOUT_URI = current_app.config["UAA_LOGOUT_URI"]
UAA_LGOUT = f"{UAA_LOGOUT_URI}?client_id={CLIENT_ID}&redirect={REDIRECT_URI}"
session.clear()
requests.post(UAA_LGOUT)
return redirect("/")
@bp.route("/callback")
def callback():
# @url_param {string} code
# @url_param {string} status
code = request.args.get("code")
state = request.args.get("state")
if not code or not state:
abort(400)
UAA_TOKEN_URI = current_app.config["UAA_TOKEN_URI"]
data = {
"code": code,
"grant_type": "authorization_code",
"response_type": "token",
"client_id": current_app.config["CLIENT_ID"],
"client_secret": current_app.config["CLIENT_SECRET"],
"redirect_uri": current_app.config["REDIRECT_URI"],
}
response = requests.post(UAA_TOKEN_URI, data=data)
if response.status_code != 200:
abort(response.status_code)
response = response.json()
token = response["access_token"]
header = jwt.get_unverified_header(token)
session["claims"] = jwt.decode(
token, header["alg"], options={"verify_signature": False}
)
session["expiry"] = time.time() + (response["expires_in"] * 1000)
session["refresh_token"] = response["refresh_token"]
session["authenticated"] = True
with SQLSession(current_app.engine) as s:
query = select(User).where(User.email == session["claims"]["email"])
user = s.exec(query).first()
if user:
# Account exists
user.last_logon = datetime.now()
s.add(user)
s.commit()
else:
# Account does not exist
new_user = User(
user_name=session["claims"]["user_name"],
email=session["claims"]["email"],
last_logon=datetime.now(),
)
s.add(new_user)
s.commit()
user = s.exec(query).first()
session["user"] = user.dict()
return redirect("/")
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((438, 465), 'flask.Blueprint', 'Blueprint', (['"""auth"""', '__name__'], {}), "('auth', __name__)\n", (447, 465), False, 'from flask import Blueprint\n'), ((906, 925), 'flask.redirect', 'redirect', (['UAA_LOGIN'], {}), '(UAA_LOGIN)\n', (914, 925), False, 'from flask import redirect\n'), ((1210, 1225), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (1223, 1225), False, 'from flask import session\n'), ((1230, 1254), 'requests.post', 'requests.post', (['UAA_LGOUT'], {}), '(UAA_LGOUT)\n', (1243, 1254), False, 'import requests\n'), ((1267, 1280), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1275, 1280), False, 'from flask import redirect\n'), ((1397, 1421), 'flask.request.args.get', 'request.args.get', (['"""code"""'], {}), "('code')\n", (1413, 1421), False, 'from flask import request\n'), ((1434, 1459), 'flask.request.args.get', 'request.args.get', (['"""state"""'], {}), "('state')\n", (1450, 1459), False, 'from flask import request\n'), ((1879, 1918), 'requests.post', 'requests.post', (['UAA_TOKEN_URI'], {'data': 'data'}), '(UAA_TOKEN_URI, data=data)\n', (1892, 1918), False, 'import requests\n'), ((2075, 2107), 'jwt.get_unverified_header', 'jwt.get_unverified_header', (['token'], {}), '(token)\n', (2100, 2107), False, 'import jwt\n'), ((2133, 2202), 'jwt.decode', 'jwt.decode', (['token', "header['alg']"], {'options': "{'verify_signature': False}"}), "(token, header['alg'], options={'verify_signature': False})\n", (2143, 2202), False, 'import jwt\n'), ((3065, 3078), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3073, 3078), False, 'from flask import redirect\n'), ((1499, 1509), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (1504, 1509), False, 'from flask import abort\n'), ((1964, 1991), 'flask.abort', 'abort', (['response.status_code'], {}), '(response.status_code)\n', (1969, 1991), False, 'from flask import abort\n'), ((2241, 2252), 'time.time', 'time.time', ([], {}), '()\n', (2250, 2252), False, 'import time\n'), ((2390, 2420), 'sqlmodel.Session', 'SQLSession', (['current_app.engine'], {}), '(current_app.engine)\n', (2400, 2420), True, 'from sqlmodel import Session as SQLSession\n'), ((2618, 2632), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2630, 2632), False, 'from datetime import datetime\n'), ((630, 644), 'os.urandom', 'os.urandom', (['(64)'], {}), '(64)\n', (640, 644), False, 'import os\n'), ((2443, 2455), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (2449, 2455), False, 'from sqlmodel import select\n'), ((2896, 2910), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2908, 2910), False, 'from datetime import datetime\n')] |
import time
import os
from typing import Optional
from sqlalchemy.exc import OperationalError
from sqlalchemy.engine import URL
from sqlmodel import Field, Session, SQLModel, create_engine, select
from loguru import logger
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
def main():
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>")
hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48)
host_name = "postgres" if os.environ.get("IS_INSIDE_DOCKER") else "localhost"
url = URL.create(drivername="postgresql", username="postgres", password="<PASSWORD>", host=host_name, port=5432)
engine = create_engine(url)
for _ in range(5):
try:
SQLModel.metadata.create_all(engine)
break
except OperationalError:
logger.error("Is postgres database running?")
time.sleep(2)
with Session(engine) as session:
session.add_all([hero_1, hero_2])
session.add(hero_3)
session.commit()
with Session(engine) as session:
statement = select(Hero).where(Hero.name == "Spider-Boy")
hero = session.exec(statement).first()
logger.info(hero)
if __name__ == '__main__':
main()
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field"
] | [((285, 322), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (290, 322), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((677, 788), 'sqlalchemy.engine.URL.create', 'URL.create', ([], {'drivername': '"""postgresql"""', 'username': '"""postgres"""', 'password': '"""<PASSWORD>"""', 'host': 'host_name', 'port': '(5432)'}), "(drivername='postgresql', username='postgres', password=\n '<PASSWORD>', host=host_name, port=5432)\n", (687, 788), False, 'from sqlalchemy.engine import URL\n'), ((797, 815), 'sqlmodel.create_engine', 'create_engine', (['url'], {}), '(url)\n', (810, 815), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((615, 649), 'os.environ.get', 'os.environ.get', (['"""IS_INSIDE_DOCKER"""'], {}), "('IS_INSIDE_DOCKER')\n", (629, 649), False, 'import os\n'), ((1046, 1061), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1053, 1061), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1179, 1194), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1186, 1194), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1328, 1345), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (1339, 1345), False, 'from loguru import logger\n'), ((864, 900), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (892, 900), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((964, 1009), 'loguru.logger.error', 'logger.error', (['"""Is postgres database running?"""'], {}), "('Is postgres database running?')\n", (976, 1009), False, 'from loguru import logger\n'), ((1022, 1035), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1032, 1035), False, 'import time\n'), ((1227, 1239), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1233, 1239), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ..db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class Procedure(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
procedure_group_id: int
parent_procedure_id: int
name: str
detail: str
icd_9: str
icd_10: str
class ProcedureGroup(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class ProcedureDiseaseMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
procedure_id: int
disease_id: bool
require: bool
age_min: float
age_max: float
class HistoryProcedure(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
procedure_id: int
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class HistoryProcedureDoctorMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_procedure_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
#
#
# @router.post("/history_procedure", response_model=HistoryProcedure)
# async def create_history_procedure(history_procedure: HistoryProcedure, session: AsyncSession = Depends(get_session)):
# session.add(history_procedure)
# await session.commit()
# await session.refresh(history_procedure)
# return history_procedure
#
#
# @router.get("/history_procedure/{procedure_id}", response_model=HistoryProcedure)
# async def get_history_procedure(procedure_id: int, session: AsyncSession = Depends(get_session)):
# history_procedures = await session.execute(select(HistoryProcedure).where(HistoryProcedure.id == procedure_id))
# history_procedure = history_procedures.scalars().first()
# return history_procedure
#
#
# @router.put("/history_procedure/{procedure_id}", response_model=HistoryProcedure)
# async def update_history_procedure(id: int, session: AsyncSession = Depends(get_session)):
# return None
#
#
# @router.delete("/history_procedure/{procedure_id}")
# async def delete_history_procedure(session: AsyncSession = Depends(get_session)):
# return None | [
"sqlmodel.Field"
] | [((254, 265), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (263, 265), False, 'from fastapi import APIRouter, Depends\n'), ((331, 368), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (336, 368), False, 'from sqlmodel import Field, SQLModel\n'), ((557, 594), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (562, 594), False, 'from sqlmodel import Field, SQLModel\n'), ((684, 721), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (689, 721), False, 'from sqlmodel import Field, SQLModel\n'), ((893, 930), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (898, 930), False, 'from sqlmodel import Field, SQLModel\n'), ((1177, 1214), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1182, 1214), False, 'from sqlmodel import Field, SQLModel\n')] |
import asyncio
import pytest
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.ext.asyncio.engine import AsyncConnection
from sqlmodel.ext.asyncio.session import AsyncSession
from basesqlmodel import Base
engine = create_async_engine("sqlite+aiosqlite:///:memory:")
@pytest.fixture()
async def connection() -> AsyncConnection:
async with engine.begin() as conn:
yield conn
await conn.rollback()
@pytest.fixture()
async def session(connection: AsyncConnection):
async with AsyncSession(connection, expire_on_commit=False) as _session:
yield _session
@pytest.fixture(scope="session", autouse=True)
def event_loop():
"""Reference: https://github.com/pytest-dev/pytest-asyncio/issues/38#issuecomment-264418154"""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session", autouse=True)
async def init_database():
import tests.utils # noqa
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
| [
"sqlmodel.ext.asyncio.session.AsyncSession"
] | [((238, 289), 'sqlalchemy.ext.asyncio.create_async_engine', 'create_async_engine', (['"""sqlite+aiosqlite:///:memory:"""'], {}), "('sqlite+aiosqlite:///:memory:')\n", (257, 289), False, 'from sqlalchemy.ext.asyncio import create_async_engine\n'), ((293, 309), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (307, 309), False, 'import pytest\n'), ((444, 460), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (458, 460), False, 'import pytest\n'), ((612, 657), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (626, 657), False, 'import pytest\n'), ((870, 915), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (884, 915), False, 'import pytest\n'), ((524, 572), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['connection'], {'expire_on_commit': '(False)'}), '(connection, expire_on_commit=False)\n', (536, 572), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((786, 817), 'asyncio.get_event_loop_policy', 'asyncio.get_event_loop_policy', ([], {}), '()\n', (815, 817), False, 'import asyncio\n')] |
"""Add participant and application
Revision ID: 58d2280520b8
Revises:
Create Date: 2022-02-12 07:30:30.427270+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "58d2280520b8"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"participants",
sa.Column("first_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("last_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("email", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"applications",
sa.Column(
"gender",
sa.Enum("MALE", "FEMALE", "NON_BINARY", name="gender"),
nullable=True,
),
sa.Column(
"race_ethnicity",
sa.Enum(
"AMERICAN_INDIAN",
"ASIAN",
"PACIFIC_ISLANDER",
"BLACK",
"HISPANIC",
"CAUCASIAN",
"MULTIPLE_OTHER",
name="raceethnicity",
),
nullable=True,
),
sa.Column("participant_id", sa.Integer(), nullable=False),
sa.Column("level_of_study", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("graduation_year", sa.Integer(), nullable=False),
sa.Column("major", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date_of_birth", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("hackathons_attended", sa.Integer(), nullable=False),
sa.Column("portfolio_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("vcs_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column(
"shipping_address", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("share_information", sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
["participant_id"],
["participants.id"],
),
sa.PrimaryKeyConstraint("participant_id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("applications")
op.drop_table("participants")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((2458, 2487), 'alembic.op.drop_table', 'op.drop_table', (['"""applications"""'], {}), "('applications')\n", (2471, 2487), False, 'from alembic import op\n'), ((2492, 2521), 'alembic.op.drop_table', 'op.drop_table', (['"""participants"""'], {}), "('participants')\n", (2505, 2521), False, 'from alembic import op\n'), ((753, 782), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (776, 782), True, 'import sqlalchemy as sa\n'), ((2176, 2240), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['participant_id']", "['participants.id']"], {}), "(['participant_id'], ['participants.id'])\n", (2199, 2240), True, 'import sqlalchemy as sa\n'), ((2285, 2326), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""participant_id"""'], {}), "('participant_id')\n", (2308, 2326), True, 'import sqlalchemy as sa\n'), ((473, 507), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (505, 507), False, 'import sqlmodel\n'), ((557, 591), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (589, 591), False, 'import sqlmodel\n'), ((637, 671), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (669, 671), False, 'import sqlmodel\n'), ((714, 726), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (724, 726), True, 'import sqlalchemy as sa\n'), ((888, 942), 'sqlalchemy.Enum', 'sa.Enum', (['"""MALE"""', '"""FEMALE"""', '"""NON_BINARY"""'], {'name': '"""gender"""'}), "('MALE', 'FEMALE', 'NON_BINARY', name='gender')\n", (895, 942), True, 'import sqlalchemy as sa\n'), ((1043, 1176), 'sqlalchemy.Enum', 'sa.Enum', (['"""AMERICAN_INDIAN"""', '"""ASIAN"""', '"""PACIFIC_ISLANDER"""', '"""BLACK"""', '"""HISPANIC"""', '"""CAUCASIAN"""', '"""MULTIPLE_OTHER"""'], {'name': '"""raceethnicity"""'}), "('AMERICAN_INDIAN', 'ASIAN', 'PACIFIC_ISLANDER', 'BLACK', 'HISPANIC',\n 'CAUCASIAN', 'MULTIPLE_OTHER', name='raceethnicity')\n", (1050, 1176), True, 'import sqlalchemy as sa\n'), ((1391, 1403), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1401, 1403), True, 'import sqlalchemy as sa\n'), ((1458, 1492), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1490, 1492), False, 'import sqlmodel\n'), ((1548, 1560), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1558, 1560), True, 'import sqlalchemy as sa\n'), ((1606, 1640), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1638, 1640), False, 'import sqlmodel\n'), ((1693, 1727), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1725, 1727), False, 'import sqlmodel\n'), ((1787, 1799), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1797, 1799), True, 'import sqlalchemy as sa\n'), ((1853, 1887), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1885, 1887), False, 'import sqlmodel\n'), ((1934, 1968), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1966, 1968), False, 'import sqlmodel\n'), ((2037, 2071), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2069, 2071), False, 'import sqlmodel\n'), ((2137, 2149), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (2147, 2149), True, 'import sqlalchemy as sa\n')] |
import traceback
from contextlib import contextmanager
from sqlmodel import create_engine, Session
from fastapi_dream_core.application_dependencies.application_dependencies_abc import ApplicationDependenciesABC
from fastapi_dream_core.utils import logger
class DatabaseSQLModel(ApplicationDependenciesABC):
def __init__(self, db_url: str, echo_queries: bool = False) -> None:
self._engine = create_engine(db_url, echo=echo_queries)
def readiness(self) -> bool:
with Session(self._engine) as session:
try:
database_status = session.connection().connection.is_valid
logger.debug(f"DatabaseSQLModel.readiness = {database_status}")
return True
except Exception:
traceback.print_exc()
return False
@contextmanager
def session(self) -> Session:
with Session(self._engine) as session:
try:
yield session
except Exception:
logger.exception("Session rollback because of exception")
session.rollback()
raise
finally:
session.close()
def __str__(self):
return "DatabaseSQLModel"
| [
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((408, 448), 'sqlmodel.create_engine', 'create_engine', (['db_url'], {'echo': 'echo_queries'}), '(db_url, echo=echo_queries)\n', (421, 448), False, 'from sqlmodel import create_engine, Session\n'), ((496, 517), 'sqlmodel.Session', 'Session', (['self._engine'], {}), '(self._engine)\n', (503, 517), False, 'from sqlmodel import create_engine, Session\n'), ((896, 917), 'sqlmodel.Session', 'Session', (['self._engine'], {}), '(self._engine)\n', (903, 917), False, 'from sqlmodel import create_engine, Session\n'), ((638, 701), 'fastapi_dream_core.utils.logger.debug', 'logger.debug', (['f"""DatabaseSQLModel.readiness = {database_status}"""'], {}), "(f'DatabaseSQLModel.readiness = {database_status}')\n", (650, 701), False, 'from fastapi_dream_core.utils import logger\n'), ((777, 798), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (796, 798), False, 'import traceback\n'), ((1023, 1080), 'fastapi_dream_core.utils.logger.exception', 'logger.exception', (['"""Session rollback because of exception"""'], {}), "('Session rollback because of exception')\n", (1039, 1080), False, 'from fastapi_dream_core.utils import logger\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime
class Capacity(SQLModel, table=True):
"""Create an SQLModel for capcities"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
team_id: int = Field(foreign_key="app_db.team.id")
year: int
month: int
days: int
created_at: datetime
updated_at: datetime
is_locked: bool = False
__table_args__ = {"schema": "app_db"}
| [
"sqlmodel.Field"
] | [((203, 240), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (208, 240), False, 'from sqlmodel import Field, SQLModel\n'), ((260, 298), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.appuser.id"""'}), "(foreign_key='app_db.appuser.id')\n", (265, 298), False, 'from sqlmodel import Field, SQLModel\n'), ((318, 353), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.team.id"""'}), "(foreign_key='app_db.team.id')\n", (323, 353), False, 'from sqlmodel import Field, SQLModel\n')] |
import pickle
from typing import Optional, Dict, Any, List, cast
from enum import Enum
import orjson
from fastapi import APIRouter, Depends, Query
from pydantic import validator
from sqlmodel import Session, Field, select # type: ignore[import]
from sqlalchemy import distinct # type: ignore[import]
from app.db import get_db, FeedModel, FeedBase
router = APIRouter()
class FeedRead(FeedBase):
# parse from the backend to data structures
tags: List[str] = Field(default_factory=list)
data: Dict[str, Any] = Field(default_factory={})
@validator("tags", pre=True)
def parse_tags(cls, v: Any) -> List[str]:
return cast(List[str], orjson.loads(v))
@validator("data", pre=True)
def parse_data(cls, v: Any) -> Dict[str, Any]:
if v is None:
return {}
else:
return cast(Dict[str, Any], pickle.loads(v))
class OrderBy(Enum):
score = "score"
when = "when"
class Sort(Enum):
asc = "asc"
ascending = "ascending"
desc = "desc"
descending = "descending"
# items which shouldn't be shown when sorted by 'score'
# since it'd make the feed too busy
INDIVIDUAL_FEED_TYPES = [
"anime_episode",
"manga_chapter",
"listen",
"trakt_history_episode",
"trakt_history_movie",
]
@router.get("/types", response_model=List[str])
async def data_types(
session: Session = Depends(get_db),
) -> List[str]:
stmt = select(distinct(FeedModel.ftype))
with session:
items: List[str] = list(session.exec(stmt))
return items
@router.get("/", response_model=List[FeedRead])
async def data(
offset: int = 0,
limit: int = Query(default=100, lte=100),
order_by: OrderBy = Query(default=OrderBy.when),
sort: Sort = Query(default=Sort.desc),
ftype: Optional[str] = Query(default=None, min_length=2),
query: Optional[str] = Query(default=None, min_length=2),
title: Optional[str] = Query(default=None, min_length=2),
creator: Optional[str] = Query(default=None, min_length=2),
subtitle: Optional[str] = Query(default=None, min_length=2),
session: Session = Depends(get_db),
) -> List[FeedRead]:
stmt = select(FeedModel)
if ftype is not None and ftype.strip():
if parts := ftype.strip().split(","):
stmt = stmt.filter(FeedModel.ftype.in_(parts)) # type: ignore
if query is None:
if title:
stmt = stmt.filter(FeedModel.title.ilike(f"%{title}%")) # type: ignore
if creator:
stmt = stmt.filter(FeedModel.creator.ilike(f"%{creator}%")) # type: ignore
if subtitle:
stmt = stmt.filter(FeedModel.subtitle.ilike(f"%{subtitle}%")) # type: ignore
else:
stmt = stmt.filter(
(FeedModel.title.ilike(f"%{query}%")) # type: ignore
| (FeedModel.creator.ilike(f"%{query}%")) # type: ignore
| (FeedModel.subtitle.ilike(f"%{query}%")) # type: ignore
| (FeedModel.model_id.ilike(f"%{query}%")) # type: ignore
)
if order_by == OrderBy.score:
stmt = stmt.filter(FeedModel.score != None)
stmt = stmt.filter(FeedModel.ftype.notin_(INDIVIDUAL_FEED_TYPES)) # type: ignore
# ORDER BY Score [CHOSEN], When DESC to show things I completed recently higher when sorting by score
stmt = stmt.order_by(FeedModel.score.asc() if sort == Sort.asc else FeedModel.score.desc(), FeedModel.when.desc()) # type: ignore
else:
stmt = stmt.order_by(FeedModel.when.asc() if sort == Sort.asc else FeedModel.when.desc()) # type: ignore
stmt = stmt.limit(limit).offset(offset)
with session:
items: List[FeedModel] = list(session.exec(stmt))
return items
| [
"sqlmodel.select",
"sqlmodel.Field"
] | [((363, 374), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (372, 374), False, 'from fastapi import APIRouter, Depends, Query\n'), ((473, 500), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (478, 500), False, 'from sqlmodel import Session, Field, select\n'), ((528, 553), 'sqlmodel.Field', 'Field', ([], {'default_factory': '{}'}), '(default_factory={})\n', (533, 553), False, 'from sqlmodel import Session, Field, select\n'), ((560, 587), 'pydantic.validator', 'validator', (['"""tags"""'], {'pre': '(True)'}), "('tags', pre=True)\n", (569, 587), False, 'from pydantic import validator\n'), ((688, 715), 'pydantic.validator', 'validator', (['"""data"""'], {'pre': '(True)'}), "('data', pre=True)\n", (697, 715), False, 'from pydantic import validator\n'), ((1384, 1399), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1391, 1399), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1653, 1680), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (1658, 1680), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1706, 1733), 'fastapi.Query', 'Query', ([], {'default': 'OrderBy.when'}), '(default=OrderBy.when)\n', (1711, 1733), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1752, 1776), 'fastapi.Query', 'Query', ([], {'default': 'Sort.desc'}), '(default=Sort.desc)\n', (1757, 1776), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1805, 1838), 'fastapi.Query', 'Query', ([], {'default': 'None', 'min_length': '(2)'}), '(default=None, min_length=2)\n', (1810, 1838), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1867, 1900), 'fastapi.Query', 'Query', ([], {'default': 'None', 'min_length': '(2)'}), '(default=None, min_length=2)\n', (1872, 1900), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1929, 1962), 'fastapi.Query', 'Query', ([], {'default': 'None', 'min_length': '(2)'}), '(default=None, min_length=2)\n', (1934, 1962), False, 'from fastapi import APIRouter, Depends, Query\n'), ((1993, 2026), 'fastapi.Query', 'Query', ([], {'default': 'None', 'min_length': '(2)'}), '(default=None, min_length=2)\n', (1998, 2026), False, 'from fastapi import APIRouter, Depends, Query\n'), ((2058, 2091), 'fastapi.Query', 'Query', ([], {'default': 'None', 'min_length': '(2)'}), '(default=None, min_length=2)\n', (2063, 2091), False, 'from fastapi import APIRouter, Depends, Query\n'), ((2116, 2131), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (2123, 2131), False, 'from fastapi import APIRouter, Depends, Query\n'), ((2165, 2182), 'sqlmodel.select', 'select', (['FeedModel'], {}), '(FeedModel)\n', (2171, 2182), False, 'from sqlmodel import Session, Field, select\n'), ((1435, 1460), 'sqlalchemy.distinct', 'distinct', (['FeedModel.ftype'], {}), '(FeedModel.ftype)\n', (1443, 1460), False, 'from sqlalchemy import distinct\n'), ((665, 680), 'orjson.loads', 'orjson.loads', (['v'], {}), '(v)\n', (677, 680), False, 'import orjson\n'), ((3131, 3176), 'app.db.FeedModel.ftype.notin_', 'FeedModel.ftype.notin_', (['INDIVIDUAL_FEED_TYPES'], {}), '(INDIVIDUAL_FEED_TYPES)\n', (3153, 3176), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((3404, 3425), 'app.db.FeedModel.when.desc', 'FeedModel.when.desc', ([], {}), '()\n', (3423, 3425), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((865, 880), 'pickle.loads', 'pickle.loads', (['v'], {}), '(v)\n', (877, 880), False, 'import pickle\n'), ((2304, 2330), 'app.db.FeedModel.ftype.in_', 'FeedModel.ftype.in_', (['parts'], {}), '(parts)\n', (2323, 2330), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2420, 2455), 'app.db.FeedModel.title.ilike', 'FeedModel.title.ilike', (['f"""%{title}%"""'], {}), "(f'%{title}%')\n", (2441, 2455), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2524, 2563), 'app.db.FeedModel.creator.ilike', 'FeedModel.creator.ilike', (['f"""%{creator}%"""'], {}), "(f'%{creator}%')\n", (2547, 2563), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2633, 2674), 'app.db.FeedModel.subtitle.ilike', 'FeedModel.subtitle.ilike', (['f"""%{subtitle}%"""'], {}), "(f'%{subtitle}%')\n", (2657, 2674), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2952, 2990), 'app.db.FeedModel.model_id.ilike', 'FeedModel.model_id.ilike', (['f"""%{query}%"""'], {}), "(f'%{query}%')\n", (2976, 2990), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((3333, 3354), 'app.db.FeedModel.score.asc', 'FeedModel.score.asc', ([], {}), '()\n', (3352, 3354), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((3380, 3402), 'app.db.FeedModel.score.desc', 'FeedModel.score.desc', ([], {}), '()\n', (3400, 3402), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((3482, 3502), 'app.db.FeedModel.when.asc', 'FeedModel.when.asc', ([], {}), '()\n', (3500, 3502), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((3528, 3549), 'app.db.FeedModel.when.desc', 'FeedModel.when.desc', ([], {}), '()\n', (3547, 3549), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2881, 2919), 'app.db.FeedModel.subtitle.ilike', 'FeedModel.subtitle.ilike', (['f"""%{query}%"""'], {}), "(f'%{query}%')\n", (2905, 2919), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2743, 2778), 'app.db.FeedModel.title.ilike', 'FeedModel.title.ilike', (['f"""%{query}%"""'], {}), "(f'%{query}%')\n", (2764, 2778), False, 'from app.db import get_db, FeedModel, FeedBase\n'), ((2811, 2848), 'app.db.FeedModel.creator.ilike', 'FeedModel.creator.ilike', (['f"""%{query}%"""'], {}), "(f'%{query}%')\n", (2834, 2848), False, 'from app.db import get_db, FeedModel, FeedBase\n')] |
from typing import Optional, List
from sqlalchemy import String
from sqlalchemy.sql.schema import Column
from sqlmodel import SQLModel, Field, Relationship
class CustomerProductLink(SQLModel, table=True):
customer_id: Optional[int] = Field(
default=None, foreign_key='customer.id', primary_key=True
)
product_id: Optional[int] = Field(
default=None, foreign_key='product.id', primary_key=True
)
class AddressBase(SQLModel):
street_name: str
house_number: str
city: str
zip_code: str
class Address(AddressBase, table=True):
id: int = Field(default=None, primary_key=True)
customers: List['Customer'] = Relationship(back_populates='address')
class AddressOut(AddressBase):
pass
class AddressIn(AddressBase):
pass
class CustomerBase(SQLModel):
first_name: str
last_name: str
birth_date: str
gender: str
mobile_number: str
email: str
class Customer(CustomerBase, table=True):
id: int = Field(default=None, primary_key=True)
address_id: Optional[int] = Field(default=None, foreign_key='address.id')
address: Optional[Address] = Relationship(back_populates='customers',
sa_relationship_kwargs={'lazy': 'selectin'})
mobile_number: str = Field(sa_column=Column('mobile_number', String, unique=True))
email: str = Field(sa_column=Column('email', String, unique=True))
products: List['Product'] = Relationship(back_populates='customers', link_model=CustomerProductLink,
sa_relationship_kwargs={'lazy': 'selectin'})
class CustomerOut(CustomerBase):
id: int
address: Optional[AddressOut]
class CustomerIn(CustomerBase):
address: Optional[AddressIn]
class ProductBase(SQLModel):
name: Optional[str] = None
class Product(ProductBase, table=True):
id: int = Field(default=None, primary_key=True)
name: str = Field(sa_column=Column('name', String, unique=True))
customers: List[Customer] = Relationship(back_populates='products', link_model=CustomerProductLink)
class ProductOut(ProductBase):
id: int
name: str
class ProductIn(ProductBase):
name: str
class ProductUpdate(ProductBase):
product_id: int
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((241, 305), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""customer.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='customer.id', primary_key=True)\n", (246, 305), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((352, 415), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""product.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='product.id', primary_key=True)\n", (357, 415), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((592, 629), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (597, 629), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((664, 702), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""address"""'}), "(back_populates='address')\n", (676, 702), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((989, 1026), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (994, 1026), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1059, 1104), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""address.id"""'}), "(default=None, foreign_key='address.id')\n", (1064, 1104), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1138, 1227), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', sa_relationship_kwargs={'lazy':\n 'selectin'})\n", (1150, 1227), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1461, 1582), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""customers"""', 'link_model': 'CustomerProductLink', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='customers', link_model=CustomerProductLink,\n sa_relationship_kwargs={'lazy': 'selectin'})\n", (1473, 1582), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1890, 1927), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1895, 1927), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2030, 2101), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""products"""', 'link_model': 'CustomerProductLink'}), "(back_populates='products', link_model=CustomerProductLink)\n", (2042, 2101), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1311, 1355), 'sqlalchemy.sql.schema.Column', 'Column', (['"""mobile_number"""', 'String'], {'unique': '(True)'}), "('mobile_number', String, unique=True)\n", (1317, 1355), False, 'from sqlalchemy.sql.schema import Column\n'), ((1390, 1426), 'sqlalchemy.sql.schema.Column', 'Column', (['"""email"""', 'String'], {'unique': '(True)'}), "('email', String, unique=True)\n", (1396, 1426), False, 'from sqlalchemy.sql.schema import Column\n'), ((1960, 1995), 'sqlalchemy.sql.schema.Column', 'Column', (['"""name"""', 'String'], {'unique': '(True)'}), "('name', String, unique=True)\n", (1966, 1995), False, 'from sqlalchemy.sql.schema import Column\n')] |
from typing import Optional, List
import sqlalchemy
from sqlmodel import SQLModel, Field, Relationship
from datetime import date, datetime
# #############################################################################
# Links
class ListingFacilityLink(SQLModel, table=True):
listing_id: int = Field(
foreign_key="listings.id", primary_key=True
)
facility_id: int = Field(
foreign_key="facilities.id", primary_key=True
)
# #############################################################################
class SongBase(SQLModel):
id: Optional[int]
name: str
artist: str
year: Optional[int] = None
class Song(SongBase, table=True):
id: int = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
class SongRead(SongBase):
id: int
created_at: datetime
updated_at: datetime
class SongUpdate(SQLModel):
name: Optional[str] = None
artist: Optional[str] = None
year: Optional[int] = None
class SongCreate(SongBase):
pass
class Increment(SQLModel, table=True):
id: int = Field(primary_key=True)
# #############################################################################
class ListingBase(SQLModel):
id: int = Field(primary_key=True)
is_active: bool
title: Optional[str] = None
description: Optional[str] = None
url: str
source: str
source_id: str
source_code: Optional[str] = None
address: str
short_postal_code: Optional[str] = None
property_type: Optional[str] = None
postal_code: Optional[str] = None
ber_code: Optional[str] = None
views: Optional[int] = None
bedrooms: Optional[int] = None
bathrooms: Optional[int] = None
price: Optional[int] = None
rating_auto: Optional[int] = None
rating_user: Optional[int] = None
telegram_sent_at: Optional[datetime] = None
images_count: Optional[int] = 0
latitude: Optional[float] = None
longitude: Optional[float] = None
notes: Optional[str] = None
publish_date: Optional[datetime] = None
last_updated: Optional[datetime] = None
class Listing(ListingBase, table=True):
__tablename__ = 'listings'
id: int = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
images: List["Image"] = Relationship(back_populates="listing",
# sa_relationship_kwargs={'lazy': 'joined'}
)
facilities: List["Facility"] = Relationship(link_model=ListingFacilityLink)
places_nearby: List["PlaceNearby"] = Relationship(
back_populates="listing",)
routes: List["Route"] = Relationship(back_populates="listing",)
class ListingRead(ListingBase):
id: str
created_at: datetime
updated_at: datetime
class ListingCreate(ListingBase):
pass
class ListingUpdate(ListingBase):
id: Optional[str]
is_active: Optional[bool]
url: Optional[str]
source: Optional[str]
source_id: Optional[str]
address: Optional[str]
# #############################################################################
class FacilityBase(SQLModel):
id: Optional[int]
name: str
category: Optional[str] = None
notes: Optional[str] = None
class Facility(FacilityBase, table=True):
__tablename__ = 'facilities'
id: int = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
class FacilityRead(FacilityBase):
id: int
created_at: datetime
updated_at: datetime
class FacilityCreate(FacilityBase):
pass
# #############################################################################
class ImageBase(SQLModel):
id: Optional[int]
url: str
url_600: Optional[str]
size_x: Optional[float]
size_y: Optional[float]
listing_id: Optional[int] = Field(default=None, foreign_key="listings.id")
class Image(ImageBase, table=True):
__tablename__ = 'images'
id: int = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
listing: Optional[Listing] = Relationship(back_populates="images",
# sa_relationship_kwargs={'lazy': 'selectin'}
)
class ImageRead(ImageBase):
id: int
created_at: datetime
updated_at: datetime
class ImageCreate(ImageBase):
pass
# #############################################################################
class PlaceNearbyBase(SQLModel):
id: Optional[int]
latitude: Optional[float] = None
longitude: Optional[float] = None
query: Optional[str] = None
name: str
address: str
distance: int
website: Optional[str] = None
website_domain: Optional[str] = None
chain_name: Optional[str] = None
listing_id: Optional[int] = Field(default=None, foreign_key="listings.id")
class PlaceNearby(PlaceNearbyBase, table=True):
__tablename__ = 'places_nearby'
id: Optional[int] = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
listing: Optional[Listing] = Relationship(back_populates="places_nearby",)
class PlaceNearbyRead(PlaceNearbyBase):
id: int
created_at: datetime
updated_at: datetime
class PlaceNearbyCreate(PlaceNearbyBase):
pass
# #############################################################################
class InterestPointBase(SQLModel):
id: Optional[int]
name: str
is_active: bool
latitude: Optional[float] = None
longitude: Optional[float] = None
class InterestPoint(InterestPointBase, table=True):
__tablename__ = 'interest_points'
id: Optional[int] = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
class InterestPointRead(InterestPointBase):
id: int
created_at: datetime
updated_at: datetime
class InterestPointCreate(InterestPointBase):
pass
# #############################################################################
class RouteBase(SQLModel):
id: Optional[int]
waking_distance: Optional[int] = 0
total_distance: Optional[int] = 0
total_time: Optional[int] = 0
public_transport_count: Optional[int] = 0
listing_id: Optional[int] = Field(default=None, foreign_key="listings.id")
interest_point_id: Optional[int] = Field(
default=None, foreign_key="interest_points.id")
class Route(RouteBase, table=True):
__tablename__ = 'routes'
id: int = Field(primary_key=True)
created_at: datetime = Field(default=datetime.now())
updated_at: datetime = Field(
default=datetime.now(),
sa_column_kwargs={'onupdate': datetime.now()})
listing: Optional[Listing] = Relationship(back_populates="routes",)
interest_point: Optional[InterestPoint] = Relationship()
class RouteRead(RouteBase):
id: int
created_at: datetime
updated_at: datetime
class RouteCreate(RouteBase):
id: Optional[int] = None
# #############################################################################
# #############################################################################
class ImageReadWithListings(ImageRead):
listing: Optional[Listing] = None
class ListingReadWithRelations(ListingRead):
images: List["ImageRead"] = []
facilities: List["Facility"] = []
places_nearby: List["PlaceNearby"] = []
routes: List["Route"] = []
class ListingCreateWithRelations(ListingCreate):
images: List["ImageCreate"] = []
facilities: List["Facility"] = [] | [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((300, 350), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""listings.id"""', 'primary_key': '(True)'}), "(foreign_key='listings.id', primary_key=True)\n", (305, 350), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((388, 440), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""facilities.id"""', 'primary_key': '(True)'}), "(foreign_key='facilities.id', primary_key=True)\n", (393, 440), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((698, 721), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (703, 721), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1225, 1248), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1230, 1248), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1374, 1397), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1379, 1397), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2327, 2350), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2332, 2350), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2573, 2611), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""listing"""'}), "(back_populates='listing')\n", (2585, 2611), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2705, 2749), 'sqlmodel.Relationship', 'Relationship', ([], {'link_model': 'ListingFacilityLink'}), '(link_model=ListingFacilityLink)\n', (2717, 2749), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2791, 2829), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""listing"""'}), "(back_populates='listing')\n", (2803, 2829), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2868, 2906), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""listing"""'}), "(back_populates='listing')\n", (2880, 2906), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((3547, 3570), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (3552, 3570), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((4171, 4217), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""listings.id"""'}), "(default=None, foreign_key='listings.id')\n", (4176, 4217), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((4299, 4322), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (4304, 4322), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((4550, 4587), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""images"""'}), "(back_populates='images')\n", (4562, 4587), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((5301, 5347), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""listings.id"""'}), "(default=None, foreign_key='listings.id')\n", (5306, 5347), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((5458, 5481), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (5463, 5481), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((5709, 5753), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""places_nearby"""'}), "(back_populates='places_nearby')\n", (5721, 5753), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((6277, 6300), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (6282, 6300), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((6986, 7032), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""listings.id"""'}), "(default=None, foreign_key='listings.id')\n", (6991, 7032), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((7072, 7125), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""interest_points.id"""'}), "(default=None, foreign_key='interest_points.id')\n", (7077, 7125), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((7216, 7239), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (7221, 7239), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((7452, 7489), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""routes"""'}), "(back_populates='routes')\n", (7464, 7489), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((7537, 7551), 'sqlmodel.Relationship', 'Relationship', ([], {}), '()\n', (7549, 7551), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((763, 777), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (775, 777), False, 'from datetime import date, datetime\n'), ((820, 834), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (832, 834), False, 'from datetime import date, datetime\n'), ((2392, 2406), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2404, 2406), False, 'from datetime import date, datetime\n'), ((2449, 2463), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2461, 2463), False, 'from datetime import date, datetime\n'), ((3612, 3626), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3624, 3626), False, 'from datetime import date, datetime\n'), ((3669, 3683), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3681, 3683), False, 'from datetime import date, datetime\n'), ((4364, 4378), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4376, 4378), False, 'from datetime import date, datetime\n'), ((4421, 4435), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4433, 4435), False, 'from datetime import date, datetime\n'), ((5523, 5537), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5535, 5537), False, 'from datetime import date, datetime\n'), ((5580, 5594), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5592, 5594), False, 'from datetime import date, datetime\n'), ((6342, 6356), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6354, 6356), False, 'from datetime import date, datetime\n'), ((6399, 6413), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6411, 6413), False, 'from datetime import date, datetime\n'), ((7281, 7295), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7293, 7295), False, 'from datetime import date, datetime\n'), ((7347, 7361), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7359, 7361), False, 'from datetime import date, datetime\n'), ((899, 913), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (911, 913), False, 'from datetime import date, datetime\n'), ((2528, 2542), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2540, 2542), False, 'from datetime import date, datetime\n'), ((3748, 3762), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3760, 3762), False, 'from datetime import date, datetime\n'), ((4500, 4514), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4512, 4514), False, 'from datetime import date, datetime\n'), ((5659, 5673), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5671, 5673), False, 'from datetime import date, datetime\n'), ((6478, 6492), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6490, 6492), False, 'from datetime import date, datetime\n'), ((7401, 7415), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7413, 7415), False, 'from datetime import date, datetime\n')] |
"""add application resume
Revision ID: 07061a7c250f
Revises: 378a9b9a491b
Create Date: 2022-04-26 08:00:47.428979+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "07061a7c250f"
down_revision = "378a9b9a491b"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"applications",
sa.Column("resume", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
)
def downgrade():
op.drop_column("applications", "resume")
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((496, 536), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""resume"""'], {}), "('applications', 'resume')\n", (510, 536), False, 'from alembic import op\n'), ((415, 449), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (447, 449), False, 'import sqlmodel\n')] |
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
from sqlmodel import Field, Relationship
from api.db.models.base import BaseModel, BaseTable
class OutOfBandBase(BaseModel):
msg_type: str = Field(nullable=False)
msg: dict = Field(default={}, sa_column=Column(JSON))
sender_id: uuid.UUID = None
recipient_id: uuid.UUID = None
sandbox_id: uuid.UUID = None
action: Optional[str] = Field(nullable=True)
class OutOfBand(OutOfBandBase, BaseTable, table=True):
__tablename__ = "out_of_band"
# optional else, required on save
sender_id: uuid.UUID = Field(foreign_key="line_of_business.id")
recipient_id: uuid.UUID = Field(foreign_key="line_of_business.id")
sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id")
# relationships
sender: Optional["Lob"] = Relationship( # noqa: F821
sa_relationship_kwargs={
"primaryjoin": "OutOfBand.sender_id==Lob.id",
"lazy": "joined",
}
)
recipient: Optional["Lob"] = Relationship( # noqa: F821
sa_relationship_kwargs={
"primaryjoin": "OutOfBand.recipient_id==Lob.id",
"lazy": "joined",
}
)
class Config:
arbitrary_types_allowed = True
class OutOfBandCreate(OutOfBandBase):
pass
class OutOfBandRead(OutOfBandBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
class OutOfBandUpdate(BaseModel):
id: uuid.UUID
name: Optional[str] = None
action: Optional[str] = None
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((297, 318), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (302, 318), False, 'from sqlmodel import Field, Relationship\n'), ((505, 525), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)'}), '(nullable=True)\n', (510, 525), False, 'from sqlmodel import Field, Relationship\n'), ((683, 723), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (688, 723), False, 'from sqlmodel import Field, Relationship\n'), ((754, 794), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""line_of_business.id"""'}), "(foreign_key='line_of_business.id')\n", (759, 794), False, 'from sqlmodel import Field, Relationship\n'), ((823, 854), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (828, 854), False, 'from sqlmodel import Field, Relationship\n'), ((906, 1011), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.sender_id==Lob.id', 'lazy': 'joined'})\n", (918, 1011), False, 'from sqlmodel import Field, Relationship\n'), ((1104, 1212), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'primaryjoin': 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'}"}), "(sa_relationship_kwargs={'primaryjoin':\n 'OutOfBand.recipient_id==Lob.id', 'lazy': 'joined'})\n", (1116, 1212), False, 'from sqlmodel import Field, Relationship\n'), ((363, 375), 'sqlalchemy.Column', 'Column', (['JSON'], {}), '(JSON)\n', (369, 375), False, 'from sqlalchemy import Column\n')] |
"""Example code"""
from sqlmodel import Field, SQLModel
class CarBase(SQLModel):
"""
CarBase is the base model. This is a data-only model(Pydantic), since it lacks `table=True`.
"""
name: str
manufacturer: str
class Car(CarBase, table=True): # type: ignore
"""
Add `id` property to the base model. Since `table=True` it is a pydantic AND SQLAlchemy model
and represents a database table.
"""
id: int = Field(default=None, primary_key=True)
class CarRead(CarBase):
id: int # Make `id` a required field in our response model
class CarCreate(CarBase):
"""
This is a data-only pydantic model. Used to create new songs.
"""
pass
| [
"sqlmodel.Field"
] | [((449, 486), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (454, 486), False, 'from sqlmodel import Field, SQLModel\n')] |
"""empty message
Revision ID: 2d614148ea4b
Revises:
Create Date: 2022-03-17 22:29:00.613962
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import sqlmodel # added
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '2d614148ea4b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('role',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=True),
sa.Column('is_superuser', sa.Boolean(), nullable=True),
sa.Column('birthdate', sa.DateTime(), nullable=True),
sa.Column('phone', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('state', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('country', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hashed_password', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_hashed_password'), 'user', ['hashed_password'], unique=False)
op.create_table('textinference',
sa.Column('result', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('text', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_textinference_text'), 'textinference', ['text'], unique=False)
op.create_table('zeroshotinference',
sa.Column('candidate_labels', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('result', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.Column('text', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_zeroshotinference_text'), 'zeroshotinference', ['text'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_zeroshotinference_text'), table_name='zeroshotinference')
op.drop_table('zeroshotinference')
op.drop_index(op.f('ix_textinference_text'), table_name='textinference')
op.drop_table('textinference')
op.drop_index(op.f('ix_user_hashed_password'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
op.drop_table('role')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((3708, 3742), 'alembic.op.drop_table', 'op.drop_table', (['"""zeroshotinference"""'], {}), "('zeroshotinference')\n", (3721, 3742), False, 'from alembic import op\n'), ((3824, 3854), 'alembic.op.drop_table', 'op.drop_table', (['"""textinference"""'], {}), "('textinference')\n", (3837, 3854), False, 'from alembic import op\n'), ((3989, 4010), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (4002, 4010), False, 'from alembic import op\n'), ((4015, 4036), 'alembic.op.drop_table', 'op.drop_table', (['"""role"""'], {}), "('role')\n", (4028, 4036), False, 'from alembic import op\n'), ((806, 835), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (829, 835), True, 'import sqlalchemy as sa\n'), ((1899, 1948), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['role_id']", "['role.id']"], {}), "(['role_id'], ['role.id'])\n", (1922, 1948), True, 'import sqlalchemy as sa\n'), ((1956, 1985), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1979, 1985), True, 'import sqlalchemy as sa\n'), ((2012, 2033), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (2016, 2033), False, 'from alembic import op\n'), ((2087, 2118), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (2091, 2118), False, 'from alembic import op\n'), ((2589, 2644), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['created_by_id']", "['user.id']"], {}), "(['created_by_id'], ['user.id'])\n", (2612, 2644), True, 'import sqlalchemy as sa\n'), ((2652, 2681), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2675, 2681), True, 'import sqlalchemy as sa\n'), ((2708, 2737), 'alembic.op.f', 'op.f', (['"""ix_textinference_text"""'], {}), "('ix_textinference_text')\n", (2712, 2737), False, 'from alembic import op\n'), ((3300, 3355), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['created_by_id']", "['user.id']"], {}), "(['created_by_id'], ['user.id'])\n", (3323, 3355), True, 'import sqlalchemy as sa\n'), ((3363, 3392), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3386, 3392), True, 'import sqlalchemy as sa\n'), ((3419, 3452), 'alembic.op.f', 'op.f', (['"""ix_zeroshotinference_text"""'], {}), "('ix_zeroshotinference_text')\n", (3423, 3452), False, 'from alembic import op\n'), ((3637, 3670), 'alembic.op.f', 'op.f', (['"""ix_zeroshotinference_text"""'], {}), "('ix_zeroshotinference_text')\n", (3641, 3670), False, 'from alembic import op\n'), ((3761, 3790), 'alembic.op.f', 'op.f', (['"""ix_textinference_text"""'], {}), "('ix_textinference_text')\n", (3765, 3790), False, 'from alembic import op\n'), ((3873, 3904), 'alembic.op.f', 'op.f', (['"""ix_user_hashed_password"""'], {}), "('ix_user_hashed_password')\n", (3877, 3904), False, 'from alembic import op\n'), ((3943, 3964), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (3947, 3964), False, 'from alembic import op\n'), ((498, 532), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (530, 532), False, 'import sqlmodel\n'), ((580, 614), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (612, 614), False, 'import sqlmodel\n'), ((653, 665), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (663, 665), True, 'import sqlalchemy as sa\n'), ((712, 725), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (723, 725), True, 'import sqlalchemy as sa\n'), ((771, 784), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (782, 784), True, 'import sqlalchemy as sa\n'), ((898, 932), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (930, 932), False, 'import sqlmodel\n'), ((978, 1012), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1010, 1012), False, 'import sqlmodel\n'), ((1054, 1088), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1086, 1088), False, 'import sqlmodel\n'), ((1133, 1145), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1143, 1145), True, 'import sqlalchemy as sa\n'), ((1193, 1205), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1203, 1205), True, 'import sqlalchemy as sa\n'), ((1250, 1263), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1261, 1263), True, 'import sqlalchemy as sa\n'), ((1304, 1338), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1336, 1338), False, 'import sqlmodel\n'), ((1379, 1413), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1411, 1413), False, 'import sqlmodel\n'), ((1456, 1490), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1488, 1490), False, 'import sqlmodel\n'), ((1533, 1567), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1565, 1567), False, 'import sqlmodel\n'), ((1613, 1626), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1624, 1626), True, 'import sqlalchemy as sa\n'), ((1672, 1685), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1683, 1685), True, 'import sqlalchemy as sa\n'), ((1723, 1735), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1733, 1735), True, 'import sqlalchemy as sa\n'), ((1787, 1821), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1819, 1821), False, 'import sqlmodel\n'), ((1865, 1877), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1875, 1877), True, 'import sqlalchemy as sa\n'), ((2302, 2336), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2334, 2336), False, 'import sqlmodel\n'), ((2375, 2387), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2385, 2387), True, 'import sqlalchemy as sa\n'), ((2434, 2447), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (2445, 2447), True, 'import sqlalchemy as sa\n'), ((2493, 2506), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (2504, 2506), True, 'import sqlalchemy as sa\n'), ((2555, 2567), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2565, 2567), True, 'import sqlalchemy as sa\n'), ((3013, 3047), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3045, 3047), False, 'import sqlmodel\n'), ((3086, 3098), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3096, 3098), True, 'import sqlalchemy as sa\n'), ((3145, 3158), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3156, 3158), True, 'import sqlalchemy as sa\n'), ((3204, 3217), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3215, 3217), True, 'import sqlalchemy as sa\n'), ((3266, 3278), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3276, 3278), True, 'import sqlalchemy as sa\n'), ((2252, 2261), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2259, 2261), True, 'import sqlalchemy as sa\n'), ((2883, 2892), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2890, 2892), True, 'import sqlalchemy as sa\n'), ((2963, 2972), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (2970, 2972), True, 'import sqlalchemy as sa\n')] |
"""Arquivo para fixtures"""
from unittest.mock import patch
from pytest import fixture
from sqlmodel import create_engine, SQLModel
from tests.mocks import mock_user
from mitmirror.infra.entities import * # pylint: disable=W0614, W0401
user = mock_user()
@fixture(scope="module")
def fake_user():
"""Mock de usuario"""
return user
@fixture(autouse=True, scope="function")
def separate_database(request):
"""
Cria um mock do banco de dados para que cada teste use um banco separado.
"""
tmpdir = request.getfixturevalue("tmpdir")
test_db = tmpdir.join("mitmirror.test.db")
engine = create_engine(f"sqlite:///{test_db}")
SQLModel.metadata.create_all(engine)
with patch("mitmirror.infra.config.database_config.engine", engine):
yield
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((246, 257), 'tests.mocks.mock_user', 'mock_user', ([], {}), '()\n', (255, 257), False, 'from tests.mocks import mock_user\n'), ((261, 284), 'pytest.fixture', 'fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (268, 284), False, 'from pytest import fixture\n'), ((348, 387), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)', 'scope': '"""function"""'}), "(autouse=True, scope='function')\n", (355, 387), False, 'from pytest import fixture\n'), ((622, 659), 'sqlmodel.create_engine', 'create_engine', (['f"""sqlite:///{test_db}"""'], {}), "(f'sqlite:///{test_db}')\n", (635, 659), False, 'from sqlmodel import create_engine, SQLModel\n'), ((664, 700), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (692, 700), False, 'from sqlmodel import create_engine, SQLModel\n'), ((710, 772), 'unittest.mock.patch', 'patch', (['"""mitmirror.infra.config.database_config.engine"""', 'engine'], {}), "('mitmirror.infra.config.database_config.engine', engine)\n", (715, 772), False, 'from unittest.mock import patch\n')] |
from typing import Optional
from fastapi import FastAPI
from sqlalchemy.sql.expression import table
from sqlmodel import (
SQLModel,
Field,
create_engine,
select,
Session
)
engine = create_engine('sqlite:///database.db')
class Pessoa(SQLModel, table=True):
id : Optional[int] = Field(default=None, primary_key=True)
nome: str
idade: str
SQLModel.metadata.create_all(engine)
app = FastAPI()
@app.get('/')
def home():
return {'message' : 'Deu bom!!!'}
@app.get('/pessoa')
def pessoa():
query = select(Pessoa)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
@app.get('/pessoas-nome')
def pessoa():
query = select(Pessoa.nome)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
| [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field"
] | [((204, 242), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {}), "('sqlite:///database.db')\n", (217, 242), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((373, 409), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (401, 409), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((417, 426), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (424, 426), False, 'from fastapi import FastAPI\n'), ((305, 342), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (310, 342), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((539, 553), 'sqlmodel.select', 'select', (['Pessoa'], {}), '(Pessoa)\n', (545, 553), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((718, 737), 'sqlmodel.select', 'select', (['Pessoa.nome'], {}), '(Pessoa.nome)\n', (724, 737), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((563, 578), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (570, 578), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((747, 762), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (754, 762), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n')] |
from glob import glob
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine, select
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
engine = create_engine("sqlite:///image.db")
def read_images():
with Session(engine) as session:
statement = select(Image)
images = session.exec(statement).all()
return images
read_images() | [
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((283, 318), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///image.db"""'], {}), "('sqlite:///image.db')\n", (296, 318), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((181, 218), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (186, 218), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((348, 363), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (355, 363), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((396, 409), 'sqlmodel.select', 'select', (['Image'], {}), '(Image)\n', (402, 409), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Right(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Province(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Amphoe(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
province_id: int
name: str
class Tambon(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
amphoe_id: int
name: str
class Religion(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class National(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Occupation(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class MaritalStatus(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class AcademicDegree(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Allergy(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Vehicle(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Language(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Relationship(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class IdType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class FeedbackType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class VisibilityLevel(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Module(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class ModuleFunction(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
| [
"sqlmodel.Field"
] | [((136, 147), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (145, 147), False, 'from fastapi import APIRouter\n'), ((209, 246), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (214, 246), False, 'from sqlmodel import Field, SQLModel\n'), ((325, 362), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (330, 362), False, 'from sqlmodel import Field, SQLModel\n'), ((439, 476), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (444, 476), False, 'from sqlmodel import Field, SQLModel\n'), ((574, 611), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (579, 611), False, 'from sqlmodel import Field, SQLModel\n'), ((709, 746), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (714, 746), False, 'from sqlmodel import Field, SQLModel\n'), ((825, 862), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (830, 862), False, 'from sqlmodel import Field, SQLModel\n'), ((943, 980), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (948, 980), False, 'from sqlmodel import Field, SQLModel\n'), ((1064, 1101), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1069, 1101), False, 'from sqlmodel import Field, SQLModel\n'), ((1186, 1223), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1191, 1223), False, 'from sqlmodel import Field, SQLModel\n'), ((1301, 1338), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1306, 1338), False, 'from sqlmodel import Field, SQLModel\n'), ((1416, 1453), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1421, 1453), False, 'from sqlmodel import Field, SQLModel\n'), ((1532, 1569), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1537, 1569), False, 'from sqlmodel import Field, SQLModel\n'), ((1652, 1689), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1657, 1689), False, 'from sqlmodel import Field, SQLModel\n'), ((1766, 1803), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1771, 1803), False, 'from sqlmodel import Field, SQLModel\n'), ((1886, 1923), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1891, 1923), False, 'from sqlmodel import Field, SQLModel\n'), ((2009, 2046), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2014, 2046), False, 'from sqlmodel import Field, SQLModel\n'), ((2123, 2160), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2128, 2160), False, 'from sqlmodel import Field, SQLModel\n'), ((2368, 2405), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2373, 2405), False, 'from sqlmodel import Field, SQLModel\n')] |
from sqlmodel import Session, select
from db import BaseDBModel
from service.base_crud import BaseCRUD
class TestTable(BaseDBModel, table=True):
test_str: str
test_int: int
TEST_ROW_DATA1 = {'test_str': 'str1', 'test_int': 1}
TEST_ROW_DATA2 = {'test_str': 'str2', 'test_int': 2}
class TestCRUD(BaseCRUD):
model = TestTable
test_crud = TestCRUD()
def test_get_returns_none_for_not_existing_rows(session: Session):
row = TestTable(**TEST_ROW_DATA1)
result_row = test_crud.get(session, row.id)
assert result_row is None
def test_get_returns_existing_row(session: Session):
row = TestTable(**TEST_ROW_DATA1)
session.add(row)
session.commit()
result_row: TestTable = test_crud.get(session, row.id)
assert row == result_row
def test_get_multiple_values_returns_empty_array_for_not_existing_rows(session: Session):
result_arr = test_crud.get_multiple_values(session)
assert len(result_arr) == 0
def create_and_return_multiple_rows(db_session: Session):
row1 = TestTable(**TEST_ROW_DATA1)
row2 = TestTable(**TEST_ROW_DATA2)
db_session.add(row1)
db_session.add(row2)
db_session.commit()
return [row1, row2]
def test_get_multiple_values_returns_existing_rows(session: Session):
rows = create_and_return_multiple_rows(session)
result_arr = test_crud.get_multiple_values(session)
assert result_arr == rows
def test_get_multiple_values_returns_limited_rows(session: Session):
rows = create_and_return_multiple_rows(session)
result_arr = test_crud.get_multiple_values(session, limit=1)
assert result_arr == [rows[0]]
def test_get_multiple_values_returns_offset_rows(session: Session):
rows = create_and_return_multiple_rows(session)
result_arr = test_crud.get_multiple_values(session, offset=1)
assert result_arr == [rows[1]]
def test_create_row_returns_inserted_row(session: Session):
row = TestTable(**TEST_ROW_DATA1)
inserted_row: TestTable = test_crud.create(session, row)
assert inserted_row == row
def test_create_all_does_not_throw_errors(session: Session):
rows = [TestTable(**TEST_ROW_DATA1), TestTable(**TEST_ROW_DATA2)]
test_crud.create_all(session, rows)
inserted_rows = session.exec(select(TestTable)).all()
assert inserted_rows == rows
def test_update_returns_updated_row(session: Session):
row = TestTable(**TEST_ROW_DATA1)
session.add(row)
session.commit()
new_item = TestTable.from_orm(row)
new_item.test_int = 10
updated_row: TestTable = test_crud.update(session, row, new_item)
assert new_item.test_int == updated_row.test_int
def test_delete_row_does_not_throw_error(session: Session):
row = TestTable(**TEST_ROW_DATA1)
session.add(row)
session.commit()
test_crud.delete(session, row.id)
inserted_row = session.exec(
select(TestTable).where(TestTable.id == row.id)).first()
assert inserted_row is None
def test_model_attribute_returns_test_model():
assert test_crud.model == TestTable
def test_model_in_base_crud_returns_none_when_not_implemented():
BaseCRUD.__abstractmethods__ = set()
class MockCRUD(BaseCRUD):
string: str
mock_crud = MockCRUD()
assert mock_crud.model is None
| [
"sqlmodel.select"
] | [((2249, 2266), 'sqlmodel.select', 'select', (['TestTable'], {}), '(TestTable)\n', (2255, 2266), False, 'from sqlmodel import Session, select\n'), ((2857, 2874), 'sqlmodel.select', 'select', (['TestTable'], {}), '(TestTable)\n', (2863, 2874), False, 'from sqlmodel import Session, select\n')] |
from typing import List, Optional
from sqlmodel import Field, Session, SQLModel, create_engine
class Zi(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(...)
stroke_count: int = Field(...)
strokes: str = Field(...)
heng_count: int = Field(...)
shu_count: int = Field(...)
pie_count: int = Field(...)
dian_count: int = Field(...)
zhe_count: int = Field(...)
engine = create_engine('sqlite:///Data/strokes.db', connect_args={'check_same_thread': False})
SQLModel.metadata.create_all(engine)
session = Session(engine)
| [
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((474, 564), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///Data/strokes.db"""'], {'connect_args': "{'check_same_thread': False}"}), "('sqlite:///Data/strokes.db', connect_args={\n 'check_same_thread': False})\n", (487, 564), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((561, 597), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (589, 597), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((609, 624), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (616, 624), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((160, 197), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (165, 197), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((215, 225), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (220, 225), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((251, 261), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (256, 261), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((282, 292), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (287, 292), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((316, 326), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (321, 326), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((349, 359), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (354, 359), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((382, 392), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (387, 392), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((416, 426), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (421, 426), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((449, 459), 'sqlmodel.Field', 'Field', (['...'], {}), '(...)\n', (454, 459), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
"""
Post a new role.
Parameters
----------
role : Role
Role that is to be added to the database.
session : Session
SQL session that is to be used to add the role.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
"""
Get list of all roles.
Parameters
----------
session : Session
SQL session that is to be used to get the roles.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role)
results = session.exec(statement).all()
return results
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
"""
Get list of active roles.
Parameters
----------
session : Session
SQL session that is to be used to get the roles.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
"""
Activate a role using the role ID as a key.
Parameters
----------
role_id : str
ID of role to be activated.
session : Session
SQL session that is to be used to activate the role.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
"""
Deactivate a role using the role ID as a key.
Parameters
----------
role_id : str
ID of role to be deactivated.
session : Session
SQL session that is to be used to deactivate the role.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role).where(Role.id == role_id)
role_to_deactivate = session.exec(statement).one()
role_to_deactivate.is_active = False
role_to_deactivate.updated_at = datetime.now()
session.add(role_to_deactivate)
session.commit()
session.refresh(role_to_deactivate)
return role_to_deactivate
@router.put("/")
async def update_role(
id: str = None,
new_name: str = None,
new_short_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
"""
Update a role.
Parameters
----------
id : str
ID of role to be updated.
new_name : str
New name of the role.
new_short_name : str
New short name of the role.
is_active : bool
New status of the role.
session : Session
SQL session that is to be used to update the role.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Role.is_active).where(Role.id == id)
result = session.exec(statement).first()
if result == True:
statement = select(Role).where(Role.id == id)
role_to_update = session.exec(statement).one()
if new_name != None:
role_to_update.name = new_name
if new_short_name != None:
role_to_update.short_name = new_short_name
if is_active != None:
role_to_update.is_active = is_active
session.add(role_to_update)
role_to_update.updated_at = datetime.now()
session.commit()
session.refresh(role_to_update)
return role_to_update
else:
return False
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((243, 288), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/roles"""', 'tags': "['role']"}), "(prefix='/api/roles', tags=['role'])\n", (252, 288), False, 'from fastapi import APIRouter, Depends\n'), ((299, 314), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (306, 314), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((389, 409), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (396, 409), False, 'from fastapi import APIRouter, Depends\n'), ((1018, 1038), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1025, 1038), False, 'from fastapi import APIRouter, Depends\n'), ((1286, 1298), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1292, 1298), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1427, 1447), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1434, 1447), False, 'from fastapi import APIRouter, Depends\n'), ((1914, 1934), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1921, 1934), False, 'from fastapi import APIRouter, Depends\n'), ((2427, 2441), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2439, 2441), False, 'from datetime import datetime\n'), ((2677, 2697), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2684, 2697), False, 'from fastapi import APIRouter, Depends\n'), ((3203, 3217), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3215, 3217), False, 'from datetime import datetime\n'), ((3516, 3536), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3523, 3536), False, 'from fastapi import APIRouter, Depends\n'), ((4524, 4538), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4536, 4538), False, 'from datetime import datetime\n'), ((716, 728), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (722, 728), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1698, 1710), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1704, 1710), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2263, 2275), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (2269, 2275), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((3032, 3044), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (3038, 3044), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((3990, 4012), 'sqlmodel.select', 'select', (['Role.is_active'], {}), '(Role.is_active)\n', (3996, 4012), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((4122, 4134), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (4128, 4134), False, 'from sqlmodel import Session, select, SQLModel, or_\n')] |
from datetime import datetime
from typing import Optional
from sqlmodel import Field, SQLModel
class YouTube(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
video_id: str
title: str
description: str
thumb: str
published: datetime
class YouTubeRead(SQLModel):
id: int
video_id: str
title: str
description: str
thumb: str
published: datetime
| [
"sqlmodel.Field"
] | [((159, 196), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (164, 196), False, 'from sqlmodel import Field, SQLModel\n')] |
from time import sleep
from sqlmodel import select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.metrics import prom_metrics
from icon_governance.models.preps import Prep
from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json
def get_prep_stake(session):
result = session.execute(select(Prep))
preps = result.scalars().all()
for prep in preps:
prep.stake = convert_hex_int(post_rpc_json(getStake(prep.address))["stake"]) / 1e18
session.merge(prep)
session.commit()
def prep_stake_cron(session):
while True:
logger.info("Starting stake cron")
get_prep_stake(session)
logger.info("Prep stake ran.")
prom_metrics.preps_stake_cron_ran.inc()
sleep(settings.CRON_SLEEP_SEC)
if __name__ == "__main__":
from icon_governance.db import session_factory
get_prep_stake(session_factory())
| [
"sqlmodel.select"
] | [((370, 382), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (376, 382), False, 'from sqlmodel import select\n'), ((645, 679), 'icon_governance.log.logger.info', 'logger.info', (['"""Starting stake cron"""'], {}), "('Starting stake cron')\n", (656, 679), False, 'from icon_governance.log import logger\n'), ((720, 750), 'icon_governance.log.logger.info', 'logger.info', (['"""Prep stake ran."""'], {}), "('Prep stake ran.')\n", (731, 750), False, 'from icon_governance.log import logger\n'), ((759, 798), 'icon_governance.metrics.prom_metrics.preps_stake_cron_ran.inc', 'prom_metrics.preps_stake_cron_ran.inc', ([], {}), '()\n', (796, 798), False, 'from icon_governance.metrics import prom_metrics\n'), ((807, 837), 'time.sleep', 'sleep', (['settings.CRON_SLEEP_SEC'], {}), '(settings.CRON_SLEEP_SEC)\n', (812, 837), False, 'from time import sleep\n'), ((938, 955), 'icon_governance.db.session_factory', 'session_factory', ([], {}), '()\n', (953, 955), False, 'from icon_governance.db import session_factory\n'), ((494, 516), 'icon_governance.utils.rpc.getStake', 'getStake', (['prep.address'], {}), '(prep.address)\n', (502, 516), False, 'from icon_governance.utils.rpc import convert_hex_int, getStake, post_rpc_json\n')] |
"""
dayong.impls
~~~~~~~~~~~~
Implementaion of interfaces and the logic for injecting them.
"""
import asyncio
from typing import Any
import tanjun
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import SQLModel, select
from sqlmodel.engine.result import ScalarResult
from sqlmodel.ext.asyncio.session import AsyncSession
from dayong.configs import DayongConfig, DayongConfigLoader
from dayong.models import Message
class MessageDBImpl:
"""Implementaion of a database connection for transacting and interacting with
message tables, those of which derive from message table models.
The data to be selected, retrieved, and modified is determined by the table model
object and its type. The type, in this case, is `dayong.models.Message`.
"""
def __init__(self) -> None:
self._conn: AsyncEngine
async def connect(self, config: DayongConfig = tanjun.injected(type=DayongConfig)):
"""Create a database connection.
If the `database_uri` is Falsy, the function will reattempt to get the url from
the environment variables.
Args:
config (DayongConfig, optional): [description]. Defaults to
tanjun.injected(type=DayongConfig).
"""
loop = asyncio.get_running_loop()
self._conn = await loop.run_in_executor(
None,
create_async_engine,
config.database_uri if config.database_uri else DayongConfigLoader().load(),
)
async def create_table(self) -> None:
"""Create physical message tables for all the message table models stored in
`SQLModel.metadata`.
"""
async with self._conn.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async def add_row(self, tabe_model_object: Message) -> None:
"""Insert a row in the message table.
Args:
table_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
"""
async with AsyncSession(self._conn) as session:
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, session.add, tabe_model_object)
await session.commit()
async def remove_row(self, tabe_model_object: Message) -> None:
"""Delete a row in the message table.
Args:
table_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
"""
table_model = type(tabe_model_object)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(table_model).where( # type: ignore
table_model.message_id == tabe_model_object.message_id
)
)
await session.delete(row)
await session.commit()
async def get_row(self, tabe_model_object: Message) -> ScalarResult[Any]:
"""Fetch a row from the message table.
Args:
tabe_model_object (Message): An instance of `dayong.models.Message` or one
of its subclasses.
Returns:
ScalarResult: An `ScalarResult` object which contains a scalar value or
sequence of scalar values.
"""
table_model = type(tabe_model_object)
async with AsyncSession(self._conn) as session:
# Temp ignore incompatible type passed to `exec()`. See:
# https://github.com/tiangolo/sqlmodel/issues/54
# https://github.com/tiangolo/sqlmodel/pull/58
row: ScalarResult[Any] = await session.exec(
select(table_model).where( # type: ignore
table_model.message_id == tabe_model_object.message_id
)
)
return row
| [
"sqlmodel.ext.asyncio.session.AsyncSession",
"sqlmodel.select"
] | [((917, 951), 'tanjun.injected', 'tanjun.injected', ([], {'type': 'DayongConfig'}), '(type=DayongConfig)\n', (932, 951), False, 'import tanjun\n'), ((1285, 1311), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (1309, 1311), False, 'import asyncio\n'), ((2066, 2090), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (2078, 2090), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((2122, 2148), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (2146, 2148), False, 'import asyncio\n'), ((2587, 2611), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (2599, 2611), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((3590, 3614), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self._conn'], {}), '(self._conn)\n', (3602, 3614), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((1472, 1492), 'dayong.configs.DayongConfigLoader', 'DayongConfigLoader', ([], {}), '()\n', (1490, 1492), False, 'from dayong.configs import DayongConfig, DayongConfigLoader\n'), ((2886, 2905), 'sqlmodel.select', 'select', (['table_model'], {}), '(table_model)\n', (2892, 2905), False, 'from sqlmodel import SQLModel, select\n'), ((3889, 3908), 'sqlmodel.select', 'select', (['table_model'], {}), '(table_model)\n', (3895, 3908), False, 'from sqlmodel import SQLModel, select\n')] |
from sqlmodel import Field
from typing import Optional
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class LinkGroupUser(BaseUUIDModel, table=True):
group_id: Optional[UUID] = Field(default=None, nullable=False, foreign_key="group.id", primary_key=True)
user_id: Optional[UUID] = Field(default=None, nullable=False, foreign_key="user.id", primary_key=True)
| [
"sqlmodel.Field"
] | [((210, 287), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(False)', 'foreign_key': '"""group.id"""', 'primary_key': '(True)'}), "(default=None, nullable=False, foreign_key='group.id', primary_key=True)\n", (215, 287), False, 'from sqlmodel import Field\n'), ((318, 394), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(False)', 'foreign_key': '"""user.id"""', 'primary_key': '(True)'}), "(default=None, nullable=False, foreign_key='user.id', primary_key=True)\n", (323, 394), False, 'from sqlmodel import Field\n')] |
from typing import List, Optional
from functools import wraps
from uuid import UUID
from sqlalchemy.sql.schema import UniqueConstraint
from sqlmodel import Field, Relationship, SQLModel
# monkeypath from https://github.com/tiangolo/sqlmodel/issues/9
# without this all database fields are indexed be default
def set_default_index(func):
"""Decorator to set default index for SQLModel
Can be removed when https://github.com/tiangolo/sqlmodel/pull/11 is merged
"""
@wraps(func)
def inner(*args, index=False, **kwargs):
return func(*args, index=index, **kwargs)
return inner
# monkey patch field with default index=False
# this works as long as we always call Field()
Field = set_default_index(Field)
class TopicModelBase(SQLModel):
model_id: UUID = Field()
version: int = Field(default=1)
class TopicModel(TopicModelBase, table=True):
__tablename__ = "topic_model"
__table_args__ = (UniqueConstraint("model_id", "version", name="_model_id_version_uc"),)
id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003
topics: List["Topic"] = Relationship(
back_populates="topic_model", sa_relationship_kwargs={"cascade": "all,delete"}
)
class WordBase(SQLModel):
name: str = Field()
score: float = Field()
class Word(WordBase, table=True):
id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003
topic_id: int = Field(foreign_key="topic.id")
topic: "Topic" = Relationship(
back_populates="top_words", sa_relationship_kwargs={"cascade": "all,delete"}
)
class TopicBase(SQLModel):
name: str = Field()
count: int = Field()
topic_index: int = Field()
class TopicWithWords(TopicBase):
top_words: List["WordBase"] = Field(default=[])
class Topic(TopicBase, table=True):
id: Optional[int] = Field(primary_key=True, nullable=False) # NOQA: A003
topic_model_id: int = Field(foreign_key="topic_model.id")
top_words: List[Word] = Relationship(
back_populates="topic", sa_relationship_kwargs={"cascade": "all,delete"}
)
topic_model: TopicModel = Relationship(
back_populates="topics", sa_relationship_kwargs={"cascade": "all,delete"}
)
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((485, 496), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (490, 496), False, 'from functools import wraps\n'), ((793, 800), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (798, 800), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((820, 836), 'sqlmodel.Field', 'Field', ([], {'default': '(1)'}), '(default=1)\n', (825, 836), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1037, 1076), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'nullable': '(False)'}), '(primary_key=True, nullable=False)\n', (1042, 1076), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1119, 1216), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""topic_model"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete'}"}), "(back_populates='topic_model', sa_relationship_kwargs={\n 'cascade': 'all,delete'})\n", (1131, 1216), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1270, 1277), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1275, 1277), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1297, 1304), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1302, 1304), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1365, 1404), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'nullable': '(False)'}), '(primary_key=True, nullable=False)\n', (1370, 1404), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1439, 1468), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""topic.id"""'}), "(foreign_key='topic.id')\n", (1444, 1468), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1490, 1584), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""top_words"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete'}"}), "(back_populates='top_words', sa_relationship_kwargs={'cascade':\n 'all,delete'})\n", (1502, 1584), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1640, 1647), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1645, 1647), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1665, 1672), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1670, 1672), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1696, 1703), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1701, 1703), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1773, 1790), 'sqlmodel.Field', 'Field', ([], {'default': '[]'}), '(default=[])\n', (1778, 1790), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1853, 1892), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'nullable': '(False)'}), '(primary_key=True, nullable=False)\n', (1858, 1892), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1933, 1968), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""topic_model.id"""'}), "(foreign_key='topic_model.id')\n", (1938, 1968), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1997, 2087), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""topic"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete'}"}), "(back_populates='topic', sa_relationship_kwargs={'cascade':\n 'all,delete'})\n", (2009, 2087), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((2128, 2219), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""topics"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete'}"}), "(back_populates='topics', sa_relationship_kwargs={'cascade':\n 'all,delete'})\n", (2140, 2219), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((941, 1009), 'sqlalchemy.sql.schema.UniqueConstraint', 'UniqueConstraint', (['"""model_id"""', '"""version"""'], {'name': '"""_model_id_version_uc"""'}), "('model_id', 'version', name='_model_id_version_uc')\n", (957, 1009), False, 'from sqlalchemy.sql.schema import UniqueConstraint\n')] |
from typing import Optional, List
from fastapi import FastAPI, File, UploadFile, Request
from sqlmodel import Field, Session, SQLModel, create_engine, select
from pydantic import BaseModel
from network import Network
import requests
from PIL import Image as ImagePIL
import torchvision as tv
app = FastAPI()
network = Network()
network.model.eval()
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
engine = create_engine("sqlite:///image.db")
@app.get("/")
def read_images():
with Session(engine) as session:
statement = select(Image)
images = session.exec(statement).all()
return images
class Item(BaseModel):
key: int
label: str = ""
@app.post("/")
def update_heroes(item: Item):
with Session(engine) as session:
statement = select(Image).where(Image.key == item.key)
results = session.exec(statement)
image = results.one()
image.label = item.label
session.add(image)
session.commit()
@app.get("/predict/{item}")
def predict(item: int):
import torch
import numpy as np
with Session(engine) as session:
statement = select(Image).where(Image.key == item)
results = session.exec(statement)
image = results.one()
image_url = image.image_url
img = ImagePIL.open(requests.get(image_url, stream=True).raw)
img = tv.transforms.functional.pil_to_tensor(img).float().unsqueeze(0)
with torch.no_grad():
result = network.model(img)
result = torch.nn.functional.softmax(result)
ret = {
'1' : float(result[0][0]), '2': float(result[0][1])
}
return str(ret)
@app.post("/upload_image")
async def upload_image(files: List[UploadFile]):
# return {"filenames": [file.filename for file in files]}
from minio import Minio
import io
with Session(engine) as session:
client = Minio(
"localhost:9001",
secure=False,
access_key="<KEY>",
secret_key="<KEY>"
)
for item in files:
image_name = item.filename
cont = await item.read()
content = io.BytesIO(cont)
length = len(content.read())
content = io.BytesIO(cont)
client.put_object(
"image", image_name, content, length
)
image_url = f"http://localhost:9001/image/{image_name}"
data = Image(image_name=image_name[:-4], label="", image_url=image_url)
session.add(data)
session.commit() | [
"sqlmodel.create_engine",
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.Field"
] | [((299, 308), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (306, 308), False, 'from fastapi import FastAPI, File, UploadFile, Request\n'), ((319, 328), 'network.Network', 'Network', ([], {}), '()\n', (326, 328), False, 'from network import Network\n'), ((514, 549), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///image.db"""'], {}), "('sqlite:///image.db')\n", (527, 549), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((412, 449), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (417, 449), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((593, 608), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (600, 608), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((641, 654), 'sqlmodel.select', 'select', (['Image'], {}), '(Image)\n', (647, 654), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((837, 852), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (844, 852), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1187, 1202), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1194, 1202), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1979, 1994), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1986, 1994), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2024, 2101), 'minio.Minio', 'Minio', (['"""localhost:9001"""'], {'secure': '(False)', 'access_key': '"""<KEY>"""', 'secret_key': '"""<KEY>"""'}), "('localhost:9001', secure=False, access_key='<KEY>', secret_key='<KEY>')\n", (2029, 2101), False, 'from minio import Minio\n'), ((1544, 1559), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1557, 1559), False, 'import torch\n'), ((1622, 1657), 'torch.nn.functional.softmax', 'torch.nn.functional.softmax', (['result'], {}), '(result)\n', (1649, 1657), False, 'import torch\n'), ((2286, 2302), 'io.BytesIO', 'io.BytesIO', (['cont'], {}), '(cont)\n', (2296, 2302), False, 'import io\n'), ((2366, 2382), 'io.BytesIO', 'io.BytesIO', (['cont'], {}), '(cont)\n', (2376, 2382), False, 'import io\n'), ((885, 898), 'sqlmodel.select', 'select', (['Image'], {}), '(Image)\n', (891, 898), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1235, 1248), 'sqlmodel.select', 'select', (['Image'], {}), '(Image)\n', (1241, 1248), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1410, 1446), 'requests.get', 'requests.get', (['image_url'], {'stream': '(True)'}), '(image_url, stream=True)\n', (1422, 1446), False, 'import requests\n'), ((1466, 1509), 'torchvision.transforms.functional.pil_to_tensor', 'tv.transforms.functional.pil_to_tensor', (['img'], {}), '(img)\n', (1504, 1509), True, 'import torchvision as tv\n')] |
from typing import List
from app.database import get_session
from app.models import Medication, MedicationUpdate
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import select
router = APIRouter(prefix="/medications", tags=["medications"])
@router.post("", response_model=Medication)
async def create_medication(
*, med: Medication, session: AsyncSession = Depends(get_session)
) -> Medication:
medication = Medication.from_orm(med)
session.add(medication)
await session.commit()
await session.refresh(medication)
return medication
@router.get("/{medication_id}", response_model=Medication)
async def retrieve_medication(
*, medication_id: str, session: AsyncSession = Depends(get_session)
) -> Medication:
result = await session.execute(
select(Medication).where(Medication.id == medication_id)
)
medication = result.scalar_one_or_none()
if not medication:
raise HTTPException(
status_code=404, detail=f"Medication {medication_id} not found"
)
return medication
@router.patch("/{medication_id}", response_model=Medication)
async def update_medication(
*,
medication_id: str,
patch: MedicationUpdate,
session: AsyncSession = Depends(get_session),
) -> Medication:
result = await session.execute(
select(Medication).where(Medication.id == medication_id)
)
medication = result.scalar_one_or_none()
if not medication:
raise HTTPException(
status_code=404, detail=f"Medication {medication_id} not found"
)
patch_data = patch.dict(exclude_unset=True)
for key, value in patch_data.items():
setattr(medication, key, value)
session.add(medication)
await session.commit()
await session.refresh(medication)
return medication
@router.post("/{medication_id}")
async def delete_medication(
*, medication_id: str, session: AsyncSession = Depends(get_session)
):
result = await session.execute(
select(Medication).where(Medication.id == medication_id)
)
medication = result.scalar_one_or_none()
if not medication:
raise HTTPException(
status_code=404, detail=f"Medication {medication_id} not found"
)
await session.delete(medication)
await session.commit()
return {"ok": True}
@router.get("", response_model=List[Medication])
async def list_medications(
*, session: AsyncSession = Depends(get_session)
) -> List[Medication]:
result = await session.execute(select(Medication))
medications = result.scalars().all()
return medications
| [
"sqlmodel.select"
] | [((254, 308), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/medications"""', 'tags': "['medications']"}), "(prefix='/medications', tags=['medications'])\n", (263, 308), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((432, 452), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (439, 452), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((487, 511), 'app.models.Medication.from_orm', 'Medication.from_orm', (['med'], {}), '(med)\n', (506, 511), False, 'from app.models import Medication, MedicationUpdate\n'), ((770, 790), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (777, 790), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1300, 1320), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1307, 1320), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1989, 2009), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1996, 2009), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2501, 2521), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2508, 2521), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((997, 1075), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""Medication {medication_id} not found"""'}), "(status_code=404, detail=f'Medication {medication_id} not found')\n", (1010, 1075), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((1528, 1606), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""Medication {medication_id} not found"""'}), "(status_code=404, detail=f'Medication {medication_id} not found')\n", (1541, 1606), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2202, 2280), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': 'f"""Medication {medication_id} not found"""'}), "(status_code=404, detail=f'Medication {medication_id} not found')\n", (2215, 2280), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2580, 2598), 'sqlmodel.select', 'select', (['Medication'], {}), '(Medication)\n', (2586, 2598), False, 'from sqlmodel import select\n'), ((852, 870), 'sqlmodel.select', 'select', (['Medication'], {}), '(Medication)\n', (858, 870), False, 'from sqlmodel import select\n'), ((1383, 1401), 'sqlmodel.select', 'select', (['Medication'], {}), '(Medication)\n', (1389, 1401), False, 'from sqlmodel import select\n'), ((2057, 2075), 'sqlmodel.select', 'select', (['Medication'], {}), '(Medication)\n', (2063, 2075), False, 'from sqlmodel import select\n')] |
#!/usr/bin/env python3
from sqlalchemy.orm import sessionmaker
from sqlmodel import create_engine
from . import constants
#postgres_url = f"postgresql://postgres:{constants.POSTGRES_PW}@localhost/billsim"
if constants.POSTGRES_URL is None:
postgres_url = f"postgresql://postgres:{constants.POSTGRES_PW}@localhost"
else:
postgres_url = constants.POSTGRES_URL
engine = create_engine(postgres_url, echo=True)
SessionLocal = sessionmaker(autocommit=False,
autoflush=False,
expire_on_commit=False,
bind=engine)
| [
"sqlmodel.create_engine"
] | [((379, 417), 'sqlmodel.create_engine', 'create_engine', (['postgres_url'], {'echo': '(True)'}), '(postgres_url, echo=True)\n', (392, 417), False, 'from sqlmodel import create_engine\n'), ((434, 522), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'autocommit': '(False)', 'autoflush': '(False)', 'expire_on_commit': '(False)', 'bind': 'engine'}), '(autocommit=False, autoflush=False, expire_on_commit=False,\n bind=engine)\n', (446, 522), False, 'from sqlalchemy.orm import sessionmaker\n')] |
import logging
from datetime import datetime
from typing import List
from sqlmodel import Session, select
from db import engine
from models import Social, User
def get_last_social() -> Social:
with Session(engine) as session:
statement = select(Social).order_by(Social.id.desc()).limit(1)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT social row: {result}")
return result
def get_previous_social() -> Social:
with Session(engine) as session:
statement = select(Social).order_by(Social.id.desc()).offset(1).limit(1)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT previous social row: {result}")
return result
def create_social(fb: int, ig: int, tw: int, sp: int, yt: int):
dt_now = datetime.now().strftime("%Y%m%d_%H%M%S")
logging.info(f"INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})")
social_row = Social(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt)
with Session(engine) as session:
session.add(social_row)
session.commit()
def create_user(telegram_id: int, username: str = None, first_name: str = None, last_name: str = None):
logging.info(f"INSERT user: {first_name}")
user_row = User(telegram_id=telegram_id, username=username, first_name=first_name, last_name=last_name)
with Session(engine) as session:
session.add(user_row)
session.commit()
def get_user(telegram_id: int) -> User:
with Session(engine) as session:
statement = select(User).where(User.telegram_id == telegram_id)
result = session.exec(statement).one_or_none()
logging.info(f"SELECT user: {result}")
return result
def get_all_users() -> List[User]:
with Session(engine) as session:
statement = select(User)
result = session.exec(statement).fetchall()
logging.info(f"SELECT all users: {result}")
return result
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((364, 408), 'logging.info', 'logging.info', (['f"""SELECT social row: {result}"""'], {}), "(f'SELECT social row: {result}')\n", (376, 408), False, 'import logging\n'), ((643, 696), 'logging.info', 'logging.info', (['f"""SELECT previous social row: {result}"""'], {}), "(f'SELECT previous social row: {result}')\n", (655, 696), False, 'import logging\n'), ((839, 909), 'logging.info', 'logging.info', (['f"""INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})"""'], {}), "(f'INSERT social row ({dt_now},{fb},{ig},{tw},{sp},{yt})')\n", (851, 909), False, 'import logging\n'), ((927, 979), 'models.Social', 'Social', ([], {'dt': 'dt_now', 'fb': 'fb', 'ig': 'ig', 'tw': 'tw', 'sp': 'sp', 'yt': 'yt'}), '(dt=dt_now, fb=fb, ig=ig, tw=tw, sp=sp, yt=yt)\n', (933, 979), False, 'from models import Social, User\n'), ((1184, 1226), 'logging.info', 'logging.info', (['f"""INSERT user: {first_name}"""'], {}), "(f'INSERT user: {first_name}')\n", (1196, 1226), False, 'import logging\n'), ((1242, 1338), 'models.User', 'User', ([], {'telegram_id': 'telegram_id', 'username': 'username', 'first_name': 'first_name', 'last_name': 'last_name'}), '(telegram_id=telegram_id, username=username, first_name=first_name,\n last_name=last_name)\n', (1246, 1338), False, 'from models import Social, User\n'), ((1637, 1675), 'logging.info', 'logging.info', (['f"""SELECT user: {result}"""'], {}), "(f'SELECT user: {result}')\n", (1649, 1675), False, 'import logging\n'), ((1857, 1900), 'logging.info', 'logging.info', (['f"""SELECT all users: {result}"""'], {}), "(f'SELECT all users: {result}')\n", (1869, 1900), False, 'import logging\n'), ((206, 221), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (213, 221), False, 'from sqlmodel import Session, select\n'), ((475, 490), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (482, 490), False, 'from sqlmodel import Session, select\n'), ((989, 1004), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (996, 1004), False, 'from sqlmodel import Session, select\n'), ((1344, 1359), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1351, 1359), False, 'from sqlmodel import Session, select\n'), ((1478, 1493), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1485, 1493), False, 'from sqlmodel import Session, select\n'), ((1740, 1755), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1747, 1755), False, 'from sqlmodel import Session, select\n'), ((1788, 1800), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1794, 1800), False, 'from sqlmodel import Session, select\n'), ((794, 808), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (806, 808), False, 'from datetime import datetime\n'), ((1526, 1538), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1532, 1538), False, 'from sqlmodel import Session, select\n'), ((278, 294), 'models.Social.id.desc', 'Social.id.desc', ([], {}), '()\n', (292, 294), False, 'from models import Social, User\n'), ((254, 268), 'sqlmodel.select', 'select', (['Social'], {}), '(Social)\n', (260, 268), False, 'from sqlmodel import Session, select\n'), ((547, 563), 'models.Social.id.desc', 'Social.id.desc', ([], {}), '()\n', (561, 563), False, 'from models import Social, User\n'), ((523, 537), 'sqlmodel.select', 'select', (['Social'], {}), '(Social)\n', (529, 537), False, 'from sqlmodel import Session, select\n')] |
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_heroes():
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>")
hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48)
print("Before interacting with the database")
print("Hero 1:", hero_1)
print("Hero 2:", hero_2)
print("Hero 3:", hero_3)
with Session(engine) as session:
session.add(hero_1)
session.add(hero_2)
session.add(hero_3)
print("After adding to the session")
print("Hero 1:", hero_1)
print("Hero 2:", hero_2)
print("Hero 3:", hero_3)
session.commit()
print("After committing the session")
print("Hero 1:", hero_1)
print("Hero 2:", hero_2)
print("Hero 3:", hero_3)
print("After committing the session, show IDs")
print("Hero 1 ID:", hero_1.id)
print("Hero 2 ID:", hero_2.id)
print("Hero 3 ID:", hero_3.id)
print("After committing the session, show names")
print("Hero 1 name:", hero_1.name)
print("Hero 2 name:", hero_2.name)
print("Hero 3 name:", hero_3.name)
session.refresh(hero_1)
session.refresh(hero_2)
session.refresh(hero_3)
print("After refreshing the heroes")
print("Hero 1:", hero_1)
print("Hero 2:", hero_2)
print("Hero 3:", hero_3)
print("After the session closes")
print("Hero 1:", hero_1)
print("Hero 2:", hero_2)
print("Hero 3:", hero_3)
def main():
create_db_and_tables()
create_heroes()
if __name__ == "__main__":
main()
| [
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((343, 379), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (356, 379), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((150, 187), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (155, 187), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((414, 450), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (442, 450), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((804, 819), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (811, 819), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n')] |
import uuid
from datetime import datetime
from typing import Optional
from pydantic import UUID4
from sqlmodel import Field, SQLModel
class DocumentInput(SQLModel):
id: UUID4 = Field(default_factory=uuid.uuid4, primary_key=True)
content: str
class Document(DocumentInput, table=True):
created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow)
category: Optional[str] = Field(default=None)
| [
"sqlmodel.Field"
] | [((184, 235), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid.uuid4', 'primary_key': '(True)'}), '(default_factory=uuid.uuid4, primary_key=True)\n', (189, 235), False, 'from sqlmodel import Field, SQLModel\n'), ((325, 363), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'datetime.utcnow'}), '(default_factory=datetime.utcnow)\n', (330, 363), False, 'from sqlmodel import Field, SQLModel\n'), ((391, 429), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'datetime.utcnow'}), '(default_factory=datetime.utcnow)\n', (396, 429), False, 'from sqlmodel import Field, SQLModel\n'), ((460, 479), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (465, 479), False, 'from sqlmodel import Field, SQLModel\n')] |
"""record model
Revision ID: 6c2a16b349b1
Revises: 2bafd0d01ae2
Create Date: 2021-11-20 18:51:45.427996
"""
import sqlalchemy as sa
import sqlmodel
import sqlmodel.sql.sqltypes
from alembic import op
# revision identifiers, used by Alembic.
revision = "6c2a16b349b1"
down_revision = "2bafd0d01ae2"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"records",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("status", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("score", sa.Integer(), nullable=True),
sa.Column("time_ms", sa.Integer(), nullable=True),
sa.Column("memory_kb", sa.Integer(), nullable=True),
sa.Column("commit_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(["problem_id"], ["problems.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(
["problem_set_id"], ["problem_sets.id"], ondelete="SET NULL"
),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_records_commit_id"), "records", ["commit_id"], unique=False
)
op.create_index(
op.f("ix_records_created_at"), "records", ["created_at"], unique=False
)
op.create_index(op.f("ix_records_id"), "records", ["id"], unique=False)
op.create_index(
op.f("ix_records_memory_kb"), "records", ["memory_kb"], unique=False
)
op.create_index(op.f("ix_records_score"), "records", ["score"], unique=False)
op.create_index(op.f("ix_records_status"), "records", ["status"], unique=False)
op.create_index(op.f("ix_records_time_ms"), "records", ["time_ms"], unique=False)
op.create_index(
op.f("ix_records_updated_at"), "records", ["updated_at"], unique=False
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_records_updated_at"), table_name="records")
op.drop_index(op.f("ix_records_time_ms"), table_name="records")
op.drop_index(op.f("ix_records_status"), table_name="records")
op.drop_index(op.f("ix_records_score"), table_name="records")
op.drop_index(op.f("ix_records_memory_kb"), table_name="records")
op.drop_index(op.f("ix_records_id"), table_name="records")
op.drop_index(op.f("ix_records_created_at"), table_name="records")
op.drop_index(op.f("ix_records_commit_id"), table_name="records")
op.drop_table("records")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.sql.sqltypes.AutoString"
] | [((3269, 3293), 'alembic.op.drop_table', 'op.drop_table', (['"""records"""'], {}), "('records')\n", (3282, 3293), False, 'from alembic import op\n'), ((1521, 1598), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['problem_id']", "['problems.id']"], {'ondelete': '"""SET NULL"""'}), "(['problem_id'], ['problems.id'], ondelete='SET NULL')\n", (1544, 1598), True, 'import sqlalchemy as sa\n'), ((1608, 1698), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['problem_set_id']", "['problem_sets.id']"], {'ondelete': '"""SET NULL"""'}), "(['problem_set_id'], ['problem_sets.id'], ondelete=\n 'SET NULL')\n", (1631, 1698), True, 'import sqlalchemy as sa\n'), ((1725, 1796), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {'ondelete': '"""SET NULL"""'}), "(['user_id'], ['users.id'], ondelete='SET NULL')\n", (1748, 1796), True, 'import sqlalchemy as sa\n'), ((1806, 1835), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1829, 1835), True, 'import sqlalchemy as sa\n'), ((1872, 1900), 'alembic.op.f', 'op.f', (['"""ix_records_commit_id"""'], {}), "('ix_records_commit_id')\n", (1876, 1900), False, 'from alembic import op\n'), ((1976, 2005), 'alembic.op.f', 'op.f', (['"""ix_records_created_at"""'], {}), "('ix_records_created_at')\n", (1980, 2005), False, 'from alembic import op\n'), ((2073, 2094), 'alembic.op.f', 'op.f', (['"""ix_records_id"""'], {}), "('ix_records_id')\n", (2077, 2094), False, 'from alembic import op\n'), ((2158, 2186), 'alembic.op.f', 'op.f', (['"""ix_records_memory_kb"""'], {}), "('ix_records_memory_kb')\n", (2162, 2186), False, 'from alembic import op\n'), ((2253, 2277), 'alembic.op.f', 'op.f', (['"""ix_records_score"""'], {}), "('ix_records_score')\n", (2257, 2277), False, 'from alembic import op\n'), ((2335, 2360), 'alembic.op.f', 'op.f', (['"""ix_records_status"""'], {}), "('ix_records_status')\n", (2339, 2360), False, 'from alembic import op\n'), ((2419, 2445), 'alembic.op.f', 'op.f', (['"""ix_records_time_ms"""'], {}), "('ix_records_time_ms')\n", (2423, 2445), False, 'from alembic import op\n'), ((2514, 2543), 'alembic.op.f', 'op.f', (['"""ix_records_updated_at"""'], {}), "('ix_records_updated_at')\n", (2518, 2543), False, 'from alembic import op\n'), ((2737, 2766), 'alembic.op.f', 'op.f', (['"""ix_records_updated_at"""'], {}), "('ix_records_updated_at')\n", (2741, 2766), False, 'from alembic import op\n'), ((2808, 2834), 'alembic.op.f', 'op.f', (['"""ix_records_time_ms"""'], {}), "('ix_records_time_ms')\n", (2812, 2834), False, 'from alembic import op\n'), ((2876, 2901), 'alembic.op.f', 'op.f', (['"""ix_records_status"""'], {}), "('ix_records_status')\n", (2880, 2901), False, 'from alembic import op\n'), ((2943, 2967), 'alembic.op.f', 'op.f', (['"""ix_records_score"""'], {}), "('ix_records_score')\n", (2947, 2967), False, 'from alembic import op\n'), ((3009, 3037), 'alembic.op.f', 'op.f', (['"""ix_records_memory_kb"""'], {}), "('ix_records_memory_kb')\n", (3013, 3037), False, 'from alembic import op\n'), ((3079, 3100), 'alembic.op.f', 'op.f', (['"""ix_records_id"""'], {}), "('ix_records_id')\n", (3083, 3100), False, 'from alembic import op\n'), ((3142, 3171), 'alembic.op.f', 'op.f', (['"""ix_records_created_at"""'], {}), "('ix_records_created_at')\n", (3146, 3171), False, 'from alembic import op\n'), ((3213, 3241), 'alembic.op.f', 'op.f', (['"""ix_records_commit_id"""'], {}), "('ix_records_commit_id')\n", (3217, 3241), False, 'from alembic import op\n'), ((528, 554), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (539, 554), True, 'import sqlalchemy as sa\n'), ((726, 752), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (737, 752), True, 'import sqlalchemy as sa\n'), ((903, 931), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (929, 931), False, 'import sqlmodel\n'), ((981, 1009), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1007, 1009), False, 'import sqlmodel\n'), ((1056, 1084), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1082, 1084), False, 'import sqlmodel\n'), ((1126, 1154), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1152, 1154), False, 'import sqlmodel\n'), ((1201, 1235), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1233, 1235), False, 'import sqlmodel\n'), ((1280, 1292), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1290, 1292), True, 'import sqlalchemy as sa\n'), ((1339, 1351), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1349, 1351), True, 'import sqlalchemy as sa\n'), ((1400, 1412), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1410, 1412), True, 'import sqlalchemy as sa\n'), ((1461, 1495), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1493, 1495), False, 'import sqlmodel\n'), ((583, 628), 'sqlalchemy.text', 'sa.text', (['"""TIMEZONE(\'utc\', CURRENT_TIMESTAMP)"""'], {}), '("TIMEZONE(\'utc\', CURRENT_TIMESTAMP)")\n', (590, 628), True, 'import sqlalchemy as sa\n'), ((781, 826), 'sqlalchemy.text', 'sa.text', (['"""TIMEZONE(\'utc\', CURRENT_TIMESTAMP)"""'], {}), '("TIMEZONE(\'utc\', CURRENT_TIMESTAMP)")\n', (788, 826), True, 'import sqlalchemy as sa\n')] |
import uuid
from datetime import datetime
from typing import Optional
from sqlalchemy import UniqueConstraint
from sqlmodel import Field, Relationship
from pydantic_factories import ModelFactory, Use
from faker import Faker
from api.db.models.base import BaseModel, BaseTable
class StudentBase(BaseModel):
name: str = Field(index=True, nullable=False)
sandbox_id: uuid.UUID = None
# faber line of business data for student degree credentials
degree: Optional[str] = Field(default=None, nullable=True)
age: Optional[int] = Field(default=None, nullable=True)
student_id: Optional[str] = Field(default=None, nullable=True)
date: Optional[datetime] = Field(default=None, nullable=True)
# track invitation information
# this is for this LOB to track this entity in Traction
invitation_state: Optional[str] = Field(default=None, nullable=True)
connection_id: Optional[uuid.UUID] = Field(default=None)
# for matching this student with their traction tenant
# this would not be in this LOB data at all!!!
# the entity/person/business that this record represents
# would be tracking this in their system/data
wallet_id: Optional[uuid.UUID] = None
alias: Optional[str] = Field(default=None, nullable=True)
class Student(StudentBase, BaseTable, table=True):
__table_args__ = (UniqueConstraint("name", "sandbox_id"),)
sandbox: Optional["Sandbox"] = Relationship(back_populates="students") # noqa: F821
sandbox_id: uuid.UUID = Field(foreign_key="sandbox.id")
wallet_id: uuid.UUID = Field(default=None, nullable=True)
class StudentCreate(StudentBase):
pass
class StudentRead(StudentBase):
id: uuid.UUID
created_at: datetime
updated_at: datetime
degree: Optional[str] = None
age: Optional[int] = None
student_id: Optional[str] = None
date: Optional[datetime] = None
class StudentUpdate(StudentBase):
name: Optional[str] = None
# FACTORIES
class StudentCreateFactory(ModelFactory):
__model__ = StudentCreate
name = Use(Faker().name)
degree = None
age = None
student_id = None
date = None
wallet_id = None
alias = None
invitation_state = None
connection_id = None
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((325, 358), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'nullable': '(False)'}), '(index=True, nullable=False)\n', (330, 358), False, 'from sqlmodel import Field, Relationship\n'), ((486, 520), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (491, 520), False, 'from sqlmodel import Field, Relationship\n'), ((546, 580), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (551, 580), False, 'from sqlmodel import Field, Relationship\n'), ((613, 647), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (618, 647), False, 'from sqlmodel import Field, Relationship\n'), ((679, 713), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (684, 713), False, 'from sqlmodel import Field, Relationship\n'), ((848, 882), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (853, 882), False, 'from sqlmodel import Field, Relationship\n'), ((924, 943), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (929, 943), False, 'from sqlmodel import Field, Relationship\n'), ((1235, 1269), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1240, 1269), False, 'from sqlmodel import Field, Relationship\n'), ((1422, 1461), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""students"""'}), "(back_populates='students')\n", (1434, 1461), False, 'from sqlmodel import Field, Relationship\n'), ((1505, 1536), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""sandbox.id"""'}), "(foreign_key='sandbox.id')\n", (1510, 1536), False, 'from sqlmodel import Field, Relationship\n'), ((1564, 1598), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(True)'}), '(default=None, nullable=True)\n', (1569, 1598), False, 'from sqlmodel import Field, Relationship\n'), ((1345, 1383), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""', '"""sandbox_id"""'], {}), "('name', 'sandbox_id')\n", (1361, 1383), False, 'from sqlalchemy import UniqueConstraint\n'), ((2053, 2060), 'faker.Faker', 'Faker', ([], {}), '()\n', (2058, 2060), False, 'from faker import Faker\n')] |
from typing import TYPE_CHECKING, List, Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import ARRAY
from sqlmodel import AutoString, Field, Relationship, SQLModel
if TYPE_CHECKING:
from .application import Application, ApplicationList
class SchoolBase(SQLModel):
name: str
abbreviations: List[str] = Field(
default=[],
sa_column=Column(ARRAY(AutoString()), nullable=False),
)
alternatives: List[str] = Field(
default=[],
sa_column=Column(ARRAY(AutoString()), nullable=False),
)
class School(SchoolBase, table=True):
__tablename__ = "schools"
id: Optional[str] = Field(default=None, primary_key=True, nullable=False)
needs_review: bool = False
applications: List["Application"] = Relationship(back_populates="school")
class SchoolCreate(SchoolBase):
pass
class SchoolList(SQLModel):
id: str
name: str
needs_review: bool
class SchoolRead(SchoolBase):
id: str
needs_review: bool
applications: List["ApplicationList"]
class SchoolUpdate(SQLModel):
name: Optional[str]
needs_review: Optional[bool]
abbreviations: Optional[List[str]]
alternatives: Optional[List[str]]
| [
"sqlmodel.Relationship",
"sqlmodel.AutoString",
"sqlmodel.Field"
] | [((662, 715), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'nullable': '(False)'}), '(default=None, primary_key=True, nullable=False)\n', (667, 715), False, 'from sqlmodel import AutoString, Field, Relationship, SQLModel\n'), ((788, 825), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""school"""'}), "(back_populates='school')\n", (800, 825), False, 'from sqlmodel import AutoString, Field, Relationship, SQLModel\n'), ((403, 415), 'sqlmodel.AutoString', 'AutoString', ([], {}), '()\n', (413, 415), False, 'from sqlmodel import AutoString, Field, Relationship, SQLModel\n'), ((529, 541), 'sqlmodel.AutoString', 'AutoString', ([], {}), '()\n', (539, 541), False, 'from sqlmodel import AutoString, Field, Relationship, SQLModel\n')] |
#!/usr/bin/env python3
import asyncio
import datetime
import json
import os
import time
from argparse import ArgumentParser
from collections.abc import Sequence
import httpx
import uvloop
from dotenv import load_dotenv
from loguru import logger
from sqlmodel import Session, create_engine
from steam2sqlite import APPIDS_URL, BATCH_SIZE, navigator, utils
from steam2sqlite.handler import (
get_appids_from_db,
get_apps_achievements,
get_apps_data,
get_error_appids,
store_apps_achievements,
store_apps_data,
)
load_dotenv()
APPIDS_FILE = os.getenv("APPIDS_FILE")
sqlite_file_name = "database.db"
SQLITE_URL = f"sqlite:///{sqlite_file_name}"
async def get_appids_from_steam(local_file: str = None) -> dict[int, str]:
if local_file:
logger.info(f"Loading appids from local file: {local_file}")
with open(local_file) as steam_appids_fp:
appid_data = json.load(steam_appids_fp)
else:
logger.info("Loading appids from Steam API")
try:
async with httpx.AsyncClient() as client:
resp = await navigator.get(client, APPIDS_URL)
appid_data = resp.json()
await asyncio.sleep(1)
except navigator.NavigatorError:
logger.error("Error getting the appids from Steam")
raise
return {item["appid"]: item["name"] for item in appid_data["applist"]["apps"]}
def main(argv: Sequence[str] | None = None) -> int:
parser = ArgumentParser()
parser.add_argument(
"-l",
"--limit",
type=float,
default=None,
nargs="?",
const=1,
help="limit runtime (minutes)",
)
args = parser.parse_args(argv)
logger.info("Starting...")
start_time = time.monotonic()
uvloop.install()
engine = create_engine(SQLITE_URL, echo=False)
# From steam api, dict of: {appids: names}
steam_appids_names = asyncio.run(get_appids_from_steam(APPIDS_FILE))
with Session(engine) as session:
# query db for all appids we already have, sort by last_modified
db_appids_updated = get_appids_from_db(session)
# identify any missing appids -- these go on the top of our stack to process
missing_appids = set(steam_appids_names.keys()) - {
appid for appid, _ in db_appids_updated
}
# remove any appids that have been modified recently
db_appids = [
appid
for appid, updated in db_appids_updated
if ((datetime.datetime.utcnow().date() - updated.date()).days > 3)
]
appids_missing_and_older = list(missing_appids) + db_appids
# remove any appids that have been flagged as errors from previous runs
error_appids = get_error_appids(session)
appids_to_process = [
appid
for appid in appids_missing_and_older
if appid not in set(error_appids)
]
logger.info("Loading app data from Steam API and saving to db")
for appids in utils.grouper(appids_to_process, BATCH_SIZE, fillvalue=None):
apps_data = get_apps_data(session, steam_appids_names, appids)
apps = store_apps_data(session, steam_appids_names, apps_data)
apps_with_achievements = [app for app in apps if app.achievements_total > 0]
if apps_with_achievements:
apps_achievements_data = utils.delay_by(len(apps_with_achievements))(
get_apps_achievements
)(apps_with_achievements)
store_apps_achievements(session, apps_achievements_data)
if args.limit and (time.monotonic() - start_time) / 60 > args.limit:
logger.info(f"Limit ({args.limit} min) reached shutting down...")
break
return 0
if __name__ == "__main__":
exit(main())
| [
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((538, 551), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (549, 551), False, 'from dotenv import load_dotenv\n'), ((567, 591), 'os.getenv', 'os.getenv', (['"""APPIDS_FILE"""'], {}), "('APPIDS_FILE')\n", (576, 591), False, 'import os\n'), ((1478, 1494), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (1492, 1494), False, 'from argparse import ArgumentParser\n'), ((1717, 1743), 'loguru.logger.info', 'logger.info', (['"""Starting..."""'], {}), "('Starting...')\n", (1728, 1743), False, 'from loguru import logger\n'), ((1762, 1778), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (1776, 1778), False, 'import time\n'), ((1784, 1800), 'uvloop.install', 'uvloop.install', ([], {}), '()\n', (1798, 1800), False, 'import uvloop\n'), ((1815, 1852), 'sqlmodel.create_engine', 'create_engine', (['SQLITE_URL'], {'echo': '(False)'}), '(SQLITE_URL, echo=False)\n', (1828, 1852), False, 'from sqlmodel import Session, create_engine\n'), ((775, 835), 'loguru.logger.info', 'logger.info', (['f"""Loading appids from local file: {local_file}"""'], {}), "(f'Loading appids from local file: {local_file}')\n", (786, 835), False, 'from loguru import logger\n'), ((956, 1000), 'loguru.logger.info', 'logger.info', (['"""Loading appids from Steam API"""'], {}), "('Loading appids from Steam API')\n", (967, 1000), False, 'from loguru import logger\n'), ((1984, 1999), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1991, 1999), False, 'from sqlmodel import Session, create_engine\n'), ((2114, 2141), 'steam2sqlite.handler.get_appids_from_db', 'get_appids_from_db', (['session'], {}), '(session)\n', (2132, 2141), False, 'from steam2sqlite.handler import get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data\n'), ((2766, 2791), 'steam2sqlite.handler.get_error_appids', 'get_error_appids', (['session'], {}), '(session)\n', (2782, 2791), False, 'from steam2sqlite.handler import get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data\n'), ((2955, 3018), 'loguru.logger.info', 'logger.info', (['"""Loading app data from Steam API and saving to db"""'], {}), "('Loading app data from Steam API and saving to db')\n", (2966, 3018), False, 'from loguru import logger\n'), ((3042, 3102), 'steam2sqlite.utils.grouper', 'utils.grouper', (['appids_to_process', 'BATCH_SIZE'], {'fillvalue': 'None'}), '(appids_to_process, BATCH_SIZE, fillvalue=None)\n', (3055, 3102), False, 'from steam2sqlite import APPIDS_URL, BATCH_SIZE, navigator, utils\n'), ((911, 937), 'json.load', 'json.load', (['steam_appids_fp'], {}), '(steam_appids_fp)\n', (920, 937), False, 'import json\n'), ((3129, 3179), 'steam2sqlite.handler.get_apps_data', 'get_apps_data', (['session', 'steam_appids_names', 'appids'], {}), '(session, steam_appids_names, appids)\n', (3142, 3179), False, 'from steam2sqlite.handler import get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data\n'), ((3199, 3254), 'steam2sqlite.handler.store_apps_data', 'store_apps_data', (['session', 'steam_appids_names', 'apps_data'], {}), '(session, steam_appids_names, apps_data)\n', (3214, 3254), False, 'from steam2sqlite.handler import get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data\n'), ((1037, 1056), 'httpx.AsyncClient', 'httpx.AsyncClient', ([], {}), '()\n', (1054, 1056), False, 'import httpx\n'), ((1186, 1202), 'asyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1199, 1202), False, 'import asyncio\n'), ((1256, 1307), 'loguru.logger.error', 'logger.error', (['"""Error getting the appids from Steam"""'], {}), "('Error getting the appids from Steam')\n", (1268, 1307), False, 'from loguru import logger\n'), ((3570, 3626), 'steam2sqlite.handler.store_apps_achievements', 'store_apps_achievements', (['session', 'apps_achievements_data'], {}), '(session, apps_achievements_data)\n', (3593, 3626), False, 'from steam2sqlite.handler import get_appids_from_db, get_apps_achievements, get_apps_data, get_error_appids, store_apps_achievements, store_apps_data\n'), ((3725, 3790), 'loguru.logger.info', 'logger.info', (['f"""Limit ({args.limit} min) reached shutting down..."""'], {}), "(f'Limit ({args.limit} min) reached shutting down...')\n", (3736, 3790), False, 'from loguru import logger\n'), ((1097, 1130), 'steam2sqlite.navigator.get', 'navigator.get', (['client', 'APPIDS_URL'], {}), '(client, APPIDS_URL)\n', (1110, 1130), False, 'from steam2sqlite import APPIDS_URL, BATCH_SIZE, navigator, utils\n'), ((3659, 3675), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (3673, 3675), False, 'import time\n'), ((2521, 2547), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2545, 2547), False, 'import datetime\n')] |
from datetime import date, datetime
from typing import Any, Dict, Optional
from uuid import UUID, uuid4
from pydantic.class_validators import root_validator
from sqlmodel import Column, Enum, Field, SQLModel
from sqlmodel.sql.sqltypes import GUID
from ...utils.date import now_datetime
from ..constants import OperationType, PaymentType, SaleType
class BaseBalance(SQLModel):
value: float = Field(description="Value of operation")
operation: OperationType = Field(
description="Type of operation", sa_column=Column(Enum(OperationType), nullable=False)
)
description: str = Field(description="Description of operation", min_length=1)
created_at: datetime = Field(default_factory=now_datetime)
class CreateBalance(BaseBalance):
@root_validator()
def normalize_value(cls, values: Dict[str, Any]) -> float:
operation_type = values.get("operation")
value = values.get("value")
if not operation_type or not value:
return values
if any(operation_type.name == payment_type.name for payment_type in PaymentType) and value > 0:
values["value"] = value * -1
if any(operation_type.name == sale_type.name for sale_type in SaleType) and value < 0:
values["value"] = value * -1
return values
class QueryBalance(SQLModel):
start_date: Optional[date] = Field(description="Initial date for query")
end_date: Optional[date] = Field(description="End date for query")
class Balance(BaseBalance, table=True):
id: UUID = Field(default_factory=uuid4, sa_column=Column("id", GUID(), primary_key=True))
owner_id: UUID = Field(description="User ID that owns the balance", foreign_key="users.id")
| [
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.Enum",
"sqlmodel.Field"
] | [((399, 438), 'sqlmodel.Field', 'Field', ([], {'description': '"""Value of operation"""'}), "(description='Value of operation')\n", (404, 438), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((601, 660), 'sqlmodel.Field', 'Field', ([], {'description': '"""Description of operation"""', 'min_length': '(1)'}), "(description='Description of operation', min_length=1)\n", (606, 660), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((688, 723), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'now_datetime'}), '(default_factory=now_datetime)\n', (693, 723), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((765, 781), 'pydantic.class_validators.root_validator', 'root_validator', ([], {}), '()\n', (779, 781), False, 'from pydantic.class_validators import root_validator\n'), ((1372, 1415), 'sqlmodel.Field', 'Field', ([], {'description': '"""Initial date for query"""'}), "(description='Initial date for query')\n", (1377, 1415), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((1447, 1486), 'sqlmodel.Field', 'Field', ([], {'description': '"""End date for query"""'}), "(description='End date for query')\n", (1452, 1486), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((1644, 1718), 'sqlmodel.Field', 'Field', ([], {'description': '"""User ID that owns the balance"""', 'foreign_key': '"""users.id"""'}), "(description='User ID that owns the balance', foreign_key='users.id')\n", (1649, 1718), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((535, 554), 'sqlmodel.Enum', 'Enum', (['OperationType'], {}), '(OperationType)\n', (539, 554), False, 'from sqlmodel import Column, Enum, Field, SQLModel\n'), ((1596, 1602), 'sqlmodel.sql.sqltypes.GUID', 'GUID', ([], {}), '()\n', (1600, 1602), False, 'from sqlmodel.sql.sqltypes import GUID\n')] |
import sys
import os
import click
from app import config
from sqlmodel import Session
from sqlmodel import create_engine
from app.models.server import Server
from sqlmodel import select
API_ENVIRONMENT = os.environ.get("API_ENVIRONMENT", "Testing")
settings = getattr(sys.modules[__name__].config, API_ENVIRONMENT)
engine = create_engine(settings.DATABASE_URI)
@click.group()
@click.pass_context
def main():
pass
@main.command(name="settings")
def get_settings():
"""
Prints current API settings from $API_ENVIRONMENT.
"""
click.echo(settings)
@main.group(name="import")
def import_group():
pass
@import_group.command(name="catagories")
def import_catagories():
"""
Commands for importing a database.
"""
import yaml
from app.models.server import Catagory, ServerCatagoryLink
print("Looking for catagories.yml")
with Session(engine) as session:
with open("config.yml", "r") as stream:
catagories = yaml.safe_load(stream)
for name, data in catagories.get("catagories").items():
print(data)
_catagory = Catagory(
title = name,
meta_ref = name.lower().replace(" ", "-"),
color = f"#{data['color']}"
)
session.add(_catagory)
session.commit()
session.refresh(_catagory)
_query = select(Server).where(Server.domain_name.like(data['match']))
_result = session.exec(_query).all()
for server in _result:
_link = ServerCatagoryLink(
server_id = server.id,
catagory_id = _catagory.id
)
session.add(_link)
session.commit()
@import_group.command(name="csv")
def csv_file(file):
"""
Commands for importing a database.
"""
import csv
with Session(engine) as session:
with open(file, "r") as stream:
csv_reader = csv.DictReader(stream)
line_count = 0
for row in csv_reader:
if line_count == 0:
line_count += 1
_server = Server(
domain_name=row["Domain Name"],
domain_type=row["Domain Type"],
agency=row["Agency"],
organization=row["Organization"],
)
session.add(_server)
session.commit()
@import_group.command(name="file")
@click.argument("file")
def basic_file(file):
with Session(engine) as session:
with open(file, "r") as stream:
stream = stream.readlines()
servers = []
for row in stream:
servers.append(row.strip().lower())
for row in list(set(servers)):
session.add(Server(domain_name=row))
session.commit()
@main.group()
def tables():
"""
Commands for handling database tables.
"""
pass
@tables.command()
def drop():
"""
Forcefully remove all tables within the database.
"""
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import inspect
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine(settings.DATABASE_URI)
meta = MetaData()
meta.reflect(bind=engine)
meta.drop_all(engine, checkfirst=False)
print("Dropped tables.")
@tables.command()
def create():
"""
Creates all tables within the API.
"""
from sqlmodel import SQLModel
from app.models.user import User, Log
from app.models.server import Server, ServerLog
SQLModel.metadata.create_all(engine)
print("Created tables.")
cli = click.CommandCollection(sources=[main])
if __name__ == "__main__":
cli()
| [
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((205, 249), 'os.environ.get', 'os.environ.get', (['"""API_ENVIRONMENT"""', '"""Testing"""'], {}), "('API_ENVIRONMENT', 'Testing')\n", (219, 249), False, 'import os\n'), ((326, 362), 'sqlalchemy.create_engine', 'create_engine', (['settings.DATABASE_URI'], {}), '(settings.DATABASE_URI)\n', (339, 362), False, 'from sqlalchemy import create_engine\n'), ((366, 379), 'click.group', 'click.group', ([], {}), '()\n', (377, 379), False, 'import click\n'), ((2575, 2597), 'click.argument', 'click.argument', (['"""file"""'], {}), "('file')\n", (2589, 2597), False, 'import click\n'), ((3878, 3917), 'click.CommandCollection', 'click.CommandCollection', ([], {'sources': '[main]'}), '(sources=[main])\n', (3901, 3917), False, 'import click\n'), ((549, 569), 'click.echo', 'click.echo', (['settings'], {}), '(settings)\n', (559, 569), False, 'import click\n'), ((3420, 3456), 'sqlalchemy.create_engine', 'create_engine', (['settings.DATABASE_URI'], {}), '(settings.DATABASE_URI)\n', (3433, 3456), False, 'from sqlalchemy import create_engine\n'), ((3468, 3478), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (3476, 3478), False, 'from sqlalchemy import MetaData\n'), ((3804, 3840), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (3832, 3840), False, 'from sqlmodel import SQLModel\n'), ((880, 895), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (887, 895), False, 'from sqlmodel import Session\n'), ((1972, 1987), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1979, 1987), False, 'from sqlmodel import Session\n'), ((2629, 2644), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2636, 2644), False, 'from sqlmodel import Session\n'), ((981, 1003), 'yaml.safe_load', 'yaml.safe_load', (['stream'], {}), '(stream)\n', (995, 1003), False, 'import yaml\n'), ((2065, 2087), 'csv.DictReader', 'csv.DictReader', (['stream'], {}), '(stream)\n', (2079, 2087), False, 'import csv\n'), ((2249, 2379), 'app.models.server.Server', 'Server', ([], {'domain_name': "row['Domain Name']", 'domain_type': "row['Domain Type']", 'agency': "row['Agency']", 'organization': "row['Organization']"}), "(domain_name=row['Domain Name'], domain_type=row['Domain Type'],\n agency=row['Agency'], organization=row['Organization'])\n", (2255, 2379), False, 'from app.models.server import Server, ServerLog\n'), ((1465, 1503), 'app.models.server.Server.domain_name.like', 'Server.domain_name.like', (["data['match']"], {}), "(data['match'])\n", (1488, 1503), False, 'from app.models.server import Server, ServerLog\n'), ((1625, 1690), 'app.models.server.ServerCatagoryLink', 'ServerCatagoryLink', ([], {'server_id': 'server.id', 'catagory_id': '_catagory.id'}), '(server_id=server.id, catagory_id=_catagory.id)\n', (1643, 1690), False, 'from app.models.server import Catagory, ServerCatagoryLink\n'), ((2917, 2940), 'app.models.server.Server', 'Server', ([], {'domain_name': 'row'}), '(domain_name=row)\n', (2923, 2940), False, 'from app.models.server import Server, ServerLog\n'), ((1444, 1458), 'sqlmodel.select', 'select', (['Server'], {}), '(Server)\n', (1450, 1458), False, 'from sqlmodel import select\n')] |
from sqlmodel import SQLModel, create_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel.ext.asyncio.session import AsyncSession, AsyncEngine
from app.settings import Settings
settings = Settings()
engine = AsyncEngine(create_engine(settings.ASYNC_DATABASE_URI, echo=True, future=True))
async def init_db():
async with engine.begin() as conn:
# await conn.run_sync(SQLModel.metadata.drop_all)
await conn.run_sync(SQLModel.metadata.create_all)
async def get_session() -> AsyncSession:
async_session = sessionmaker(
engine, class_=AsyncSession, expire_on_commit=False
)
async with async_session() as session:
yield session
| [
"sqlmodel.create_engine"
] | [((198, 208), 'app.settings.Settings', 'Settings', ([], {}), '()\n', (206, 208), False, 'from app.settings import Settings\n'), ((231, 297), 'sqlmodel.create_engine', 'create_engine', (['settings.ASYNC_DATABASE_URI'], {'echo': '(True)', 'future': '(True)'}), '(settings.ASYNC_DATABASE_URI, echo=True, future=True)\n', (244, 297), False, 'from sqlmodel import SQLModel, create_engine\n'), ((539, 604), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (['engine'], {'class_': 'AsyncSession', 'expire_on_commit': '(False)'}), '(engine, class_=AsyncSession, expire_on_commit=False)\n', (551, 604), False, 'from sqlalchemy.orm import sessionmaker\n')] |
from datetime import datetime
from typing import Optional
from sqlmodel import Field, Enum, Column
from sqlmodel.main import SQLModel
from graphene_example.app.core.structures import TaskStatusEnum
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
full_name: str
email: Optional[str]
hashed_password: str
is_active: bool = True
is_superuser: bool = True
class Task(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
created_at: datetime = Field(default=datetime.utcnow)
title: str
status: TaskStatusEnum = Field(sa_column=Column(Enum(TaskStatusEnum)), default=TaskStatusEnum.draft)
user_id: Optional[int] = Field(default=None, foreign_key="user.id")
| [
"sqlmodel.Enum",
"sqlmodel.Field"
] | [((260, 297), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (265, 297), False, 'from sqlmodel import Field, Enum, Column\n'), ((484, 521), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (489, 521), False, 'from sqlmodel import Field, Enum, Column\n'), ((549, 579), 'sqlmodel.Field', 'Field', ([], {'default': 'datetime.utcnow'}), '(default=datetime.utcnow)\n', (554, 579), False, 'from sqlmodel import Field, Enum, Column\n'), ((729, 771), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""user.id"""'}), "(default=None, foreign_key='user.id')\n", (734, 771), False, 'from sqlmodel import Field, Enum, Column\n'), ((647, 667), 'sqlmodel.Enum', 'Enum', (['TaskStatusEnum'], {}), '(TaskStatusEnum)\n', (651, 667), False, 'from sqlmodel import Field, Enum, Column\n')] |
from typing import TYPE_CHECKING, List, Optional, Type
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.link_tables import ProblemProblemSetLink
from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType
from joj.horse.services.db import db_session
if TYPE_CHECKING:
from joj.horse.models import (
Domain,
ProblemConfig,
ProblemGroup,
ProblemSet,
Record,
User,
)
class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg]
__tablename__ = "problems"
__table_args__ = (UniqueConstraint("domain_id", "url"),)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="problems")
owner_id: Optional[UUID] = Field(
sa_column=Column(
GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True
)
)
owner: Optional["User"] = Relationship(back_populates="owned_problems")
problem_group_id: Optional[UUID] = Field(
sa_column=Column(
GUID, ForeignKey("problem_groups.id", ondelete="SET NULL"), nullable=True
)
)
problem_group: Optional["ProblemGroup"] = Relationship(back_populates="problems")
problem_sets: List["ProblemSet"] = Relationship(
back_populates="problems",
link_model=ProblemProblemSetLink,
)
problem_problem_set_links: List[ProblemProblemSetLink] = Relationship(
back_populates="problem",
)
records: List["Record"] = Relationship(back_populates="problem")
problem_configs: List["ProblemConfig"] = Relationship(back_populates="problem")
@classmethod
async def get_problems_with_record_states(
cls,
result_cls: Type[WithLatestRecordType],
problem_set_id: Optional[UUID],
problems: List["Problem"],
user_id: UUID,
) -> List[WithLatestRecordType]:
from joj.horse import models
problem_ids = [problem.id for problem in problems]
records = await models.Record.get_user_latest_records(
problem_set_id=problem_set_id, problem_ids=problem_ids, user_id=user_id
)
problems = [
result_cls(**problems[i].dict(), latest_record=records[i])
for i, record in enumerate(records)
]
return problems
async def get_latest_problem_config(self) -> Optional["ProblemConfig"]:
from joj.horse import models
statement = (
models.ProblemConfig.sql_select()
.where(models.ProblemConfig.problem_id == self.id)
.order_by(models.ProblemConfig.created_at.desc()) # type: ignore
.limit(1)
)
async with db_session() as session:
results = await session.exec(statement)
return results.one_or_none()
event.listen(Problem, "before_insert", url_pre_save)
event.listen(Problem, "before_update", url_pre_save)
| [
"sqlmodel.Relationship"
] | [((3154, 3206), 'sqlalchemy.event.listen', 'event.listen', (['Problem', '"""before_insert"""', 'url_pre_save'], {}), "(Problem, 'before_insert', url_pre_save)\n", (3166, 3206), False, 'from sqlalchemy import event\n'), ((3207, 3259), 'sqlalchemy.event.listen', 'event.listen', (['Problem', '"""before_update"""', 'url_pre_save'], {}), "(Problem, 'before_update', url_pre_save)\n", (3219, 3259), False, 'from sqlalchemy import event\n'), ((1029, 1068), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problems"""'}), "(back_populates='problems')\n", (1041, 1068), False, 'from sqlmodel import Field, Relationship\n'), ((1257, 1302), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""owned_problems"""'}), "(back_populates='owned_problems')\n", (1269, 1302), False, 'from sqlmodel import Field, Relationship\n'), ((1524, 1563), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problems"""'}), "(back_populates='problems')\n", (1536, 1563), False, 'from sqlmodel import Field, Relationship\n'), ((1604, 1677), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problems"""', 'link_model': 'ProblemProblemSetLink'}), "(back_populates='problems', link_model=ProblemProblemSetLink)\n", (1616, 1677), False, 'from sqlmodel import Field, Relationship\n'), ((1762, 1800), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem"""'}), "(back_populates='problem')\n", (1774, 1800), False, 'from sqlmodel import Field, Relationship\n'), ((1847, 1885), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem"""'}), "(back_populates='problem')\n", (1859, 1885), False, 'from sqlmodel import Field, Relationship\n'), ((1931, 1969), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem"""'}), "(back_populates='problem')\n", (1943, 1969), False, 'from sqlmodel import Field, Relationship\n'), ((816, 852), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""url"""'], {}), "('domain_id', 'url')\n", (832, 852), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2352, 2466), 'joj.horse.models.Record.get_user_latest_records', 'models.Record.get_user_latest_records', ([], {'problem_set_id': 'problem_set_id', 'problem_ids': 'problem_ids', 'user_id': 'user_id'}), '(problem_set_id=problem_set_id,\n problem_ids=problem_ids, user_id=user_id)\n', (2389, 2466), False, 'from joj.horse import models\n'), ((3034, 3046), 'joj.horse.services.db.db_session', 'db_session', ([], {}), '()\n', (3044, 3046), False, 'from joj.horse.services.db import db_session\n'), ((929, 973), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (939, 973), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((1152, 1195), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""users.id"""'], {'ondelete': '"""SET NULL"""'}), "('users.id', ondelete='SET NULL')\n", (1162, 1195), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((1394, 1446), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""problem_groups.id"""'], {'ondelete': '"""SET NULL"""'}), "('problem_groups.id', ondelete='SET NULL')\n", (1404, 1446), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2927, 2965), 'joj.horse.models.ProblemConfig.created_at.desc', 'models.ProblemConfig.created_at.desc', ([], {}), '()\n', (2963, 2965), False, 'from joj.horse import models\n'), ((2808, 2841), 'joj.horse.models.ProblemConfig.sql_select', 'models.ProblemConfig.sql_select', ([], {}), '()\n', (2839, 2841), False, 'from joj.horse import models\n')] |
from datetime import date
from typing import List, Optional
from uuid import UUID
from api.ecoindex.models.responses import ApiEcoindex
from api.helper import new_uuid
from api.models.enums import Version
from ecoindex_scraper.models import Result
from sqlalchemy import func
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlalchemy.sql.expression import asc
from sqlmodel import select
from db.helper import date_filter
async def save_ecoindex_result_db(
session: AsyncSession,
ecoindex_result: Result,
version: Optional[Version] = Version.v1,
) -> ApiEcoindex:
ranking = await get_rank_analysis_db(
ecoindex=ecoindex_result, session=session, version=version
)
total_results = await get_count_analysis_db(session=session, version=version)
db_ecoindex = ApiEcoindex(
id=new_uuid(),
date=ecoindex_result.date,
url=ecoindex_result.url,
host=ecoindex_result.url.host,
width=ecoindex_result.width,
height=ecoindex_result.height,
size=ecoindex_result.size,
nodes=ecoindex_result.nodes,
requests=ecoindex_result.requests,
grade=ecoindex_result.grade,
score=ecoindex_result.score,
ges=ecoindex_result.ges,
water=ecoindex_result.water,
page_type=ecoindex_result.page_type,
version=version.get_version_number(),
initial_ranking=ranking if ranking else total_results + 1,
initial_total_results=total_results + 1,
)
session.add(db_ecoindex)
await session.commit()
await session.refresh(db_ecoindex)
return db_ecoindex
async def get_count_analysis_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
host: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
) -> int:
statement = f"SELECT count(*) FROM apiecoindex WHERE version = {version.get_version_number()}"
if host:
statement += f" AND host = '{host}'"
if date_from:
statement += f" AND date >= '{date_from}'"
if date_to:
statement += f" AND date <= '{date_to}'"
result = await session.execute(statement=statement)
return result.scalar()
async def get_rank_analysis_db(
ecoindex: Result, session: AsyncSession, version: Optional[Version] = Version.v1
) -> Optional[int]:
result = await session.execute(
(
"SELECT ranking FROM ("
"SELECT *, ROW_NUMBER() OVER (ORDER BY score DESC) ranking "
"FROM apiecoindex "
f"WHERE version={version.get_version_number()} "
"ORDER BY score DESC) t "
f"WHERE score <= {ecoindex.score} "
"LIMIT 1;"
)
)
return result.scalar()
async def get_ecoindex_result_list_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
host: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
page: Optional[int] = 1,
size: Optional[int] = 50,
) -> List[ApiEcoindex]:
statement = (
select(ApiEcoindex)
.where(ApiEcoindex.version == version.get_version_number())
.offset((page - 1) * size)
.limit(size)
)
if host:
statement = statement.where(ApiEcoindex.host == host)
statement = date_filter(statement=statement, date_from=date_from, date_to=date_to)
ecoindexes = await session.execute(statement.order_by(asc("date")))
return ecoindexes.scalars().all()
async def get_ecoindex_result_by_id_db(
session: AsyncSession, id: UUID, version: Optional[Version] = Version.v1
) -> ApiEcoindex:
statement = (
select(ApiEcoindex)
.where(ApiEcoindex.id == id)
.where(ApiEcoindex.version == version.get_version_number())
)
ecoindex = await session.execute(statement)
return ecoindex.scalar_one_or_none()
async def get_count_daily_request_per_host(session: AsyncSession, host: str) -> int:
statement = select(ApiEcoindex).where(
func.date(ApiEcoindex.date) == date.today(), ApiEcoindex.host == host
)
results = await session.execute(statement)
return len(results.all())
| [
"sqlmodel.select"
] | [((3331, 3401), 'db.helper.date_filter', 'date_filter', ([], {'statement': 'statement', 'date_from': 'date_from', 'date_to': 'date_to'}), '(statement=statement, date_from=date_from, date_to=date_to)\n', (3342, 3401), False, 'from db.helper import date_filter\n'), ((833, 843), 'api.helper.new_uuid', 'new_uuid', ([], {}), '()\n', (841, 843), False, 'from api.helper import new_uuid\n'), ((4001, 4020), 'sqlmodel.select', 'select', (['ApiEcoindex'], {}), '(ApiEcoindex)\n', (4007, 4020), False, 'from sqlmodel import select\n'), ((4036, 4063), 'sqlalchemy.func.date', 'func.date', (['ApiEcoindex.date'], {}), '(ApiEcoindex.date)\n', (4045, 4063), False, 'from sqlalchemy import func\n'), ((4067, 4079), 'datetime.date.today', 'date.today', ([], {}), '()\n', (4077, 4079), False, 'from datetime import date\n'), ((3461, 3472), 'sqlalchemy.sql.expression.asc', 'asc', (['"""date"""'], {}), "('date')\n", (3464, 3472), False, 'from sqlalchemy.sql.expression import asc\n'), ((3677, 3696), 'sqlmodel.select', 'select', (['ApiEcoindex'], {}), '(ApiEcoindex)\n', (3683, 3696), False, 'from sqlmodel import select\n'), ((3089, 3108), 'sqlmodel.select', 'select', (['ApiEcoindex'], {}), '(ApiEcoindex)\n', (3095, 3108), False, 'from sqlmodel import select\n')] |
from typing import TYPE_CHECKING, List, Optional
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .hero_model import Hero
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
headquarters: str
heroes: List["Hero"] = Relationship(back_populates="team")
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((213, 250), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (218, 250), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((315, 350), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (327, 350), False, 'from sqlmodel import Field, Relationship, SQLModel\n')] |
from sqlalchemy import inspect
from sqlalchemy.engine.reflection import Inspector
from sqlmodel import create_engine
def test_tutorial001(clear_sqlmodel):
from docs_src.tutorial.connect.create_tables import tutorial001 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
mod.main()
insp: Inspector = inspect(mod.engine)
assert insp.has_table(str(mod.Hero.__tablename__))
assert insp.has_table(str(mod.Team.__tablename__))
| [
"sqlmodel.create_engine"
] | [((283, 312), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (296, 312), False, 'from sqlmodel import create_engine\n'), ((317, 327), 'docs_src.tutorial.connect.create_tables.tutorial001.main', 'mod.main', ([], {}), '()\n', (325, 327), True, 'from docs_src.tutorial.connect.create_tables import tutorial001 as mod\n'), ((350, 369), 'sqlalchemy.inspect', 'inspect', (['mod.engine'], {}), '(mod.engine)\n', (357, 369), False, 'from sqlalchemy import inspect\n')] |
"""seed schools
Revision ID: 8d04b7943264
Revises: <PASSWORD>
Create Date: 2022-04-18 00:38:38.618682+00:00
"""
import json
from os import getcwd
from pathlib import Path
import sqlalchemy as sa
import sqlalchemy.sql as sql
import sqlmodel
from alembic import context, op
# revision identifiers, used by Alembic.
revision = "8d04b7943264"
down_revision = "0<PASSWORD>"
branch_labels = None
depends_on = None
# Ad-hoc schools table for bulk import
schools_table = sql.table(
"schools", sql.column("id", sa.String), sql.column("name", sa.String)
)
def load_schools():
migrations_dir = Path(getcwd(), context.script.dir)
schools_path = migrations_dir.joinpath("schools.json")
return json.load(open(schools_path, "r"))
def upgrade():
# Change schools.id to a string
op.drop_constraint(
"applications_school_id_fkey", "applications", type_="foreignkey"
)
op.alter_column(
"applications",
"school_id",
type_=sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
)
op.alter_column(
"schools", "id", type_=sqlmodel.sql.sqltypes.AutoString(), nullable=False
)
op.create_foreign_key(
None, "applications", "schools", ["school_id"], ["id"], ondelete="CASCADE"
)
# Insert stuff
schools = load_schools()
op.bulk_insert(schools_table, [{"id": s["id"], "name": s["name"]} for s in schools])
def downgrade():
# Delete added records
schools = load_schools()
for school in schools:
op.execute(
schools_table.delete().where(
schools_table.c.id == op.inline_literal(school["id"])
)
)
# Change schools.id back to an integer
op.drop_constraint(
"applications_school_id_fkey", "applications", type_="foreignkey"
)
op.alter_column(
"applications",
"school_id",
type_=sa.Integer(),
nullable=False,
postgresql_using="school_id::integer",
)
op.alter_column(
"schools",
"id",
type_=sa.Integer(),
nullable=False,
postgresql_using="id::integer",
)
op.create_foreign_key(
None, "applications", "schools", ["school_id"], ["id"], ondelete="CASCADE"
)
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((494, 521), 'sqlalchemy.sql.column', 'sql.column', (['"""id"""', 'sa.String'], {}), "('id', sa.String)\n", (504, 521), True, 'import sqlalchemy.sql as sql\n'), ((523, 552), 'sqlalchemy.sql.column', 'sql.column', (['"""name"""', 'sa.String'], {}), "('name', sa.String)\n", (533, 552), True, 'import sqlalchemy.sql as sql\n'), ((795, 885), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""applications_school_id_fkey"""', '"""applications"""'], {'type_': '"""foreignkey"""'}), "('applications_school_id_fkey', 'applications', type_=\n 'foreignkey')\n", (813, 885), False, 'from alembic import context, op\n'), ((1154, 1256), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""schools"""', "['school_id']", "['id']"], {'ondelete': '"""CASCADE"""'}), "(None, 'applications', 'schools', ['school_id'], ['id'\n ], ondelete='CASCADE')\n", (1175, 1256), False, 'from alembic import context, op\n'), ((1319, 1407), 'alembic.op.bulk_insert', 'op.bulk_insert', (['schools_table', "[{'id': s['id'], 'name': s['name']} for s in schools]"], {}), "(schools_table, [{'id': s['id'], 'name': s['name']} for s in\n schools])\n", (1333, 1407), False, 'from alembic import context, op\n'), ((1710, 1800), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""applications_school_id_fkey"""', '"""applications"""'], {'type_': '"""foreignkey"""'}), "('applications_school_id_fkey', 'applications', type_=\n 'foreignkey')\n", (1728, 1800), False, 'from alembic import context, op\n'), ((2137, 2239), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""schools"""', "['school_id']", "['id']"], {'ondelete': '"""CASCADE"""'}), "(None, 'applications', 'schools', ['school_id'], ['id'\n ], ondelete='CASCADE')\n", (2158, 2239), False, 'from alembic import context, op\n'), ((603, 611), 'os.getcwd', 'getcwd', ([], {}), '()\n', (609, 611), False, 'from os import getcwd\n'), ((975, 1009), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1007, 1009), False, 'import sqlmodel\n'), ((1093, 1127), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1125, 1127), False, 'import sqlmodel\n'), ((1890, 1902), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1900, 1902), True, 'import sqlalchemy as sa\n'), ((2049, 2061), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2059, 2061), True, 'import sqlalchemy as sa\n'), ((1606, 1637), 'alembic.op.inline_literal', 'op.inline_literal', (["school['id']"], {}), "(school['id'])\n", (1623, 1637), False, 'from alembic import context, op\n')] |
from sqlmodel import create_engine, SQLModel, Session
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
connect_args = {"check_same_thread": False}
engine = create_engine(sqlite_url, echo=True, connect_args=connect_args)
def init_db():
SQLModel.metadata.create_all(engine)
def get_session():
with Session(engine) as session:
yield session | [
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((188, 251), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)', 'connect_args': 'connect_args'}), '(sqlite_url, echo=True, connect_args=connect_args)\n', (201, 251), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((273, 309), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (301, 309), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((340, 355), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (347, 355), False, 'from sqlmodel import create_engine, SQLModel, Session\n')] |
from typing import List, Optional
from sqlmodel import Field, Relationship, SQLModel, create_engine
class Weapon(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
hero: "Hero" = Relationship(back_populates="weapon")
class Power(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
hero_id: int = Field(foreign_key="hero.id")
hero: "Hero" = Relationship(back_populates="powers")
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
headquarters: str
heroes: List["Hero"] = Relationship(back_populates="team")
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
team: Optional[Team] = Relationship(back_populates="heroes")
weapon_id: Optional[int] = Field(default=None, foreign_key="weapon.id")
weapon: Optional[Weapon] = Relationship(back_populates="owner")
powers: List[Power] = Relationship(back_populates="hero")
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
create_db_and_tables()
if __name__ == "__main__":
main()
| [
"sqlmodel.Relationship",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((1289, 1325), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (1302, 1325), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((163, 200), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (168, 200), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((235, 272), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""weapon"""'}), "(back_populates='weapon')\n", (247, 272), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((334, 371), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (339, 371), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((406, 434), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""hero.id"""'}), "(foreign_key='hero.id')\n", (411, 434), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((454, 491), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""powers"""'}), "(back_populates='powers')\n", (466, 491), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((552, 589), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (557, 589), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((654, 689), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (666, 689), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((750, 787), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (755, 787), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((883, 925), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (888, 925), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((953, 990), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""heroes"""'}), "(back_populates='heroes')\n", (965, 990), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((1023, 1067), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""weapon.id"""'}), "(default=None, foreign_key='weapon.id')\n", (1028, 1067), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((1099, 1135), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""owner"""'}), "(back_populates='owner')\n", (1111, 1135), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((1163, 1198), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""hero"""'}), "(back_populates='hero')\n", (1175, 1198), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n'), ((1360, 1396), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1388, 1396), False, 'from sqlmodel import Field, Relationship, SQLModel, create_engine\n')] |
from sqlmodel import SQLModel, Field
import datetime
from typing import Optional
class Orders(SQLModel, table=True):
invoice_no: Optional[int] = Field(default=None, primary_key=True)
stock_code: str
description: str
quantity: int
invoice_date: datetime.datetime
unit_price: float
cust_id: int
| [
"sqlmodel.Field"
] | [((151, 188), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (156, 188), False, 'from sqlmodel import SQLModel, Field\n')] |
# Imports from standard library
import os
from typing import Optional, Union
import asyncio
import time
import sqlite3
import hashlib
# Import these utilities
from utils.merkletree import MerkleTree, MerkleError
# Import the HTTP app server
from fastapi import FastAPI, BackgroundTasks
##################################
from sqlmodel import Field, SQLModel, create_engine
class MerkleTable(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
timestamp: int
ngsi_id_hash: str
ngsi_value_hash: str
ngsi_receipt: Optional[str] = None
# The table scripts
drop_table_script = """
DROP TABLE IF EXISTS testing;
"""
create_table_script = """
CREATE TABLE IF NOT EXISTS testing (
id INTEGER PRIMARY KEY,
timestamp INTEGER NOT NULL,
ngsi_id_hash TEXT NOT NULL,
ngsi_value_hash TEXT NOT NULL,
ngsi_receipt TEXT
);
"""
set_journal_wal = 'PRAGMA journal_mode=WAL'
query_journal_mode = """PRAGMA journal_mode"""
class MerkleBuffer:
def __init__(self,
db_name: str = 'mkbuffer.db', # Name of the database
db_max_elements: int = 10000, # Maximum size of database, in number of records
maxLeaves: int = 1024, # Maximum number of leaves of the Merkle Tree to notarize
maxInterval: int = 60, # Notarize every maxInterval (seconds) even if not enough leaves received yet
durability: int = 10 # Commit database every durability seconds, to make data permanent
) -> None:
self.db_name = db_name
self.maxLeaves = maxLeaves
self.db_max_elements = db_max_elements
self.maxInterval = maxInterval
self.durability = durability
self.next_record = 1
self.leaves = 0
self.last_notarization = time.time()
print(f'MaxLeaves: {maxLeaves}')
self.open()
# Create a background task which commits the db every durability secs (or 2 sec as a minimum),
# and registers the Merkle Tree even if not enough entries have been received
if durability > 0 or maxInterval > 0:
self.commit_task = asyncio.create_task(self.commit_background_task(min(durability, maxInterval)))
def db_name(self):
self.db_name
def open(self):
sqlite_file_name = "sqlmodel.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
# # Connect to db
# self.db = sqlite3.connect(self.db_name,
# detect_types=sqlite3.PARSE_DECLTYPES,)
# self.db.row_factory = sqlite3.Row
# self.db.close()
# self.db = sqlite3.connect(self.db_name,
# detect_types=sqlite3.PARSE_DECLTYPES,)
# self.db.row_factory = sqlite3.Row
def open_erase(self):
# Erase the database
if os.path.exists(self.db_name):
os.remove(self.db_name)
# Connect to db
self.db = sqlite3.connect(self.db_name,
detect_types=sqlite3.PARSE_DECLTYPES,)
self.db.row_factory = sqlite3.Row
# Create the table, dropping it before.
self.db.executescript(drop_table_script)
self.db.executescript(create_table_script)
# Set the db to WAL mode for better performance
self.db.execute(set_journal_wal)
def commit(self):
self.db.commit()
def close(self):
self.next_record = 1
self.leaves = 0
self.commit_task.cancel()
self.db.close()
def _hash(self, text: Union[str, bytes]) -> bytes:
if isinstance(text, str):
text = bytes(text, "utf-8")
h = hashlib.sha256()
h.update(text)
d = h.digest()
return d
def put(self, id: str, value: str):
need_process_batch = False
# calculate hashes
id_hash = self._hash(id)
value_hash = self._hash(value)
# Insert the record
try:
# Execute the INSERT or REPLACE
self.db.execute(
'''insert or replace into testing
(id, timestamp, ngsi_id_hash, ngsi_value_hash) values (?, ?, ?, ?)''',
(self.next_record, time.time_ns(), id_hash, value_hash))
# Increment the record number
self.next_record += 1
self.leaves += 1
except Exception as e:
raise e
# Check if we should create the Merkle Tree and notarize
if self.leaves >= self.maxLeaves:
# Process the batch of records, possibly asynchronously
need_process_batch = True
# self.processBatch()
self.leaves = 0
# Check if the database size has reached the maximum and start reusing rows
if self.next_record > self.db_max_elements:
print("Rotate the database")
self.next_record = 1
return need_process_batch
def processBatch(self, db):
# stmt = 'select * from testing where ngsi_receipt is null limit 100'
stmt = 'select * from testing'
result = db.execute(stmt)
rows = result.fetchall()
print(f'Rows: {len(rows)}')
# for row in rows:
# print(f'{row["timestamp"]}-{row["ngsi_id_hash"].hex()}-{row["ngsi_value_hash"].hex()}')
# Update last notarization
self.last_notarization = time.time()
# Create a background task to make sure commit is called for the last put,
# even if no more puts are coming
async def commit_background_task(self, frequency: int):
while True:
await asyncio.sleep(frequency)
print("BKG Task: committing")
# Commit the database
self.db.commit()
# Check if must notarize even though not enough records have arrived
now = time.time()
if now - self.last_notarization > self.maxInterval:
self.processBatch(self.db)
self.last_notarization = now
f: MerkleBuffer = None
app = FastAPI()
def processBatch():
global f
print(f'In return from call process')
# Connect to db
db = sqlite3.connect(f.db_name,
detect_types=sqlite3.PARSE_DECLTYPES,)
db.row_factory = sqlite3.Row
f.processBatch(db)
@app.on_event("startup")
async def startup_event():
global f
f = MerkleBuffer()
@app.on_event("shutdown")
def shutdown_event():
print("SHUTDOWN: closing the database")
global f
f.close()
@app.get("/store/initialize")
async def store_initialize():
global f
f = MerkleBuffer(maxLeaves=4)
return {"result": "OK"}
@app.get("/store/{item_id}/{value_id}")
async def store_item(item_id: str = "Hello", value_id: str = "Pepe", background_tasks: BackgroundTasks = None):
global f
result = f.put(item_id, item_id)
if result:
background_tasks.add_task(processBatch)
return {"result": "OK"}
import uvicorn
if __name__ == "__main__":
uvicorn.run("lserver:app", host="127.0.0.1", port=8000, log_level="warning") | [
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((6014, 6023), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (6021, 6023), False, 'from fastapi import FastAPI, BackgroundTasks\n'), ((443, 480), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (448, 480), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((6129, 6193), 'sqlite3.connect', 'sqlite3.connect', (['f.db_name'], {'detect_types': 'sqlite3.PARSE_DECLTYPES'}), '(f.db_name, detect_types=sqlite3.PARSE_DECLTYPES)\n', (6144, 6193), False, 'import sqlite3\n'), ((6950, 7026), 'uvicorn.run', 'uvicorn.run', (['"""lserver:app"""'], {'host': '"""127.0.0.1"""', 'port': '(8000)', 'log_level': '"""warning"""'}), "('lserver:app', host='127.0.0.1', port=8000, log_level='warning')\n", (6961, 7026), False, 'import uvicorn\n'), ((1790, 1801), 'time.time', 'time.time', ([], {}), '()\n', (1799, 1801), False, 'import time\n'), ((2390, 2426), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (2403, 2426), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((2843, 2871), 'os.path.exists', 'os.path.exists', (['self.db_name'], {}), '(self.db_name)\n', (2857, 2871), False, 'import os\n'), ((2952, 3019), 'sqlite3.connect', 'sqlite3.connect', (['self.db_name'], {'detect_types': 'sqlite3.PARSE_DECLTYPES'}), '(self.db_name, detect_types=sqlite3.PARSE_DECLTYPES)\n', (2967, 3019), False, 'import sqlite3\n'), ((3645, 3661), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (3659, 3661), False, 'import hashlib\n'), ((5359, 5370), 'time.time', 'time.time', ([], {}), '()\n', (5368, 5370), False, 'import time\n'), ((2885, 2908), 'os.remove', 'os.remove', (['self.db_name'], {}), '(self.db_name)\n', (2894, 2908), False, 'import os\n'), ((5818, 5829), 'time.time', 'time.time', ([], {}), '()\n', (5827, 5829), False, 'import time\n'), ((5587, 5611), 'asyncio.sleep', 'asyncio.sleep', (['frequency'], {}), '(frequency)\n', (5600, 5611), False, 'import asyncio\n'), ((4190, 4204), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (4202, 4204), False, 'import time\n')] |
from enum import Enum
from typing import Optional
from sqlmodel import Field, SQLModel
class ExtensionType(str, Enum):
DOCKER = 'DOCKER'
WEBHOOK = 'WEBHOOK'
BINARY = 'BINARY'
class Extension(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
# condition is a small piece of code that can be eval by the interpreter
# i.e., ext-server will run if(eval(condition)): ...
# in the condition, the extension has access to the event object, i.e., they can access as e.event_type=='build_image'
condition: str
extension_type: ExtensionType
# ext-server will create new process to execute the entrypoint. For different types, different format of entrypoint will be needed.
# 1. for Docker image, it should be like: docker exec xxx
# 2. for webhook, it should be a url address.
# 3. for a local binary, it should be a shell command
entrypoint: str
remote_url: Optional[str]
vendor: str
name: str
| [
"sqlmodel.Field"
] | [((254, 291), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (259, 291), False, 'from sqlmodel import Field, SQLModel\n')] |
from typing import TYPE_CHECKING, List, Optional
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.link_tables import ProblemProblemSetLink
from joj.horse.schemas.base import Operation
from joj.horse.schemas.problem_set import ProblemSetDetail
from joj.horse.utils.errors import BizError, ErrorCode
if TYPE_CHECKING:
from joj.horse.models import Domain, Problem, Record, User
class ProblemSet(DomainURLORMModel, ProblemSetDetail, table=True): # type: ignore[call-arg]
__tablename__ = "problem_sets"
__table_args__ = (UniqueConstraint("domain_id", "url"),)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="problem_sets")
owner_id: Optional[UUID] = Field(
sa_column=Column(
GUID,
ForeignKey("users.id", ondelete="SET NULL"),
nullable=True,
)
)
owner: Optional["User"] = Relationship(back_populates="owned_problem_sets")
# problems_link: List["Problem"] = Relationship(
# back_populates="problem_problem_set_links",
# # link_model=ProblemProblemSetLink,
# sa_relationship_kwargs={
# "secondary": ProblemProblemSetLink,
# "order_by": "ProblemProblemSetLink.position",
# "collection_class": ordering_list("position"),
# },
# )
# maintain the order of many to many relationship
problem_problem_set_links: List[ProblemProblemSetLink] = Relationship(
back_populates="problem_set",
sa_relationship_kwargs={
"order_by": "ProblemProblemSetLink.position",
"collection_class": ordering_list("position"),
},
)
problems: List["Problem"] = Relationship(
back_populates="problem_sets",
link_model=ProblemProblemSetLink,
sa_relationship_kwargs={
"order_by": "ProblemProblemSetLink.position",
},
)
records: List["Record"] = Relationship(back_populates="problem_set")
async def operate_problem(
self, problem: "Problem", operation: Operation, position: Optional[int] = None
) -> None:
assert problem.domain_id == self.domain_id
link = await ProblemProblemSetLink.get_or_none(
problem_set_id=self.id, problem_id=problem.id
)
if operation == Operation.Create:
if link is not None:
raise BizError(ErrorCode.IntegrityError, "problem already added")
link = ProblemProblemSetLink(problem_set_id=self.id, problem_id=problem.id)
else:
if link is None:
raise BizError(ErrorCode.IntegrityError, "problem not added")
if operation == Operation.Read:
return
if operation in (Operation.Update, Operation.Delete):
self.problem_problem_set_links.remove(link)
if operation in (Operation.Create, Operation.Update):
if position is None:
self.problem_problem_set_links.append(link)
else:
self.problem_problem_set_links.insert(position, link)
if operation == Operation.Delete:
await link.delete_model(commit=False)
await self.save_model()
event.listen(ProblemSet, "before_insert", url_pre_save)
event.listen(ProblemSet, "before_update", url_pre_save)
| [
"sqlmodel.Relationship"
] | [((3592, 3647), 'sqlalchemy.event.listen', 'event.listen', (['ProblemSet', '"""before_insert"""', 'url_pre_save'], {}), "(ProblemSet, 'before_insert', url_pre_save)\n", (3604, 3647), False, 'from sqlalchemy import event\n'), ((3648, 3703), 'sqlalchemy.event.listen', 'event.listen', (['ProblemSet', '"""before_update"""', 'url_pre_save'], {}), "(ProblemSet, 'before_update', url_pre_save)\n", (3660, 3703), False, 'from sqlalchemy import event\n'), ((1038, 1081), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_sets"""'}), "(back_populates='problem_sets')\n", (1050, 1081), False, 'from sqlmodel import Field, Relationship\n'), ((1295, 1344), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""owned_problem_sets"""'}), "(back_populates='owned_problem_sets')\n", (1307, 1344), False, 'from sqlmodel import Field, Relationship\n'), ((2094, 2251), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_sets"""', 'link_model': 'ProblemProblemSetLink', 'sa_relationship_kwargs': "{'order_by': 'ProblemProblemSetLink.position'}"}), "(back_populates='problem_sets', link_model=\n ProblemProblemSetLink, sa_relationship_kwargs={'order_by':\n 'ProblemProblemSetLink.position'})\n", (2106, 2251), False, 'from sqlmodel import Field, Relationship\n'), ((2327, 2369), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""problem_set"""'}), "(back_populates='problem_set')\n", (2339, 2369), False, 'from sqlmodel import Field, Relationship\n'), ((825, 861), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""url"""'], {}), "('domain_id', 'url')\n", (841, 861), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2576, 2661), 'joj.horse.models.link_tables.ProblemProblemSetLink.get_or_none', 'ProblemProblemSetLink.get_or_none', ([], {'problem_set_id': 'self.id', 'problem_id': 'problem.id'}), '(problem_set_id=self.id, problem_id=problem.id\n )\n', (2609, 2661), False, 'from joj.horse.models.link_tables import ProblemProblemSetLink\n'), ((2855, 2923), 'joj.horse.models.link_tables.ProblemProblemSetLink', 'ProblemProblemSetLink', ([], {'problem_set_id': 'self.id', 'problem_id': 'problem.id'}), '(problem_set_id=self.id, problem_id=problem.id)\n', (2876, 2923), False, 'from joj.horse.models.link_tables import ProblemProblemSetLink\n'), ((938, 982), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (948, 982), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((1177, 1220), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""users.id"""'], {'ondelete': '"""SET NULL"""'}), "('users.id', ondelete='SET NULL')\n", (1187, 1220), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((2017, 2042), 'sqlalchemy.ext.orderinglist.ordering_list', 'ordering_list', (['"""position"""'], {}), "('position')\n", (2030, 2042), False, 'from sqlalchemy.ext.orderinglist import ordering_list\n'), ((2776, 2835), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.IntegrityError', '"""problem already added"""'], {}), "(ErrorCode.IntegrityError, 'problem already added')\n", (2784, 2835), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n'), ((2989, 3044), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.IntegrityError', '"""problem not added"""'], {}), "(ErrorCode.IntegrityError, 'problem not added')\n", (2997, 3044), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from uuid import UUID
import sqlalchemy.types as types
from sqlalchemy import Column
from sqlalchemy.orm import registry
from sqlmodel import Field, select
from dbgen.core.decorators import transform
from dbgen.core.entity import Entity
from dbgen.core.generator import Generator
from dbgen.core.model import Model
from dbgen.core.node.extract import Extract
from dbgen.core.node.query import Query
my_registry = registry()
class Parent(Entity, registry=my_registry, table=True):
__identifying__ = {"label"}
label: str
myColumn: Optional[dict] = Field(None, sa_column=Column(types.JSON()))
class Child(Entity, registry=my_registry, table=True):
__identifying__ = {"label", "parent_id"}
label: str
new_col: str = "test"
parent_id: Optional[UUID] = Field(None, foreign_key="public.parent.id")
class CustomExtractor(Extract):
n: int = 1000
def extract(self):
for i in range(self.n):
yield {'out': str(i)}
def length(self, **_):
return self.n
@transform
def failing_func():
raise ValueError("Failed")
@transform
def inputs_skipped():
from dbgen.exceptions import DBgenSkipException
raise DBgenSkipException(msg="Skip!")
def make_model():
with Model(name='new_api', registry=my_registry) as model:
with Generator('add_parent'):
new_extract = CustomExtractor(n=1000)
Parent.load(insert=True, label=new_extract["out"], validation='strict', myColumn={'a': 1})
with Generator('add_parents_v2'):
Parent.load(insert=True, label="parentier")
with Generator('add_parents_v3'):
Parent.load(insert=True, label="parent")
@transform
def concise_func(label: str) -> str:
return f"{label}-test"
with Generator('add_child'):
query = Query(select(Parent.id, Parent.label))
parent_id, parent_label = query.results()
concise_pyblock = concise_func(query["label"])
Child.load(insert=True, label=concise_pyblock.results(), parent_id=query["id"])
with Generator('failing_gen'):
failing_func()
with Generator('skip_gen'):
inputs_skipped()
return model
| [
"sqlmodel.select",
"sqlmodel.Field"
] | [((1041, 1051), 'sqlalchemy.orm.registry', 'registry', ([], {}), '()\n', (1049, 1051), False, 'from sqlalchemy.orm import registry\n'), ((1407, 1450), 'sqlmodel.Field', 'Field', (['None'], {'foreign_key': '"""public.parent.id"""'}), "(None, foreign_key='public.parent.id')\n", (1412, 1450), False, 'from sqlmodel import Field, select\n'), ((1805, 1836), 'dbgen.exceptions.DBgenSkipException', 'DBgenSkipException', ([], {'msg': '"""Skip!"""'}), "(msg='Skip!')\n", (1823, 1836), False, 'from dbgen.exceptions import DBgenSkipException\n'), ((1866, 1909), 'dbgen.core.model.Model', 'Model', ([], {'name': '"""new_api"""', 'registry': 'my_registry'}), "(name='new_api', registry=my_registry)\n", (1871, 1909), False, 'from dbgen.core.model import Model\n'), ((1933, 1956), 'dbgen.core.generator.Generator', 'Generator', (['"""add_parent"""'], {}), "('add_parent')\n", (1942, 1956), False, 'from dbgen.core.generator import Generator\n'), ((2125, 2152), 'dbgen.core.generator.Generator', 'Generator', (['"""add_parents_v2"""'], {}), "('add_parents_v2')\n", (2134, 2152), False, 'from dbgen.core.generator import Generator\n'), ((2224, 2251), 'dbgen.core.generator.Generator', 'Generator', (['"""add_parents_v3"""'], {}), "('add_parents_v3')\n", (2233, 2251), False, 'from dbgen.core.generator import Generator\n'), ((2420, 2442), 'dbgen.core.generator.Generator', 'Generator', (['"""add_child"""'], {}), "('add_child')\n", (2429, 2442), False, 'from dbgen.core.generator import Generator\n'), ((2722, 2746), 'dbgen.core.generator.Generator', 'Generator', (['"""failing_gen"""'], {}), "('failing_gen')\n", (2731, 2746), False, 'from dbgen.core.generator import Generator\n'), ((2789, 2810), 'dbgen.core.generator.Generator', 'Generator', (['"""skip_gen"""'], {}), "('skip_gen')\n", (2798, 2810), False, 'from dbgen.core.generator import Generator\n'), ((1217, 1229), 'sqlalchemy.types.JSON', 'types.JSON', ([], {}), '()\n', (1227, 1229), True, 'import sqlalchemy.types as types\n'), ((2470, 2501), 'sqlmodel.select', 'select', (['Parent.id', 'Parent.label'], {}), '(Parent.id, Parent.label)\n', (2476, 2501), False, 'from sqlmodel import Field, select\n')] |
from time import sleep
from sqlmodel import select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.models.preps import Prep
from icon_governance.utils.rpc import (
convert_hex_int,
get_preps_cps,
get_sponsors_record,
post_rpc_json,
)
def get_cps(session):
sponsors = post_rpc_json(get_sponsors_record())
if sponsors is None:
logger.info("No sponsors found from rpc.")
sleep(1)
return
for k, v in sponsors.items():
prep = session.get(Prep, k)
if prep is None:
logger.info("No preps found in db? Should not ever happen cuz of db_init.")
continue
prep.sponsored_cps_grants = convert_hex_int(v)
session.merge(prep)
session.commit()
result = session.execute(select(Prep))
preps = result.scalars().all()
cps_preps = post_rpc_json(get_preps_cps())
prep_list = [i["address"] for i in cps_preps]
for prep in preps:
if prep.address in prep_list:
prep.cps_governance = True
else:
prep.cps_governance = False
session.merge(prep)
session.commit()
def cps_cron(session):
while True:
logger.info("Starting cps cron")
get_cps(session)
logger.info("CPS cron ran.")
sleep(settings.CRON_SLEEP_SEC * 10)
if __name__ == "__main__":
from icon_governance.db import session_factory
get_cps(session_factory())
| [
"sqlmodel.select"
] | [((361, 382), 'icon_governance.utils.rpc.get_sponsors_record', 'get_sponsors_record', ([], {}), '()\n', (380, 382), False, 'from icon_governance.utils.rpc import convert_hex_int, get_preps_cps, get_sponsors_record, post_rpc_json\n'), ((418, 460), 'icon_governance.log.logger.info', 'logger.info', (['"""No sponsors found from rpc."""'], {}), "('No sponsors found from rpc.')\n", (429, 460), False, 'from icon_governance.log import logger\n'), ((469, 477), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (474, 477), False, 'from time import sleep\n'), ((735, 753), 'icon_governance.utils.rpc.convert_hex_int', 'convert_hex_int', (['v'], {}), '(v)\n', (750, 753), False, 'from icon_governance.utils.rpc import convert_hex_int, get_preps_cps, get_sponsors_record, post_rpc_json\n'), ((838, 850), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (844, 850), False, 'from sqlmodel import select\n'), ((918, 933), 'icon_governance.utils.rpc.get_preps_cps', 'get_preps_cps', ([], {}), '()\n', (931, 933), False, 'from icon_governance.utils.rpc import convert_hex_int, get_preps_cps, get_sponsors_record, post_rpc_json\n'), ((1243, 1275), 'icon_governance.log.logger.info', 'logger.info', (['"""Starting cps cron"""'], {}), "('Starting cps cron')\n", (1254, 1275), False, 'from icon_governance.log import logger\n'), ((1309, 1337), 'icon_governance.log.logger.info', 'logger.info', (['"""CPS cron ran."""'], {}), "('CPS cron ran.')\n", (1320, 1337), False, 'from icon_governance.log import logger\n'), ((1346, 1381), 'time.sleep', 'sleep', (['(settings.CRON_SLEEP_SEC * 10)'], {}), '(settings.CRON_SLEEP_SEC * 10)\n', (1351, 1381), False, 'from time import sleep\n'), ((1475, 1492), 'icon_governance.db.session_factory', 'session_factory', ([], {}), '()\n', (1490, 1492), False, 'from icon_governance.db import session_factory\n'), ((601, 676), 'icon_governance.log.logger.info', 'logger.info', (['"""No preps found in db? Should not ever happen cuz of db_init."""'], {}), "('No preps found in db? Should not ever happen cuz of db_init.')\n", (612, 676), False, 'from icon_governance.log import logger\n')] |
from typing import Optional, List
from sqlmodel import (
SQLModel,
Field,
create_engine,
Relationship
)
engine = create_engine('sqlite:///database.db')
class Person(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
nome: str
idade: int
livros: List['Livro'] = Relationship(back_populates='person')
class Livro(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
titulo: str
pessoa_id: Optional[int] = Field(default=None, foreign_key='person.id')
pessoa: Optional[Person] = Relationship(back_populates='livros')
SQLModel.metadata.create_all(engine) | [
"sqlmodel.Relationship",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine",
"sqlmodel.Field"
] | [((131, 169), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {}), "('sqlite:///database.db')\n", (144, 169), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((629, 665), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (657, 665), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((232, 269), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (237, 269), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((328, 365), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""person"""'}), "(back_populates='person')\n", (340, 365), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((427, 464), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (432, 464), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((513, 557), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""person.id"""'}), "(default=None, foreign_key='person.id')\n", (518, 557), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n'), ((589, 626), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""livros"""'}), "(back_populates='livros')\n", (601, 626), False, 'from sqlmodel import SQLModel, Field, create_engine, Relationship\n')] |
from flask import Blueprint
from flask import request
from flask import jsonify
from flask import session
from flask import current_app
from flask import render_template
from flask import request
from app.utils import render_markdown
from sqlmodel import Session as SQLSession
from sqlmodel import select
from app.models.server import Catagory, Organization
from app.utils.decorators import admin_required
bp = Blueprint("admin", __name__)
@bp.route("/")
@admin_required
def index():
return render_markdown(
"page.html",
file="admin.md",
session=session,
)
@bp.route("/organizations", methods=["GET"])
def get_organizations():
with SQLSession(current_app.engine) as s:
orgs = select(Organization)
results = s.exec(orgs).all()
return render_template(
"admin/organizations.html", session=session, organizations=results
)
@bp.route("/server/organization", methods=["POST", "DELETE"])
def post_server():
if request.method == "POST":
data = request.form
org_id = data.get("id").strip()
title = data.get("title").strip()
return jsonify({}), 200
@bp.route("/catagory", methods=["POST"])
@admin_required
def get_catagory():
data = request.form
cat_id = data.get("id")
title = data.get("title")
color = data.get("color")
with SQLSession(current_app.engine) as s:
if cat_id:
_catagory = s.exec(select(Catagory).where(Catagory.id == cat_id)).one()
_catagory.title = data.get("title")
_catagory.meta_ref = data.get("title").lower().replace(" ", "-")
_catagory.color = data.get("color")
s.add(_catagory)
s.commit()
else:
_catagory = Catagory(
title=data.get("title"),
meta_ref=data.get("title").lower().replace(" ", "-"),
color=data.get("color"),
)
s.add(_catagory)
s.commit()
return jsonify({"result": "Operate successfully"})
@bp.route("/catagories", methods=["GET", "POST"])
def get_post_catagories():
if request.method == "GET":
with SQLSession(current_app.engine) as s:
results = s.exec(select(Catagory)).all()
return render_template(
"admin/catagories.html", sesssion=session, catagories=results
)
else:
data = request.get_json()
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((413, 441), 'flask.Blueprint', 'Blueprint', (['"""admin"""', '__name__'], {}), "('admin', __name__)\n", (422, 441), False, 'from flask import Blueprint\n'), ((499, 561), 'app.utils.render_markdown', 'render_markdown', (['"""page.html"""'], {'file': '"""admin.md"""', 'session': 'session'}), "('page.html', file='admin.md', session=session)\n", (514, 561), False, 'from app.utils import render_markdown\n'), ((2011, 2054), 'flask.jsonify', 'jsonify', (["{'result': 'Operate successfully'}"], {}), "({'result': 'Operate successfully'})\n", (2018, 2054), False, 'from flask import jsonify\n'), ((674, 704), 'sqlmodel.Session', 'SQLSession', (['current_app.engine'], {}), '(current_app.engine)\n', (684, 704), True, 'from sqlmodel import Session as SQLSession\n'), ((726, 746), 'sqlmodel.select', 'select', (['Organization'], {}), '(Organization)\n', (732, 746), False, 'from sqlmodel import select\n'), ((800, 888), 'flask.render_template', 'render_template', (['"""admin/organizations.html"""'], {'session': 'session', 'organizations': 'results'}), "('admin/organizations.html', session=session, organizations=\n results)\n", (815, 888), False, 'from flask import render_template\n'), ((1367, 1397), 'sqlmodel.Session', 'SQLSession', (['current_app.engine'], {}), '(current_app.engine)\n', (1377, 1397), True, 'from sqlmodel import Session as SQLSession\n'), ((2423, 2441), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2439, 2441), False, 'from flask import request\n'), ((1148, 1159), 'flask.jsonify', 'jsonify', (['{}'], {}), '({})\n', (1155, 1159), False, 'from flask import jsonify\n'), ((2179, 2209), 'sqlmodel.Session', 'SQLSession', (['current_app.engine'], {}), '(current_app.engine)\n', (2189, 2209), True, 'from sqlmodel import Session as SQLSession\n'), ((2289, 2367), 'flask.render_template', 'render_template', (['"""admin/catagories.html"""'], {'sesssion': 'session', 'catagories': 'results'}), "('admin/catagories.html', sesssion=session, catagories=results)\n", (2304, 2367), False, 'from flask import render_template\n'), ((2245, 2261), 'sqlmodel.select', 'select', (['Catagory'], {}), '(Catagory)\n', (2251, 2261), False, 'from sqlmodel import select\n'), ((1454, 1470), 'sqlmodel.select', 'select', (['Catagory'], {}), '(Catagory)\n', (1460, 1470), False, 'from sqlmodel import select\n')] |
from typing import Optional
import pytest
from sqlmodel import Field, SQLModel, create_engine
def test_missing_sql_type():
class CustomType:
@classmethod
def __get_validators__(cls):
yield cls.validate
@classmethod
def validate(cls, v):
return v
with pytest.raises(ValueError):
class Item(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
item: CustomType
| [
"sqlmodel.Field"
] | [((332, 357), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (345, 357), False, 'import pytest\n'), ((433, 470), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (438, 470), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
import uuid
from datetime import datetime
from sqlmodel import Field
from api.db.models.base import BaseModel, BaseTable
class TenantSchemaBase(BaseModel):
tenant_id: uuid.UUID = Field(nullable=False)
wallet_id: uuid.UUID = Field(nullable=False)
# workflow_id will be null until the tenant kcks it off
workflow_id: uuid.UUID = Field(nullable=True, default=None)
schema_id: str = Field(nullable=True, default=None)
schema_name: str = Field(nullable=True, default=None)
schema_version: str = Field(nullable=True, default=None)
schema_attrs: str = Field(nullable=True, default=None)
schema_txn_id: uuid.UUID = Field(nullable=True, default=None)
schema_state: str = Field(nullable=True, default=None)
cred_def_tag: str = Field(nullable=True, default=None)
cred_def_txn_id: uuid.UUID = Field(nullable=True, default=None)
cred_def_id: str = Field(nullable=True, default=None)
cred_def_state: str = Field(nullable=True, default=None)
cred_revocation: bool = Field(nullable=True, default=None)
cred_revoc_reg_size: int = Field(nullable=True, default=None)
revoc_reg_state: str = Field(nullable=True, default=None)
class TenantSchema(TenantSchemaBase, BaseTable, table=True):
# This is the class that represents the table
pass
class TenantSchemaCreate(TenantSchemaBase):
# This is the class that represents interface for creating a tenant
# we must set all the required fields,
# but do not need to set optional (and shouldn't)
pass
class TenantSchemaRead(TenantSchemaBase):
# This is the class that represents interface for reading a tenant
# here we indicate id, created_at and updated_at must be included
id: uuid.UUID
created_at: datetime
updated_at: datetime
class TenantSchemaUpdate(BaseModel):
# This is our update interface
# This does NOT inherit from TenantSchemaBase,
# so no need to worry about accidentally updating id or other fields
id: uuid.UUID
workflow_id: uuid.UUID = Field(nullable=True, default=None)
schema_id: str = Field(nullable=True, default=None)
schema_txn_id: uuid.UUID = Field(nullable=True, default=None)
schema_state: str = Field(nullable=True, default=None)
cred_def_txn_id: uuid.UUID = Field(nullable=True, default=None)
cred_def_id: str = Field(nullable=True, default=None)
cred_def_state: str = Field(nullable=True, default=None)
revoc_reg_state: str = Field(nullable=True, default=None)
| [
"sqlmodel.Field"
] | [((187, 208), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (192, 208), False, 'from sqlmodel import Field\n'), ((236, 257), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (241, 257), False, 'from sqlmodel import Field\n'), ((347, 381), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (352, 381), False, 'from sqlmodel import Field\n'), ((403, 437), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (408, 437), False, 'from sqlmodel import Field\n'), ((461, 495), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (466, 495), False, 'from sqlmodel import Field\n'), ((522, 556), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (527, 556), False, 'from sqlmodel import Field\n'), ((581, 615), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (586, 615), False, 'from sqlmodel import Field\n'), ((647, 681), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (652, 681), False, 'from sqlmodel import Field\n'), ((706, 740), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (711, 740), False, 'from sqlmodel import Field\n'), ((765, 799), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (770, 799), False, 'from sqlmodel import Field\n'), ((833, 867), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (838, 867), False, 'from sqlmodel import Field\n'), ((891, 925), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (896, 925), False, 'from sqlmodel import Field\n'), ((952, 986), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (957, 986), False, 'from sqlmodel import Field\n'), ((1015, 1049), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1020, 1049), False, 'from sqlmodel import Field\n'), ((1081, 1115), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1086, 1115), False, 'from sqlmodel import Field\n'), ((1143, 1177), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1148, 1177), False, 'from sqlmodel import Field\n'), ((2022, 2056), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2027, 2056), False, 'from sqlmodel import Field\n'), ((2078, 2112), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2083, 2112), False, 'from sqlmodel import Field\n'), ((2144, 2178), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2149, 2178), False, 'from sqlmodel import Field\n'), ((2203, 2237), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2208, 2237), False, 'from sqlmodel import Field\n'), ((2271, 2305), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2276, 2305), False, 'from sqlmodel import Field\n'), ((2329, 2363), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2334, 2363), False, 'from sqlmodel import Field\n'), ((2390, 2424), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2395, 2424), False, 'from sqlmodel import Field\n'), ((2452, 2486), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (2457, 2486), False, 'from sqlmodel import Field\n')] |
"""add remoteuser
Revision ID: 5c6d07e2a9c1
Revises: <KEY>
Create Date: 2022-02-13 01:54:01.310088
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '5c6d07e2a9c1'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('remoteuser',
sa.Column('username', sa.VARCHAR(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('inbox', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('public_key', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('remoteuser')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((880, 907), 'alembic.op.drop_table', 'op.drop_table', (['"""remoteuser"""'], {}), "('remoteuser')\n", (893, 907), False, 'from alembic import op\n'), ((683, 712), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (706, 712), True, 'import sqlalchemy as sa\n'), ((718, 749), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""username"""'], {}), "('username')\n", (737, 749), True, 'import sqlalchemy as sa\n'), ((443, 455), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (453, 455), True, 'import sqlalchemy as sa\n'), ((493, 505), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (503, 505), True, 'import sqlalchemy as sa\n'), ((546, 580), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (578, 580), False, 'import sqlmodel\n'), ((627, 661), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (659, 661), False, 'import sqlmodel\n')] |
"""Add schools
Revision ID: 423e059e8b64
Revises: 58d2280520b8
Create Date: 2022-02-12 07:44:42.189067+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "423e059e8b64"
down_revision = "58d2280520b8"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"schools",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("applications", sa.Column("school_id", sa.Integer(), nullable=False))
op.create_foreign_key(None, "applications", "schools", ["school_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "applications", type_="foreignkey")
op.drop_column("applications", "school_id")
op.drop_table("schools")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((710, 787), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""schools"""', "['school_id']", "['id']"], {}), "(None, 'applications', 'schools', ['school_id'], ['id'])\n", (731, 787), False, 'from alembic import op\n'), ((912, 972), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""applications"""'], {'type_': '"""foreignkey"""'}), "(None, 'applications', type_='foreignkey')\n", (930, 972), False, 'from alembic import op\n'), ((977, 1020), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""school_id"""'], {}), "('applications', 'school_id')\n", (991, 1020), False, 'from alembic import op\n'), ((1025, 1049), 'alembic.op.drop_table', 'op.drop_table', (['"""schools"""'], {}), "('schools')\n", (1038, 1049), False, 'from alembic import op\n'), ((581, 610), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (604, 610), True, 'import sqlalchemy as sa\n'), ((463, 475), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (473, 475), True, 'import sqlalchemy as sa\n'), ((520, 554), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (552, 554), False, 'import sqlmodel\n'), ((675, 687), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (685, 687), True, 'import sqlalchemy as sa\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.