code stringlengths 110 18.9k | apis list | extract_api stringlengths 123 24.4k |
|---|---|---|
# from sqlmodel import select
from app.src.db.engine import get_db
from sqlmodel import Session, select
from app.src.db.manager import create_table
from app.src.models.db.product import Product
from app.src.models.db.product_type import ProductType
from app.src.models.db.tag import Tag
def test_data():
# create the tables (come renderle dipendenti da una sessione)
engine = get_db()
create_table()
with Session(engine) as session:
# define data
type_panini = ProductType(name="Panino", description="Tutto ciò che è panino")
cibo_tag = Tag(name="cibo")
panino = Product(
name="panino",
description="panino buono",
price=3.30,
available=True,
product_type=type_panini,
tags=[cibo_tag],
)
kebab = Product(
name="kebab",
description="senza cipolla",
price=4,
available=True,
product_type=type_panini,
tags=[cibo_tag],
)
session.add(type_panini)
session.add(cibo_tag)
session.add(panino)
session.add(kebab)
statement = select(Product)
results = session.exec(statement)
products = results.all()
assert len(products) >= 1
results = select(Product).where(Product.name == "kebab")
results = session.exec(statement)
products = results.all()
assert len(products) != 0
session.rollback()
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((387, 395), 'app.src.db.engine.get_db', 'get_db', ([], {}), '()\n', (393, 395), False, 'from app.src.db.engine import get_db\n'), ((400, 414), 'app.src.db.manager.create_table', 'create_table', ([], {}), '()\n', (412, 414), False, 'from app.src.db.manager import create_table\n'), ((425, 440), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (432, 440), False, 'from sqlmodel import Session, select\n'), ((497, 561), 'app.src.models.db.product_type.ProductType', 'ProductType', ([], {'name': '"""Panino"""', 'description': '"""Tutto ciò che è panino"""'}), "(name='Panino', description='Tutto ciò che è panino')\n", (508, 561), False, 'from app.src.models.db.product_type import ProductType\n'), ((581, 597), 'app.src.models.db.tag.Tag', 'Tag', ([], {'name': '"""cibo"""'}), "(name='cibo')\n", (584, 597), False, 'from app.src.models.db.tag import Tag\n'), ((615, 740), 'app.src.models.db.product.Product', 'Product', ([], {'name': '"""panino"""', 'description': '"""panino buono"""', 'price': '(3.3)', 'available': '(True)', 'product_type': 'type_panini', 'tags': '[cibo_tag]'}), "(name='panino', description='panino buono', price=3.3, available=\n True, product_type=type_panini, tags=[cibo_tag])\n", (622, 740), False, 'from app.src.models.db.product import Product\n'), ((836, 958), 'app.src.models.db.product.Product', 'Product', ([], {'name': '"""kebab"""', 'description': '"""senza cipolla"""', 'price': '(4)', 'available': '(True)', 'product_type': 'type_panini', 'tags': '[cibo_tag]'}), "(name='kebab', description='senza cipolla', price=4, available=True,\n product_type=type_panini, tags=[cibo_tag])\n", (843, 958), False, 'from app.src.models.db.product import Product\n'), ((1178, 1193), 'sqlmodel.select', 'select', (['Product'], {}), '(Product)\n', (1184, 1193), False, 'from sqlmodel import Session, select\n'), ((1322, 1337), 'sqlmodel.select', 'select', (['Product'], {}), '(Product)\n', (1328, 1337), False, 'from sqlmodel import Session, select\n')] |
from pydantic.types import List, Optional
from sqlmodel import Field, Relationship, SQLModel
class TeamBase(SQLModel):
name: str
headquarters: str
class Config:
schema_extra = {
"example": {
"name": "wonderful league",
"headquarters": "Fortress of Solitude",
}
}
class Team(TeamBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
heroes: List["Hero"] = Relationship(back_populates="team")
class TeamCreate(TeamBase):
pass
class TeamRead(TeamBase):
id: int
class TeamUpdate(TeamBase):
name: Optional[str] = None
headquarters: Optional[str] = None
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((410, 447), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (415, 447), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((476, 511), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (488, 511), False, 'from sqlmodel import Field, Relationship, SQLModel\n')] |
from urllib.parse import urlparse
from datetime import datetime
import dramatiq
from dramatiq.brokers.redis import RedisBroker
from sqlmodel import Session
from app.db import engine
from app.models import Document, DocumentInput
from app.predict import CategoryPrediction
from app.settings import settings
redis_parameters = urlparse(settings.redis_url)
redis_broker = RedisBroker(
host=redis_parameters.hostname,
port=redis_parameters.port,
username=redis_parameters.username,
password=redis_parameters.password,
# Heroku Redis with TLS use self-signed certs, so we need to tinker a bit
ssl=redis_parameters.scheme == "rediss",
ssl_cert_reqs=None,
)
dramatiq.set_broker(redis_broker)
category_prediction = CategoryPrediction()
@dramatiq.actor
def ingest_document(document_json: str):
document = DocumentInput.parse_raw(document_json)
with Session(engine) as session:
document_db = session.get(Document, document.id)
if document_db is None:
document_db = Document(**document.dict())
else:
document_dict = document.dict(exclude_unset=True)
for key, value in document_dict.items():
setattr(document_db, key, value)
document_db.category = category_prediction.predict(document_db.content)
document_db.updated_at = datetime.utcnow()
session.add(document_db)
session.commit()
| [
"sqlmodel.Session"
] | [((328, 356), 'urllib.parse.urlparse', 'urlparse', (['settings.redis_url'], {}), '(settings.redis_url)\n', (336, 356), False, 'from urllib.parse import urlparse\n'), ((372, 584), 'dramatiq.brokers.redis.RedisBroker', 'RedisBroker', ([], {'host': 'redis_parameters.hostname', 'port': 'redis_parameters.port', 'username': 'redis_parameters.username', 'password': 'redis_parameters.password', 'ssl': "(redis_parameters.scheme == 'rediss')", 'ssl_cert_reqs': 'None'}), "(host=redis_parameters.hostname, port=redis_parameters.port,\n username=redis_parameters.username, password=redis_parameters.password,\n ssl=redis_parameters.scheme == 'rediss', ssl_cert_reqs=None)\n", (383, 584), False, 'from dramatiq.brokers.redis import RedisBroker\n'), ((682, 715), 'dramatiq.set_broker', 'dramatiq.set_broker', (['redis_broker'], {}), '(redis_broker)\n', (701, 715), False, 'import dramatiq\n'), ((740, 760), 'app.predict.CategoryPrediction', 'CategoryPrediction', ([], {}), '()\n', (758, 760), False, 'from app.predict import CategoryPrediction\n'), ((835, 873), 'app.models.DocumentInput.parse_raw', 'DocumentInput.parse_raw', (['document_json'], {}), '(document_json)\n', (858, 873), False, 'from app.models import Document, DocumentInput\n'), ((883, 898), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (890, 898), False, 'from sqlmodel import Session\n'), ((1346, 1363), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1361, 1363), False, 'from datetime import datetime\n')] |
import typing as t
from sqlmodel import Field, SQLModel
class Quotes(SQLModel, table=True):
id: t.Optional[int] = Field(default=None, primary_key=True)
quote: str
| [
"sqlmodel.Field"
] | [((121, 158), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (126, 158), False, 'from sqlmodel import Field, SQLModel\n')] |
from typing import Type
from sqlmodel import select, Session
from ..db import engine
from ..models.base import TSQLModelDB
class BaseRepository:
model: Type[TSQLModelDB]
@classmethod
def create(cls, **kwargs) -> TSQLModelDB:
db_model = cls.model(**kwargs)
db_model.save()
return db_model
@classmethod
def get_all(cls, offset: int = 0, limit: int = 100) -> list[TSQLModelDB] | None:
with Session(engine) as session:
return session.exec(select(cls.model)
.offset(offset)
.limit(limit)
).unique().all()
@classmethod
def get_model_by_id(cls, _id: int) -> TSQLModelDB | None:
with Session(engine) as session:
return session.get(cls.model, _id)
@classmethod
def get_model_by_attr(cls, **kwargs) -> TSQLModelDB | None:
with Session(engine) as session:
return session.exec(select(cls.model)
.filter_by(**kwargs)
).first()
@classmethod
def update_model(cls, db_model: TSQLModelDB, new_data: dict) -> TSQLModelDB:
for key, value in new_data.items():
setattr(db_model, key, value)
db_model.save()
return db_model
@classmethod
def delete_model(cls, db_model: TSQLModelDB) -> None:
db_model.delete()
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((445, 460), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (452, 460), False, 'from sqlmodel import select, Session\n'), ((759, 774), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (766, 774), False, 'from sqlmodel import select, Session\n'), ((929, 944), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (936, 944), False, 'from sqlmodel import select, Session\n'), ((989, 1006), 'sqlmodel.select', 'select', (['cls.model'], {}), '(cls.model)\n', (995, 1006), False, 'from sqlmodel import select, Session\n'), ((505, 522), 'sqlmodel.select', 'select', (['cls.model'], {}), '(cls.model)\n', (511, 522), False, 'from sqlmodel import select, Session\n')] |
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.where import tutorial010 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == [
[{"name": "Tarantula", "secret_name": "<NAME>", "age": 32, "id": 4}],
[{"name": "<NAME>", "secret_name": "<NAME>", "age": 35, "id": 5}],
[
{
"name": "<NAME>",
"secret_name": "<NAME>",
"age": 93,
"id": 7,
}
],
]
| [
"sqlmodel.create_engine"
] | [((267, 296), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (280, 296), False, 'from sqlmodel import create_engine\n'), ((373, 411), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (378, 411), False, 'from unittest.mock import patch\n'), ((421, 431), 'docs_src.tutorial.where.tutorial010.main', 'mod.main', ([], {}), '()\n', (429, 431), True, 'from docs_src.tutorial.where import tutorial010 as mod\n')] |
from sqlmodel import Session, func, text, SQLModel, select
from typing import Any, List, Literal, Optional, Type, TypeVar, Generic
ReadType = TypeVar("ReadType", bound=SQLModel)
CreateType = TypeVar("CreateType", bound=SQLModel)
UpdateType = TypeVar("UpdateType", bound=SQLModel)
class BaseRepository(Generic[ReadType, CreateType, UpdateType]):
entity: Type[ReadType]
def __init__(self, db: Session):
self.db = db
def get_entity(self, *args: Any, **kwargs: Any) -> Optional[ReadType]:
result = self.db.exec(
select(self.entity)
.filter(*args)
.filter_by(**kwargs)
)
return result.first()
def get_entities(self, *args: Any, offset: int = 0, limit: int = 100, order_by: str = 'id', order: Literal['desc', 'asc'] = 'asc', **kwargs: Any) -> List[ReadType]:
result = self.db.exec(
select(self.entity)
.filter(*args)
.filter_by(**kwargs)
.offset(offset)
.limit(limit)
.order_by(text(f"{order_by} {order}"))
)
return result.all()
def count_entities(self) -> int:
return self.db.query(func.count(self.entity.id)).scalar()
def create_entity(self, entity: CreateType) -> ReadType:
entity = self.entity.from_orm(entity)
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
def create_entities(self, entities: List[CreateType]) -> ReadType:
entities_input = []
for entity in entities:
entity = self.entity.from_orm(entity)
entities_input.append(entity)
self.db.add_all(entities_input)
self.db.commit()
entities_output = []
for entity in entities:
self.db.refresh(entity)
entities_output.append(entity)
return entities_output
def delete_entity(self, *args, **kwargs) -> bool:
try:
entity = self.get_entity(*args, **kwargs)
if entity is None:
return False
self.db.delete(entity)
self.db.commit()
return True
except Exception:
return False
def delete_entities(self, *args, **kwargs) -> bool:
try:
entities = self.get_entities(*args, **kwargs)
if entities is None:
return False
for entity in entities:
self.db.delete(entity)
self.db.commit()
return True
except Exception:
return False
def update_entity(self, data: UpdateType, *args, **kwargs) -> ReadType:
entity = self.get_entity(*args, **kwargs)
if entity is None:
return None
data = data.dict(exclude_unset=True)
for key, value in data.items():
setattr(entity, key, value)
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
def update_entities(self, data: UpdateType, *args, **kwargs) -> ReadType:
entities = self.get_entities(*args, **kwargs)
if entities is None:
return None
data = data.dict(exclude_unset=True)
for entity in entities:
for key, value in data.items():
setattr(entity, key, value)
self.db.add(entity)
self.db.commit()
for entity in entities:
self.db.refresh(entity)
return entities
def update_entity_changes(self, entity: ReadType) -> ReadType:
self.db.add(entity)
self.db.commit()
self.db.refresh(entity)
return entity
| [
"sqlmodel.func.count",
"sqlmodel.text",
"sqlmodel.select"
] | [((143, 178), 'typing.TypeVar', 'TypeVar', (['"""ReadType"""'], {'bound': 'SQLModel'}), "('ReadType', bound=SQLModel)\n", (150, 178), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((192, 229), 'typing.TypeVar', 'TypeVar', (['"""CreateType"""'], {'bound': 'SQLModel'}), "('CreateType', bound=SQLModel)\n", (199, 229), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((243, 280), 'typing.TypeVar', 'TypeVar', (['"""UpdateType"""'], {'bound': 'SQLModel'}), "('UpdateType', bound=SQLModel)\n", (250, 280), False, 'from typing import Any, List, Literal, Optional, Type, TypeVar, Generic\n'), ((1043, 1070), 'sqlmodel.text', 'text', (['f"""{order_by} {order}"""'], {}), "(f'{order_by} {order}')\n", (1047, 1070), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((1178, 1204), 'sqlmodel.func.count', 'func.count', (['self.entity.id'], {}), '(self.entity.id)\n', (1188, 1204), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((553, 572), 'sqlmodel.select', 'select', (['self.entity'], {}), '(self.entity)\n', (559, 572), False, 'from sqlmodel import Session, func, text, SQLModel, select\n'), ((887, 906), 'sqlmodel.select', 'select', (['self.entity'], {}), '(self.entity)\n', (893, 906), False, 'from sqlmodel import Session, func, text, SQLModel, select\n')] |
from sqlmodel import SQLModel, create_engine
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
| [
"sqlmodel.create_engine",
"sqlmodel.SQLModel.metadata.create_all"
] | [((134, 159), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {}), '(sqlite_url)\n', (147, 159), False, 'from sqlmodel import SQLModel, create_engine\n'), ((194, 230), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (222, 230), False, 'from sqlmodel import SQLModel, create_engine\n')] |
"""add swag tiers
Revision ID: <KEY>
Revises: 02338256c6aa
Create Date: 2022-06-01 05:58:25.373228+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "02338256c6aa"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"swag_tiers",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("required_attendance", sa.Integer(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
"participants", sa.Column("swag_tier_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(None, "participants", "swag_tiers", ["swag_tier_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "participants", type_="foreignkey")
op.drop_column("participants", "swag_tier_id")
op.drop_table("swag_tiers")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((876, 963), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""participants"""', '"""swag_tiers"""', "['swag_tier_id']", "['id']"], {}), "(None, 'participants', 'swag_tiers', ['swag_tier_id'],\n ['id'])\n", (897, 963), False, 'from alembic import op\n'), ((1084, 1144), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""participants"""'], {'type_': '"""foreignkey"""'}), "(None, 'participants', type_='foreignkey')\n", (1102, 1144), False, 'from alembic import op\n'), ((1149, 1195), 'alembic.op.drop_column', 'op.drop_column', (['"""participants"""', '"""swag_tier_id"""'], {}), "('participants', 'swag_tier_id')\n", (1163, 1195), False, 'from alembic import op\n'), ((1200, 1227), 'alembic.op.drop_table', 'op.drop_table', (['"""swag_tiers"""'], {}), "('swag_tiers')\n", (1213, 1227), False, 'from alembic import op\n'), ((731, 760), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (754, 760), True, 'import sqlalchemy as sa\n'), ((457, 491), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (489, 491), False, 'import sqlmodel\n'), ((543, 577), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (575, 577), False, 'import sqlmodel\n'), ((637, 649), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (647, 649), True, 'import sqlalchemy as sa\n'), ((692, 704), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (702, 704), True, 'import sqlalchemy as sa\n'), ((837, 849), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (847, 849), True, 'import sqlalchemy as sa\n')] |
import hypothesis.strategies as st
from hypothesis import given
from hypothesis.strategies import DataObject
from sqlmodel import Session, select
from fastapi_server.models.user import User
from fastapi_server.test.base_test import BaseTest
class TestDatabase(BaseTest):
def test_user_add_single(self, method_session_fixture: Session):
session = method_session_fixture
assert isinstance(session, Session)
username = 'asd'
email = '<EMAIL>'
password = '<PASSWORD>'
assert session.exec(select(User)).all() == []
session.add(
User(
username=username,
email=email,
password_hashed=password,
is_admin=False,
is_disabled=False,
is_verified=False,
)
)
session.commit()
assert session.exec(select(User)).all() != []
@given(data=st.data())
def test_user_add_multiple(self, data: DataObject):
username = data.draw(st.from_regex('[a-zA-Z0-9]{1,20}', fullmatch=True))
email = data.draw(st.from_regex('[a-zA-Z]{1,20}@gmailcom', fullmatch=True))
password = data.draw(st.from_regex('[a-zA-Z0-9]{1,20}', fullmatch=True))
with self.example_session_context() as session:
assert session.exec(select(User)).all() == []
session.add(
User(
username=username,
email=email,
password_hashed=password,
is_admin=False,
is_disabled=False,
is_verified=False,
)
)
session.commit()
assert session.exec(select(User)).all() != []
| [
"sqlmodel.select"
] | [((599, 720), 'fastapi_server.models.user.User', 'User', ([], {'username': 'username', 'email': 'email', 'password_hashed': 'password', 'is_admin': '(False)', 'is_disabled': '(False)', 'is_verified': '(False)'}), '(username=username, email=email, password_hashed=password, is_admin=\n False, is_disabled=False, is_verified=False)\n', (603, 720), False, 'from fastapi_server.models.user import User\n'), ((1029, 1079), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z0-9]{1,20}"""'], {'fullmatch': '(True)'}), "('[a-zA-Z0-9]{1,20}', fullmatch=True)\n", (1042, 1079), True, 'import hypothesis.strategies as st\n'), ((1107, 1163), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z]{1,20}@gmailcom"""'], {'fullmatch': '(True)'}), "('[a-zA-Z]{1,20}@gmailcom', fullmatch=True)\n", (1120, 1163), True, 'import hypothesis.strategies as st\n'), ((1194, 1244), 'hypothesis.strategies.from_regex', 'st.from_regex', (['"""[a-zA-Z0-9]{1,20}"""'], {'fullmatch': '(True)'}), "('[a-zA-Z0-9]{1,20}', fullmatch=True)\n", (1207, 1244), True, 'import hypothesis.strategies as st\n'), ((933, 942), 'hypothesis.strategies.data', 'st.data', ([], {}), '()\n', (940, 942), True, 'import hypothesis.strategies as st\n'), ((1401, 1522), 'fastapi_server.models.user.User', 'User', ([], {'username': 'username', 'email': 'email', 'password_hashed': 'password', 'is_admin': '(False)', 'is_disabled': '(False)', 'is_verified': '(False)'}), '(username=username, email=email, password_hashed=password, is_admin=\n False, is_disabled=False, is_verified=False)\n', (1405, 1522), False, 'from fastapi_server.models.user import User\n'), ((540, 552), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (546, 552), False, 'from sqlmodel import Session, select\n'), ((890, 902), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (896, 902), False, 'from sqlmodel import Session, select\n'), ((1334, 1346), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1340, 1346), False, 'from sqlmodel import Session, select\n'), ((1732, 1744), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1738, 1744), False, 'from sqlmodel import Session, select\n')] |
from select import select
from app.schemas.common import (
IGetResponseBase,
IPostResponseBase,
IDeleteResponseBase,
)
from app.utils.text_nlp import analyze_text
from app.schemas.text_inference import (
TextInferenceCreate,
TextInferenceRead,
)
from fastapi_pagination import Page, Params
from sqlmodel.ext.asyncio.session import AsyncSession
from fastapi import APIRouter, Depends, HTTPException, Query
from app.api import deps
from app import crud
from app.models import TextInference
from app.models import TextInferenceBase
from app.models.user import User
from sqlmodel import select
router = APIRouter()
@router.get(
"/text-classification-inferences/",
response_model=IGetResponseBase[Page[TextInferenceRead]],
)
async def get_text_classification_inferences(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
inferences = await crud.text_inference.get_multi_paginated(
db_session, params=params
)
return IGetResponseBase[Page[TextInferenceRead]](data=inferences)
@router.get(
"/text-classification-inferences/order_by_created_at/",
response_model=IGetResponseBase[Page[TextInferenceRead]],
)
async def text_classification_inferences_order_by_created_at(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
query = select(TextInference).order_by(TextInference.created_at)
inferences = await crud.text_inference.get_multi_paginated(
db_session, query=query, params=params
)
return IGetResponseBase[Page[TextInferenceRead]](data=inferences)
@router.post(
"/text-classification-predict/", response_model=IPostResponseBase[TextInferenceRead]
)
async def predict(
request: TextInferenceBase,
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
text = request.text
result = await analyze_text(text)
text = result[0]
res = result[1]
inference = TextInferenceCreate(text=text, result=res)
my_inference = await crud.text_inference.create_inference(
db_session, obj_in=inference, user_id=current_user.id
)
return IPostResponseBase(data=TextInferenceRead.from_orm(my_inference))
| [
"sqlmodel.select"
] | [((620, 631), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (629, 631), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((818, 827), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (825, 827), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((860, 880), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (867, 880), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((907, 944), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (914, 944), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1345, 1354), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (1352, 1354), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1387, 1407), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1394, 1407), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1434, 1471), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1441, 1471), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1921, 1941), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1928, 1941), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1968, 2005), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1975, 2005), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2131, 2173), 'app.schemas.text_inference.TextInferenceCreate', 'TextInferenceCreate', ([], {'text': 'text', 'result': 'res'}), '(text=text, result=res)\n', (2150, 2173), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n'), ((972, 1038), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'params': 'params'}), '(db_session, params=params)\n', (1011, 1038), False, 'from app import crud\n'), ((1568, 1647), 'app.crud.text_inference.get_multi_paginated', 'crud.text_inference.get_multi_paginated', (['db_session'], {'query': 'query', 'params': 'params'}), '(db_session, query=query, params=params)\n', (1607, 1647), False, 'from app import crud\n'), ((2054, 2072), 'app.utils.text_nlp.analyze_text', 'analyze_text', (['text'], {}), '(text)\n', (2066, 2072), False, 'from app.utils.text_nlp import analyze_text\n'), ((2200, 2296), 'app.crud.text_inference.create_inference', 'crud.text_inference.create_inference', (['db_session'], {'obj_in': 'inference', 'user_id': 'current_user.id'}), '(db_session, obj_in=inference, user_id=\n current_user.id)\n', (2236, 2296), False, 'from app import crud\n'), ((1488, 1509), 'sqlmodel.select', 'select', (['TextInference'], {}), '(TextInference)\n', (1494, 1509), False, 'from sqlmodel import select\n'), ((2341, 2381), 'app.schemas.text_inference.TextInferenceRead.from_orm', 'TextInferenceRead.from_orm', (['my_inference'], {}), '(my_inference)\n', (2367, 2381), False, 'from app.schemas.text_inference import TextInferenceCreate, TextInferenceRead\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import AbstractSet, Any, Dict, Mapping, Optional, Sequence, Union
from pydantic.fields import Undefined, UndefinedType
from sqlalchemy import Column
from sqlmodel import Field
from dbgen.utils.typing import NoArgAnyCallable
def Attribute(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: str = None,
title: str = None,
description: str = None,
exclude: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None,
include: Union[AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any] = None,
const: bool = None,
gt: float = None,
ge: float = None,
lt: float = None,
le: float = None,
multiple_of: float = None,
min_items: int = None,
max_items: int = None,
min_length: int = None,
max_length: int = None,
allow_mutation: bool = True,
regex: str = None,
primary_key: bool = False,
foreign_key: Optional[Any] = None,
nullable: Union[bool, UndefinedType] = Undefined,
index: Union[bool, UndefinedType] = Undefined,
sa_column: Union[Column, UndefinedType] = Undefined,
sa_column_args: Union[Sequence[Any], UndefinedType] = Undefined,
sa_column_kwargs: Union[Mapping[str, Any], UndefinedType] = Undefined,
schema_extra: Optional[Dict[str, Any]] = None,
) -> Any:
field = Field(
default,
default_factory=default_factory,
alias=alias,
title=title,
description=description,
exclude=exclude,
include=include,
const=const,
gt=gt,
ge=ge,
lt=lt,
le=le,
multiple_of=multiple_of,
min_items=min_items,
max_items=max_items,
min_length=min_length,
max_length=max_length,
allow_mutation=allow_mutation,
regex=regex,
primary_key=primary_key,
foreign_key=foreign_key,
nullable=nullable,
index=index,
sa_column=sa_column,
sa_column_args=sa_column_args,
sa_column_kwargs=sa_column_kwargs,
schema_extra=schema_extra,
)
return field
| [
"sqlmodel.Field"
] | [((1965, 2523), 'sqlmodel.Field', 'Field', (['default'], {'default_factory': 'default_factory', 'alias': 'alias', 'title': 'title', 'description': 'description', 'exclude': 'exclude', 'include': 'include', 'const': 'const', 'gt': 'gt', 'ge': 'ge', 'lt': 'lt', 'le': 'le', 'multiple_of': 'multiple_of', 'min_items': 'min_items', 'max_items': 'max_items', 'min_length': 'min_length', 'max_length': 'max_length', 'allow_mutation': 'allow_mutation', 'regex': 'regex', 'primary_key': 'primary_key', 'foreign_key': 'foreign_key', 'nullable': 'nullable', 'index': 'index', 'sa_column': 'sa_column', 'sa_column_args': 'sa_column_args', 'sa_column_kwargs': 'sa_column_kwargs', 'schema_extra': 'schema_extra'}), '(default, default_factory=default_factory, alias=alias, title=title,\n description=description, exclude=exclude, include=include, const=const,\n gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, min_items=\n min_items, max_items=max_items, min_length=min_length, max_length=\n max_length, allow_mutation=allow_mutation, regex=regex, primary_key=\n primary_key, foreign_key=foreign_key, nullable=nullable, index=index,\n sa_column=sa_column, sa_column_args=sa_column_args, sa_column_kwargs=\n sa_column_kwargs, schema_extra=schema_extra)\n', (1970, 2523), False, 'from sqlmodel import Field\n')] |
"""
DAG related functions.
"""
import operator
from collections import defaultdict
from io import StringIO
from typing import Any, DefaultDict, Dict, List, Optional, Set
import asciidag.graph
import asciidag.node
from sqlmodel import Session, select
from sqloxide import parse_sql
from datajunction.constants import DJ_DATABASE_ID
from datajunction.models.database import Database
from datajunction.models.node import Node
from datajunction.sql.parse import find_nodes_by_key
from datajunction.typing import ParseTree
def render_dag(dependencies: Dict[str, Set[str]], **kwargs: Any) -> str:
"""
Render the DAG of dependencies.
"""
out = StringIO()
graph = asciidag.graph.Graph(out, **kwargs)
asciidag_nodes: Dict[str, asciidag.node.Node] = {}
tips = sorted(
[build_asciidag(name, dependencies, asciidag_nodes) for name in dependencies],
key=lambda n: n.item,
)
graph.show_nodes(tips)
out.seek(0)
return out.getvalue()
def build_asciidag(
name: str,
dependencies: Dict[str, Set[str]],
asciidag_nodes: Dict[str, asciidag.node.Node],
) -> asciidag.node.Node:
"""
Build the nodes for ``asciidag``.
"""
if name in asciidag_nodes:
asciidag_node = asciidag_nodes[name]
else:
asciidag_node = asciidag.node.Node(name)
asciidag_nodes[name] = asciidag_node
asciidag_node.parents = sorted(
[
build_asciidag(child, dependencies, asciidag_nodes)
for child in dependencies[name]
],
key=lambda n: n.item,
)
return asciidag_node
def get_computable_databases(
node: Node,
columns: Optional[Set[str]] = None,
) -> Set[Database]:
"""
Return all the databases where a given node can be computed.
This takes into consideration the node expression, since some of the columns might
not be present in all databases.
"""
if columns is None:
columns = {column.name for column in node.columns}
# add all the databases where the node is explicitly materialized
tables = [
table
for table in node.tables
if columns <= {column.name for column in table.columns}
]
databases = {table.database for table in tables}
# add all the databases that are common between the parents and match all the columns
parent_columns = get_referenced_columns_from_sql(node.expression, node.parents)
if node.parents:
parent_databases = [
get_computable_databases(parent, parent_columns[parent.name])
for parent in node.parents
]
databases |= set.intersection(*parent_databases)
return databases
def get_database_for_nodes(
session: Session,
nodes: List[Node],
node_columns: Dict[str, Set[str]],
database_id: Optional[int] = None,
) -> Database:
"""
Given a list of nodes, return the best database to compute metric.
When no nodes are passed, the database with the lowest cost is returned.
"""
if nodes:
databases = set.intersection(
*[get_computable_databases(node, node_columns[node.name]) for node in nodes]
)
else:
databases = session.exec(
select(Database).where(Database.id != DJ_DATABASE_ID),
).all()
if not databases:
raise Exception("No valid database was found")
if database_id is not None:
for database in databases:
if database.id == database_id:
return database
raise Exception(f"Database ID {database_id} is not valid")
return sorted(databases, key=operator.attrgetter("cost"))[0]
def get_referenced_columns_from_sql(
sql: Optional[str],
parents: List[Node],
) -> DefaultDict[str, Set[str]]:
"""
Given a SQL expression, return the referenced columns.
Referenced columns are a dictionary mapping parent name to column name(s).
"""
if not sql:
return defaultdict(set)
tree = parse_sql(sql, dialect="ansi")
return get_referenced_columns_from_tree(tree, parents)
def get_referenced_columns_from_tree(
tree: ParseTree,
parents: List[Node],
) -> DefaultDict[str, Set[str]]:
"""
Return the columns referenced in parents given a parse tree.
"""
referenced_columns: DefaultDict[str, Set[str]] = defaultdict(set)
parent_columns = {
parent.name: {column.name for column in parent.columns} for parent in parents
}
# compound identifiers are fully qualified
for compound_identifier in find_nodes_by_key(tree, "CompoundIdentifier"):
parent = ".".join(part["value"] for part in compound_identifier[:-1])
column = compound_identifier[-1]["value"]
referenced_columns[parent].add(column)
# for regular identifiers we need to figure out which parent the columns belongs to
for identifier in find_nodes_by_key(tree, "Identifier"):
column = identifier["value"]
candidates = [
parent for parent, columns in parent_columns.items() if column in columns
]
if not candidates:
raise Exception(f"Column {column} not found in any parent")
if len(candidates) > 1:
raise Exception(f"Column {column} is ambiguous")
parent = candidates[0]
referenced_columns[parent].add(column)
return referenced_columns
def get_dimensions(node: Node) -> List[str]:
"""
Return the available dimensions in a given node.
"""
dimensions = []
for parent in node.parents:
for column in parent.columns:
dimensions.append(f"{parent.name}.{column.name}")
if column.dimension:
for dimension_column in column.dimension.columns:
dimensions.append(
f"{column.dimension.name}.{dimension_column.name}",
)
return sorted(dimensions)
| [
"sqlmodel.select"
] | [((658, 668), 'io.StringIO', 'StringIO', ([], {}), '()\n', (666, 668), False, 'from io import StringIO\n'), ((3980, 4010), 'sqloxide.parse_sql', 'parse_sql', (['sql'], {'dialect': '"""ansi"""'}), "(sql, dialect='ansi')\n", (3989, 4010), False, 'from sqloxide import parse_sql\n'), ((4324, 4340), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (4335, 4340), False, 'from collections import defaultdict\n'), ((4536, 4581), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['tree', '"""CompoundIdentifier"""'], {}), "(tree, 'CompoundIdentifier')\n", (4553, 4581), False, 'from datajunction.sql.parse import find_nodes_by_key\n'), ((4869, 4906), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['tree', '"""Identifier"""'], {}), "(tree, 'Identifier')\n", (4886, 4906), False, 'from datajunction.sql.parse import find_nodes_by_key\n'), ((3951, 3967), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (3962, 3967), False, 'from collections import defaultdict\n'), ((3612, 3639), 'operator.attrgetter', 'operator.attrgetter', (['"""cost"""'], {}), "('cost')\n", (3631, 3639), False, 'import operator\n'), ((3219, 3235), 'sqlmodel.select', 'select', (['Database'], {}), '(Database)\n', (3225, 3235), False, 'from sqlmodel import Session, select\n')] |
from fastapi import FastAPI
from sqlmodel import SQLModel, create_engine, Session, select
from datetime import datetime
from datastore.model import Label, LabelAssignment, Meter, Measurement
from pydantic import BaseModel
SQLITE_FILE_NAME = "database.db"
sqlite_url = f"sqlite:///{SQLITE_FILE_NAME}"
connect_args = {"check_same_thread": False}
engine = create_engine(sqlite_url, echo=True, connect_args=connect_args)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
app = FastAPI()
@app.on_event("startup")
def on_startup():
create_db_and_tables()
@app.get("/meters")
def read_usage():
with Session(engine) as session:
meters = session.query(Meter).all()
return meters
@app.get("/meters/{meter_id}/")
def read_meter(meter_id: int):
with Session(engine) as session:
meter = session.query(Meter).filter(Meter.id == meter_id).first()
return meter
@app.get("/meters/{meter_id}/measurements")
def read_measurements(meter_id: int, start_date: datetime = None, end_date: datetime = None):
with Session(engine) as session:
query = session.query(Measurement).filter(
Measurement.meter_id == meter_id)
if start_date:
query = query.filter(Measurement.capture_time >= start_date)
if end_date:
query = query.filter(Measurement.capture_time <= end_date)
measurements = query.all()
return measurements
class LabelAssignmentPostData(BaseModel):
label_id: int
start_time: datetime
end_time: datetime
@app.post("/meters/{meter_id}/labels")
def assign_label(meter_id: int, data: LabelAssignmentPostData):
with Session(engine) as session:
assignment = LabelAssignment(meter_id=meter_id,
label_id=data.label_id,
start_time=data.start_time,
end_time=data.end_time)
ass1 = LabelAssignment(meter_id=1, label_id=1, start_time=datetime(
2020, 1, 1, 0, 0, 30), end_time=datetime(2020, 1, 1, 0, 0, 40))
session.add(assignment)
session.commit()
return "Label assigned successfully!"
@app.get("/meters/{meter_id}/labels")
def get_assigned_labels(meter_id: int):
with Session(engine) as session:
labels = session.query(LabelAssignment.start_time, LabelAssignment.end_time, Label.name, Label.color, LabelAssignment.id).filter(
LabelAssignment.meter_id == meter_id).join(Label).all()
return labels
@app.delete("/labels/assignments/{assignment_id}")
def delete_assignment(assignment_id: int):
with Session(engine) as session:
session.query(LabelAssignment).filter(
LabelAssignment.id == assignment_id).delete()
session.commit()
return "Label assignment deleted successfully!"
@app.get("/labels")
def get_labels():
with Session(engine) as session:
labels = session.query(Label).all()
return labels
@app.post("/db/setDefaults")
def set_defaults():
clear_db()
with Session(engine) as session:
meter1 = Meter(serial_number="Meter one", id=1)
meter2 = Meter(serial_number="Meter two", id=2)
for i in range(0, 59):
session.add(Measurement(meter_id=meter1.id, voltage_phase_1=10+i,
voltage_phase_2=(i*i) % 230, voltage_phase_3=30, capture_time=datetime(2020, 1, 1, 0, 0, i)))
session.add(meter1)
session.add(meter2)
label1 = Label(name="Label one", id=1, color="red")
label2 = Label(name="Label two", id=2, color="blue")
ass1 = LabelAssignment(meter_id=meter1.id, label_id=label1.id, start_time=datetime(
2020, 1, 1, 0, 0, 30), end_time=datetime(2020, 1, 1, 0, 0, 40))
session.add(label1)
session.add(label2)
session.add(ass1)
session.commit()
return "Default set successfully!"
@app.post("/db/clear")
def clear_db():
with Session(engine) as session:
session.query(Meter).delete()
session.query(Measurement).delete()
session.query(Label).delete()
session.query(LabelAssignment).delete()
session.commit()
return "DB cleared successfuly!"
if __name__ == "__main__":
import os
import uvicorn
host = os.getenv("SMIC_HOST", "0.0.0.0")
port = int(os.getenv("SMIC_PORT", 8081))
uvicorn.run(app, host=host, port=port)
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((355, 418), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)', 'connect_args': 'connect_args'}), '(sqlite_url, echo=True, connect_args=connect_args)\n', (368, 418), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((498, 507), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (505, 507), False, 'from fastapi import FastAPI\n'), ((453, 489), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (481, 489), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((4336, 4369), 'os.getenv', 'os.getenv', (['"""SMIC_HOST"""', '"""0.0.0.0"""'], {}), "('SMIC_HOST', '0.0.0.0')\n", (4345, 4369), False, 'import os\n'), ((4420, 4458), 'uvicorn.run', 'uvicorn.run', (['app'], {'host': 'host', 'port': 'port'}), '(app, host=host, port=port)\n', (4431, 4458), False, 'import uvicorn\n'), ((629, 644), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (636, 644), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((797, 812), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (804, 812), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((1069, 1084), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1076, 1084), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((1670, 1685), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1677, 1685), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((1719, 1834), 'datastore.model.LabelAssignment', 'LabelAssignment', ([], {'meter_id': 'meter_id', 'label_id': 'data.label_id', 'start_time': 'data.start_time', 'end_time': 'data.end_time'}), '(meter_id=meter_id, label_id=data.label_id, start_time=data.\n start_time, end_time=data.end_time)\n', (1734, 1834), False, 'from datastore.model import Label, LabelAssignment, Meter, Measurement\n'), ((2282, 2297), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2289, 2297), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((2643, 2658), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2650, 2658), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((2905, 2920), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2912, 2920), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((3074, 3089), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3081, 3089), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((3119, 3157), 'datastore.model.Meter', 'Meter', ([], {'serial_number': '"""Meter one"""', 'id': '(1)'}), "(serial_number='Meter one', id=1)\n", (3124, 3157), False, 'from datastore.model import Label, LabelAssignment, Meter, Measurement\n'), ((3175, 3213), 'datastore.model.Meter', 'Meter', ([], {'serial_number': '"""Meter two"""', 'id': '(2)'}), "(serial_number='Meter two', id=2)\n", (3180, 3213), False, 'from datastore.model import Label, LabelAssignment, Meter, Measurement\n'), ((3528, 3570), 'datastore.model.Label', 'Label', ([], {'name': '"""Label one"""', 'id': '(1)', 'color': '"""red"""'}), "(name='Label one', id=1, color='red')\n", (3533, 3570), False, 'from datastore.model import Label, LabelAssignment, Meter, Measurement\n'), ((3588, 3631), 'datastore.model.Label', 'Label', ([], {'name': '"""Label two"""', 'id': '(2)', 'color': '"""blue"""'}), "(name='Label two', id=2, color='blue')\n", (3593, 3631), False, 'from datastore.model import Label, LabelAssignment, Meter, Measurement\n'), ((4000, 4015), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4007, 4015), False, 'from sqlmodel import SQLModel, create_engine, Session, select\n'), ((4385, 4413), 'os.getenv', 'os.getenv', (['"""SMIC_PORT"""', '(8081)'], {}), "('SMIC_PORT', 8081)\n", (4394, 4413), False, 'import os\n'), ((2004, 2034), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)', '(0)', '(0)', '(30)'], {}), '(2020, 1, 1, 0, 0, 30)\n', (2012, 2034), False, 'from datetime import datetime\n'), ((2058, 2088), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)', '(0)', '(0)', '(40)'], {}), '(2020, 1, 1, 0, 0, 40)\n', (2066, 2088), False, 'from datetime import datetime\n'), ((3714, 3744), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)', '(0)', '(0)', '(30)'], {}), '(2020, 1, 1, 0, 0, 30)\n', (3722, 3744), False, 'from datetime import datetime\n'), ((3768, 3798), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)', '(0)', '(0)', '(40)'], {}), '(2020, 1, 1, 0, 0, 40)\n', (3776, 3798), False, 'from datetime import datetime\n'), ((3421, 3450), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(1)', '(0)', '(0)', 'i'], {}), '(2020, 1, 1, 0, 0, i)\n', (3429, 3450), False, 'from datetime import datetime\n')] |
"""node state add
Revision ID: <KEY>
Revises: ad46aa4e<PASSWORD>
Create Date: 2021-11-15 21:08:52.990959
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"preps", sa.Column("node_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"proposals", "id", existing_type=sa.INTEGER(), nullable=False, autoincrement=True
)
op.drop_column("preps", "node_state")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((739, 776), 'alembic.op.drop_column', 'op.drop_column', (['"""preps"""', '"""node_state"""'], {}), "('preps', 'node_state')\n", (753, 776), False, 'from alembic import op\n'), ((441, 475), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (473, 475), False, 'import sqlmodel\n'), ((680, 692), 'sqlalchemy.INTEGER', 'sa.INTEGER', ([], {}), '()\n', (690, 692), True, 'import sqlalchemy as sa\n')] |
import pytest
from fastapi.testclient import TestClient
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.pool import StaticPool
from .main import Hero, app, get_session
@pytest.fixture(name="session")
def session_fixture():
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_session_override():
return session
app.dependency_overrides[get_session] = get_session_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
def test_create_hero(client: TestClient):
response = client.post(
"/heroes/", json={"name": "Deadpond", "secret_name": "<NAME>"}
)
data = response.json()
assert response.status_code == 200
assert data["name"] == "Deadpond"
assert data["secret_name"] == "<NAME>"
assert data["age"] is None
assert data["id"] is not None
def test_create_hero_incomplete(client: TestClient):
# No secret_name
response = client.post("/heroes/", json={"name": "Deadpond"})
assert response.status_code == 422
def test_create_hero_invalid(client: TestClient):
# secret_name has an invalid type
response = client.post(
"/heroes/",
json={
"name": "Deadpond",
"secret_name": {"message": "Do you wanna know my secret identity?"},
},
)
assert response.status_code == 422
def test_read_heroes(session: Session, client: TestClient):
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
hero_2 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48)
session.add(hero_1)
session.add(hero_2)
session.commit()
response = client.get("/heroes/")
data = response.json()
assert response.status_code == 200
assert len(data) == 2
assert data[0]["name"] == hero_1.name
assert data[0]["secret_name"] == hero_1.secret_name
assert data[0]["age"] == hero_1.age
assert data[0]["id"] == hero_1.id
assert data[1]["name"] == hero_2.name
assert data[1]["secret_name"] == hero_2.secret_name
assert data[1]["age"] == hero_2.age
assert data[1]["id"] == hero_2.id
def test_read_hero(session: Session, client: TestClient):
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
session.add(hero_1)
session.commit()
response = client.get(f"/heroes/{hero_1.id}")
data = response.json()
assert response.status_code == 200
assert data["name"] == hero_1.name
assert data["secret_name"] == hero_1.secret_name
assert data["age"] == hero_1.age
assert data["id"] == hero_1.id
def test_update_hero(session: Session, client: TestClient):
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
session.add(hero_1)
session.commit()
response = client.patch(f"/heroes/{hero_1.id}", json={"name": "Deadpuddle"})
data = response.json()
assert response.status_code == 200
assert data["name"] == "Deadpuddle"
assert data["secret_name"] == "<NAME>"
assert data["age"] is None
assert data["id"] == hero_1.id
def test_delete_hero(session: Session, client: TestClient):
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
session.add(hero_1)
session.commit()
response = client.delete(f"/heroes/{hero_1.id}")
hero_in_db = session.get(Hero, hero_1.id)
assert response.status_code == 200
assert hero_in_db is None
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((192, 222), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (206, 222), False, 'import pytest\n'), ((468, 497), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (482, 497), False, 'import pytest\n'), ((259, 354), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite://"""'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "('sqlite://', connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (272, 354), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((369, 405), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (397, 405), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((670, 685), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (680, 685), False, 'from fastapi.testclient import TestClient\n'), ((415, 430), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (422, 430), False, 'from sqlmodel import Session, SQLModel, create_engine\n')] |
"""Add playlist and item models
Revision ID: 979da9b7aff0
Revises: <PASSWORD>
Create Date: 2021-10-31 13:09:14.064217
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '979da9b7aff0'
down_revision = 'a<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('item',
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_item_artist'), 'item', ['artist'], unique=False)
op.create_index(op.f('ix_item_id'), 'item', ['id'], unique=False)
op.create_index(op.f('ix_item_title'), 'item', ['title'], unique=False)
op.create_table('playlist',
sa.Column('entity_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('spotify', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('amazon', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('apple_music', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('image', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('release_date', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['category_id'], ['category.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_playlist_amazon'), 'playlist', ['amazon'], unique=False)
op.create_index(op.f('ix_playlist_apple_music'), 'playlist', ['apple_music'], unique=False)
op.create_index(op.f('ix_playlist_category_id'), 'playlist', ['category_id'], unique=False)
op.create_index(op.f('ix_playlist_entity_id'), 'playlist', ['entity_id'], unique=False)
op.create_index(op.f('ix_playlist_id'), 'playlist', ['id'], unique=False)
op.create_index(op.f('ix_playlist_image'), 'playlist', ['image'], unique=False)
op.create_index(op.f('ix_playlist_name'), 'playlist', ['name'], unique=False)
op.create_index(op.f('ix_playlist_release_date'), 'playlist', ['release_date'], unique=False)
op.create_index(op.f('ix_playlist_spotify'), 'playlist', ['spotify'], unique=False)
op.create_table('playlistitemlink',
sa.Column('playlist_id', sa.Integer(), nullable=True),
sa.Column('item_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['item_id'], ['item.id'], ),
sa.ForeignKeyConstraint(['playlist_id'], ['playlist.id'], ),
sa.PrimaryKeyConstraint('playlist_id', 'item_id')
)
op.create_index(op.f('ix_playlistitemlink_item_id'), 'playlistitemlink', ['item_id'], unique=False)
op.create_index(op.f('ix_playlistitemlink_playlist_id'), 'playlistitemlink', ['playlist_id'], unique=False)
op.drop_index('ix_category_playlists', table_name='category')
op.drop_column('category', 'playlists')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('category', sa.Column('playlists', sa.VARCHAR(), nullable=False))
op.create_index('ix_category_playlists', 'category', ['playlists'], unique=False)
op.drop_index(op.f('ix_playlistitemlink_playlist_id'), table_name='playlistitemlink')
op.drop_index(op.f('ix_playlistitemlink_item_id'), table_name='playlistitemlink')
op.drop_table('playlistitemlink')
op.drop_index(op.f('ix_playlist_spotify'), table_name='playlist')
op.drop_index(op.f('ix_playlist_release_date'), table_name='playlist')
op.drop_index(op.f('ix_playlist_name'), table_name='playlist')
op.drop_index(op.f('ix_playlist_image'), table_name='playlist')
op.drop_index(op.f('ix_playlist_id'), table_name='playlist')
op.drop_index(op.f('ix_playlist_entity_id'), table_name='playlist')
op.drop_index(op.f('ix_playlist_category_id'), table_name='playlist')
op.drop_index(op.f('ix_playlist_apple_music'), table_name='playlist')
op.drop_index(op.f('ix_playlist_amazon'), table_name='playlist')
op.drop_table('playlist')
op.drop_index(op.f('ix_item_title'), table_name='item')
op.drop_index(op.f('ix_item_id'), table_name='item')
op.drop_index(op.f('ix_item_artist'), table_name='item')
op.drop_table('item')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((3030, 3091), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_category_playlists"""'], {'table_name': '"""category"""'}), "('ix_category_playlists', table_name='category')\n", (3043, 3091), False, 'from alembic import op\n'), ((3096, 3135), 'alembic.op.drop_column', 'op.drop_column', (['"""category"""', '"""playlists"""'], {}), "('category', 'playlists')\n", (3110, 3135), False, 'from alembic import op\n'), ((3344, 3430), 'alembic.op.create_index', 'op.create_index', (['"""ix_category_playlists"""', '"""category"""', "['playlists']"], {'unique': '(False)'}), "('ix_category_playlists', 'category', ['playlists'], unique=\n False)\n", (3359, 3430), False, 'from alembic import op\n'), ((3606, 3639), 'alembic.op.drop_table', 'op.drop_table', (['"""playlistitemlink"""'], {}), "('playlistitemlink')\n", (3619, 3639), False, 'from alembic import op\n'), ((4278, 4303), 'alembic.op.drop_table', 'op.drop_table', (['"""playlist"""'], {}), "('playlist')\n", (4291, 4303), False, 'from alembic import op\n'), ((4486, 4507), 'alembic.op.drop_table', 'op.drop_table', (['"""item"""'], {}), "('item')\n", (4499, 4507), False, 'from alembic import op\n'), ((643, 672), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (666, 672), True, 'import sqlalchemy as sa\n'), ((699, 721), 'alembic.op.f', 'op.f', (['"""ix_item_artist"""'], {}), "('ix_item_artist')\n", (703, 721), False, 'from alembic import op\n'), ((777, 795), 'alembic.op.f', 'op.f', (['"""ix_item_id"""'], {}), "('ix_item_id')\n", (781, 795), False, 'from alembic import op\n'), ((847, 868), 'alembic.op.f', 'op.f', (['"""ix_item_title"""'], {}), "('ix_item_title')\n", (851, 868), False, 'from alembic import op\n'), ((1573, 1630), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['category_id']", "['category.id']"], {}), "(['category_id'], ['category.id'])\n", (1596, 1630), True, 'import sqlalchemy as sa\n'), ((1638, 1667), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1661, 1667), True, 'import sqlalchemy as sa\n'), ((1694, 1720), 'alembic.op.f', 'op.f', (['"""ix_playlist_amazon"""'], {}), "('ix_playlist_amazon')\n", (1698, 1720), False, 'from alembic import op\n'), ((1780, 1811), 'alembic.op.f', 'op.f', (['"""ix_playlist_apple_music"""'], {}), "('ix_playlist_apple_music')\n", (1784, 1811), False, 'from alembic import op\n'), ((1876, 1907), 'alembic.op.f', 'op.f', (['"""ix_playlist_category_id"""'], {}), "('ix_playlist_category_id')\n", (1880, 1907), False, 'from alembic import op\n'), ((1972, 2001), 'alembic.op.f', 'op.f', (['"""ix_playlist_entity_id"""'], {}), "('ix_playlist_entity_id')\n", (1976, 2001), False, 'from alembic import op\n'), ((2064, 2086), 'alembic.op.f', 'op.f', (['"""ix_playlist_id"""'], {}), "('ix_playlist_id')\n", (2068, 2086), False, 'from alembic import op\n'), ((2142, 2167), 'alembic.op.f', 'op.f', (['"""ix_playlist_image"""'], {}), "('ix_playlist_image')\n", (2146, 2167), False, 'from alembic import op\n'), ((2226, 2250), 'alembic.op.f', 'op.f', (['"""ix_playlist_name"""'], {}), "('ix_playlist_name')\n", (2230, 2250), False, 'from alembic import op\n'), ((2308, 2340), 'alembic.op.f', 'op.f', (['"""ix_playlist_release_date"""'], {}), "('ix_playlist_release_date')\n", (2312, 2340), False, 'from alembic import op\n'), ((2406, 2433), 'alembic.op.f', 'op.f', (['"""ix_playlist_spotify"""'], {}), "('ix_playlist_spotify')\n", (2410, 2433), False, 'from alembic import op\n'), ((2632, 2681), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['item_id']", "['item.id']"], {}), "(['item_id'], ['item.id'])\n", (2655, 2681), True, 'import sqlalchemy as sa\n'), ((2689, 2746), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['playlist_id']", "['playlist.id']"], {}), "(['playlist_id'], ['playlist.id'])\n", (2712, 2746), True, 'import sqlalchemy as sa\n'), ((2754, 2803), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""playlist_id"""', '"""item_id"""'], {}), "('playlist_id', 'item_id')\n", (2777, 2803), True, 'import sqlalchemy as sa\n'), ((2830, 2865), 'alembic.op.f', 'op.f', (['"""ix_playlistitemlink_item_id"""'], {}), "('ix_playlistitemlink_item_id')\n", (2834, 2865), False, 'from alembic import op\n'), ((2934, 2973), 'alembic.op.f', 'op.f', (['"""ix_playlistitemlink_playlist_id"""'], {}), "('ix_playlistitemlink_playlist_id')\n", (2938, 2973), False, 'from alembic import op\n'), ((3444, 3483), 'alembic.op.f', 'op.f', (['"""ix_playlistitemlink_playlist_id"""'], {}), "('ix_playlistitemlink_playlist_id')\n", (3448, 3483), False, 'from alembic import op\n'), ((3534, 3569), 'alembic.op.f', 'op.f', (['"""ix_playlistitemlink_item_id"""'], {}), "('ix_playlistitemlink_item_id')\n", (3538, 3569), False, 'from alembic import op\n'), ((3658, 3685), 'alembic.op.f', 'op.f', (['"""ix_playlist_spotify"""'], {}), "('ix_playlist_spotify')\n", (3662, 3685), False, 'from alembic import op\n'), ((3728, 3760), 'alembic.op.f', 'op.f', (['"""ix_playlist_release_date"""'], {}), "('ix_playlist_release_date')\n", (3732, 3760), False, 'from alembic import op\n'), ((3803, 3827), 'alembic.op.f', 'op.f', (['"""ix_playlist_name"""'], {}), "('ix_playlist_name')\n", (3807, 3827), False, 'from alembic import op\n'), ((3870, 3895), 'alembic.op.f', 'op.f', (['"""ix_playlist_image"""'], {}), "('ix_playlist_image')\n", (3874, 3895), False, 'from alembic import op\n'), ((3938, 3960), 'alembic.op.f', 'op.f', (['"""ix_playlist_id"""'], {}), "('ix_playlist_id')\n", (3942, 3960), False, 'from alembic import op\n'), ((4003, 4032), 'alembic.op.f', 'op.f', (['"""ix_playlist_entity_id"""'], {}), "('ix_playlist_entity_id')\n", (4007, 4032), False, 'from alembic import op\n'), ((4075, 4106), 'alembic.op.f', 'op.f', (['"""ix_playlist_category_id"""'], {}), "('ix_playlist_category_id')\n", (4079, 4106), False, 'from alembic import op\n'), ((4149, 4180), 'alembic.op.f', 'op.f', (['"""ix_playlist_apple_music"""'], {}), "('ix_playlist_apple_music')\n", (4153, 4180), False, 'from alembic import op\n'), ((4223, 4249), 'alembic.op.f', 'op.f', (['"""ix_playlist_amazon"""'], {}), "('ix_playlist_amazon')\n", (4227, 4249), False, 'from alembic import op\n'), ((4322, 4343), 'alembic.op.f', 'op.f', (['"""ix_item_title"""'], {}), "('ix_item_title')\n", (4326, 4343), False, 'from alembic import op\n'), ((4382, 4400), 'alembic.op.f', 'op.f', (['"""ix_item_id"""'], {}), "('ix_item_id')\n", (4386, 4400), False, 'from alembic import op\n'), ((4439, 4461), 'alembic.op.f', 'op.f', (['"""ix_item_artist"""'], {}), "('ix_item_artist')\n", (4443, 4461), False, 'from alembic import op\n'), ((459, 493), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (491, 493), False, 'import sqlmodel\n'), ((536, 570), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (568, 570), False, 'import sqlmodel\n'), ((609, 621), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (619, 621), True, 'import sqlalchemy as sa\n'), ((962, 996), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (994, 996), False, 'import sqlmodel\n'), ((1037, 1071), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1069, 1071), False, 'import sqlmodel\n'), ((1115, 1149), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1147, 1149), False, 'import sqlmodel\n'), ((1191, 1225), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1223, 1225), False, 'import sqlmodel\n'), ((1272, 1306), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1304, 1306), False, 'import sqlmodel\n'), ((1347, 1381), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1379, 1381), False, 'import sqlmodel\n'), ((1429, 1442), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1440, 1442), True, 'import sqlalchemy as sa\n'), ((1480, 1492), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1490, 1492), True, 'import sqlalchemy as sa\n'), ((1539, 1551), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1549, 1551), True, 'import sqlalchemy as sa\n'), ((2543, 2555), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2553, 2555), True, 'import sqlalchemy as sa\n'), ((2598, 2610), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2608, 2610), True, 'import sqlalchemy as sa\n'), ((3309, 3321), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (3319, 3321), True, 'import sqlalchemy as sa\n')] |
from datetime import datetime, date
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryProblem(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_problem", response_model=HistoryProblem)
async def create_history_problem(history_problem: HistoryProblem, session: AsyncSession = Depends(get_session)):
session.add(history_problem)
await session.commit()
await session.refresh(history_problem)
return history_problem
@router.get("/history_problem/{id}", response_model=HistoryProblem)
async def get_history_problem(id: int, session: AsyncSession = Depends(get_session)):
history_problems = await session.execute(select(HistoryProblem).where(HistoryProblem.id == id))
history_problem = history_problems.scalars().first()
return history_problem
@router.put("/history_problem/{id}", response_model=HistoryProblem)
async def update_history_problem(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_problem/{id}")
async def delete_history_problem(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((295, 306), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (304, 306), False, 'from fastapi import APIRouter, Depends\n'), ((377, 414), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (382, 414), False, 'from sqlmodel import Field, SQLModel\n'), ((714, 734), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (721, 734), False, 'from fastapi import APIRouter, Depends\n'), ((1000, 1020), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1007, 1020), False, 'from fastapi import APIRouter, Depends\n'), ((1343, 1363), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1350, 1363), False, 'from fastapi import APIRouter, Depends\n'), ((1481, 1501), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1488, 1501), False, 'from fastapi import APIRouter, Depends\n'), ((1068, 1090), 'sqlalchemy.select', 'select', (['HistoryProblem'], {}), '(HistoryProblem)\n', (1074, 1090), False, 'from sqlalchemy import select\n')] |
from typing import Optional
from datetime import datetime
from sqlalchemy import DateTime, String
from sqlalchemy.sql.schema import Column
from sqlmodel import Field, SQLModel
class Test(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class DataStorage(SQLModel, table=True):
test_id: int = Field(foreign_key="test.id")
distance: int
created: Optional[datetime] = Field(
default=None,
sa_column=Column("created", DateTime),
)
| [
"sqlmodel.Field"
] | [((237, 274), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (242, 274), False, 'from sqlmodel import Field, SQLModel\n'), ((351, 379), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""test.id"""'}), "(foreign_key='test.id')\n", (356, 379), False, 'from sqlmodel import Field, SQLModel\n'), ((479, 506), 'sqlalchemy.sql.schema.Column', 'Column', (['"""created"""', 'DateTime'], {}), "('created', DateTime)\n", (485, 506), False, 'from sqlalchemy.sql.schema import Column\n')] |
# Creare connessioni tra tabelle M:N (many to many): Product e Tags
# https://sqlmodel.tiangolo.com/tutorial/many-to-many/
# e seguito
from typing import Optional, List
from sqlmodel import Field, SQLModel, Session,\
Relationship, create_engine, select
# Tabella di associazione tra Tag e Product
class TagProductLink(SQLModel, table=True):
tag_id: Optional[int] = Field(
default=None, foreign_key="tag.id", primary_key=True
)
product_id: Optional[int] = Field(
default=None, foreign_key="product.id", primary_key=True
)
class Tag(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
# Relazione many con Product
products: List["Product"] =\
Relationship(back_populates="tags", link_model=TagProductLink)
class ProductType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class Product(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
product_type: Optional[int] = Field(default=None,
foreign_key="producttype.id")
# Relazione many con Tag
tags: List["Tag"] =\
Relationship(back_populates="products", link_model=TagProductLink)
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_entities():
tag_offerta = Tag(name="Offerta")
tag_maionese = Tag(name="Con Maionese")
tag_no_maionese = Tag(name="Senza Maionese")
tipo_panino = ProductType(name="panino")
tipo_bibita = ProductType(name="bibita")
with Session(engine) as session:
session.add(tag_offerta)
session.add(tag_maionese)
session.add(tag_no_maionese)
session.add(tipo_panino)
session.add(tipo_bibita)
session.commit()
session.refresh(tag_offerta)
session.refresh(tag_maionese)
session.refresh(tag_no_maionese)
session.refresh(tipo_panino)
session.refresh(tipo_bibita)
hamburger = Product(
name="hamburger",
product_type=tipo_panino.id,
tags=[tag_offerta, tag_maionese]
)
coke = Product(
name="Coca Cola",
product_type=tipo_bibita.id,
tags=[tag_offerta]
)
session.add(hamburger)
session.add(coke)
session.commit()
session.refresh(hamburger)
session.refresh(coke)
print("Created :", hamburger)
print("Created :", coke)
def update_burger():
with Session(engine) as session:
tag_no_maionese = session.exec(
select(Tag).where(Tag.name == "Senza Maionese")
).one()
tag_maionese = session.exec(
select(Tag).where(Tag.name == "Con Maionese")
).one()
hamburger = session.exec(
select(Product).where(Product.name == "hamburger")
).one()
hamburger.tags.append(tag_no_maionese)
hamburger.tags.remove(tag_maionese)
session.add(hamburger)
session.commit()
print("Updated hamburger:", hamburger.tags)
print("Updated tags:", tag_maionese.products, tag_no_maionese.products)
def select_products():
with Session(engine) as session:
statement = select(Product, ProductType).\
where(Product.product_type == ProductType.id)
results = session.exec(statement)
for product, product_type in results:
print("product:", product, "product_type:",
product_type, "tags:", product.tags)
def main():
create_db_and_tables()
create_entities()
update_burger()
# select_products()
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine",
"sqlmodel.Relationship"
] | [((1400, 1436), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (1413, 1436), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((392, 451), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""tag.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='tag.id', primary_key=True)\n", (397, 451), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((498, 561), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""product.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='product.id', primary_key=True)\n", (503, 561), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((635, 672), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (640, 672), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((761, 823), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""tags"""', 'link_model': 'TagProductLink'}), "(back_populates='tags', link_model=TagProductLink)\n", (773, 823), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((891, 928), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (896, 928), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((1006, 1043), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1011, 1043), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((1092, 1141), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""producttype.id"""'}), "(default=None, foreign_key='producttype.id')\n", (1097, 1141), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((1244, 1310), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""products"""', 'link_model': 'TagProductLink'}), "(back_populates='products', link_model=TagProductLink)\n", (1256, 1310), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((1471, 1507), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1499, 1507), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((1763, 1778), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1770, 1778), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((2717, 2732), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2724, 2732), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((3398, 3413), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3405, 3413), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((3446, 3474), 'sqlmodel.select', 'select', (['Product', 'ProductType'], {}), '(Product, ProductType)\n', (3452, 3474), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((2797, 2808), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2803, 2808), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((2910, 2921), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2916, 2921), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n'), ((3018, 3033), 'sqlmodel.select', 'select', (['Product'], {}), '(Product)\n', (3024, 3033), False, 'from sqlmodel import Field, SQLModel, Session, Relationship, create_engine, select\n')] |
"""Criação dos bancos de dados"""
from sqlmodel import SQLModel
from mitmirror.infra.entities import *
from .database_config import engine
def create_db():
"""Criando bancos de dados"""
base = SQLModel.metadata.create_all(engine)
return base
| [
"sqlmodel.SQLModel.metadata.create_all"
] | [((204, 240), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (232, 240), False, 'from sqlmodel import SQLModel\n')] |
from typer.testing import CliRunner
import os
from timerdo.main import app, sqlite_file_name
from timerdo.tables import ToDo, Timer
from sqlmodel import create_engine, Session, select
from datetime import datetime, timedelta
try:
os.rename('/home/cmts/.config/timerdo/timerdo_db.db',
'/home/cmts/.config/timerdo/timerdo_db_moved.db')
except FileNotFoundError:
pass
sqlite_url = f'sqlite:///{sqlite_file_name}'
engine = create_engine(sqlite_url, echo=True)
runner = CliRunner()
def test_add_none():
"""Test add function with no argument"""
result = runner.invoke(app, ['add'])
assert result.exit_code == 2
def test_add_task():
"""Test add function with task argument"""
task = 'test add'
result = runner.invoke(app, ['add', task])
with Session(engine) as session:
query = session.exec(select(ToDo).where(ToDo.task == task)).one()
task = query.task
status = query.status
assert result.exit_code == 0
assert task == task
assert status == 'to do'
def test_add_status():
"""Test status"""
task = 'Test status'
status = 'dif'
result = runner.invoke(app, ['add', task, '--status', status])
assert result.exit_code == 1
assert 'status must be "to do" or "doing"\n' in result.stdout
def test_add_due_date():
"""Test due date"""
task = 'Test due date'
date = datetime.strftime(datetime.now(), '%Y-%m-%d')
result = runner.invoke(app, ['add', task, '--due-date', date])
assert result.exit_code == 1
assert f'due date must be grater than {datetime.today().date()}\n' in \
result.stdout
def test_add_reminder():
"""Test reminder"""
task = 'Test reminder'
date = datetime.strftime(datetime.now(), '%Y-%m-%d')
result = runner.invoke(app, ['add', task, '--reminder', date])
assert result.exit_code == 1
assert f'reminder must be grater than {datetime.today().date()}\n' in \
result.stdout
def test_add_due_date_reminder():
"""Test due-date and reminder"""
task = 'Test due-date and reminder'
due_date = datetime.strftime(
datetime.now() + timedelta(days=2), '%Y-%m-%d')
reminder = datetime.strftime(
datetime.now() + timedelta(days=2), '%Y-%m-%d')
result = runner.invoke(app, ['add', task, '--reminder', reminder,
'--due-date', due_date])
assert result.exit_code == 1
assert f'reminder must be smaller than {due_date}\n' in \
result.stdout
def test_add_full_entry():
"""Test add full task"""
task = 'something'
project = 'test project'
due_date = datetime.strftime(
datetime.now() + timedelta(days=2), '%Y-%m-%d')
reminder = datetime.strftime(
datetime.now() + timedelta(days=1), '%Y-%m-%d')
status = 'doing'
tag = 'tag'
result = runner.invoke(app, ['add', task,
'--project', project,
'--due-date', due_date,
'--reminder', reminder,
'--status', status,
'--tag', tag])
assert result.exit_code == 0
with Session(engine) as session:
query = session.exec(select(ToDo).where(ToDo.task == task,
ToDo.project == project,
ToDo.status == status,
ToDo.tag == tag)).one()
assert query is not None
def test_start():
"""Test start"""
todo_id = '1'
result = runner.invoke(app, ['start', todo_id])
assert result.exit_code == 0
with Session(engine) as session:
query = session.exec(select(ToDo.status).where(ToDo.id ==
todo_id)).one()
assert query == 'doing'
def test_start_running():
"""Test start when running"""
todo_id = '1'
result = runner.invoke(app, ['start', todo_id])
assert result.exit_code == 1
assert 'The Timer must be stopped first' in result.stdout
def test_stop():
"""Test stop"""
result = runner.invoke(app, ['stop'])
assert result.exit_code == 0
def test_stop_no_run():
"""Test stop with no run"""
result = runner.invoke(app, ['stop'])
assert result.exit_code == 1
def test_duration():
"""test duration"""
todo_id = 1
with Session(engine) as session:
todo = session.exec(select(ToDo.duration).where(ToDo.id ==
todo_id)).one()
timer = session.exec(select(Timer.duration).where(Timer.id_todo
== todo_id)).one()
assert todo is not None and todo == timer
def test_return_db():
try:
os.remove('/home/cmts/.config/timerdo/timerdo_db.db')
os.rename('/home/cmts/.config/timerdo/timerdo_db_moved.db',
'/home/cmts/.config/timerdo/timerdo_db.db')
except FileNotFoundError:
pass | [
"sqlmodel.Session",
"sqlmodel.select",
"sqlmodel.create_engine"
] | [((447, 483), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (460, 483), False, 'from sqlmodel import create_engine, Session, select\n'), ((494, 505), 'typer.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (503, 505), False, 'from typer.testing import CliRunner\n'), ((237, 344), 'os.rename', 'os.rename', (['"""/home/cmts/.config/timerdo/timerdo_db.db"""', '"""/home/cmts/.config/timerdo/timerdo_db_moved.db"""'], {}), "('/home/cmts/.config/timerdo/timerdo_db.db',\n '/home/cmts/.config/timerdo/timerdo_db_moved.db')\n", (246, 344), False, 'import os\n'), ((797, 812), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (804, 812), False, 'from sqlmodel import create_engine, Session, select\n'), ((1407, 1421), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1419, 1421), False, 'from datetime import datetime, timedelta\n'), ((1744, 1758), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1756, 1758), False, 'from datetime import datetime, timedelta\n'), ((3204, 3219), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3211, 3219), False, 'from sqlmodel import create_engine, Session, select\n'), ((3703, 3718), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3710, 3718), False, 'from sqlmodel import create_engine, Session, select\n'), ((4450, 4465), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4457, 4465), False, 'from sqlmodel import create_engine, Session, select\n'), ((4858, 4911), 'os.remove', 'os.remove', (['"""/home/cmts/.config/timerdo/timerdo_db.db"""'], {}), "('/home/cmts/.config/timerdo/timerdo_db.db')\n", (4867, 4911), False, 'import os\n'), ((4920, 5027), 'os.rename', 'os.rename', (['"""/home/cmts/.config/timerdo/timerdo_db_moved.db"""', '"""/home/cmts/.config/timerdo/timerdo_db.db"""'], {}), "('/home/cmts/.config/timerdo/timerdo_db_moved.db',\n '/home/cmts/.config/timerdo/timerdo_db.db')\n", (4929, 5027), False, 'import os\n'), ((2129, 2143), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2141, 2143), False, 'from datetime import datetime, timedelta\n'), ((2146, 2163), 'datetime.timedelta', 'timedelta', ([], {'days': '(2)'}), '(days=2)\n', (2155, 2163), False, 'from datetime import datetime, timedelta\n'), ((2219, 2233), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2231, 2233), False, 'from datetime import datetime, timedelta\n'), ((2236, 2253), 'datetime.timedelta', 'timedelta', ([], {'days': '(2)'}), '(days=2)\n', (2245, 2253), False, 'from datetime import datetime, timedelta\n'), ((2668, 2682), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2680, 2682), False, 'from datetime import datetime, timedelta\n'), ((2685, 2702), 'datetime.timedelta', 'timedelta', ([], {'days': '(2)'}), '(days=2)\n', (2694, 2702), False, 'from datetime import datetime, timedelta\n'), ((2758, 2772), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2770, 2772), False, 'from datetime import datetime, timedelta\n'), ((2775, 2792), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2784, 2792), False, 'from datetime import datetime, timedelta\n'), ((1579, 1595), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1593, 1595), False, 'from datetime import datetime, timedelta\n'), ((1916, 1932), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1930, 1932), False, 'from datetime import datetime, timedelta\n'), ((854, 866), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (860, 866), False, 'from sqlmodel import create_engine, Session, select\n'), ((3261, 3273), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (3267, 3273), False, 'from sqlmodel import create_engine, Session, select\n'), ((3760, 3779), 'sqlmodel.select', 'select', (['ToDo.status'], {}), '(ToDo.status)\n', (3766, 3779), False, 'from sqlmodel import create_engine, Session, select\n'), ((4506, 4527), 'sqlmodel.select', 'select', (['ToDo.duration'], {}), '(ToDo.duration)\n', (4512, 4527), False, 'from sqlmodel import create_engine, Session, select\n'), ((4646, 4668), 'sqlmodel.select', 'select', (['Timer.duration'], {}), '(Timer.duration)\n', (4652, 4668), False, 'from sqlmodel import create_engine, Session, select\n')] |
from pathlib import Path
from typing import List
import nonebot
import pytest
from nonebug import App
from .utils import make_fake_event, make_fake_message
@pytest.mark.asyncio
async def test_db(app: App):
"""测试数据库"""
from sqlmodel import select
from nonebot_plugin_datastore.db import create_session, init_db
from .example import Example, test
nonebot.load_plugin("tests.example")
await init_db()
async with create_session() as session:
session.add(Example(message="test"))
await session.commit()
async with create_session() as session:
statement = select(Example)
examples: List[Example] = (await session.exec(statement)).all() # type: ignore
assert len(examples) == 1
assert examples[0].message == "test"
message = make_fake_message()("/test")
event = make_fake_event(_message=message)()
async with app.test_matcher(test) as ctx:
bot = ctx.create_bot()
ctx.receive_event(bot, event)
async with create_session() as session:
statement = select(Example)
examples: List[Example] = (await session.exec(statement)).all() # type: ignore
assert len(examples) == 2
assert examples[1].message == "matcher"
@pytest.mark.asyncio
async def test_disable_db(nonebug_init: None, tmp_path: Path):
"""测试禁用数据库"""
import nonebot
config = nonebot.get_driver().config
# 插件数据目录
config.datastore_cache_dir = tmp_path / "cache"
config.datastore_config_dir = tmp_path / "config"
config.datastore_data_dir = tmp_path / "data"
# 禁用数据库
config.datastore_enable_database = False
# 加载插件
nonebot.load_plugin("nonebot_plugin_datastore")
from nonebot_plugin_datastore import create_session
with pytest.raises(ValueError) as e:
async with create_session() as session:
pass
assert str(e.value) == "数据库未启用"
@pytest.mark.asyncio
async def test_default_db_url(nonebug_init: None):
"""测试默认数据库地址"""
import nonebot
# 加载插件
nonebot.load_plugin("nonebot_plugin_datastore")
from nonebot_plugin_datastore.config import BASE_DATA_DIR, plugin_config
assert (
plugin_config.datastore_database_url
== f"sqlite+aiosqlite:///{BASE_DATA_DIR / 'data.db'}"
)
| [
"sqlmodel.select"
] | [((372, 408), 'nonebot.load_plugin', 'nonebot.load_plugin', (['"""tests.example"""'], {}), "('tests.example')\n", (391, 408), False, 'import nonebot\n'), ((1667, 1714), 'nonebot.load_plugin', 'nonebot.load_plugin', (['"""nonebot_plugin_datastore"""'], {}), "('nonebot_plugin_datastore')\n", (1686, 1714), False, 'import nonebot\n'), ((2045, 2092), 'nonebot.load_plugin', 'nonebot.load_plugin', (['"""nonebot_plugin_datastore"""'], {}), "('nonebot_plugin_datastore')\n", (2064, 2092), False, 'import nonebot\n'), ((420, 429), 'nonebot_plugin_datastore.db.init_db', 'init_db', ([], {}), '()\n', (427, 429), False, 'from nonebot_plugin_datastore.db import create_session, init_db\n'), ((446, 462), 'nonebot_plugin_datastore.create_session', 'create_session', ([], {}), '()\n', (460, 462), False, 'from nonebot_plugin_datastore import create_session\n'), ((567, 583), 'nonebot_plugin_datastore.create_session', 'create_session', ([], {}), '()\n', (581, 583), False, 'from nonebot_plugin_datastore import create_session\n'), ((616, 631), 'sqlmodel.select', 'select', (['Example'], {}), '(Example)\n', (622, 631), False, 'from sqlmodel import select\n'), ((1024, 1040), 'nonebot_plugin_datastore.create_session', 'create_session', ([], {}), '()\n', (1038, 1040), False, 'from nonebot_plugin_datastore import create_session\n'), ((1073, 1088), 'sqlmodel.select', 'select', (['Example'], {}), '(Example)\n', (1079, 1088), False, 'from sqlmodel import select\n'), ((1396, 1416), 'nonebot.get_driver', 'nonebot.get_driver', ([], {}), '()\n', (1414, 1416), False, 'import nonebot\n'), ((1782, 1807), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1795, 1807), False, 'import pytest\n'), ((1833, 1849), 'nonebot_plugin_datastore.create_session', 'create_session', ([], {}), '()\n', (1847, 1849), False, 'from nonebot_plugin_datastore import create_session\n')] |
from datetime import timedelta
from enum import Enum
from tkinter import *
from tkinter import ttk
import typer
from sqlmodel import Session
class Status(str, Enum):
"""Status"""
to_do = 'to do'
doing = 'doing'
done = 'done'
def round_timedelta(delta: timedelta):
"""round timedelta object"""
seconds = round(delta.total_seconds())
if seconds >= 3600:
hours = round(seconds/3600)
seconds += - hours*3600
else:
hours = 0
if seconds >= 60:
minutes = round(seconds/60)
seconds += - minutes * 60
else:
minutes = 0
if hours < 10:
hours = '0' + str(hours)
if minutes < 10:
minutes = '0' + str(minutes)
return f'{hours}:{minutes}'
def list_query(engine, query):
"""Calculate duration of a task"""
with Session(engine) as session:
query_list = session.exec(query).all()
try:
for task in query_list:
duration = timedelta()
for dur in task.timers:
duration += dur.duration
yield task, duration
except TypeError:
typer.secho(f'\nTask is running. Stop timer first.\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
def make_table_view(engine, tasks):
table = [['id', 'Task', 'Project', 'Status', 'Tag', 'hh:mm',
'Due in']]
try:
for i in list_query(engine, tasks):
task = i[0]
duration = i[1]
table.append(
[task.id, task.task, task.project, task.status, task.tag,
round_timedelta(duration), task.due_date])
except UnboundLocalError:
pass
return table
def pop_up_msg():
"""Pop up finish msg"""
root = Tk()
frm = ttk.Frame(root, padding=10)
frm.grid()
ttk.Label(frm, text="Your Time is Over! Well done!").grid(column=0, row=0)
ttk.Button(frm, text="Quit", command=root.destroy).grid(column=1, row=0)
root.mainloop()
def make_table_projects(engine, tasks):
table = [['id', 'Task', 'Status', 'Tag', 'hh:mm', 'Due in']]
try:
project_duration = timedelta()
for i in list_query(engine, tasks):
task = i[0]
duration = i[1]
project_duration += duration
table.append(
[task.id, task.task, task.status, task.tag,
round_timedelta(duration), task.due_date])
except UnboundLocalError:
pass
return table, round_timedelta(project_duration)
| [
"sqlmodel.Session"
] | [((1814, 1841), 'tkinter.ttk.Frame', 'ttk.Frame', (['root'], {'padding': '(10)'}), '(root, padding=10)\n', (1823, 1841), False, 'from tkinter import ttk\n'), ((827, 842), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (834, 842), False, 'from sqlmodel import Session\n'), ((2176, 2187), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (2185, 2187), False, 'from datetime import timedelta\n'), ((1861, 1913), 'tkinter.ttk.Label', 'ttk.Label', (['frm'], {'text': '"""Your Time is Over! Well done!"""'}), "(frm, text='Your Time is Over! Well done!')\n", (1870, 1913), False, 'from tkinter import ttk\n'), ((1940, 1990), 'tkinter.ttk.Button', 'ttk.Button', (['frm'], {'text': '"""Quit"""', 'command': 'root.destroy'}), "(frm, text='Quit', command=root.destroy)\n", (1950, 1990), False, 'from tkinter import ttk\n'), ((978, 989), 'datetime.timedelta', 'timedelta', ([], {}), '()\n', (987, 989), False, 'from datetime import timedelta\n'), ((1150, 1227), 'typer.secho', 'typer.secho', (['f"""\nTask is running. Stop timer first.\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTask is running. Stop timer first.\n""", fg=typer.colors.RED)\n', (1161, 1227), False, 'import typer\n'), ((1268, 1286), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1278, 1286), False, 'import typer\n')] |
from fastapi import *
from sqlmodel import Session, select, SQLModel
from sqlalchemy.exc import OperationalError
from backend.models.timelog import TimeLog
from backend.models.calendar import Calendar
from backend.utils import (
engine,
sqlite3_engine,
create_db,
tags_metadata,
execute_sample_sql,
)
from backend.api import (
user,
timelog,
forecast,
epic,
epic_area,
client,
rate,
team,
role,
sponsor,
capacity,
demand,
)
import csv
app = FastAPI(title="timeflow app API", openapi_tags=tags_metadata)
session = Session(engine)
app.include_router(timelog.router)
app.include_router(forecast.router)
app.include_router(user.router)
app.include_router(epic.router)
app.include_router(epic_area.router)
app.include_router(client.router)
app.include_router(rate.router)
app.include_router(team.router)
app.include_router(role.router)
app.include_router(sponsor.router)
app.include_router(capacity.router)
app.include_router(demand.router)
@app.on_event("startup")
def on_startup():
try:
statement = select(TimeLog)
results = session.exec(statement)
except OperationalError:
create_db()
execute_sample_sql(session)
@app.on_event("startup")
def implement_calendar_table():
try:
statement = select(Calendar.year_name).where(Calendar.id == 1)
result = session.exec(statement).one()
except Exception as e:
print(e)
values_sql = f"""INSERT INTO calendar (date, year_number, year_name, quarter_number, quarter_name
, month_number, month_name, week_number, week_name, week_day_number, week_day_name)
VALUES """
with open("backend/calendar.csv") as csvfile:
reader = csv.reader(csvfile, delimiter=",", quotechar="|")
values_list = []
for index, row in enumerate(reader):
if index > 0 and row[0] != "":
_row = [f"'{item}'" for item in row]
row_sql = ", ".join(_row)
values = f"({row_sql}),"
values_sql += values
values_sql += f"({row_sql});"
cur = sqlite3_engine.cursor()
cur.execute(values_sql)
sqlite3_engine.commit()
sqlite3_engine.close()
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((583, 598), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (590, 598), False, 'from sqlmodel import Session, select, SQLModel\n'), ((1080, 1095), 'sqlmodel.select', 'select', (['TimeLog'], {}), '(TimeLog)\n', (1086, 1095), False, 'from sqlmodel import Session, select, SQLModel\n'), ((1175, 1186), 'backend.utils.create_db', 'create_db', ([], {}), '()\n', (1184, 1186), False, 'from backend.utils import engine, sqlite3_engine, create_db, tags_metadata, execute_sample_sql\n'), ((1195, 1222), 'backend.utils.execute_sample_sql', 'execute_sample_sql', (['session'], {}), '(session)\n', (1213, 1222), False, 'from backend.utils import engine, sqlite3_engine, create_db, tags_metadata, execute_sample_sql\n'), ((1311, 1337), 'sqlmodel.select', 'select', (['Calendar.year_name'], {}), '(Calendar.year_name)\n', (1317, 1337), False, 'from sqlmodel import Session, select, SQLModel\n'), ((1769, 1818), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""|"""'}), "(csvfile, delimiter=',', quotechar='|')\n", (1779, 1818), False, 'import csv\n'), ((2193, 2216), 'backend.utils.sqlite3_engine.cursor', 'sqlite3_engine.cursor', ([], {}), '()\n', (2214, 2216), False, 'from backend.utils import engine, sqlite3_engine, create_db, tags_metadata, execute_sample_sql\n'), ((2265, 2288), 'backend.utils.sqlite3_engine.commit', 'sqlite3_engine.commit', ([], {}), '()\n', (2286, 2288), False, 'from backend.utils import engine, sqlite3_engine, create_db, tags_metadata, execute_sample_sql\n'), ((2301, 2323), 'backend.utils.sqlite3_engine.close', 'sqlite3_engine.close', ([], {}), '()\n', (2321, 2323), False, 'from backend.utils import engine, sqlite3_engine, create_db, tags_metadata, execute_sample_sql\n')] |
"""
Database related APIs.
"""
import logging
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import Session, select
from datajunction.models.database import Database
from datajunction.utils import get_session
_logger = logging.getLogger(__name__)
router = APIRouter()
@router.get("/databases/", response_model=List[Database])
def read_databases(*, session: Session = Depends(get_session)) -> List[Database]:
"""
List the available databases.
"""
return session.exec(select(Database)).all()
| [
"sqlmodel.select"
] | [((253, 280), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (270, 280), False, 'import logging\n'), ((290, 301), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (299, 301), False, 'from fastapi import APIRouter, Depends\n'), ((403, 423), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (410, 423), False, 'from fastapi import APIRouter, Depends\n'), ((518, 534), 'sqlmodel.select', 'select', (['Database'], {}), '(Database)\n', (524, 534), False, 'from sqlmodel import Session, select\n')] |
import json
from sfm.utils import validate_signature, calc_signature
from sfm.dependencies import get_db
from sfm.models import WorkItemCreate, Project, CommitCreate, WorkItem, WorkItemUpdate
from typing import List, Optional
from sqlmodel import Session, select, and_
from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query
from opencensus.ext.azure.log_exporter import AzureLogHandler
from sfm.config import get_settings
from sfm.logger import create_logger
from .github_functions import (
webhook_project_processor,
deployment_processor,
pull_request_processor,
populate_past_github,
defect_processor,
reopened_processor,
unlabeled_processor,
)
app_settings = get_settings()
logger = create_logger(__name__)
router = APIRouter()
def fetch_github_payload(request):
raw = request.body()
signature = request.headers.get("X-Hub-Signature-256")
proj_auth_token = validate_signature(signature, raw)
payload = request.json()
event_type = request.headers.get("X-Github-Event")
return payload, event_type, proj_auth_token
@router.post("/github_webhooks/") # pragma: no cover
async def webhook_handler(request: Request, db: Session = Depends(get_db)):
"""
## Github Webhook Handler
Awaits incoming payload from Github Webhooks and parses the data.
Currently, endpoint processes two different event types: "Deployment" and "Pull Request".
The payload data is parsed and data needed to calculate the DORA metrics is stored in the db tables.
"""
if app_settings.GITHUB_WEBHOOK_SECRET in ["", "XXXXXXXXXXX"]:
raise HTTPException(
status_code=412,
detail="Missing github webhook secret. Please specify GITHUB_WEBHOOK_SECRET and try again",
)
# if app_settings.ENV != "test":
payload, event_type, proj_auth_token = fetch_github_payload(request)
# gather common payload object properties
if event_type != "push": # push events are the exception to common properties
repository = payload.get("repository")
else: # TODO: pull in push event information
pass
if event_type != "repository":
project_name = repository.get("name")
print("THE PROJECT NAME: ", project_name)
project_db = db.exec(
select(Project).where(Project.name == project_name)
).first()
if not project_db:
logger.debug("A matching project was not found in the database")
raise HTTPException(
status_code=404, detail="Matching project not found in db"
)
if event_type == "repository":
action = payload.get("action")
webhook_project_processor(db, repository, action)
elif event_type == "deployment":
deployment = payload.get("deployment")
deployment_processor(db, deployment, project_db, proj_auth_token)
elif event_type == "pull_request":
pull_request = payload.get("pull_request")
if (
pull_request["head"]["repo"]["default_branch"] == "main"
): # process only pull requests to main
pull_request_processor(db, pull_request, project_db, proj_auth_token)
elif event_type == "issues":
action = payload.get("action")
issue = payload.get("issue")
if action == "closed":
defect_processor(db, issue, project_db, proj_auth_token, closed=True)
elif action == "labeled" and "production defect" in [
lbl["name"] for lbl in issue["labels"]
]:
defect_processor(db, issue, project_db, proj_auth_token, closed=False)
elif action == "reopened":
reopened_processor(db, issue, proj_auth_token)
elif action == "unlabeled" and "production defect" not in [
lbl["name"] for lbl in issue["labels"]
]:
unlabeled_processor(db, issue, proj_auth_token)
else:
logger.debug("Issues event type passed that is unhandled")
else:
logger.warning("Event type not handled")
return {"code": "event type not handled"}
# raise HTTPException(status_code=404, detail="Event type not handled.")
return {"code": "success"}
@router.get("/github_populate")
def populate_past_data(
org: str,
db: Session = Depends(get_db),
include_only_list: Optional[List[str]] = Query(None),
):
"""
## Github Backpopulate
Queries the GitHub API to populate projects and work items that already exist in specified repos.
"include_only_list" is a list of repo names (as strings) that you wish use to populate the database.
If "include_only_list" is populated, only projects in this list will be populated
"""
proj_intended_not_found = populate_past_github(db, org, include_only_list)
in_database = db.exec(select(Project)).all()
proj_name_in_db = [proj.name for proj in in_database]
not_found_projects = []
if include_only_list is not None:
for repo in include_only_list:
if repo not in proj_name_in_db:
not_found_projects.append(repo)
if proj_intended_not_found != [] or not_found_projects != []: # pragma: no cover
included_projects = []
for proj in include_only_list:
if proj not in proj_intended_not_found and proj in proj_name_in_db:
included_projects.append(proj)
return {
"projects_included": included_projects,
"projects_not_included": proj_intended_not_found,
"project_not_found": not_found_projects,
}
else:
return {"code": "success"}
| [
"sqlmodel.select"
] | [((721, 735), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (733, 735), False, 'from sfm.config import get_settings\n'), ((747, 770), 'sfm.logger.create_logger', 'create_logger', (['__name__'], {}), '(__name__)\n', (760, 770), False, 'from sfm.logger import create_logger\n'), ((782, 793), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (791, 793), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((937, 971), 'sfm.utils.validate_signature', 'validate_signature', (['signature', 'raw'], {}), '(signature, raw)\n', (955, 971), False, 'from sfm.utils import validate_signature, calc_signature\n'), ((1221, 1236), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1228, 1236), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((4325, 4340), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (4332, 4340), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((4387, 4398), 'fastapi.Query', 'Query', (['None'], {}), '(None)\n', (4392, 4398), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((1636, 1768), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(412)', 'detail': '"""Missing github webhook secret. Please specify GITHUB_WEBHOOK_SECRET and try again"""'}), "(status_code=412, detail=\n 'Missing github webhook secret. Please specify GITHUB_WEBHOOK_SECRET and try again'\n )\n", (1649, 1768), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((2513, 2586), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Matching project not found in db"""'}), "(status_code=404, detail='Matching project not found in db')\n", (2526, 2586), False, 'from fastapi import APIRouter, HTTPException, Depends, Path, Header, Request, Query\n'), ((4846, 4861), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (4852, 4861), False, 'from sqlmodel import Session, select, and_\n'), ((2320, 2335), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (2326, 2335), False, 'from sqlmodel import Session, select, and_\n')] |
import typing as ty
from nepali_dictionary.common.db import Dictionary
from sqlmodel import Session, select
class SearchService:
def search(self, query: str, session: ty.Type[Session], engine) -> ty.Optional[dict]:
with session(engine) as s:
statement = select(Dictionary).where(Dictionary.word == query)
result: ty.Any = s.execute(statement).fetchone()
if result:
return result[0].dict()
| [
"sqlmodel.select"
] | [((282, 300), 'sqlmodel.select', 'select', (['Dictionary'], {}), '(Dictionary)\n', (288, 300), False, 'from sqlmodel import Session, select\n')] |
from __future__ import annotations
import inspect
from functools import wraps
from typing import Any, List, Type, TypeVar
from fastapi.encoders import jsonable_encoder
from sqlmodel import SQLModel, select
from sqlmodel.ext.asyncio.session import AsyncSession
Self = TypeVar("Self", bound="Base")
class InvalidTable(RuntimeError):
"""Raised when calling a method coupled to SQLAlchemy operations.
It should be called only by SQLModel objects that are tables.
"""
def is_table(cls: Type[Self]) -> bool:
base_is_table = False
for base in cls.__bases__:
config = getattr(base, "__config__")
if config and getattr(config, "table", False):
base_is_table = True
break
return getattr(cls.__config__, "table", False) and not base_is_table
def validate_table(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
cls = self if inspect.isclass(self) else self.__class__
if not is_table(cls):
raise InvalidTable(
f'"{cls.__name__}" is not a table. '
"Add the class parameter `table=True` or don't use with this object."
)
return func(self, *args, **kwargs)
return wrapper
class Base(SQLModel):
@classmethod
@validate_table
async def get(
cls: Type[Self], session: AsyncSession, *args: Any, **kwargs: Any
) -> Self:
result = await session.execute(select(cls).filter(*args).filter_by(**kwargs))
return result.scalars().first()
@classmethod
@validate_table
async def get_multi(
cls: Type[Self],
session: AsyncSession,
*args,
offset: int = 0,
limit: int = 100,
**kwargs,
) -> List[Self]:
result = await session.execute(
select(cls).filter(*args).filter_by(**kwargs).offset(offset).limit(limit)
)
return result.scalars().all()
@classmethod
@validate_table
async def create(cls: Type[Self], session: AsyncSession, **kwargs: Any) -> Self:
db_obj = cls(**kwargs)
session.add(db_obj)
await session.commit()
return db_obj
@validate_table
async def update(self: Self, session: AsyncSession, **kwargs: Any) -> Self:
obj_data = jsonable_encoder(self)
for field in obj_data:
if field in kwargs:
setattr(self, field, kwargs[field])
session.add(self)
await session.commit()
await session.refresh(self)
return self
@classmethod
@validate_table
async def delete(
cls: Type[Self], session: AsyncSession, *args: Any, **kwargs: Any
) -> Self:
db_obj = await cls.get(session, *args, **kwargs)
await session.delete(db_obj)
await session.commit()
return db_obj
| [
"sqlmodel.select"
] | [((270, 299), 'typing.TypeVar', 'TypeVar', (['"""Self"""'], {'bound': '"""Base"""'}), "('Self', bound='Base')\n", (277, 299), False, 'from typing import Any, List, Type, TypeVar\n'), ((836, 847), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (841, 847), False, 'from functools import wraps\n'), ((2278, 2300), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['self'], {}), '(self)\n', (2294, 2300), False, 'from fastapi.encoders import jsonable_encoder\n'), ((910, 931), 'inspect.isclass', 'inspect.isclass', (['self'], {}), '(self)\n', (925, 931), False, 'import inspect\n'), ((1438, 1449), 'sqlmodel.select', 'select', (['cls'], {}), '(cls)\n', (1444, 1449), False, 'from sqlmodel import SQLModel, select\n'), ((1801, 1812), 'sqlmodel.select', 'select', (['cls'], {}), '(cls)\n', (1807, 1812), False, 'from sqlmodel import SQLModel, select\n')] |
from typing import Optional
from pydantic import EmailStr
from sqlmodel import Field, SQLModel
# define your database tables (models) here
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, nullable=False, primary_key=True)
name: str = Field(nullable=False)
email: EmailStr = Field(
nullable=False,
)
password: str = Field(nullable=False)
| [
"sqlmodel.Field"
] | [((200, 253), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(False)', 'primary_key': '(True)'}), '(default=None, nullable=False, primary_key=True)\n', (205, 253), False, 'from sqlmodel import Field, SQLModel\n'), ((270, 291), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (275, 291), False, 'from sqlmodel import Field, SQLModel\n'), ((314, 335), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (319, 335), False, 'from sqlmodel import Field, SQLModel\n'), ((371, 392), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (376, 392), False, 'from sqlmodel import Field, SQLModel\n')] |
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.where import tutorial002 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == [
[
{
"name": "Spider-Boy",
"secret_name": "<NAME>",
"age": None,
"id": 2,
}
],
[{"name": "Rusty-Man", "secret_name": "<NAME>", "age": 48, "id": 3}],
]
| [
"sqlmodel.create_engine"
] | [((267, 296), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (280, 296), False, 'from sqlmodel import create_engine\n'), ((373, 411), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (378, 411), False, 'from unittest.mock import patch\n'), ((421, 431), 'docs_src.tutorial.where.tutorial002.main', 'mod.main', ([], {}), '()\n', (429, 431), True, 'from docs_src.tutorial.where import tutorial002 as mod\n')] |
from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR
from pydantic import BaseModel
from datetime import datetime
from typing import List, Optional
from sqlalchemy import String, Column
class ServerCatagoryLink(SQLModel, table=True):
server_id: Optional[int] = Field(
default=None, foreign_key="server.id", primary_key=True
)
catagory_id: Optional[int] = Field(
default=None, foreign_key="catagory.id", primary_key=True
)
class ServerOrganizationLink(SQLModel, table=True):
server_id: Optional[int] = Field(
default=None, foreign_key="server.id", primary_key=True
)
organization_id: Optional[int] = Field(
default=None, foreign_key="organization.id", primary_key=True
)
class Catagory(SQLModel, table=True):
id: int = Field(primary_key=True)
title: str = Field(sa_column=Column("title", String(255), unique=True))
meta_ref: str = Field(sa_column=Column("meta_ref", String(255), unique=True))
color: str
servers: List["Server"] = Relationship(
back_populates="catagories", link_model=ServerCatagoryLink
)
class SaveCatagory(SQLModel):
title: str
color: str
class Organization(SQLModel, table=True):
id: int = Field(primary_key=True)
title: str = Field(sa_column=Column("title", String(255), unique=True))
parent_id: Optional[int] = Field(foreign_key="organization.id")
ref_title: str
description: str
servers: List["Server"] = Relationship(
back_populates="organizations", link_model=ServerOrganizationLink
)
class Server(SQLModel, table=True):
id: int = Field(primary_key=True)
scheme: str = Field(default="http")
domain_name: str = Field(sa_column=Column("domain_name", String(255), unique=True))
path: Optional[str]
agency: Optional[int]
organization: Optional[str]
status: str = "LOADING"
server_log: List["ServerLog"] = Relationship(back_populates="server")
server_reports: List["ServerReport"] = Relationship(back_populates="server")
clicks: int = 0
ipaddress: Optional[str]
response_time: Optional[int] = None
last_checked: Optional[datetime]
catagories: List["Catagory"] = Relationship(
back_populates="servers", link_model=ServerCatagoryLink
)
organizations: List["Organization"] = Relationship(
back_populates="servers", link_model=ServerOrganizationLink
)
class Config:
arbitrary_types_allowed = True
class ServerLog(SQLModel, table=True):
id: int = Field(primary_key=True)
datetime: datetime
server_id: Optional[int] = Field(default=None, foreign_key="server.id")
server: Optional[Server] = Relationship(back_populates="server_log")
response_code: Optional[int]
response_time: Optional[int]
ipaddress: Optional[str]
url: str
error: Optional[str]
class ServerReport(SQLModel, table=True):
id: int = Field(primary_key=True)
datetime: datetime
server_id: Optional[int] = Field(default=None, foreign_key="server.id")
server: Optional[Server] = Relationship(back_populates="server_reports")
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((280, 342), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""server.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='server.id', primary_key=True)\n", (285, 342), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((390, 454), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""catagory.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='catagory.id', primary_key=True)\n", (395, 454), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((554, 616), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""server.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='server.id', primary_key=True)\n", (559, 616), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((668, 736), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""organization.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='organization.id', primary_key=True)\n", (673, 736), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((805, 828), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (810, 828), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1032, 1104), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""catagories"""', 'link_model': 'ServerCatagoryLink'}), "(back_populates='catagories', link_model=ServerCatagoryLink)\n", (1044, 1104), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1239, 1262), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1244, 1262), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1370, 1406), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""organization.id"""'}), "(foreign_key='organization.id')\n", (1375, 1406), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1477, 1556), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""organizations"""', 'link_model': 'ServerOrganizationLink'}), "(back_populates='organizations', link_model=ServerOrganizationLink)\n", (1489, 1556), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1623, 1646), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1628, 1646), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1665, 1686), 'sqlmodel.Field', 'Field', ([], {'default': '"""http"""'}), "(default='http')\n", (1670, 1686), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((1921, 1958), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""server"""'}), "(back_populates='server')\n", (1933, 1958), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2002, 2039), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""server"""'}), "(back_populates='server')\n", (2014, 2039), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2201, 2270), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""servers"""', 'link_model': 'ServerCatagoryLink'}), "(back_populates='servers', link_model=ServerCatagoryLink)\n", (2213, 2270), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2327, 2400), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""servers"""', 'link_model': 'ServerOrganizationLink'}), "(back_populates='servers', link_model=ServerOrganizationLink)\n", (2339, 2400), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2528, 2551), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2533, 2551), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2606, 2650), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""server.id"""'}), "(default=None, foreign_key='server.id')\n", (2611, 2650), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2682, 2723), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""server_log"""'}), "(back_populates='server_log')\n", (2694, 2723), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2916, 2939), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2921, 2939), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((2994, 3038), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""server.id"""'}), "(default=None, foreign_key='server.id')\n", (2999, 3038), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((3070, 3115), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""server_reports"""'}), "(back_populates='server_reports')\n", (3082, 3115), False, 'from sqlmodel import SQLModel, Field, JSON, Relationship, VARCHAR\n'), ((878, 889), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (884, 889), False, 'from sqlalchemy import String, Column\n'), ((960, 971), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (966, 971), False, 'from sqlalchemy import String, Column\n'), ((1312, 1323), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (1318, 1323), False, 'from sqlalchemy import String, Column\n'), ((1748, 1759), 'sqlalchemy.String', 'String', (['(255)'], {}), '(255)\n', (1754, 1759), False, 'from sqlalchemy import String, Column\n')] |
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
expected_calls = [
[
[
{
"id": 7,
"name": "Captain North America",
"secret_name": "<NAME>",
"age": 93,
}
]
]
]
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.offset_and_limit import tutorial003 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == expected_calls
| [
"sqlmodel.create_engine"
] | [((502, 531), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (515, 531), False, 'from sqlmodel import create_engine\n'), ((608, 646), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (613, 646), False, 'from unittest.mock import patch\n'), ((656, 666), 'docs_src.tutorial.offset_and_limit.tutorial003.main', 'mod.main', ([], {}), '()\n', (664, 666), True, 'from docs_src.tutorial.offset_and_limit import tutorial003 as mod\n')] |
from sqlmodel import create_engine
from pyflarum.database.session import FlarumDatabase
from pyflarum.database.flarum.core.users import DB_User
ENGINE = create_engine('sqlite:///tests/database/database.db')
DATABASE = FlarumDatabase(engine=ENGINE)
if __name__ == "__main__":
with DATABASE:
for user in DATABASE.generic_filter(DB_User, id=1).all():
if user.discussions:
print(user.username, ':', sep='')
for discussion in user.discussions:
print('•', discussion.title)
else:
print(user.username, '(no discussions)')
| [
"sqlmodel.create_engine"
] | [((156, 209), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///tests/database/database.db"""'], {}), "('sqlite:///tests/database/database.db')\n", (169, 209), False, 'from sqlmodel import create_engine\n'), ((221, 250), 'pyflarum.database.session.FlarumDatabase', 'FlarumDatabase', ([], {'engine': 'ENGINE'}), '(engine=ENGINE)\n', (235, 250), False, 'from pyflarum.database.session import FlarumDatabase\n')] |
from sqlmodel import SQLModel, create_engine
from sqlalchemy.orm import sessionmaker
from opencensus.ext.azure.log_exporter import AzureLogHandler
from sfm.logger import create_logger
from sfm.config import get_settings
import psycopg2
# def generate_db_string(ENV: str, DBHOST: str, DBNAME: str, DBUSER: str, DBPASS: str):
# """Take in env variables and generate correct db string."""
# # if ENV == "test":
# # return "sqlite://" # in-memory database for unit tests
# if ENV == "local":
# return "postgres+asyncpg://postgres:postgres@db:5432/sfm" # local sqlite for local development
# if ENV == "development" or "production":
# # need all four parameters available
# if "unset" in [DBNAME, DBPASS]:
# raise ValueError(
# "Missing database parameter in the environment. Please specify DBHOST, DBNAME, DBUSER, and DBPASS"
# )
# conn = "host={0} user={1} dbname={2} password={3} sslmode={4}".format(
# DBHOST, DBUSER, DBNAME, DBPASS, "require"
# )
# conn = f"postgresql+asyncpg://{DBUSER}:{DBPASS}@{DBHOST}/{DBNAME}"
# # return conn
# return conn
app_settings = get_settings()
# CONN_STR = generate_db_string(
# app_settings.ENV,
# app_settings.DBHOST,
# app_settings.DBNAME,
# app_settings.DBUSER,
# app_settings.DBPASS,
# )
# check_same_thread = false only works in sqlite, not postgres or others
# if "sqlite" in CONN_STR:
# # print("Using a sqlite database")
# connect_args = {"check_same_thread": False}
# engine = create_engine(CONN_STR, connect_args=connect_args)
# else:
logger = create_logger(__name__)
engine = create_engine(app_settings.DATABASE_URL, echo=False)
# async def init_db():
# async with engine.begin() as conn:
# # await conn.run_sync(SQLModel.metadata.drop_all)
# await conn.run_sync(SQLModel.metadata.create_all)
# logger.info("Database tables have been created")
# async def get_session() -> AsyncSession:
# async_session = sessionmaker(
# engine, class_=AsyncSession, expire_on_commit=False
# )
# async with async_session() as session:
# yield session
# def create_db_and_tables():
# SQLModel.metadata.create_all(engine)
| [
"sqlmodel.create_engine"
] | [((1212, 1226), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (1224, 1226), False, 'from sfm.config import get_settings\n'), ((1671, 1694), 'sfm.logger.create_logger', 'create_logger', (['__name__'], {}), '(__name__)\n', (1684, 1694), False, 'from sfm.logger import create_logger\n'), ((1705, 1757), 'sqlmodel.create_engine', 'create_engine', (['app_settings.DATABASE_URL'], {'echo': '(False)'}), '(app_settings.DATABASE_URL, echo=False)\n', (1718, 1757), False, 'from sqlmodel import SQLModel, create_engine\n')] |
from typing import Callable
from sqlmodel import select, Session
from . import BaseRepository, engine
from ..models import ProfileDB, UserDB
from app.shared.exc import (
EmailAlreadyTakenError,
UserDoesNotExist,
UsernameAlreadyTakenError)
class UserRepository(BaseRepository):
model = UserDB
@classmethod
def create_with_profile(cls, **kwargs) -> UserDB:
user = cls.model(**kwargs)
user.profile = ProfileDB()
user.profile.create_gravatar()
user.save()
return user
@classmethod
def get_all_ilike_username(cls,
search_for: str
) -> list[UserDB]:
with Session(engine) as session:
return session.exec(select(cls.model)
.where(cls.model.username.like(f"%{search_for}%"))
).unique().all()
@classmethod
def get_model_by_username(cls, username: str) -> UserDB | None:
return cls.get_model_by_attr(username=username)
@classmethod
def get_model_by_email(cls, email: str) -> UserDB | None:
return cls.get_model_by_attr(email=email)
@classmethod
def is_credentials_valid(cls,
username: str,
password: str,
verify_hash_func: Callable
) -> bool:
if not verify_hash_func:
raise NameError("Hash Function is not defined.")
if not (user := cls.get_model_by_username(username)):
return False
return verify_hash_func(password, user.hashed_password)
@classmethod
def change_username(cls, old_username: str, new_username: str) -> UserDB | None:
# IF the new username is the same as the user' current
# OR already taken by another user
if old_username == new_username or cls.get_model_by_username(new_username):
raise UsernameAlreadyTakenError("Please try a different username.")
if not (user := cls.get_model_by_username(old_username)):
raise UserDoesNotExist("User not found.")
user.username = new_username
user.save()
return user
@classmethod
def change_email(cls, old_email: str, new_email: str) -> UserDB | None:
# IF the new email is the same as the user' current
# OR already taken by another user
if old_email == new_email or cls.get_model_by_email(new_email):
raise EmailAlreadyTakenError("Please try a different username.")
if not (user := cls.get_model_by_email(old_email)):
raise UserDoesNotExist("User not found.")
user.email = new_email
user.save()
return user
@classmethod
def change_password(cls, user_id: int, new_password: str) -> bool:
pass
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((697, 712), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (704, 712), False, 'from sqlmodel import select, Session\n'), ((1973, 2034), 'app.shared.exc.UsernameAlreadyTakenError', 'UsernameAlreadyTakenError', (['"""Please try a different username."""'], {}), "('Please try a different username.')\n", (1998, 2034), False, 'from app.shared.exc import EmailAlreadyTakenError, UserDoesNotExist, UsernameAlreadyTakenError\n'), ((2120, 2155), 'app.shared.exc.UserDoesNotExist', 'UserDoesNotExist', (['"""User not found."""'], {}), "('User not found.')\n", (2136, 2155), False, 'from app.shared.exc import EmailAlreadyTakenError, UserDoesNotExist, UsernameAlreadyTakenError\n'), ((2521, 2579), 'app.shared.exc.EmailAlreadyTakenError', 'EmailAlreadyTakenError', (['"""Please try a different username."""'], {}), "('Please try a different username.')\n", (2543, 2579), False, 'from app.shared.exc import EmailAlreadyTakenError, UserDoesNotExist, UsernameAlreadyTakenError\n'), ((2659, 2694), 'app.shared.exc.UserDoesNotExist', 'UserDoesNotExist', (['"""User not found."""'], {}), "('User not found.')\n", (2675, 2694), False, 'from app.shared.exc import EmailAlreadyTakenError, UserDoesNotExist, UsernameAlreadyTakenError\n'), ((757, 774), 'sqlmodel.select', 'select', (['cls.model'], {}), '(cls.model)\n', (763, 774), False, 'from sqlmodel import select, Session\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime
class Meter(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
serial_number: str
class Measurement(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
meter_id: Optional[int] = Field(default=None, foreign_key="meter.id")
capture_time: datetime = Field()
voltage_phase_1: float = Field(default=0.0)
voltage_phase_2: float = Field(default=0.0)
voltage_phase_3: float = Field(default=0.0)
power: float = Field(default=0.0)
thd_1: float = Field(default=0.0)
thd_2: float = Field(default=0.0)
thd_3: float = Field(default=0.0)
thd_4: float = Field(default=0.0)
thd_5: float = Field(default=0.0)
thd_6: float = Field(default=0.0)
thd_7: float = Field(default=0.0)
thd_8: float = Field(default=0.0)
thd_9: float = Field(default=0.0)
thd_10: float = Field(default=0.0)
class Label(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field()
color: str = Field()
class LabelAssignment(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
label_id: Optional[int] = Field(default=None, foreign_key="label.id")
meter_id: Optional[int] = Field(default=None, foreign_key="meter.id")
start_time: datetime = Field()
end_time: datetime = Field()
| [
"sqlmodel.Field"
] | [((156, 193), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (161, 193), False, 'from sqlmodel import Field, SQLModel\n'), ((284, 321), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (289, 321), False, 'from sqlmodel import Field, SQLModel\n'), ((352, 395), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""meter.id"""'}), "(default=None, foreign_key='meter.id')\n", (357, 395), False, 'from sqlmodel import Field, SQLModel\n'), ((425, 432), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (430, 432), False, 'from sqlmodel import Field, SQLModel\n'), ((462, 480), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (467, 480), False, 'from sqlmodel import Field, SQLModel\n'), ((510, 528), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (515, 528), False, 'from sqlmodel import Field, SQLModel\n'), ((558, 576), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (563, 576), False, 'from sqlmodel import Field, SQLModel\n'), ((596, 614), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (601, 614), False, 'from sqlmodel import Field, SQLModel\n'), ((634, 652), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (639, 652), False, 'from sqlmodel import Field, SQLModel\n'), ((672, 690), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (677, 690), False, 'from sqlmodel import Field, SQLModel\n'), ((710, 728), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (715, 728), False, 'from sqlmodel import Field, SQLModel\n'), ((748, 766), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (753, 766), False, 'from sqlmodel import Field, SQLModel\n'), ((786, 804), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (791, 804), False, 'from sqlmodel import Field, SQLModel\n'), ((824, 842), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (829, 842), False, 'from sqlmodel import Field, SQLModel\n'), ((862, 880), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (867, 880), False, 'from sqlmodel import Field, SQLModel\n'), ((900, 918), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (905, 918), False, 'from sqlmodel import Field, SQLModel\n'), ((938, 956), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (943, 956), False, 'from sqlmodel import Field, SQLModel\n'), ((977, 995), 'sqlmodel.Field', 'Field', ([], {'default': '(0.0)'}), '(default=0.0)\n', (982, 995), False, 'from sqlmodel import Field, SQLModel\n'), ((1057, 1094), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1062, 1094), False, 'from sqlmodel import Field, SQLModel\n'), ((1112, 1119), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1117, 1119), False, 'from sqlmodel import Field, SQLModel\n'), ((1138, 1145), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1143, 1145), False, 'from sqlmodel import Field, SQLModel\n'), ((1217, 1254), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1222, 1254), False, 'from sqlmodel import Field, SQLModel\n'), ((1285, 1328), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""label.id"""'}), "(default=None, foreign_key='label.id')\n", (1290, 1328), False, 'from sqlmodel import Field, SQLModel\n'), ((1359, 1402), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""meter.id"""'}), "(default=None, foreign_key='meter.id')\n", (1364, 1402), False, 'from sqlmodel import Field, SQLModel\n'), ((1430, 1437), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1435, 1437), False, 'from sqlmodel import Field, SQLModel\n'), ((1463, 1470), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (1468, 1470), False, 'from sqlmodel import Field, SQLModel\n')] |
import typing as t
if t.TYPE_CHECKING:
from ..core.discussions import DB_Discussion
from ..core.users import DB_User
from datetime import datetime
from sqlmodel import SQLModel, Field, Relationship
class DB_TagUser(SQLModel, table=True):
"""
Represents a tag-to-user relationship in the database.
"""
__tablename__ = 'tag_user'
user_id: t.Optional[int] = Field(default=None, primary_key=True, foreign_key='users.id')
"""The ID of the user."""
tag_id: t.Optional[int] = Field(default=None, primary_key=True, foreign_key='tags.id')
"""The ID of the tag."""
marked_as_read_at: t.Optional[datetime]
"""When the user marked the tag as read?"""
is_hidden: bool = Field(default=False)
"""?"""
class DB_Tag(SQLModel, table=True):
"""
Represents a tag in the database.
"""
__tablename__ = 'tags'
id: t.Optional[int] = Field(default=None, primary_key=True)
"""The ID of the tag. This is handled by the database."""
name: str = Field(max_length=100)
"""The name of the tag."""
slug: str = Field(max_length=100)
"""The tag's slug (will be used in URL)."""
description: t.Optional[t.Text]
"""The description of the tag."""
color: t.Optional[str] = Field(max_length=50)
"""The tag's color."""
background_path: t.Optional[str] = Field(max_length=100)
"""?"""
background_mode: t.Optional[str] = Field(max_length=100)
"""?"""
position: t.Optional[int]
"""The tag's position in the tag tree."""
parent_id: t.Optional[int] = Field(default=None, foreign_key='tags.id')
"""The ID of the parent tag."""
parent_tag: t.Optional['DB_Tag'] = Relationship(back_populates='children')
"""The tag's parent tag."""
default_sort: t.Optional[str]
"""The default sorting behaviour of the tag."""
is_restricted: bool = Field(default=False)
"""Whether or not the tag is restricted."""
is_hidden: bool = Field(default=False)
"""Whether or not the tag is hidden."""
discussion_count: int = Field(default=0)
"""How many discussions are tagged with this tag?"""
last_posted_at: t.Optional[datetime]
"""The datetime when was the last discussion posted in this tag."""
last_posted_discussion_id: t.Optional[int] = Field(default=None, foreign_key='discussions.id')
"""The ID of the last posted discussion in this tag."""
last_posted_discussion: t.Optional['DB_Discussion'] = Relationship(back_populates='tags')
"""The last posted discussion in this tag."""
last_posted_user_id: t.Optional[int] = Field(default=None, foreign_key='users.id')
"""The ID of the user that last posted in this tag."""
last_posted_user: t.Optional['DB_User'] = Relationship(back_populates='tags')
"""The user that last posted in this tag."""
icon: t.Optional[str] = Field(max_length=100)
"""The [FontAwesome](https://fontawesome.com/v5.15/icons?d=gallery&m=free) icon for the tag."""
users: t.List['DB_User'] = Relationship(back_populates='tags', link_model=DB_TagUser)
"""Users that have relationship with this tag."""
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((394, 455), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'foreign_key': '"""users.id"""'}), "(default=None, primary_key=True, foreign_key='users.id')\n", (399, 455), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((516, 576), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'foreign_key': '"""tags.id"""'}), "(default=None, primary_key=True, foreign_key='tags.id')\n", (521, 576), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((721, 741), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (726, 741), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((905, 942), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (910, 942), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1022, 1043), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1027, 1043), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1091, 1112), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1096, 1112), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1265, 1285), 'sqlmodel.Field', 'Field', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1270, 1285), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1353, 1374), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1358, 1374), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1426, 1447), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1431, 1447), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1570, 1612), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""tags.id"""'}), "(default=None, foreign_key='tags.id')\n", (1575, 1612), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1688, 1727), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""children"""'}), "(back_populates='children')\n", (1700, 1727), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1874, 1894), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (1879, 1894), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1965, 1985), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (1970, 1985), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2059, 2075), 'sqlmodel.Field', 'Field', ([], {'default': '(0)'}), '(default=0)\n', (2064, 2075), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2296, 2345), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""discussions.id"""'}), "(default=None, foreign_key='discussions.id')\n", (2301, 2345), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2464, 2499), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""tags"""'}), "(back_populates='tags')\n", (2476, 2499), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2593, 2636), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""users.id"""'}), "(default=None, foreign_key='users.id')\n", (2598, 2636), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2742, 2777), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""tags"""'}), "(back_populates='tags')\n", (2754, 2777), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((2856, 2877), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2861, 2877), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((3010, 3068), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""tags"""', 'link_model': 'DB_TagUser'}), "(back_populates='tags', link_model=DB_TagUser)\n", (3022, 3068), False, 'from sqlmodel import SQLModel, Field, Relationship\n')] |
import os
import re
from pathlib import Path
from typing import List, Optional
import tmdbsimple as tmdb
from dotenv import load_dotenv
from models import (
Collection,
Genre,
Movie,
ProductionCompany,
ProductionCountry,
SpokenLanguage,
)
from sqlalchemy import extract
from sqlalchemy.exc import NoResultFound
from sqlmodel import Session, SQLModel, select
load_dotenv()
YEAR_PATTERN = re.compile(r"(\s\(\d{4}\))")
def create_model_obj(o: dict, model_type: SQLModel, session: Session) -> SQLModel:
obj = model_type(**o)
return obj
def create_model_objs(
data: dict, model_type: SQLModel, session: Session
) -> List[SQLModel]:
objs = []
for o in data:
obj = create_model_obj(o, model_type, session)
objs.append(obj)
return objs
def tmdb_info_to_movie(info: dict, session: Session) -> Movie:
relationship_keys = {
"genres",
"belongs_to_collection",
"production_companies",
"production_countries",
"spoken_languages",
}
movie_info = {k: v for k, v in info.items() if k not in relationship_keys}
genres = create_model_objs(info["genres"], Genre, session)
collection = None
if info["belongs_to_collection"]:
collection = create_model_obj(
info["belongs_to_collection"], Collection, session
)
production_companies = create_model_objs(
info["production_companies"], ProductionCompany, session
)
production_countries = create_model_objs(
info["production_countries"], ProductionCountry, session
)
# languages
spoken_languages = create_model_objs(
info["spoken_languages"], SpokenLanguage, session
)
# create movie
movie = Movie(**movie_info)
movie.genres = genres
movie.collection = collection
movie.production_companies = production_companies
movie.production_countries = production_countries
movie.spoken_languages = spoken_languages
session.add(movie)
session.commit()
session.refresh(movie)
return movie
tmdb.API_KEY = os.getenv("TMDB_API_KEY", None)
def split_movie_path_title_and_year(path: str):
movie_path = Path(path)
movie_name = movie_path.stem
year = None
match = YEAR_PATTERN.search(movie_name)
if match:
year = match.group().strip(" ()")
movie_name = movie_name.replace(match.group(), "")
return movie_name, year
def get_movie_from_path(path: str, session: Session) -> Optional[Movie]:
movie = None
movie_name, year = split_movie_path_title_and_year(path)
# lookup in db
statement = select(Movie).where(Movie.title == movie_name)
if year is not None:
statement = statement.filter(extract("year", Movie.release_date) == int(year))
try:
movie = session.exec(statement).one()
except NoResultFound:
search = tmdb.Search()
search.movie(query=movie_name, year=year)
# take the first result:
if search.results:
id = search.results[0]["id"]
info = tmdb.Movies(id).info()
movie = tmdb_info_to_movie(info, session)
return movie
| [
"sqlmodel.select"
] | [((384, 397), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (395, 397), False, 'from dotenv import load_dotenv\n'), ((414, 445), 're.compile', 're.compile', (['"""(\\\\s\\\\(\\\\d{4}\\\\))"""'], {}), "('(\\\\s\\\\(\\\\d{4}\\\\))')\n", (424, 445), False, 'import re\n'), ((2087, 2118), 'os.getenv', 'os.getenv', (['"""TMDB_API_KEY"""', 'None'], {}), "('TMDB_API_KEY', None)\n", (2096, 2118), False, 'import os\n'), ((1746, 1765), 'models.Movie', 'Movie', ([], {}), '(**movie_info)\n', (1751, 1765), False, 'from models import Collection, Genre, Movie, ProductionCompany, ProductionCountry, SpokenLanguage\n'), ((2186, 2196), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (2190, 2196), False, 'from pathlib import Path\n'), ((2625, 2638), 'sqlmodel.select', 'select', (['Movie'], {}), '(Movie)\n', (2631, 2638), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2882, 2895), 'tmdbsimple.Search', 'tmdb.Search', ([], {}), '()\n', (2893, 2895), True, 'import tmdbsimple as tmdb\n'), ((2734, 2769), 'sqlalchemy.extract', 'extract', (['"""year"""', 'Movie.release_date'], {}), "('year', Movie.release_date)\n", (2741, 2769), False, 'from sqlalchemy import extract\n'), ((3066, 3081), 'tmdbsimple.Movies', 'tmdb.Movies', (['id'], {}), '(id)\n', (3077, 3081), True, 'import tmdbsimple as tmdb\n')] |
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine, select
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_heroes():
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
hero_2 = Hero(name="Spider-Boy", secret_name="<NAME>")
hero_3 = Hero(name="Rusty-Man", secret_name="<NAME>", age=48)
hero_4 = Hero(name="Tarantula", secret_name="<NAME>", age=32)
hero_5 = Hero(name="<NAME>", secret_name="<NAME>", age=35)
hero_6 = Hero(name="<NAME>", secret_name="<NAME>", age=36)
hero_7 = Hero(name="Captain North America", secret_name="<NAME>", age=93)
with Session(engine) as session:
session.add(hero_1)
session.add(hero_2)
session.add(hero_3)
session.add(hero_4)
session.add(hero_5)
session.add(hero_6)
session.add(hero_7)
session.commit()
def select_heroes():
with Session(engine) as session:
statement = select(Hero).where(Hero.id == 1)
results = session.exec(statement)
hero = results.first()
print("Hero:", hero)
def main():
create_db_and_tables()
create_heroes()
select_heroes()
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine"
] | [((351, 387), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (364, 387), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((158, 195), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (163, 195), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((422, 458), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (450, 458), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((944, 959), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (951, 959), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1226, 1241), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1233, 1241), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1274, 1286), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1280, 1286), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
import uuid
from datetime import datetime, timedelta, timezone
from typing import AsyncGenerator
import pytest
from pydantic import UUID4
from sqlalchemy import exc
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel import Session, SQLModel, create_engine
from fastapi_users_db_sqlmodel import SQLModelUserDatabase, SQLModelUserDatabaseAsync
from fastapi_users_db_sqlmodel.access_token import (
SQLModelAccessTokenDatabase,
SQLModelAccessTokenDatabaseAsync,
SQLModelBaseAccessToken,
)
from tests.conftest import UserDB
class AccessToken(SQLModelBaseAccessToken, table=True):
pass
@pytest.fixture
def user_id() -> UUID4:
return uuid.UUID("a9089e5d-2642-406d-a7c0-cbc641aca0ec")
async def init_sync_session(url: str) -> AsyncGenerator[Session, None]:
engine = create_engine(url, connect_args={"check_same_thread": False})
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
yield session
SQLModel.metadata.drop_all(engine)
async def init_async_session(url: str) -> AsyncGenerator[AsyncSession, None]:
engine = create_async_engine(url, connect_args={"check_same_thread": False})
make_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
async with engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
async with make_session() as session:
yield session
await conn.run_sync(SQLModel.metadata.drop_all)
@pytest.fixture(
params=[
(
init_sync_session,
"sqlite:///./test-sqlmodel-access-token.db",
SQLModelAccessTokenDatabase,
SQLModelUserDatabase,
),
(
init_async_session,
"sqlite+aiosqlite:///./test-sqlmodel-access-token.db",
SQLModelAccessTokenDatabaseAsync,
SQLModelUserDatabaseAsync,
),
],
ids=["sync", "async"],
)
async def sqlmodel_access_token_db(
request, user_id: UUID4
) -> AsyncGenerator[SQLModelAccessTokenDatabase, None]:
create_session = request.param[0]
database_url = request.param[1]
access_token_database_class = request.param[2]
user_database_class = request.param[3]
async for session in create_session(database_url):
user = UserDB(
id=user_id, email="<EMAIL>", hashed_password="<PASSWORD>"
)
user_db = user_database_class(UserDB, session)
await user_db.create(user)
yield access_token_database_class(AccessToken, session)
@pytest.mark.asyncio
@pytest.mark.db
async def test_queries(
sqlmodel_access_token_db: SQLModelAccessTokenDatabase[AccessToken],
user_id: UUID4,
):
access_token = AccessToken(token="TOKEN", user_id=user_id)
# Create
access_token_db = await sqlmodel_access_token_db.create(access_token)
assert access_token_db.token == "TOKEN"
assert access_token_db.user_id == user_id
# Update
access_token_db.created_at = datetime.now(timezone.utc)
await sqlmodel_access_token_db.update(access_token_db)
# Get by token
access_token_by_token = await sqlmodel_access_token_db.get_by_token(
access_token_db.token
)
assert access_token_by_token is not None
# Get by token expired
access_token_by_token = await sqlmodel_access_token_db.get_by_token(
access_token_db.token, max_age=datetime.now(timezone.utc) + timedelta(hours=1)
)
assert access_token_by_token is None
# Get by token not expired
access_token_by_token = await sqlmodel_access_token_db.get_by_token(
access_token_db.token, max_age=datetime.now(timezone.utc) - timedelta(hours=1)
)
assert access_token_by_token is not None
# Get by token unknown
access_token_by_token = await sqlmodel_access_token_db.get_by_token(
"NOT_EXISTING_TOKEN"
)
assert access_token_by_token is None
# Delete token
await sqlmodel_access_token_db.delete(access_token_db)
deleted_access_token = await sqlmodel_access_token_db.get_by_token(
access_token_db.token
)
assert deleted_access_token is None
@pytest.mark.asyncio
@pytest.mark.db
async def test_insert_existing_token(
sqlmodel_access_token_db: SQLModelAccessTokenDatabase[AccessToken], user_id: UUID4
):
access_token = AccessToken(token="TOKEN", user_id=user_id)
await sqlmodel_access_token_db.create(access_token)
with pytest.raises(exc.IntegrityError):
await sqlmodel_access_token_db.create(
AccessToken(token="TOKEN", user_id=user_id)
)
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.SQLModel.metadata.drop_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((1537, 1859), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[(init_sync_session, 'sqlite:///./test-sqlmodel-access-token.db',\n SQLModelAccessTokenDatabase, SQLModelUserDatabase), (init_async_session,\n 'sqlite+aiosqlite:///./test-sqlmodel-access-token.db',\n SQLModelAccessTokenDatabaseAsync, SQLModelUserDatabaseAsync)]", 'ids': "['sync', 'async']"}), "(params=[(init_sync_session,\n 'sqlite:///./test-sqlmodel-access-token.db',\n SQLModelAccessTokenDatabase, SQLModelUserDatabase), (init_async_session,\n 'sqlite+aiosqlite:///./test-sqlmodel-access-token.db',\n SQLModelAccessTokenDatabaseAsync, SQLModelUserDatabaseAsync)], ids=[\n 'sync', 'async'])\n", (1551, 1859), False, 'import pytest\n'), ((725, 774), 'uuid.UUID', 'uuid.UUID', (['"""a9089e5d-2642-406d-a7c0-cbc641aca0ec"""'], {}), "('a9089e5d-2642-406d-a7c0-cbc641aca0ec')\n", (734, 774), False, 'import uuid\n'), ((862, 923), 'sqlmodel.create_engine', 'create_engine', (['url'], {'connect_args': "{'check_same_thread': False}"}), "(url, connect_args={'check_same_thread': False})\n", (875, 923), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((928, 964), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (956, 964), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((1028, 1062), 'sqlmodel.SQLModel.metadata.drop_all', 'SQLModel.metadata.drop_all', (['engine'], {}), '(engine)\n', (1054, 1062), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((1156, 1223), 'sqlalchemy.ext.asyncio.create_async_engine', 'create_async_engine', (['url'], {'connect_args': "{'check_same_thread': False}"}), "(url, connect_args={'check_same_thread': False})\n", (1175, 1223), False, 'from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine\n'), ((1243, 1308), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', (['engine'], {'class_': 'AsyncSession', 'expire_on_commit': '(False)'}), '(engine, class_=AsyncSession, expire_on_commit=False)\n', (1255, 1308), False, 'from sqlalchemy.orm import sessionmaker\n'), ((3037, 3063), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (3049, 3063), False, 'from datetime import datetime, timedelta, timezone\n'), ((974, 989), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (981, 989), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2349, 2414), 'tests.conftest.UserDB', 'UserDB', ([], {'id': 'user_id', 'email': '"""<EMAIL>"""', 'hashed_password': '"""<PASSWORD>"""'}), "(id=user_id, email='<EMAIL>', hashed_password='<PASSWORD>')\n", (2355, 2414), False, 'from tests.conftest import UserDB\n'), ((4475, 4508), 'pytest.raises', 'pytest.raises', (['exc.IntegrityError'], {}), '(exc.IntegrityError)\n', (4488, 4508), False, 'import pytest\n'), ((3437, 3463), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (3449, 3463), False, 'from datetime import datetime, timedelta, timezone\n'), ((3466, 3484), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3475, 3484), False, 'from datetime import datetime, timedelta, timezone\n'), ((3676, 3702), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (3688, 3702), False, 'from datetime import datetime, timedelta, timezone\n'), ((3705, 3723), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3714, 3723), False, 'from datetime import datetime, timedelta, timezone\n')] |
from sqlmodel import SQLModel
from sqlmodel import Field, Relationship
from sqlalchemy import String
from sqlalchemy.sql.schema import Column
from typing import TYPE_CHECKING, Optional, List
if TYPE_CHECKING:
from app.src.models.product import ProductRead
from app.src.models.product import Product
class ProductTypeBase(SQLModel):
name: str
description: str
class ProductType(ProductTypeBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(sa_column=Column("name", String, unique=True))
description: Optional[str] = Field(default=None)
products: List["Product"] = Relationship(back_populates="product_type")
class ProductTypeReadwithProduct(ProductTypeBase):
product_type: Optional["ProductRead"] = None
class ProductTypeCreate(ProductTypeBase):
pass
class ProductTypeRead(ProductTypeBase):
id: int
# Nel modello update tutti gli attributi devono essere opzionali
class ProductTypeUpdate(SQLModel):
name: Optional[str] = None
description: Optional[str] = None
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((452, 489), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (457, 489), False, 'from sqlmodel import Field, Relationship\n'), ((592, 611), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (597, 611), False, 'from sqlmodel import Field, Relationship\n'), ((644, 687), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""product_type"""'}), "(back_populates='product_type')\n", (656, 687), False, 'from sqlmodel import Field, Relationship\n'), ((522, 557), 'sqlalchemy.sql.schema.Column', 'Column', (['"""name"""', 'String'], {'unique': '(True)'}), "('name', String, unique=True)\n", (528, 557), False, 'from sqlalchemy.sql.schema import Column\n')] |
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine, select
class Team(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(index=True)
headquarters: str
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_heroes():
with Session(engine) as session:
team_preventers = Team(name="Preventers", headquarters="Sharp Tower")
team_z_force = Team(name="Z-Force", headquarters="Sister Margaret’s Bar")
session.add(team_preventers)
session.add(team_z_force)
session.commit()
hero_deadpond = Hero(
name="Deadpond", secret_name="<NAME>", team_id=team_z_force.id
)
hero_rusty_man = Hero(
name="Rusty-Man",
secret_name="<NAME>",
age=48,
team_id=team_preventers.id,
)
hero_spider_boy = Hero(name="Spider-Boy", secret_name="<NAME>")
session.add(hero_deadpond)
session.add(hero_rusty_man)
session.add(hero_spider_boy)
session.commit()
session.refresh(hero_deadpond)
session.refresh(hero_rusty_man)
session.refresh(hero_spider_boy)
print("Created hero:", hero_deadpond)
print("Created hero:", hero_rusty_man)
print("Created hero:", hero_spider_boy)
def select_heroes():
with Session(engine) as session:
statement = select(Hero, Team).where(Hero.team_id == Team.id)
results = session.exec(statement)
for hero, team in results:
print("Hero:", hero, "Team:", team)
def main():
create_db_and_tables()
create_heroes()
select_heroes()
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine"
] | [((625, 661), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (638, 661), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((158, 195), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (163, 195), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((212, 229), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (217, 229), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((312, 349), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (317, 349), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((366, 383), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (371, 383), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((430, 461), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'index': '(True)'}), '(default=None, index=True)\n', (435, 461), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((492, 534), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (497, 534), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((696, 732), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (724, 732), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((765, 780), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (772, 780), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1830, 1845), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1837, 1845), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1878, 1896), 'sqlmodel.select', 'select', (['Hero', 'Team'], {}), '(Hero, Team)\n', (1884, 1896), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
from typing import Optional
from pydantic import EmailStr
from sqlmodel import Field, SQLModel
from sb_backend.app.models.base.base_model import TimeStampMixin
# Shared properties
class UserBase(SQLModel):
email: Optional[EmailStr] = None
is_active: Optional[bool] = True
is_superuser: bool = False
full_name: Optional[str] = None
# Properties to receive via API on creation
class UserCreate(UserBase):
email: EmailStr
password: str
# Properties to receive via API on update
class UserUpdate(UserBase):
password: Optional[str] = None
class UserInDBBase(UserBase):
id: Optional[int] = Field(default=None, primary_key=True)
# Additional properties to return via API
class User(UserInDBBase, TimeStampMixin, table=True):
__tablename__ = "user"
pass
# Additional properties stored in DB
class UserInDB(UserInDBBase):
hashed_password: str
| [
"sqlmodel.Field"
] | [((624, 661), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (629, 661), False, 'from sqlmodel import Field, SQLModel\n')] |
from enum import Enum
from typing import TYPE_CHECKING, Optional, Set
from sqlalchemy import Column
from sqlalchemy import Enum as SQLEnum
from sqlalchemy import ForeignKey, Integer
from sqlmodel import Field, Relationship, SQLModel
from .application import Status
if TYPE_CHECKING:
from .message import Message
class Group(Enum):
EVERYONE = "Everyone"
APPLICATION_COMPLETE = "Application - Complete"
APPLICATION_INCOMPLETE = "Application - Incomplete"
STATUS_ACCEPTED = "Status - Accepted"
STATUS_DENIED = "Status - Denied"
STATUS_PENDING = "Status - Pending"
@staticmethod
def completion_states() -> Set["Group"]:
return {Group.APPLICATION_COMPLETE, Group.APPLICATION_INCOMPLETE}
@staticmethod
def statuses() -> Set["Group"]:
return {Group.STATUS_ACCEPTED, Group.STATUS_DENIED, Group.STATUS_PENDING}
def to_status(self) -> Optional[Status]:
if self == Group.STATUS_ACCEPTED:
return Status.ACCEPTED
elif self == Group.STATUS_DENIED:
return Status.REJECTED
elif self == Group.STATUS_PENDING:
return Status.PENDING
else:
return None
class RecipientBase(SQLModel):
group: Group = Field(
sa_column=Column(
SQLEnum(Group),
nullable=False,
primary_key=True,
)
)
class Recipient(RecipientBase, table=True):
__tablename__ = "recipients"
message_id: int = Field(
sa_column=Column(
Integer(),
ForeignKey("messages.id", ondelete="CASCADE"),
primary_key=True,
)
)
message: "Message" = Relationship(back_populates="recipients")
class RecipientCreate(RecipientBase):
pass
class RecipientRead(RecipientBase):
pass
| [
"sqlmodel.Relationship"
] | [((1659, 1700), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""recipients"""'}), "(back_populates='recipients')\n", (1671, 1700), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1281, 1295), 'sqlalchemy.Enum', 'SQLEnum', (['Group'], {}), '(Group)\n', (1288, 1295), True, 'from sqlalchemy import Enum as SQLEnum\n'), ((1518, 1527), 'sqlalchemy.Integer', 'Integer', ([], {}), '()\n', (1525, 1527), False, 'from sqlalchemy import ForeignKey, Integer\n'), ((1541, 1586), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""messages.id"""'], {'ondelete': '"""CASCADE"""'}), "('messages.id', ondelete='CASCADE')\n", (1551, 1586), False, 'from sqlalchemy import ForeignKey, Integer\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine, func, select
class Item(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
created: datetime
deleted: bool = False
category: str
version: float = 1
data: str
# Create and save records to show that the query itself is working.
item_1 = Item(created=datetime.now(), category="category_1", data="❤️ I love SQLModel.")
item_2 = Item(
created=datetime.now(),
category="category_1",
data="❤️ I love FastAPI.",
deleted=True,
)
item_3 = Item(
created=datetime.now(),
category="category_2",
data="🥰 I appreciate your work on all of it!",
)
engine = create_engine("sqlite://")
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
session.add(item_1)
session.add(item_2)
session.add(item_3)
session.commit()
# This "statement" is where the issue presents itself in PyCharm
statement = (
select(
Item.category,
func.count(Item.id).label("my_count"),
func.total(Item.deleted).label("delete_count"),
func.min(Item.created).label("oldest_timestamp"),
func.max(Item.created).label("newest_timestamp"),
func.group_concat(Item.version).label("version_list"),
)
.distinct()
.group_by(Item.category)
)
category_metadata = session.exec(statement)
for result in category_metadata:
print(dict(result))
| [
"sqlmodel.func.min",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.func.total",
"sqlmodel.func.group_concat",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.func.count",
"sqlmodel.func.max",
"sqlmodel.create_engine"
] | [((1345, 1371), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite://"""'], {}), "('sqlite://')\n", (1358, 1371), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1373, 1409), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1401, 1409), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((791, 828), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (796, 828), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1416, 1431), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1423, 1431), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1024, 1038), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1036, 1038), False, 'from datetime import datetime\n'), ((1118, 1132), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1130, 1132), False, 'from datetime import datetime\n'), ((1239, 1253), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1251, 1253), False, 'from datetime import datetime\n'), ((1680, 1699), 'sqlmodel.func.count', 'func.count', (['Item.id'], {}), '(Item.id)\n', (1690, 1699), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1731, 1755), 'sqlmodel.func.total', 'func.total', (['Item.deleted'], {}), '(Item.deleted)\n', (1741, 1755), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1791, 1813), 'sqlmodel.func.min', 'func.min', (['Item.created'], {}), '(Item.created)\n', (1799, 1813), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1853, 1875), 'sqlmodel.func.max', 'func.max', (['Item.created'], {}), '(Item.created)\n', (1861, 1875), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n'), ((1915, 1946), 'sqlmodel.func.group_concat', 'func.group_concat', (['Item.version'], {}), '(Item.version)\n', (1932, 1946), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, func, select\n')] |
from datetime import date
from typing import List
from sqlmodel import select
from config.config_utils import get_managed_teams_config
from src.api.fixtures_client import FixturesClient
from src.db.db_manager import NotifierDBManager
from src.db.notif_sql_models import Fixture as DBFixture
from src.db.notif_sql_models import League as DBLeague
from src.db.notif_sql_models import Team as DBTeam
from src.entities import Championship, Team
from src.utils.fixtures_utils import convert_fixture_response_to_db
NOTIFIER_DB_MANAGER = NotifierDBManager()
def insert_league(fixture_league: Championship) -> DBLeague:
league_statement = select(DBLeague).where(DBLeague.id == fixture_league.league_id)
retrieved_league = NOTIFIER_DB_MANAGER.select_records(league_statement)
if not len(retrieved_league):
league = DBLeague(
id=fixture_league.league_id,
name=fixture_league.name,
logo=fixture_league.logo,
country=fixture_league.country,
)
NOTIFIER_DB_MANAGER.insert_record(league)
retrieved_league = NOTIFIER_DB_MANAGER.select_records(league_statement)
return retrieved_league
def insert_team(fixture_team: Team) -> DBTeam:
team_statement = select(DBTeam).where(DBTeam.id == fixture_team.id)
retrieved_team = NOTIFIER_DB_MANAGER.select_records(team_statement)
if not len(retrieved_team):
team = DBTeam(
id=fixture_team.id,
name=fixture_team.name,
picture=fixture_team.picture,
aliases=fixture_team.aliases,
)
NOTIFIER_DB_MANAGER.insert_record(team)
retrieved_team = NOTIFIER_DB_MANAGER.select_records(team_statement)
return retrieved_team
def save_fixtures(team_fixtures: List[dict]) -> None:
converted_fixtures = []
fix_nr = 1
for fixture in team_fixtures:
print(f"Converting & populating fixture {fix_nr}/{len(team_fixtures)}")
converted_fixtures.append(convert_fixture_response_to_db(fixture))
fix_nr += 1
db_fixtures = []
for conv_fix in converted_fixtures:
retrieved_league = insert_league(conv_fix.championship)
retrieved_home_team = insert_team(conv_fix.home_team)
retrieved_away_team = insert_team(conv_fix.away_team)
fixture_statement = select(DBFixture).where(DBFixture.id == conv_fix.id)
retrieved_fixture = NOTIFIER_DB_MANAGER.select_records(fixture_statement)
if not len(retrieved_fixture):
db_fixture = DBFixture(
id=conv_fix.id,
utc_date=conv_fix.utc_date,
league=retrieved_league.pop().id,
round=conv_fix.round,
home_team=retrieved_home_team.pop().id,
away_team=retrieved_away_team.pop().id,
home_score=conv_fix.match_score.home_score,
away_score=conv_fix.match_score.away_score,
)
else:
db_fixture = retrieved_fixture.pop()
db_fixture.id = conv_fix.id
db_fixture.utc_date = conv_fix.utc_date
db_fixture.league = retrieved_league.pop().id
db_fixture.round = conv_fix.round
db_fixture.home_team = retrieved_home_team.pop().id
db_fixture.away_team = retrieved_away_team.pop().id
db_fixture.home_score = conv_fix.match_score.home_score
db_fixture.away_score = conv_fix.match_score.away_score
db_fixtures.append(db_fixture)
NOTIFIER_DB_MANAGER.insert_records(db_fixtures)
def populate_data(is_initial: bool = False) -> None:
managed_teams = get_managed_teams_config()
fixtures_client = FixturesClient()
current_year = date.today().year
last_year = current_year - 1
for team in managed_teams:
if is_initial:
team_fixtures = fixtures_client.get_fixtures_by(str(last_year), team.id)
if "response" in team_fixtures.as_dict:
save_fixtures(team_fixtures.as_dict["response"])
team_fixtures = fixtures_client.get_fixtures_by(str(current_year), team.id)
if "response" in team_fixtures.as_dict:
save_fixtures(team_fixtures.as_dict["response"])
if __name__ == "__main__":
fixtures = NOTIFIER_DB_MANAGER.select_records(select(DBFixture))
is_initial = True if not len(fixtures) else False
populate_data(is_initial)
| [
"sqlmodel.select"
] | [((534, 553), 'src.db.db_manager.NotifierDBManager', 'NotifierDBManager', ([], {}), '()\n', (551, 553), False, 'from src.db.db_manager import NotifierDBManager\n'), ((3634, 3660), 'config.config_utils.get_managed_teams_config', 'get_managed_teams_config', ([], {}), '()\n', (3658, 3660), False, 'from config.config_utils import get_managed_teams_config\n'), ((3683, 3699), 'src.api.fixtures_client.FixturesClient', 'FixturesClient', ([], {}), '()\n', (3697, 3699), False, 'from src.api.fixtures_client import FixturesClient\n'), ((832, 958), 'src.db.notif_sql_models.League', 'DBLeague', ([], {'id': 'fixture_league.league_id', 'name': 'fixture_league.name', 'logo': 'fixture_league.logo', 'country': 'fixture_league.country'}), '(id=fixture_league.league_id, name=fixture_league.name, logo=\n fixture_league.logo, country=fixture_league.country)\n', (840, 958), True, 'from src.db.notif_sql_models import League as DBLeague\n'), ((1413, 1528), 'src.db.notif_sql_models.Team', 'DBTeam', ([], {'id': 'fixture_team.id', 'name': 'fixture_team.name', 'picture': 'fixture_team.picture', 'aliases': 'fixture_team.aliases'}), '(id=fixture_team.id, name=fixture_team.name, picture=fixture_team.\n picture, aliases=fixture_team.aliases)\n', (1419, 1528), True, 'from src.db.notif_sql_models import Team as DBTeam\n'), ((3719, 3731), 'datetime.date.today', 'date.today', ([], {}), '()\n', (3729, 3731), False, 'from datetime import date\n'), ((4300, 4317), 'sqlmodel.select', 'select', (['DBFixture'], {}), '(DBFixture)\n', (4306, 4317), False, 'from sqlmodel import select\n'), ((640, 656), 'sqlmodel.select', 'select', (['DBLeague'], {}), '(DBLeague)\n', (646, 656), False, 'from sqlmodel import select\n'), ((1242, 1256), 'sqlmodel.select', 'select', (['DBTeam'], {}), '(DBTeam)\n', (1248, 1256), False, 'from sqlmodel import select\n'), ((1981, 2020), 'src.utils.fixtures_utils.convert_fixture_response_to_db', 'convert_fixture_response_to_db', (['fixture'], {}), '(fixture)\n', (2011, 2020), False, 'from src.utils.fixtures_utils import convert_fixture_response_to_db\n'), ((2322, 2339), 'sqlmodel.select', 'select', (['DBFixture'], {}), '(DBFixture)\n', (2328, 2339), False, 'from sqlmodel import select\n')] |
from typing import TYPE_CHECKING
from pydantic import validator
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .event import Event, EventList
from .participant import Participant, ParticipantList
class FeedbackBase(SQLModel):
presentation: int
content: int
interest: int
comments: str = Field(default="", nullable=False)
again: bool = Field(default=True, nullable=False)
@validator("presentation", "content", "interest")
def between_1_and_5(cls, value: int):
if value < 1 or value > 5:
raise ValueError("must be between 1 and 5 inclusive")
return value
class Feedback(FeedbackBase, table=True):
__tablename__ = "feedback"
participant_id: str = Field(
sa_column=Column(
String(),
ForeignKey("participants.id", ondelete="CASCADE"),
nullable=False,
primary_key=True,
)
)
participant: "Participant" = Relationship()
event_id: int = Field(
sa_column=Column(
Integer(),
ForeignKey("events.id", ondelete="CASCADE"),
nullable=False,
primary_key=True,
)
)
event: "Event" = Relationship(back_populates="feedback")
class FeedbackCreate(FeedbackBase):
pass
class FeedbackList(SQLModel):
participant: "ParticipantList"
presentation: int
content: int
interest: int
class FeedbackRead(FeedbackBase):
participant: "ParticipantList"
event: "EventList"
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((402, 435), 'sqlmodel.Field', 'Field', ([], {'default': '""""""', 'nullable': '(False)'}), "(default='', nullable=False)\n", (407, 435), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((454, 489), 'sqlmodel.Field', 'Field', ([], {'default': '(True)', 'nullable': '(False)'}), '(default=True, nullable=False)\n', (459, 489), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((496, 544), 'pydantic.validator', 'validator', (['"""presentation"""', '"""content"""', '"""interest"""'], {}), "('presentation', 'content', 'interest')\n", (505, 544), False, 'from pydantic import validator\n'), ((1036, 1050), 'sqlmodel.Relationship', 'Relationship', ([], {}), '()\n', (1048, 1050), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1280, 1319), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""feedback"""'}), "(back_populates='feedback')\n", (1292, 1319), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((856, 864), 'sqlalchemy.String', 'String', ([], {}), '()\n', (862, 864), False, 'from sqlalchemy import Column, ForeignKey, Integer, String\n'), ((878, 927), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""participants.id"""'], {'ondelete': '"""CASCADE"""'}), "('participants.id', ondelete='CASCADE')\n", (888, 927), False, 'from sqlalchemy import Column, ForeignKey, Integer, String\n'), ((1117, 1126), 'sqlalchemy.Integer', 'Integer', ([], {}), '()\n', (1124, 1126), False, 'from sqlalchemy import Column, ForeignKey, Integer, String\n'), ((1140, 1183), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""events.id"""'], {'ondelete': '"""CASCADE"""'}), "('events.id', ondelete='CASCADE')\n", (1150, 1183), False, 'from sqlalchemy import Column, ForeignKey, Integer, String\n')] |
from typing import Optional
from loguru import logger
from sqlmodel import Field, Session, SQLModel, create_engine, or_, select
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str = Field(max_length=30)
age: Optional[int] = None
def test_database_with_sqlmodel():
hero_1 = Hero(name='Deadpond', secret_name='<NAME>')
hero_2 = Hero(name='Spider-Boy', secret_name='<NAME>')
hero_3 = Hero(name='Rusty-Man', secret_name='<NAME>', age=48)
# engine = create_engine('sqlite:///temp.db')
engine = create_engine('sqlite:///:memory:')
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
for hero in [hero_1, hero_2, hero_3]:
session.add(hero)
session.commit()
with Session(engine) as session:
statement = select(Hero).where(Hero.name == 'Spider-Boy')
hero = session.exec(statement).first()
logger.info(hero)
# Or statement
statement = select(Hero).where((Hero.name == 'Spider-Boy') | (Hero.name == 'Rusty-Man'))
heroes = session.exec(statement)
for hero in heroes:
logger.info(hero)
# Or statement, alternative way
statement = select(Hero).where(or_(Hero.name == 'Spider-Boy', Hero.name == 'Rusty-Man'))
heroes = session.exec(statement)
for hero in heroes:
logger.info(hero)
# And statement
statement = select(Hero).where(Hero.name == 'Spider-Boy', Hero.secret_name == '<NAME>')
heroes = session.exec(statement)
for hero in heroes:
logger.info(hero)
# And statement, alternative way
statement = select(Hero).where(Hero.name == 'Spider-Boy').where(Hero.secret_name == '<NAME>')
heroes = session.exec(statement)
for hero in heroes:
logger.info(hero)
if __name__ == '__main__':
test_database_with_sqlmodel()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.or_",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine"
] | [((189, 226), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (194, 226), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((264, 284), 'sqlmodel.Field', 'Field', ([], {'max_length': '(30)'}), '(max_length=30)\n', (269, 284), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((598, 633), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///:memory:"""'], {}), "('sqlite:///:memory:')\n", (611, 633), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((639, 675), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (667, 675), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((686, 701), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (693, 701), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((825, 840), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (832, 840), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((974, 991), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (985, 991), False, 'from loguru import logger\n'), ((1194, 1211), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (1205, 1211), False, 'from loguru import logger\n'), ((1292, 1348), 'sqlmodel.or_', 'or_', (["(Hero.name == 'Spider-Boy')", "(Hero.name == 'Rusty-Man')"], {}), "(Hero.name == 'Spider-Boy', Hero.name == 'Rusty-Man')\n", (1295, 1348), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((1431, 1448), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (1442, 1448), False, 'from loguru import logger\n'), ((1651, 1668), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (1662, 1668), False, 'from loguru import logger\n'), ((1894, 1911), 'loguru.logger.info', 'logger.info', (['hero'], {}), '(hero)\n', (1905, 1911), False, 'from loguru import logger\n'), ((873, 885), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (879, 885), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((1036, 1048), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1042, 1048), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((1273, 1285), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1279, 1285), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((1494, 1506), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1500, 1506), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n'), ((1731, 1743), 'sqlmodel.select', 'select', (['Hero'], {}), '(Hero)\n', (1737, 1743), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, or_, select\n')] |
from __future__ import annotations
import inspect
from functools import wraps
from typing import Any, Callable, Dict, List, Literal, Type, TypeVar
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Query, noload, raiseload, selectinload, subqueryload
from sqlalchemy.sql.elements import BinaryExpression
from sqlmodel import SQLModel, select
from sqlmodel.ext.asyncio.session import AsyncSession
Self = TypeVar("Self", bound="Base")
LoadStrategy = Literal["subquery", "selectin", "raise", "raise_on_sql", "noload"]
load_strategy_map: Dict[LoadStrategy, Callable[..., Any]] = {
"subquery": subqueryload,
"selectin": selectinload,
"raise": raiseload,
"raise_on_sql": raiseload,
"noload": noload,
}
class InvalidTable(RuntimeError):
"""Raised when calling a method coupled to SQLAlchemy operations.
It should be called only by SQLModel objects that are tables.
"""
def is_table(cls: Type[Self]) -> bool:
base_is_table = False
for base in cls.__bases__:
config = getattr(base, "__config__")
if config and getattr(config, "table", False):
base_is_table = True
break
return getattr(cls.__config__, "table", False) and not base_is_table
def validate_table(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
cls = self if inspect.isclass(self) else self.__class__
if not is_table(cls):
raise InvalidTable(
f'"{cls.__name__}" is not a table. '
"Add the class parameter `table=True` or don't use with this object."
)
return func(self, *args, **kwargs)
return wrapper
def _prepare_query(
cls: Type[Self], load_strategy: Dict[str, LoadStrategy] | None
) -> Query:
load_strategy = load_strategy or {}
query = select(cls)
for key, strategy in load_strategy.items():
query = query.options(load_strategy_map[strategy](key))
return query
class Base(SQLModel):
@classmethod
@validate_table
async def get(
cls: Type[Self],
session: AsyncSession,
*args: BinaryExpression,
load_strategy: Dict[str, LoadStrategy] = None,
**kwargs: Any,
) -> Self:
query = _prepare_query(cls, load_strategy)
result = await session.execute(query.filter(*args).filter_by(**kwargs))
return result.scalars().first()
@classmethod
@validate_table
async def get_multi(
cls: Type[Self],
session: AsyncSession,
*args: BinaryExpression,
load_strategy: Dict[str, LoadStrategy] = None,
offset: int = 0,
limit: int = 100,
**kwargs: Any,
) -> List[Self]:
query = _prepare_query(cls, load_strategy)
result = await session.execute(
query.filter(*args).filter_by(**kwargs).offset(offset).limit(limit)
)
return result.scalars().all()
@classmethod
@validate_table
async def create(cls: Type[Self], session: AsyncSession, **kwargs: Any) -> Self:
db_obj = cls(**kwargs)
session.add(db_obj)
await session.commit()
return db_obj
@validate_table
async def update(self: Self, session: AsyncSession, **kwargs: Any) -> Self:
obj_data = jsonable_encoder(self)
for field in obj_data:
if field in kwargs:
setattr(self, field, kwargs[field])
session.add(self)
await session.commit()
await session.refresh(self)
return self
@classmethod
@validate_table
async def delete(
cls: Type[Self], session: AsyncSession, *args: BinaryExpression, **kwargs: Any
) -> Self:
db_obj = await cls.get(session, *args, **kwargs)
await session.delete(db_obj)
await session.commit()
return db_obj
| [
"sqlmodel.select"
] | [((428, 457), 'typing.TypeVar', 'TypeVar', (['"""Self"""'], {'bound': '"""Base"""'}), "('Self', bound='Base')\n", (435, 457), False, 'from typing import Any, Callable, Dict, List, Literal, Type, TypeVar\n'), ((1277, 1288), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1282, 1288), False, 'from functools import wraps\n'), ((1824, 1835), 'sqlmodel.select', 'select', (['cls'], {}), '(cls)\n', (1830, 1835), False, 'from sqlmodel import SQLModel, select\n'), ((3274, 3296), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['self'], {}), '(self)\n', (3290, 3296), False, 'from fastapi.encoders import jsonable_encoder\n'), ((1351, 1372), 'inspect.isclass', 'inspect.isclass', (['self'], {}), '(self)\n', (1366, 1372), False, 'import inspect\n')] |
from typing import Optional
from sqlmodel import SQLModel, Field, create_engine, Session
engine = create_engine(url="sqlite:///users.db", echo=False)
class User(SQLModel, table=True):
id: Optional[int] = Field(None, primary_key=True)
username: str
password: str
def get_session():
with Session(engine) as session:
yield session
def init_db():
SQLModel.metadata.create_all(engine)
| [
"sqlmodel.Session",
"sqlmodel.create_engine",
"sqlmodel.Field",
"sqlmodel.SQLModel.metadata.create_all"
] | [((100, 151), 'sqlmodel.create_engine', 'create_engine', ([], {'url': '"""sqlite:///users.db"""', 'echo': '(False)'}), "(url='sqlite:///users.db', echo=False)\n", (113, 151), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((212, 241), 'sqlmodel.Field', 'Field', (['None'], {'primary_key': '(True)'}), '(None, primary_key=True)\n', (217, 241), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((378, 414), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (406, 414), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((307, 322), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (314, 322), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n')] |
"""Initial 5
Revision ID: 0101e666f4e9
Revises: 6c98e82ae2b5
Create Date: 2021-11-14 01:40:19.792380
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '0101e666f4e9'
down_revision = '6c98e82ae2b5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('interest_points',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('distance', sa.Integer(), nullable=False),
sa.Column('website', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('website_domain', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('chain_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('listing_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['listing_id'], ['listings.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_interest_points_address'), 'interest_points', ['address'], unique=False)
op.create_index(op.f('ix_interest_points_chain_name'), 'interest_points', ['chain_name'], unique=False)
op.create_index(op.f('ix_interest_points_created_at'), 'interest_points', ['created_at'], unique=False)
op.create_index(op.f('ix_interest_points_distance'), 'interest_points', ['distance'], unique=False)
op.create_index(op.f('ix_interest_points_id'), 'interest_points', ['id'], unique=False)
op.create_index(op.f('ix_interest_points_latitude'), 'interest_points', ['latitude'], unique=False)
op.create_index(op.f('ix_interest_points_listing_id'), 'interest_points', ['listing_id'], unique=False)
op.create_index(op.f('ix_interest_points_longitude'), 'interest_points', ['longitude'], unique=False)
op.create_index(op.f('ix_interest_points_name'), 'interest_points', ['name'], unique=False)
op.create_index(op.f('ix_interest_points_updated_at'), 'interest_points', ['updated_at'], unique=False)
op.create_index(op.f('ix_interest_points_website'), 'interest_points', ['website'], unique=False)
op.create_index(op.f('ix_interest_points_website_domain'), 'interest_points', ['website_domain'], unique=False)
op.create_table('places_nearby',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('distance', sa.Integer(), nullable=False),
sa.Column('website', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('website_domain', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('chain_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('listing_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['listing_id'], ['listings.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_places_nearby_address'), 'places_nearby', ['address'], unique=False)
op.create_index(op.f('ix_places_nearby_chain_name'), 'places_nearby', ['chain_name'], unique=False)
op.create_index(op.f('ix_places_nearby_created_at'), 'places_nearby', ['created_at'], unique=False)
op.create_index(op.f('ix_places_nearby_distance'), 'places_nearby', ['distance'], unique=False)
op.create_index(op.f('ix_places_nearby_id'), 'places_nearby', ['id'], unique=False)
op.create_index(op.f('ix_places_nearby_latitude'), 'places_nearby', ['latitude'], unique=False)
op.create_index(op.f('ix_places_nearby_listing_id'), 'places_nearby', ['listing_id'], unique=False)
op.create_index(op.f('ix_places_nearby_longitude'), 'places_nearby', ['longitude'], unique=False)
op.create_index(op.f('ix_places_nearby_name'), 'places_nearby', ['name'], unique=False)
op.create_index(op.f('ix_places_nearby_updated_at'), 'places_nearby', ['updated_at'], unique=False)
op.create_index(op.f('ix_places_nearby_website'), 'places_nearby', ['website'], unique=False)
op.create_index(op.f('ix_places_nearby_website_domain'), 'places_nearby', ['website_domain'], unique=False)
op.create_table('routes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('distance', sa.Integer(), nullable=False),
sa.Column('website', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('website_domain', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('chain_name', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('listing_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['listing_id'], ['listings.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_routes_address'), 'routes', ['address'], unique=False)
op.create_index(op.f('ix_routes_chain_name'), 'routes', ['chain_name'], unique=False)
op.create_index(op.f('ix_routes_created_at'), 'routes', ['created_at'], unique=False)
op.create_index(op.f('ix_routes_distance'), 'routes', ['distance'], unique=False)
op.create_index(op.f('ix_routes_id'), 'routes', ['id'], unique=False)
op.create_index(op.f('ix_routes_latitude'), 'routes', ['latitude'], unique=False)
op.create_index(op.f('ix_routes_listing_id'), 'routes', ['listing_id'], unique=False)
op.create_index(op.f('ix_routes_longitude'), 'routes', ['longitude'], unique=False)
op.create_index(op.f('ix_routes_name'), 'routes', ['name'], unique=False)
op.create_index(op.f('ix_routes_updated_at'), 'routes', ['updated_at'], unique=False)
op.create_index(op.f('ix_routes_website'), 'routes', ['website'], unique=False)
op.create_index(op.f('ix_routes_website_domain'), 'routes', ['website_domain'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_routes_website_domain'), table_name='routes')
op.drop_index(op.f('ix_routes_website'), table_name='routes')
op.drop_index(op.f('ix_routes_updated_at'), table_name='routes')
op.drop_index(op.f('ix_routes_name'), table_name='routes')
op.drop_index(op.f('ix_routes_longitude'), table_name='routes')
op.drop_index(op.f('ix_routes_listing_id'), table_name='routes')
op.drop_index(op.f('ix_routes_latitude'), table_name='routes')
op.drop_index(op.f('ix_routes_id'), table_name='routes')
op.drop_index(op.f('ix_routes_distance'), table_name='routes')
op.drop_index(op.f('ix_routes_created_at'), table_name='routes')
op.drop_index(op.f('ix_routes_chain_name'), table_name='routes')
op.drop_index(op.f('ix_routes_address'), table_name='routes')
op.drop_table('routes')
op.drop_index(op.f('ix_places_nearby_website_domain'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_website'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_updated_at'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_name'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_longitude'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_listing_id'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_latitude'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_id'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_distance'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_created_at'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_chain_name'), table_name='places_nearby')
op.drop_index(op.f('ix_places_nearby_address'), table_name='places_nearby')
op.drop_table('places_nearby')
op.drop_index(op.f('ix_interest_points_website_domain'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_website'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_updated_at'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_name'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_longitude'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_listing_id'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_latitude'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_id'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_distance'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_created_at'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_chain_name'), table_name='interest_points')
op.drop_index(op.f('ix_interest_points_address'), table_name='interest_points')
op.drop_table('interest_points')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((7600, 7623), 'alembic.op.drop_table', 'op.drop_table', (['"""routes"""'], {}), "('routes')\n", (7613, 7623), False, 'from alembic import op\n'), ((8603, 8633), 'alembic.op.drop_table', 'op.drop_table', (['"""places_nearby"""'], {}), "('places_nearby')\n", (8616, 8633), False, 'from alembic import op\n'), ((9661, 9693), 'alembic.op.drop_table', 'op.drop_table', (['"""interest_points"""'], {}), "('interest_points')\n", (9674, 9693), False, 'from alembic import op\n'), ((1222, 1278), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['listing_id']", "['listings.id']"], {}), "(['listing_id'], ['listings.id'])\n", (1245, 1278), True, 'import sqlalchemy as sa\n'), ((1286, 1315), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1309, 1315), True, 'import sqlalchemy as sa\n'), ((1342, 1376), 'alembic.op.f', 'op.f', (['"""ix_interest_points_address"""'], {}), "('ix_interest_points_address')\n", (1346, 1376), False, 'from alembic import op\n'), ((1444, 1481), 'alembic.op.f', 'op.f', (['"""ix_interest_points_chain_name"""'], {}), "('ix_interest_points_chain_name')\n", (1448, 1481), False, 'from alembic import op\n'), ((1552, 1589), 'alembic.op.f', 'op.f', (['"""ix_interest_points_created_at"""'], {}), "('ix_interest_points_created_at')\n", (1556, 1589), False, 'from alembic import op\n'), ((1660, 1695), 'alembic.op.f', 'op.f', (['"""ix_interest_points_distance"""'], {}), "('ix_interest_points_distance')\n", (1664, 1695), False, 'from alembic import op\n'), ((1764, 1793), 'alembic.op.f', 'op.f', (['"""ix_interest_points_id"""'], {}), "('ix_interest_points_id')\n", (1768, 1793), False, 'from alembic import op\n'), ((1856, 1891), 'alembic.op.f', 'op.f', (['"""ix_interest_points_latitude"""'], {}), "('ix_interest_points_latitude')\n", (1860, 1891), False, 'from alembic import op\n'), ((1960, 1997), 'alembic.op.f', 'op.f', (['"""ix_interest_points_listing_id"""'], {}), "('ix_interest_points_listing_id')\n", (1964, 1997), False, 'from alembic import op\n'), ((2068, 2104), 'alembic.op.f', 'op.f', (['"""ix_interest_points_longitude"""'], {}), "('ix_interest_points_longitude')\n", (2072, 2104), False, 'from alembic import op\n'), ((2174, 2205), 'alembic.op.f', 'op.f', (['"""ix_interest_points_name"""'], {}), "('ix_interest_points_name')\n", (2178, 2205), False, 'from alembic import op\n'), ((2270, 2307), 'alembic.op.f', 'op.f', (['"""ix_interest_points_updated_at"""'], {}), "('ix_interest_points_updated_at')\n", (2274, 2307), False, 'from alembic import op\n'), ((2378, 2412), 'alembic.op.f', 'op.f', (['"""ix_interest_points_website"""'], {}), "('ix_interest_points_website')\n", (2382, 2412), False, 'from alembic import op\n'), ((2480, 2521), 'alembic.op.f', 'op.f', (['"""ix_interest_points_website_domain"""'], {}), "('ix_interest_points_website_domain')\n", (2484, 2521), False, 'from alembic import op\n'), ((3404, 3460), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['listing_id']", "['listings.id']"], {}), "(['listing_id'], ['listings.id'])\n", (3427, 3460), True, 'import sqlalchemy as sa\n'), ((3468, 3497), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3491, 3497), True, 'import sqlalchemy as sa\n'), ((3524, 3556), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_address"""'], {}), "('ix_places_nearby_address')\n", (3528, 3556), False, 'from alembic import op\n'), ((3622, 3657), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_chain_name"""'], {}), "('ix_places_nearby_chain_name')\n", (3626, 3657), False, 'from alembic import op\n'), ((3726, 3761), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_created_at"""'], {}), "('ix_places_nearby_created_at')\n", (3730, 3761), False, 'from alembic import op\n'), ((3830, 3863), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_distance"""'], {}), "('ix_places_nearby_distance')\n", (3834, 3863), False, 'from alembic import op\n'), ((3930, 3957), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_id"""'], {}), "('ix_places_nearby_id')\n", (3934, 3957), False, 'from alembic import op\n'), ((4018, 4051), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_latitude"""'], {}), "('ix_places_nearby_latitude')\n", (4022, 4051), False, 'from alembic import op\n'), ((4118, 4153), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_listing_id"""'], {}), "('ix_places_nearby_listing_id')\n", (4122, 4153), False, 'from alembic import op\n'), ((4222, 4256), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_longitude"""'], {}), "('ix_places_nearby_longitude')\n", (4226, 4256), False, 'from alembic import op\n'), ((4324, 4353), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_name"""'], {}), "('ix_places_nearby_name')\n", (4328, 4353), False, 'from alembic import op\n'), ((4416, 4451), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_updated_at"""'], {}), "('ix_places_nearby_updated_at')\n", (4420, 4451), False, 'from alembic import op\n'), ((4520, 4552), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_website"""'], {}), "('ix_places_nearby_website')\n", (4524, 4552), False, 'from alembic import op\n'), ((4618, 4657), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_website_domain"""'], {}), "('ix_places_nearby_website_domain')\n", (4622, 4657), False, 'from alembic import op\n'), ((5531, 5587), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['listing_id']", "['listings.id']"], {}), "(['listing_id'], ['listings.id'])\n", (5554, 5587), True, 'import sqlalchemy as sa\n'), ((5595, 5624), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (5618, 5624), True, 'import sqlalchemy as sa\n'), ((5651, 5676), 'alembic.op.f', 'op.f', (['"""ix_routes_address"""'], {}), "('ix_routes_address')\n", (5655, 5676), False, 'from alembic import op\n'), ((5735, 5763), 'alembic.op.f', 'op.f', (['"""ix_routes_chain_name"""'], {}), "('ix_routes_chain_name')\n", (5739, 5763), False, 'from alembic import op\n'), ((5825, 5853), 'alembic.op.f', 'op.f', (['"""ix_routes_created_at"""'], {}), "('ix_routes_created_at')\n", (5829, 5853), False, 'from alembic import op\n'), ((5915, 5941), 'alembic.op.f', 'op.f', (['"""ix_routes_distance"""'], {}), "('ix_routes_distance')\n", (5919, 5941), False, 'from alembic import op\n'), ((6001, 6021), 'alembic.op.f', 'op.f', (['"""ix_routes_id"""'], {}), "('ix_routes_id')\n", (6005, 6021), False, 'from alembic import op\n'), ((6075, 6101), 'alembic.op.f', 'op.f', (['"""ix_routes_latitude"""'], {}), "('ix_routes_latitude')\n", (6079, 6101), False, 'from alembic import op\n'), ((6161, 6189), 'alembic.op.f', 'op.f', (['"""ix_routes_listing_id"""'], {}), "('ix_routes_listing_id')\n", (6165, 6189), False, 'from alembic import op\n'), ((6251, 6278), 'alembic.op.f', 'op.f', (['"""ix_routes_longitude"""'], {}), "('ix_routes_longitude')\n", (6255, 6278), False, 'from alembic import op\n'), ((6339, 6361), 'alembic.op.f', 'op.f', (['"""ix_routes_name"""'], {}), "('ix_routes_name')\n", (6343, 6361), False, 'from alembic import op\n'), ((6417, 6445), 'alembic.op.f', 'op.f', (['"""ix_routes_updated_at"""'], {}), "('ix_routes_updated_at')\n", (6421, 6445), False, 'from alembic import op\n'), ((6507, 6532), 'alembic.op.f', 'op.f', (['"""ix_routes_website"""'], {}), "('ix_routes_website')\n", (6511, 6532), False, 'from alembic import op\n'), ((6591, 6623), 'alembic.op.f', 'op.f', (['"""ix_routes_website_domain"""'], {}), "('ix_routes_website_domain')\n", (6595, 6623), False, 'from alembic import op\n'), ((6807, 6839), 'alembic.op.f', 'op.f', (['"""ix_routes_website_domain"""'], {}), "('ix_routes_website_domain')\n", (6811, 6839), False, 'from alembic import op\n'), ((6880, 6905), 'alembic.op.f', 'op.f', (['"""ix_routes_website"""'], {}), "('ix_routes_website')\n", (6884, 6905), False, 'from alembic import op\n'), ((6946, 6974), 'alembic.op.f', 'op.f', (['"""ix_routes_updated_at"""'], {}), "('ix_routes_updated_at')\n", (6950, 6974), False, 'from alembic import op\n'), ((7015, 7037), 'alembic.op.f', 'op.f', (['"""ix_routes_name"""'], {}), "('ix_routes_name')\n", (7019, 7037), False, 'from alembic import op\n'), ((7078, 7105), 'alembic.op.f', 'op.f', (['"""ix_routes_longitude"""'], {}), "('ix_routes_longitude')\n", (7082, 7105), False, 'from alembic import op\n'), ((7146, 7174), 'alembic.op.f', 'op.f', (['"""ix_routes_listing_id"""'], {}), "('ix_routes_listing_id')\n", (7150, 7174), False, 'from alembic import op\n'), ((7215, 7241), 'alembic.op.f', 'op.f', (['"""ix_routes_latitude"""'], {}), "('ix_routes_latitude')\n", (7219, 7241), False, 'from alembic import op\n'), ((7282, 7302), 'alembic.op.f', 'op.f', (['"""ix_routes_id"""'], {}), "('ix_routes_id')\n", (7286, 7302), False, 'from alembic import op\n'), ((7343, 7369), 'alembic.op.f', 'op.f', (['"""ix_routes_distance"""'], {}), "('ix_routes_distance')\n", (7347, 7369), False, 'from alembic import op\n'), ((7410, 7438), 'alembic.op.f', 'op.f', (['"""ix_routes_created_at"""'], {}), "('ix_routes_created_at')\n", (7414, 7438), False, 'from alembic import op\n'), ((7479, 7507), 'alembic.op.f', 'op.f', (['"""ix_routes_chain_name"""'], {}), "('ix_routes_chain_name')\n", (7483, 7507), False, 'from alembic import op\n'), ((7548, 7573), 'alembic.op.f', 'op.f', (['"""ix_routes_address"""'], {}), "('ix_routes_address')\n", (7552, 7573), False, 'from alembic import op\n'), ((7642, 7681), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_website_domain"""'], {}), "('ix_places_nearby_website_domain')\n", (7646, 7681), False, 'from alembic import op\n'), ((7729, 7761), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_website"""'], {}), "('ix_places_nearby_website')\n", (7733, 7761), False, 'from alembic import op\n'), ((7809, 7844), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_updated_at"""'], {}), "('ix_places_nearby_updated_at')\n", (7813, 7844), False, 'from alembic import op\n'), ((7892, 7921), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_name"""'], {}), "('ix_places_nearby_name')\n", (7896, 7921), False, 'from alembic import op\n'), ((7969, 8003), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_longitude"""'], {}), "('ix_places_nearby_longitude')\n", (7973, 8003), False, 'from alembic import op\n'), ((8051, 8086), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_listing_id"""'], {}), "('ix_places_nearby_listing_id')\n", (8055, 8086), False, 'from alembic import op\n'), ((8134, 8167), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_latitude"""'], {}), "('ix_places_nearby_latitude')\n", (8138, 8167), False, 'from alembic import op\n'), ((8215, 8242), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_id"""'], {}), "('ix_places_nearby_id')\n", (8219, 8242), False, 'from alembic import op\n'), ((8290, 8323), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_distance"""'], {}), "('ix_places_nearby_distance')\n", (8294, 8323), False, 'from alembic import op\n'), ((8371, 8406), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_created_at"""'], {}), "('ix_places_nearby_created_at')\n", (8375, 8406), False, 'from alembic import op\n'), ((8454, 8489), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_chain_name"""'], {}), "('ix_places_nearby_chain_name')\n", (8458, 8489), False, 'from alembic import op\n'), ((8537, 8569), 'alembic.op.f', 'op.f', (['"""ix_places_nearby_address"""'], {}), "('ix_places_nearby_address')\n", (8541, 8569), False, 'from alembic import op\n'), ((8652, 8693), 'alembic.op.f', 'op.f', (['"""ix_interest_points_website_domain"""'], {}), "('ix_interest_points_website_domain')\n", (8656, 8693), False, 'from alembic import op\n'), ((8743, 8777), 'alembic.op.f', 'op.f', (['"""ix_interest_points_website"""'], {}), "('ix_interest_points_website')\n", (8747, 8777), False, 'from alembic import op\n'), ((8827, 8864), 'alembic.op.f', 'op.f', (['"""ix_interest_points_updated_at"""'], {}), "('ix_interest_points_updated_at')\n", (8831, 8864), False, 'from alembic import op\n'), ((8914, 8945), 'alembic.op.f', 'op.f', (['"""ix_interest_points_name"""'], {}), "('ix_interest_points_name')\n", (8918, 8945), False, 'from alembic import op\n'), ((8995, 9031), 'alembic.op.f', 'op.f', (['"""ix_interest_points_longitude"""'], {}), "('ix_interest_points_longitude')\n", (8999, 9031), False, 'from alembic import op\n'), ((9081, 9118), 'alembic.op.f', 'op.f', (['"""ix_interest_points_listing_id"""'], {}), "('ix_interest_points_listing_id')\n", (9085, 9118), False, 'from alembic import op\n'), ((9168, 9203), 'alembic.op.f', 'op.f', (['"""ix_interest_points_latitude"""'], {}), "('ix_interest_points_latitude')\n", (9172, 9203), False, 'from alembic import op\n'), ((9253, 9282), 'alembic.op.f', 'op.f', (['"""ix_interest_points_id"""'], {}), "('ix_interest_points_id')\n", (9257, 9282), False, 'from alembic import op\n'), ((9332, 9367), 'alembic.op.f', 'op.f', (['"""ix_interest_points_distance"""'], {}), "('ix_interest_points_distance')\n", (9336, 9367), False, 'from alembic import op\n'), ((9417, 9454), 'alembic.op.f', 'op.f', (['"""ix_interest_points_created_at"""'], {}), "('ix_interest_points_created_at')\n", (9421, 9454), False, 'from alembic import op\n'), ((9504, 9541), 'alembic.op.f', 'op.f', (['"""ix_interest_points_chain_name"""'], {}), "('ix_interest_points_chain_name')\n", (9508, 9541), False, 'from alembic import op\n'), ((9591, 9625), 'alembic.op.f', 'op.f', (['"""ix_interest_points_address"""'], {}), "('ix_interest_points_address')\n", (9595, 9625), False, 'from alembic import op\n'), ((451, 463), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (461, 463), True, 'import sqlalchemy as sa\n'), ((508, 518), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (516, 518), True, 'import sqlalchemy as sa\n'), ((563, 573), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (571, 573), True, 'import sqlalchemy as sa\n'), ((613, 647), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (645, 647), False, 'import sqlmodel\n'), ((691, 725), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (723, 725), False, 'import sqlmodel\n'), ((770, 782), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (780, 782), True, 'import sqlalchemy as sa\n'), ((826, 860), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (858, 860), False, 'import sqlmodel\n'), ((910, 944), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (942, 944), False, 'import sqlmodel\n'), ((990, 1024), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1022, 1024), False, 'import sqlmodel\n'), ((1070, 1082), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1080, 1082), True, 'import sqlalchemy as sa\n'), ((1128, 1141), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1139, 1141), True, 'import sqlalchemy as sa\n'), ((1187, 1200), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1198, 1200), True, 'import sqlalchemy as sa\n'), ((2633, 2645), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2643, 2645), True, 'import sqlalchemy as sa\n'), ((2690, 2700), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (2698, 2700), True, 'import sqlalchemy as sa\n'), ((2745, 2755), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (2753, 2755), True, 'import sqlalchemy as sa\n'), ((2795, 2829), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2827, 2829), False, 'import sqlmodel\n'), ((2873, 2907), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2905, 2907), False, 'import sqlmodel\n'), ((2952, 2964), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2962, 2964), True, 'import sqlalchemy as sa\n'), ((3008, 3042), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3040, 3042), False, 'import sqlmodel\n'), ((3092, 3126), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3124, 3126), False, 'import sqlmodel\n'), ((3172, 3206), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3204, 3206), False, 'import sqlmodel\n'), ((3252, 3264), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3262, 3264), True, 'import sqlalchemy as sa\n'), ((3310, 3323), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3321, 3323), True, 'import sqlalchemy as sa\n'), ((3369, 3382), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3380, 3382), True, 'import sqlalchemy as sa\n'), ((4760, 4772), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4770, 4772), True, 'import sqlalchemy as sa\n'), ((4817, 4827), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (4825, 4827), True, 'import sqlalchemy as sa\n'), ((4872, 4882), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (4880, 4882), True, 'import sqlalchemy as sa\n'), ((4922, 4956), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (4954, 4956), False, 'import sqlmodel\n'), ((5000, 5034), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5032, 5034), False, 'import sqlmodel\n'), ((5079, 5091), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (5089, 5091), True, 'import sqlalchemy as sa\n'), ((5135, 5169), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5167, 5169), False, 'import sqlmodel\n'), ((5219, 5253), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5251, 5253), False, 'import sqlmodel\n'), ((5299, 5333), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5331, 5333), False, 'import sqlmodel\n'), ((5379, 5391), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (5389, 5391), True, 'import sqlalchemy as sa\n'), ((5437, 5450), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (5448, 5450), True, 'import sqlalchemy as sa\n'), ((5496, 5509), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (5507, 5509), True, 'import sqlalchemy as sa\n')] |
from __future__ import annotations
from fastapi import Depends
from fastapi.security import OAuth2PasswordBearer
from sqlmodel import Session, select
from src.database import engine
from src.models import User
from src.services.auth import decode_jwt
from .http_exceptions import credentials_exception
def get_db():
with Session(engine) as session:
yield session
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/auth/sign-in")
def get_current_user(
db: Session = Depends(get_db), token: str = Depends(oauth2_scheme)
):
token_data = decode_jwt(token)
if token_data is None or token_data.email is None:
raise credentials_exception
stmt = select(User).where(User.email == token_data.email)
user = db.exec(stmt).first()
if user is None:
raise credentials_exception
return user
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((398, 444), 'fastapi.security.OAuth2PasswordBearer', 'OAuth2PasswordBearer', ([], {'tokenUrl': '"""/auth/sign-in"""'}), "(tokenUrl='/auth/sign-in')\n", (418, 444), False, 'from fastapi.security import OAuth2PasswordBearer\n'), ((487, 502), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (494, 502), False, 'from fastapi import Depends\n'), ((517, 539), 'fastapi.Depends', 'Depends', (['oauth2_scheme'], {}), '(oauth2_scheme)\n', (524, 539), False, 'from fastapi import Depends\n'), ((560, 577), 'src.services.auth.decode_jwt', 'decode_jwt', (['token'], {}), '(token)\n', (570, 577), False, 'from src.services.auth import decode_jwt\n'), ((330, 345), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (337, 345), False, 'from sqlmodel import Session, select\n'), ((681, 693), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (687, 693), False, 'from sqlmodel import Session, select\n')] |
from sqlmodel import Field, Relationship, SQLModel
from typing import Optional
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class HeroBase(SQLModel):
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
team_id: Optional[UUID] = Field(default=None, foreign_key="team.id")
class Hero(BaseUUIDModel, HeroBase, table=True):
team: Optional["Team"] = Relationship(back_populates="heroes", sa_relationship_kwargs={"lazy": "selectin"})
created_by_id: Optional[UUID] = Field(default=None, foreign_key="user.id")
created_by: "User" = Relationship(sa_relationship_kwargs={"lazy":"selectin", "primaryjoin":"Hero.created_by_id==User.id"})
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((197, 214), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (202, 214), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((261, 292), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'index': '(True)'}), '(default=None, index=True)\n', (266, 292), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((323, 365), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (328, 365), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((449, 535), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""heroes"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='heroes', sa_relationship_kwargs={'lazy':\n 'selectin'})\n", (461, 535), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((568, 610), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""user.id"""'}), "(default=None, foreign_key='user.id')\n", (573, 610), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((636, 743), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'lazy': 'selectin', 'primaryjoin': 'Hero.created_by_id==User.id'}"}), "(sa_relationship_kwargs={'lazy': 'selectin', 'primaryjoin':\n 'Hero.created_by_id==User.id'})\n", (648, 743), False, 'from sqlmodel import Field, Relationship, SQLModel\n')] |
from typing import Optional
from fastapi import FastAPI
from sqlmodel import (
SQLModel,
Field,
create_engine,
select,
Session
)
# Criar engine do banco
engine = create_engine('sqlite:///database.db')
class Pessoa(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
nome: str
idade: int
# Cria o banco de dados
SQLModel.metadata.create_all(engine)
app = FastAPI()
@app.get('/')
def home():
return {'message': 'Deu bom'}
@app.get('/pessoa')
def get_pessoa():
query = select(Pessoa)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
@app.get('/pessoa-nome')
def get_pessoa():
query = select(Pessoa.nome)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
@app.get('/pessoa-nome-idade')
def get_pessoa():
query = select(Pessoa.nome, Pessoa.idade)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.create_engine"
] | [((183, 221), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {}), "('sqlite:///database.db')\n", (196, 221), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((377, 413), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (405, 413), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((422, 431), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (429, 431), False, 'from fastapi import FastAPI\n'), ((284, 321), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (289, 321), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((546, 560), 'sqlmodel.select', 'select', (['Pessoa'], {}), '(Pessoa)\n', (552, 560), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((730, 749), 'sqlmodel.select', 'select', (['Pessoa.nome'], {}), '(Pessoa.nome)\n', (736, 749), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((925, 958), 'sqlmodel.select', 'select', (['Pessoa.nome', 'Pessoa.idade'], {}), '(Pessoa.nome, Pessoa.idade)\n', (931, 958), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((570, 585), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (577, 585), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((759, 774), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (766, 774), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((968, 983), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (975, 983), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n')] |
# -*- coding: utf-8 -*-
from datetime import datetime
from typing import Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class Device(SQLModel, table=True): # type: ignore
"""The Device model.
It is used to model a device.
"""
# setup the primary key of the table
uuid: str = Field(primary_key=True)
# setup the columns / properties
name: Optional[str]
type: Optional[str]
arch: Optional[str]
cpid: Optional[str]
cpu_type: Optional[str]
cpu_architecture: Optional[str]
cpu_features: Optional[str]
processor_count: Optional[int]
coprocessor_count: Optional[int]
product_name: Optional[str]
floating_point_speed: Optional[float]
integer_speed: Optional[float]
total_disk_space: Optional[float]
free_disk_space: Optional[float]
swap_space: Optional[float]
domain_name: Optional[str]
operating_system_version: Optional[str]
boinc_version: Optional[str]
scitizen_version: Optional[str]
platform: Optional[str]
# setup the pseudo-columns (metadata related to the record)
created_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), default=datetime.utcnow, name="_created_at")
)
updated_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), onupdate=datetime.utcnow, name="_updated_at")
)
class Project(SQLModel, table=True): # type: ignore
"""The Project model.
It is used to model a project.
"""
# setup the primary key of the table
uuid: str = Field(primary_key=True)
# setup the columns / properties
name: Optional[str]
avatar: Optional[str]
description: Optional[str]
general_area: Optional[str]
home: Optional[str]
image: Optional[str]
is_active: Optional[bool] = True
keywords: Optional[str]
name: Optional[str] # type: ignore
platforms: Optional[str]
specific_area: Optional[str]
summary: Optional[str]
url: Optional[str]
web_url: Optional[str]
weak_authenticator: Optional[str]
# setup the pseudo-columns (metadata related to the record)
created_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), default=datetime.utcnow, name="_created_at")
)
updated_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), onupdate=datetime.utcnow, name="_updated_at")
)
# setup the relationship
task: Optional["Task"] = Relationship(back_populates="project_rel")
class Task(SQLModel, table=True): # type: ignore
"""The Task model.
It is used to model a task.
"""
# setup the primary key of the table
uuid: str = Field(primary_key=True)
# setup the columns / properties
active_task_state: Optional[str]
app_version_num: Optional[float]
bytes_received: Optional[float]
bytes_sent: Optional[float]
checkpoint_cpu_time: Optional[float]
completed_at: Optional[datetime]
current_cpu_time: Optional[float]
elapsed_time: Optional[float]
estimated_cpu_time_remaining: Optional[float]
exit_code: Optional[float]
exit_statement: Optional[str]
fraction_done: Optional[float]
name: Optional[str]
page_fault_rate: Optional[float]
pid: Optional[float]
plan_class: Optional[str]
platform: Optional[str]
progress_rate: Optional[float]
received_at: Optional[datetime]
report_deadline_at: Optional[datetime]
scheduler_state: Optional[str]
set_size: Optional[float]
slot: Optional[float]
slot_path: Optional[str]
state: Optional[str]
swap_size: Optional[float]
version_num: Optional[float]
wu_name: Optional[str]
# setup the pseudo-columns (metadata related to the record)
created_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), default=datetime.utcnow, name="_created_at")
)
updated_at: Optional[datetime] = Field(
sa_column=Column(DateTime(), onupdate=datetime.utcnow, name="_updated_at")
)
# setup the relationship
project_url: Optional[str] = Field(foreign_key="project.url")
project_rel: Optional[Project] = Relationship(back_populates="task")
class TaskWithProject(Task):
"""The TaskWithProject model.
It is used to model a task linked to a project,
so the API will be able in one GET to fetch a task with the linked project.
"""
# setup the columns / properties
project: Optional[Project]
| [
"sqlmodel.Field",
"sqlmodel.DateTime",
"sqlmodel.Relationship"
] | [((333, 356), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (338, 356), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((1560, 1583), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (1565, 1583), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((2455, 2497), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""project_rel"""'}), "(back_populates='project_rel')\n", (2467, 2497), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((2672, 2695), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (2677, 2695), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((4063, 4095), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""project.url"""'}), "(foreign_key='project.url')\n", (4068, 4095), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((4133, 4168), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""task"""'}), "(back_populates='task')\n", (4145, 4168), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((1181, 1191), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (1189, 1191), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((1313, 1323), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (1321, 1323), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((2200, 2210), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (2208, 2210), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((2332, 2342), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (2340, 2342), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((3804, 3814), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (3812, 3814), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((3936, 3946), 'sqlmodel.DateTime', 'DateTime', ([], {}), '()\n', (3944, 3946), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n')] |
from datetime import datetime
from typing import Optional
from pydantic import BaseSettings, HttpUrl
from sqlmodel import Field, SQLModel # pyright: ignore[reportUnknownVariableType]
class Post(SQLModel):
id: int
text: Optional[str]
photos: list[HttpUrl]
date: datetime
class PostDB(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
class Settings(BaseSettings):
vk_token: str
vk_owner_id: int
tg_token: str
tg_chat_id: int
db_path: str = "/tmp/database.db"
sentry_dsn: Optional[str]
class LambdaSettings(Settings):
s3_bucket: str
s3_key: str
s3_endpoint: str
aws_access_key_id: str
aws_secret_access_key: str
| [
"sqlmodel.Field"
] | [((342, 379), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (347, 379), False, 'from sqlmodel import Field, SQLModel\n')] |
import os
import pytest
import time
from fastapi.testclient import TestClient
from sqlmodel import Session, create_engine
from main import app
from database import get_db
from settings import Settings
from alembic.command import upgrade, downgrade
from alembic.config import Config
@pytest.fixture(autouse=True)
def slow_down_tests():
yield
time.sleep(1)
@pytest.fixture(scope="session")
def apply_migrations():
config = Config("alembic.ini")
upgrade(config, "head")
yield
downgrade(config, "base")
@pytest.fixture(name="session")
def session_fixture(apply_migrations: None):
settings = Settings()
engine = create_engine(
settings.db_uri_test, connect_args={"check_same_thread": False}
)
with Session(engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_session_override():
return session
app.dependency_overrides[get_db] = get_session_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
def pytest_configure(config):
"""
Allows plugins and conftest files to perform initial configuration.
This hook is called for every plugin and initial conftest
file after command line options have been parsed.
"""
pytest.access_token = None
def pytest_sessionstart(session):
"""
Called after the Session object has been created and
before performing collection and entering the run test loop.
"""
def pytest_sessionfinish(session, exitstatus):
"""
Called after whole test run finished, right before
returning the exit status to the system.
"""
def pytest_unconfigure(config):
"""
called before test process is exited.
"""
# Delete DB file
db_file = os.path.join('.', 'test.db')
if os.path.exists(db_file):
os.remove(db_file)
| [
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((285, 313), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (299, 313), False, 'import pytest\n'), ((368, 399), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (382, 399), False, 'import pytest\n'), ((530, 560), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (544, 560), False, 'import pytest\n'), ((800, 829), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (814, 829), False, 'import pytest\n'), ((351, 364), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (361, 364), False, 'import time\n'), ((437, 458), 'alembic.config.Config', 'Config', (['"""alembic.ini"""'], {}), "('alembic.ini')\n", (443, 458), False, 'from alembic.config import Config\n'), ((463, 486), 'alembic.command.upgrade', 'upgrade', (['config', '"""head"""'], {}), "(config, 'head')\n", (470, 486), False, 'from alembic.command import upgrade, downgrade\n'), ((501, 526), 'alembic.command.downgrade', 'downgrade', (['config', '"""base"""'], {}), "(config, 'base')\n", (510, 526), False, 'from alembic.command import upgrade, downgrade\n'), ((621, 631), 'settings.Settings', 'Settings', ([], {}), '()\n', (629, 631), False, 'from settings import Settings\n'), ((645, 723), 'sqlmodel.create_engine', 'create_engine', (['settings.db_uri_test'], {'connect_args': "{'check_same_thread': False}"}), "(settings.db_uri_test, connect_args={'check_same_thread': False})\n", (658, 723), False, 'from sqlmodel import Session, create_engine\n'), ((996, 1011), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (1006, 1011), False, 'from fastapi.testclient import TestClient\n'), ((1033, 1065), 'main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (1063, 1065), False, 'from main import app\n'), ((1801, 1829), 'os.path.join', 'os.path.join', (['"""."""', '"""test.db"""'], {}), "('.', 'test.db')\n", (1813, 1829), False, 'import os\n'), ((1837, 1860), 'os.path.exists', 'os.path.exists', (['db_file'], {}), '(db_file)\n', (1851, 1860), False, 'import os\n'), ((747, 762), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (754, 762), False, 'from sqlmodel import Session, create_engine\n'), ((1870, 1888), 'os.remove', 'os.remove', (['db_file'], {}), '(db_file)\n', (1879, 1888), False, 'import os\n')] |
import random
from datetime import datetime
from typing import List, Optional
from sqlmodel import or_, select
from config.notif_config import NotifConfig
from src.api.fixtures_client import FixturesClient
from src.db.db_manager import NotifierDBManager
from src.db.notif_sql_models import Fixture as DBFixture
from src.db.notif_sql_models import League as DBLeague
from src.db.notif_sql_models import Team as DBTeam
from src.emojis import Emojis
from src.entities import Fixture, TeamStanding
from src.senders.email_sender import send_email_html
from src.senders.telegram_sender import send_telegram_message
from src.utils.date_utils import get_date_spanish_text_format
from src.utils.fixtures_utils import (
get_image_search,
get_last_fixture,
get_last_fixture_db,
get_next_fixture,
get_next_fixture_db,
get_youtube_highlights_videos,
)
from src.utils.message_utils import (
get_first_phrase_msg,
get_team_intro_message,
is_subscripted_for_team,
)
class TeamFixturesManager:
def __init__(self, season: str, team_id: str) -> None:
self._season = season
self._team_id = team_id
self._fixtures_client = FixturesClient()
self._notifier_db_manager = NotifierDBManager()
def get_next_team_fixture_text(self, user: str = "") -> tuple:
next_team_fixture = self.get_next_team_fixture()
return (
self._telegram_next_fixture_notification(next_team_fixture, True, user)
if next_team_fixture
else ("Fixture para el equipo no encontrado", "")
)
def get_last_team_fixture_text(self, user: str = "") -> tuple:
last_team_fixture = self.get_last_team_fixture()
return (
self._telegram_last_fixture_notification(last_team_fixture, user)
if last_team_fixture
else ("Fixture para el equipo no encontrado", "")
)
def get_next_team_fixture(self) -> Optional[Fixture]:
fixtures_statement = select(DBFixture).where(
or_(
DBFixture.home_team == self._team_id,
DBFixture.away_team == self._team_id,
)
)
team_fixtures = self._notifier_db_manager.select_records(fixtures_statement)
next_team_fixture = None
if len(team_fixtures):
next_team_fixture = get_next_fixture_db(team_fixtures)
return next_team_fixture
def notify_next_fixture_db(self) -> None:
next_team_fixture = self.get_next_team_fixture()
if next_team_fixture:
if next_team_fixture.remaining_time().days < NotifConfig.NEXT_MATCH_THRESHOLD:
self._perform_fixture_notification(next_team_fixture)
def notify_next_fixture(self) -> None:
team_fixtures = self._fixtures_client.get_fixtures_by(
self._season, self._team_id
)
next_team_fixture = None
if "response" in team_fixtures.as_dict:
next_team_fixture = get_next_fixture(
team_fixtures.as_dict["response"], self._team_id
)
if next_team_fixture:
if next_team_fixture.remaining_time().days < 500:
self._perform_fixture_notification(next_team_fixture)
def notify_fixture_line_up_update(self) -> None:
team_fixtures = self._fixtures_client.get_fixtures_by(
self._season, self._team_id
)
next_team_fixture = None
if "response" in team_fixtures.as_dict:
next_team_fixture = get_next_fixture(
team_fixtures.as_dict["response"], self._team_id
)
if next_team_fixture:
if (
next_team_fixture.remaining_time().days < 1
and next_team_fixture.remaining_time().hours < 6
and next_team_fixture.line_up
):
self._perform_line_up_confirmed_notification(next_team_fixture)
else:
print(
f"There is still no line up for the match of {next_team_fixture.home_team} vs {next_team_fixture.away_team}"
)
print(str(next_team_fixture.remaining_time()))
def get_last_team_fixture(self) -> Optional[Fixture]:
fixtures_statement = select(DBFixture).where(
or_(
DBFixture.home_team == self._team_id,
DBFixture.away_team == self._team_id,
)
)
team_fixtures = self._notifier_db_manager.select_records(fixtures_statement)
last_team_fixture = None
if team_fixtures:
last_team_fixture = get_last_fixture_db(team_fixtures)
return last_team_fixture
def notify_last_fixture_db(self) -> None:
fixtures_statement = select(DBFixture).where(
or_(
DBFixture.home_team == self._team_id,
DBFixture.away_team == self._team_id,
)
)
team_fixtures = self._notifier_db_manager.select_records(fixtures_statement)
last_team_fixture = None
if team_fixtures:
last_team_fixture = get_last_fixture_db(team_fixtures)
if last_team_fixture:
if (
NotifConfig.LAST_MATCH_THRESHOLD_DAYS
<= last_team_fixture.remaining_time().days
<= 0
):
self._perform_last_fixture_notification(last_team_fixture)
def notify_last_fixture(self) -> None:
team_fixtures = self._fixtures_client.get_fixtures_by(
self._season, self._team_id
)
last_team_fixture = get_last_fixture(
team_fixtures.as_dict["response"], self._team_id
)
if last_team_fixture:
if (
-1
<= last_team_fixture.remaining_time().days
<= NotifConfig.LAST_MATCH_THRESHOLD_DAYS
):
last_team_fixture.highlights = get_youtube_highlights_videos(
last_team_fixture.home_team, last_team_fixture.away_team
)
self._perform_last_fixture_notification(last_team_fixture)
def _telegram_last_fixture_notification(
self, team_fixture: Fixture, user: str = ""
) -> tuple:
match_images = self._get_match_images(team_fixture.championship.league_id)
match_image_url = random.choice(match_images)
spanish_format_date = get_date_spanish_text_format(team_fixture.bsas_date)
team_intro_message = get_team_intro_message(
team_fixture.home_team
if str(team_fixture.home_team.id) == str(self._team_id)
else team_fixture.away_team
)["last_match"]
highlights_yt_url = f"https://www.youtube.com/results?search_query={team_fixture.home_team.name}+vs+{team_fixture.away_team.name}+jugadas+resumen"
highlights_text = f"{Emojis.FILM_PROJECTOR.value} <a href='{highlights_yt_url}'>HIGHLIGHTS</a>"
telegram_message = (
f"{Emojis.WAVING_HAND.value}Hola {user}!\n\n"
f"{team_intro_message} "
f"jugó el {spanish_format_date}! \nEste fue el resultado: \n\n"
f"{team_fixture.matched_played_telegram_like_repr()}"
f"{highlights_text}"
)
return (telegram_message, match_image_url)
def _telegram_next_fixture_notification(
self, team_fixture: Fixture, is_on_demand: False, user: str = ""
) -> tuple:
spanish_format_date = get_date_spanish_text_format(team_fixture.bsas_date)
match_images = self._get_match_images(team_fixture.championship.league_id)
match_image_url = random.choice(match_images)
date_text = (
"es HOY!"
if team_fixture.bsas_date.day == datetime.today().day
else f"es el {Emojis.SPIRAL_CALENDAR.value} {spanish_format_date}."
)
first_phrase = get_first_phrase_msg(True, is_on_demand)
team_intro_message = get_team_intro_message(
team_fixture.home_team
if str(team_fixture.home_team.id) == str(self._team_id)
else team_fixture.away_team
)["next_match"]
intro_message = f"{first_phrase} {team_intro_message}"
telegram_message = (
f"{Emojis.WAVING_HAND.value}Hola {user}! "
f"\n\n{intro_message} {date_text}\n\n{team_fixture.telegram_like_repr()}"
)
return (telegram_message, match_image_url)
def _perform_last_fixture_notification(
self, team_fixture: Fixture, team_standing: TeamStanding = None
) -> None:
match_images = self._get_match_images(team_fixture.championship.league_id)
match_image_url = random.choice(match_images)
# telegram
team_standing_msg = (
f"{Emojis.RED_EXCLAMATION_MARK.value} Situación actual en el campeonato: \n\n{team_standing.telegram_like_repr()}\n"
if team_standing
else ""
)
team_intro_message = get_team_intro_message(
team_fixture.home_team
if str(team_fixture.home_team.id) == str(self._team_id)
else team_fixture.away_team
)["last_match"]
highlights_yt_url = f"https://www.youtube.com/results?search_query={team_fixture.home_team.name}+vs+{team_fixture.away_team.name}+jugadas+resumen"
highlights_text = f"{Emojis.FILM_PROJECTOR.value} <a href='{highlights_yt_url}'>HIGHLIGHTS</a>"
FOOTBALL_TELEGRAM_RECIPIENTS = NotifConfig.TELEGRAM_RECIPIENTS
for recipient in FOOTBALL_TELEGRAM_RECIPIENTS:
if is_subscripted_for_team(recipient, self._team_id):
telegram_message = (
f"{Emojis.WAVING_HAND.value}Hola {recipient.name}!\n\n"
f"{team_intro_message} "
f"jugó ayer! \nEste fue el resultado: \n\n"
f"{team_fixture.matched_played_telegram_like_repr()}"
f"\n{highlights_text}"
)
send_telegram_message(
recipient.telegram_id,
telegram_message,
match_image_url,
)
# email
team_standing_email_msg = (
f"Situación actual en el campeonato: \n\n{team_standing.email_like_repr()}"
if team_standing
else ""
)
match_image_text = f"<img src='{match_image_url}'>"
email_standing_message = (
f"{Emojis.RED_EXCLAMATION_MARK.value}{team_standing_email_msg}\n"
)
highlights_text = f"https://www.youtube.com/results?search_query={team_fixture.home_team.name}+vs+{team_fixture.away_team.name}+jugadas+resumen"
EMAIL_RECIPIENTS = NotifConfig.EMAIL_RECIPIENTS
for recipient in EMAIL_RECIPIENTS:
message = (
f"{Emojis.WAVING_HAND.value}Hola {recipient.name}!\n\n{team_intro_message} "
f"jugó ayer!<br /><br />{match_image_text}<br /><br />Este fue el resultado: \n\n{team_fixture.matched_played_email_like_repr()}"
f"<br /><br />{email_standing_message}<br /><br />{highlights_text}"
)
send_email_html(
f"{team_fixture.home_team.name} ({team_fixture.match_score.home_score}) - "
f"({team_fixture.match_score.away_score}) {team_fixture.away_team.name}",
message,
recipient.email,
)
def _perform_fixture_notification(self, team_fixture: Fixture) -> None:
spanish_format_date = get_date_spanish_text_format(team_fixture.bsas_date)
match_images = self._get_match_images(team_fixture.championship.league_id)
match_image_url = random.choice(match_images)
match_image_text = f"<img width='100%' height='100%' src='{match_image_url}'>"
date_text = (
"es HOY!"
if team_fixture.bsas_date.day == datetime.today().day
else f"es el {Emojis.SPIRAL_CALENDAR.value} {spanish_format_date}."
)
first_phrase = get_first_phrase_msg(True)
team_intro_message = get_team_intro_message(
team_fixture.home_team
if str(team_fixture.home_team.id) == str(self._team_id)
else team_fixture.away_team
)["next_match"]
intro_message = f"{first_phrase} {team_intro_message}"
# telegram
FOOTBALL_TELEGRAM_RECIPIENTS = NotifConfig.TELEGRAM_RECIPIENTS
for recipient in FOOTBALL_TELEGRAM_RECIPIENTS:
if is_subscripted_for_team(recipient, self._team_id):
telegram_message = (
f"{Emojis.WAVING_HAND.value}Hola "
f"{recipient.name}!\n\n{intro_message} {date_text}\n\n{team_fixture.telegram_like_repr()}"
)
send_telegram_message(
recipient.telegram_id,
telegram_message,
photo=match_image_url,
)
# email
EMAIL_RECIPIENTS = NotifConfig.EMAIL_RECIPIENTS
for recipient in EMAIL_RECIPIENTS:
message = f"{Emojis.WAVING_HAND.value}Hola {recipient.name}!\n\n{intro_message} {date_text}\n\n<br /><br />{match_image_text}<br /><br />{team_fixture.email_like_repr()}"
send_email_html(
f"{team_fixture.home_team.name} vs. {team_fixture.away_team.name}",
message,
recipient.email,
)
def _perform_line_up_confirmed_notification(self, team_fixture: Fixture) -> None:
match_teams = f"{team_fixture.home_team.name} vs {team_fixture.away_team.name}"
match_image_url = get_image_search(match_teams)
match_image_text = f"<img src='{match_image_url}'>"
# telegram
FOOTBALL_TELEGRAM_RECIPIENTS = NotifConfig.TELEGRAM_RECIPIENTS
for recipient in FOOTBALL_TELEGRAM_RECIPIENTS:
intro_message = f"Se actualizó la alineación para {match_teams}:"
telegram_message = f"{Emojis.WAVING_HAND.value}Hola {recipient.name}!\n\n{intro_message}\n\n{team_fixture.telegram_like_repr()}"
send_telegram_message(
recipient.telegram_id,
telegram_message,
photo=match_image_url,
)
# email
EMAIL_RECIPIENTS = NotifConfig.EMAIL_RECIPIENTS
for recipient in EMAIL_RECIPIENTS:
message = f"{Emojis.WAVING_HAND.value}Hola {recipient.name}!\n\n{intro_message}\n\n<br /><br />{match_image_text}<br /><br />{team_fixture.email_like_repr()}"
send_email_html(
f"{team_fixture.home_team.name} vs. {team_fixture.away_team.name}",
message,
recipient.email,
)
def _get_match_images(self, league_id: int) -> List[str]:
match_image_url_team_statement = select(DBTeam).where(
DBTeam.id == self._team_id
)
match_image_url_league_statement = select(DBLeague).where(
DBLeague.id == league_id
)
team_image_url = self._notifier_db_manager.select_records(
match_image_url_team_statement
)[0].picture
league_image_url = self._notifier_db_manager.select_records(
match_image_url_league_statement
)[0].logo
return [team_image_url, league_image_url]
| [
"sqlmodel.select",
"sqlmodel.or_"
] | [((1170, 1186), 'src.api.fixtures_client.FixturesClient', 'FixturesClient', ([], {}), '()\n', (1184, 1186), False, 'from src.api.fixtures_client import FixturesClient\n'), ((1223, 1242), 'src.db.db_manager.NotifierDBManager', 'NotifierDBManager', ([], {}), '()\n', (1240, 1242), False, 'from src.db.db_manager import NotifierDBManager\n'), ((5615, 5681), 'src.utils.fixtures_utils.get_last_fixture', 'get_last_fixture', (["team_fixtures.as_dict['response']", 'self._team_id'], {}), "(team_fixtures.as_dict['response'], self._team_id)\n", (5631, 5681), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((6373, 6400), 'random.choice', 'random.choice', (['match_images'], {}), '(match_images)\n', (6386, 6400), False, 'import random\n'), ((6431, 6483), 'src.utils.date_utils.get_date_spanish_text_format', 'get_date_spanish_text_format', (['team_fixture.bsas_date'], {}), '(team_fixture.bsas_date)\n', (6459, 6483), False, 'from src.utils.date_utils import get_date_spanish_text_format\n'), ((7492, 7544), 'src.utils.date_utils.get_date_spanish_text_format', 'get_date_spanish_text_format', (['team_fixture.bsas_date'], {}), '(team_fixture.bsas_date)\n', (7520, 7544), False, 'from src.utils.date_utils import get_date_spanish_text_format\n'), ((7654, 7681), 'random.choice', 'random.choice', (['match_images'], {}), '(match_images)\n', (7667, 7681), False, 'import random\n'), ((7906, 7946), 'src.utils.message_utils.get_first_phrase_msg', 'get_first_phrase_msg', (['(True)', 'is_on_demand'], {}), '(True, is_on_demand)\n', (7926, 7946), False, 'from src.utils.message_utils import get_first_phrase_msg, get_team_intro_message, is_subscripted_for_team\n'), ((8705, 8732), 'random.choice', 'random.choice', (['match_images'], {}), '(match_images)\n', (8718, 8732), False, 'import random\n'), ((11566, 11618), 'src.utils.date_utils.get_date_spanish_text_format', 'get_date_spanish_text_format', (['team_fixture.bsas_date'], {}), '(team_fixture.bsas_date)\n', (11594, 11618), False, 'from src.utils.date_utils import get_date_spanish_text_format\n'), ((11728, 11755), 'random.choice', 'random.choice', (['match_images'], {}), '(match_images)\n', (11741, 11755), False, 'import random\n'), ((12067, 12093), 'src.utils.message_utils.get_first_phrase_msg', 'get_first_phrase_msg', (['(True)'], {}), '(True)\n', (12087, 12093), False, 'from src.utils.message_utils import get_first_phrase_msg, get_team_intro_message, is_subscripted_for_team\n'), ((13677, 13706), 'src.utils.fixtures_utils.get_image_search', 'get_image_search', (['match_teams'], {}), '(match_teams)\n', (13693, 13706), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((2024, 2103), 'sqlmodel.or_', 'or_', (['(DBFixture.home_team == self._team_id)', '(DBFixture.away_team == self._team_id)'], {}), '(DBFixture.home_team == self._team_id, DBFixture.away_team == self._team_id)\n', (2027, 2103), False, 'from sqlmodel import or_, select\n'), ((2344, 2378), 'src.utils.fixtures_utils.get_next_fixture_db', 'get_next_fixture_db', (['team_fixtures'], {}), '(team_fixtures)\n', (2363, 2378), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((2981, 3047), 'src.utils.fixtures_utils.get_next_fixture', 'get_next_fixture', (["team_fixtures.as_dict['response']", 'self._team_id'], {}), "(team_fixtures.as_dict['response'], self._team_id)\n", (2997, 3047), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((3523, 3589), 'src.utils.fixtures_utils.get_next_fixture', 'get_next_fixture', (["team_fixtures.as_dict['response']", 'self._team_id'], {}), "(team_fixtures.as_dict['response'], self._team_id)\n", (3539, 3589), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((4310, 4389), 'sqlmodel.or_', 'or_', (['(DBFixture.home_team == self._team_id)', '(DBFixture.away_team == self._team_id)'], {}), '(DBFixture.home_team == self._team_id, DBFixture.away_team == self._team_id)\n', (4313, 4389), False, 'from sqlmodel import or_, select\n'), ((4625, 4659), 'src.utils.fixtures_utils.get_last_fixture_db', 'get_last_fixture_db', (['team_fixtures'], {}), '(team_fixtures)\n', (4644, 4659), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((4807, 4886), 'sqlmodel.or_', 'or_', (['(DBFixture.home_team == self._team_id)', '(DBFixture.away_team == self._team_id)'], {}), '(DBFixture.home_team == self._team_id, DBFixture.away_team == self._team_id)\n', (4810, 4886), False, 'from sqlmodel import or_, select\n'), ((5122, 5156), 'src.utils.fixtures_utils.get_last_fixture_db', 'get_last_fixture_db', (['team_fixtures'], {}), '(team_fixtures)\n', (5141, 5156), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((9594, 9643), 'src.utils.message_utils.is_subscripted_for_team', 'is_subscripted_for_team', (['recipient', 'self._team_id'], {}), '(recipient, self._team_id)\n', (9617, 9643), False, 'from src.utils.message_utils import get_first_phrase_msg, get_team_intro_message, is_subscripted_for_team\n'), ((11188, 11385), 'src.senders.email_sender.send_email_html', 'send_email_html', (['f"""{team_fixture.home_team.name} ({team_fixture.match_score.home_score}) - ({team_fixture.match_score.away_score}) {team_fixture.away_team.name}"""', 'message', 'recipient.email'], {}), "(\n f'{team_fixture.home_team.name} ({team_fixture.match_score.home_score}) - ({team_fixture.match_score.away_score}) {team_fixture.away_team.name}'\n , message, recipient.email)\n", (11203, 11385), False, 'from src.senders.email_sender import send_email_html\n'), ((12539, 12588), 'src.utils.message_utils.is_subscripted_for_team', 'is_subscripted_for_team', (['recipient', 'self._team_id'], {}), '(recipient, self._team_id)\n', (12562, 12588), False, 'from src.utils.message_utils import get_first_phrase_msg, get_team_intro_message, is_subscripted_for_team\n'), ((13303, 13421), 'src.senders.email_sender.send_email_html', 'send_email_html', (['f"""{team_fixture.home_team.name} vs. {team_fixture.away_team.name}"""', 'message', 'recipient.email'], {}), "(\n f'{team_fixture.home_team.name} vs. {team_fixture.away_team.name}',\n message, recipient.email)\n", (13318, 13421), False, 'from src.senders.email_sender import send_email_html\n'), ((14144, 14234), 'src.senders.telegram_sender.send_telegram_message', 'send_telegram_message', (['recipient.telegram_id', 'telegram_message'], {'photo': 'match_image_url'}), '(recipient.telegram_id, telegram_message, photo=\n match_image_url)\n', (14165, 14234), False, 'from src.senders.telegram_sender import send_telegram_message\n'), ((14592, 14710), 'src.senders.email_sender.send_email_html', 'send_email_html', (['f"""{team_fixture.home_team.name} vs. {team_fixture.away_team.name}"""', 'message', 'recipient.email'], {}), "(\n f'{team_fixture.home_team.name} vs. {team_fixture.away_team.name}',\n message, recipient.email)\n", (14607, 14710), False, 'from src.senders.email_sender import send_email_html\n'), ((1987, 2004), 'sqlmodel.select', 'select', (['DBFixture'], {}), '(DBFixture)\n', (1993, 2004), False, 'from sqlmodel import or_, select\n'), ((4273, 4290), 'sqlmodel.select', 'select', (['DBFixture'], {}), '(DBFixture)\n', (4279, 4290), False, 'from sqlmodel import or_, select\n'), ((4770, 4787), 'sqlmodel.select', 'select', (['DBFixture'], {}), '(DBFixture)\n', (4776, 4787), False, 'from sqlmodel import or_, select\n'), ((5949, 6040), 'src.utils.fixtures_utils.get_youtube_highlights_videos', 'get_youtube_highlights_videos', (['last_team_fixture.home_team', 'last_team_fixture.away_team'], {}), '(last_team_fixture.home_team,\n last_team_fixture.away_team)\n', (5978, 6040), False, 'from src.utils.fixtures_utils import get_image_search, get_last_fixture, get_last_fixture_db, get_next_fixture, get_next_fixture_db, get_youtube_highlights_videos\n'), ((10018, 10097), 'src.senders.telegram_sender.send_telegram_message', 'send_telegram_message', (['recipient.telegram_id', 'telegram_message', 'match_image_url'], {}), '(recipient.telegram_id, telegram_message, match_image_url)\n', (10039, 10097), False, 'from src.senders.telegram_sender import send_telegram_message\n'), ((12827, 12917), 'src.senders.telegram_sender.send_telegram_message', 'send_telegram_message', (['recipient.telegram_id', 'telegram_message'], {'photo': 'match_image_url'}), '(recipient.telegram_id, telegram_message, photo=\n match_image_url)\n', (12848, 12917), False, 'from src.senders.telegram_sender import send_telegram_message\n'), ((14869, 14883), 'sqlmodel.select', 'select', (['DBTeam'], {}), '(DBTeam)\n', (14875, 14883), False, 'from sqlmodel import or_, select\n'), ((14983, 14999), 'sqlmodel.select', 'select', (['DBLeague'], {}), '(DBLeague)\n', (14989, 14999), False, 'from sqlmodel import or_, select\n'), ((7771, 7787), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7785, 7787), False, 'from datetime import datetime\n'), ((11932, 11948), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (11946, 11948), False, 'from datetime import datetime\n')] |
from datetime import datetime
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union
from uuid import UUID
from fastapi_pagination.ext.async_sqlmodel import paginate
from fastapi_pagination import Params, Page
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlmodel import SQLModel, select, func
from sqlalchemy.orm import selectinload
from sqlmodel.ext.asyncio.session import AsyncSession
from sqlmodel.sql.expression import Select, SelectOfScalar
ModelType = TypeVar("ModelType", bound=SQLModel)
CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
SchemaType = TypeVar("SchemaType", bound=BaseModel)
T = TypeVar("T", bound=SQLModel)
class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
def __init__(self, model: Type[ModelType]):
"""
CRUD object with default methods to Create, Read, Update, Delete (CRUD).
**Parameters**
* `model`: A SQLAlchemy model class
* `schema`: A Pydantic model (schema) class
"""
self.model = model
async def get(
self, db_session: AsyncSession, *, id: Union[UUID, str]
) -> Optional[ModelType]:
response = await db_session.exec(select(self.model).where(self.model.id == id).options(selectinload('*')))
return response.first()
async def get_by_ids(self, db_session: AsyncSession, list_ids: List[Union[UUID, str]],) -> Optional[List[ModelType]]:
response = await db_session.exec(select(self.model).where(self.model.id.in_(list_ids)))
return response.all()
async def get_count(
self, db_session: AsyncSession
) -> Optional[ModelType]:
response = await db_session.exec(select(func.count()).select_from(select(self.model).subquery()))
return response.one()
async def get_multi(
self, db_session: AsyncSession, *, skip: int = 0, limit: int = 100
) -> List[ModelType]:
response = await db_session.exec(
select(self.model).offset(skip).limit(limit).order_by(self.model.id)
)
return response.all()
async def get_multi_paginated(
self, db_session: AsyncSession, *, params: Optional[Params] = Params(), query: Optional[Union[T, Select[T], SelectOfScalar[T]]] = None
) -> Page[ModelType]:
if query == None:
query = self.model
return await paginate(db_session, query, params)
async def create(
self, db_session: AsyncSession, *, obj_in: Union[CreateSchemaType, ModelType], created_by_id: Optional[Union[UUID, str]] = None
) -> ModelType:
db_obj = self.model.from_orm(obj_in) # type: ignore
db_obj.created_at = datetime.utcnow()
db_obj.updated_at = datetime.utcnow()
if(created_by_id):
db_obj.created_by_id = created_by_id
db_session.add(db_obj)
await db_session.commit()
await db_session.refresh(db_obj)
return db_obj
async def update(
self,
db_session: AsyncSession,
*,
obj_current: ModelType,
obj_new: Union[UpdateSchemaType, Dict[str, Any], ModelType]
) -> ModelType:
obj_data = jsonable_encoder(obj_current)
if isinstance(obj_new, dict):
update_data = obj_new
else:
update_data = obj_new.dict(exclude_unset=True) #This tells Pydantic to not include the values that were not sent
for field in obj_data:
if field in update_data:
setattr(obj_current, field, update_data[field])
if field == "updated_at":
setattr(obj_current, field, datetime.utcnow())
db_session.add(obj_current)
await db_session.commit()
await db_session.refresh(obj_current)
return obj_current
async def remove(
self, db_session: AsyncSession, *, id: Union[UUID, str]
) -> ModelType:
response = await db_session.exec(select(self.model).where(self.model.id == id))
obj = response.one()
await db_session.delete(obj)
await db_session.commit()
return obj
| [
"sqlmodel.select",
"sqlmodel.func.count"
] | [((518, 554), 'typing.TypeVar', 'TypeVar', (['"""ModelType"""'], {'bound': 'SQLModel'}), "('ModelType', bound=SQLModel)\n", (525, 554), False, 'from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union\n'), ((574, 618), 'typing.TypeVar', 'TypeVar', (['"""CreateSchemaType"""'], {'bound': 'BaseModel'}), "('CreateSchemaType', bound=BaseModel)\n", (581, 618), False, 'from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union\n'), ((638, 682), 'typing.TypeVar', 'TypeVar', (['"""UpdateSchemaType"""'], {'bound': 'BaseModel'}), "('UpdateSchemaType', bound=BaseModel)\n", (645, 682), False, 'from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union\n'), ((696, 734), 'typing.TypeVar', 'TypeVar', (['"""SchemaType"""'], {'bound': 'BaseModel'}), "('SchemaType', bound=BaseModel)\n", (703, 734), False, 'from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union\n'), ((739, 767), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': 'SQLModel'}), "('T', bound=SQLModel)\n", (746, 767), False, 'from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union\n'), ((2277, 2285), 'fastapi_pagination.Params', 'Params', ([], {}), '()\n', (2283, 2285), False, 'from fastapi_pagination import Params, Page\n'), ((2759, 2776), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2774, 2776), False, 'from datetime import datetime\n'), ((2805, 2822), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2820, 2822), False, 'from datetime import datetime\n'), ((3257, 3286), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['obj_current'], {}), '(obj_current)\n', (3273, 3286), False, 'from fastapi.encoders import jsonable_encoder\n'), ((2454, 2489), 'fastapi_pagination.ext.async_sqlmodel.paginate', 'paginate', (['db_session', 'query', 'params'], {}), '(db_session, query, params)\n', (2462, 2489), False, 'from fastapi_pagination.ext.async_sqlmodel import paginate\n'), ((1349, 1366), 'sqlalchemy.orm.selectinload', 'selectinload', (['"""*"""'], {}), "('*')\n", (1361, 1366), False, 'from sqlalchemy.orm import selectinload\n'), ((3713, 3730), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3728, 3730), False, 'from datetime import datetime\n'), ((1565, 1583), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (1571, 1583), False, 'from sqlmodel import SQLModel, select, func\n'), ((4024, 4042), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (4030, 4042), False, 'from sqlmodel import SQLModel, select, func\n'), ((1793, 1805), 'sqlmodel.func.count', 'func.count', ([], {}), '()\n', (1803, 1805), False, 'from sqlmodel import SQLModel, select, func\n'), ((1819, 1837), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (1825, 1837), False, 'from sqlmodel import SQLModel, select, func\n'), ((1295, 1313), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (1301, 1313), False, 'from sqlmodel import SQLModel, select, func\n'), ((2062, 2080), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (2068, 2080), False, 'from sqlmodel import SQLModel, select, func\n')] |
"""v1-messages
Revision ID: b01986f67aa3
Revises: <KEY>
Create Date: 2022-06-01 16:00:25.954662
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "c9b007919a5d"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"message",
sa.Column(
"message_id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column("tags", postgresql.ARRAY(sa.String()), nullable=True),
sa.Column("sent_time", postgresql.TIMESTAMP(), nullable=True),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tenant_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("contact_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("role", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("deleted", sa.Boolean(), nullable=False),
sa.Column("content", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column(
"revocation_comment", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.ForeignKeyConstraint(
["contact_id"],
["contact.contact_id"],
),
sa.ForeignKeyConstraint(
["tenant_id"],
["tenant.id"],
),
sa.PrimaryKeyConstraint("message_id"),
)
op.create_index(
op.f("ix_message_contact_id"), "message", ["contact_id"], unique=False
)
op.create_index(
op.f("ix_message_tenant_id"), "message", ["tenant_id"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_message_tenant_id"), table_name="message")
op.drop_index(op.f("ix_message_contact_id"), table_name="message")
op.drop_table("message")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString",
"sqlmodel.sql.sqltypes.GUID"
] | [((2506, 2530), 'alembic.op.drop_table', 'op.drop_table', (['"""message"""'], {}), "('message')\n", (2519, 2530), False, 'from alembic import op\n'), ((1780, 1843), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['contact_id']", "['contact.contact_id']"], {}), "(['contact_id'], ['contact.contact_id'])\n", (1803, 1843), True, 'import sqlalchemy as sa\n'), ((1888, 1941), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['tenant_id']", "['tenant.id']"], {}), "(['tenant_id'], ['tenant.id'])\n", (1911, 1941), True, 'import sqlalchemy as sa\n'), ((1986, 2023), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""message_id"""'], {}), "('message_id')\n", (2009, 2023), True, 'import sqlalchemy as sa\n'), ((2060, 2089), 'alembic.op.f', 'op.f', (['"""ix_message_contact_id"""'], {}), "('ix_message_contact_id')\n", (2064, 2089), False, 'from alembic import op\n'), ((2166, 2194), 'alembic.op.f', 'op.f', (['"""ix_message_tenant_id"""'], {}), "('ix_message_tenant_id')\n", (2170, 2194), False, 'from alembic import op\n'), ((2379, 2407), 'alembic.op.f', 'op.f', (['"""ix_message_tenant_id"""'], {}), "('ix_message_tenant_id')\n", (2383, 2407), False, 'from alembic import op\n'), ((2449, 2478), 'alembic.op.f', 'op.f', (['"""ix_message_contact_id"""'], {}), "('ix_message_contact_id')\n", (2453, 2478), False, 'from alembic import op\n'), ((519, 548), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (534, 548), False, 'from sqlalchemy.dialects import postgresql\n'), ((750, 772), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (770, 772), False, 'from sqlalchemy.dialects import postgresql\n'), ((847, 869), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (867, 869), False, 'from sqlalchemy.dialects import postgresql\n'), ((1012, 1034), 'sqlalchemy.dialects.postgresql.TIMESTAMP', 'postgresql.TIMESTAMP', ([], {}), '()\n', (1032, 1034), False, 'from sqlalchemy.dialects import postgresql\n'), ((1151, 1179), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1177, 1179), False, 'import sqlmodel\n'), ((1230, 1258), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1256, 1258), False, 'import sqlmodel\n'), ((1305, 1339), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1337, 1339), False, 'import sqlmodel\n'), ((1384, 1418), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1416, 1418), False, 'import sqlmodel\n'), ((1466, 1478), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1476, 1478), True, 'import sqlalchemy as sa\n'), ((1526, 1560), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1558, 1560), False, 'import sqlmodel\n'), ((1631, 1665), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1663, 1665), False, 'import sqlmodel\n'), ((1719, 1753), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1751, 1753), False, 'import sqlmodel\n'), ((577, 605), 'sqlalchemy.text', 'sa.text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (584, 605), True, 'import sqlalchemy as sa\n'), ((689, 700), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (698, 700), True, 'import sqlalchemy as sa\n'), ((898, 914), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (905, 914), True, 'import sqlalchemy as sa\n'), ((1063, 1079), 'sqlalchemy.text', 'sa.text', (['"""now()"""'], {}), "('now()')\n", (1070, 1079), True, 'import sqlalchemy as sa\n')] |
# Lettura dati con SQLModel, all, first, one; update; delete
# https://sqlmodel.tiangolo.com/tutorial/select/ e seguito
from typing import Optional
from sqlmodel import Field, SQLModel, Session, create_engine, select
class Tag(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class ProductType(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
sqlite_file_name = "database.db"
sqlite_url = f"sqlite:///{sqlite_file_name}"
engine = create_engine(sqlite_url, echo=True)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def create_entities():
tag_offerta = Tag(name="Offerta")
tag_maionese = Tag(name="Con Maionese")
tipo_panino = ProductType(name="panino")
tipo_bibita = ProductType(name="bibita")
with Session(engine) as session:
session.add(tag_offerta)
session.add(tag_maionese)
session.add(tipo_panino)
session.add(tipo_bibita)
session.commit()
print("After committing the session")
print("Tag 1:", tag_offerta)
# No refresh, no print
print("Product Type 1:", tipo_panino)
# Refresh automatica
print("Product Type 1:", tipo_panino.name)
# Refresh esplicita
session.refresh(tipo_bibita)
session.refresh(tag_maionese)
print("Product Type 2:", tipo_bibita)
print("After the session closes")
print("Tag 2:", tag_maionese)
def select_product_types():
with Session(engine) as session:
statement = select(ProductType)
results = session.exec(statement)
for product_type in results:
print("product_type:", product_type)
def select_product_type_panino():
with Session(engine) as session:
statement = select(ProductType).where(ProductType.name == 'panino')
results = session.exec(statement)
for product_type in results:
print("panino:", product_type)
def select_first_row_tag():
with Session(engine) as session:
statement = select(Tag).where(Tag.name == 'Offerta')
results = session.exec(statement)
tag = results.first()
print("first:", tag)
def select_all_tags():
with Session(engine) as session:
statement = select(Tag)
results = session.exec(statement)
tags = results.all()
print(tags)
def select_four_tags():
with Session(engine) as session:
statement = select(Tag).limit(4)
results = session.exec(statement)
tags = results.all()
print(tags)
def select_next_four_tags():
with Session(engine) as session:
statement = select(Tag).offset(4).limit(4)
results = session.exec(statement)
tags = results.all()
print(tags)
def update_tag():
with Session(engine) as session:
statement = select(Tag).where(Tag.name == "Con Maionese")
results = session.exec(statement)
# mayo = results.one()
mayo = results.first()
print("Tag:", mayo)
mayo.name = "<NAME>"
session.add(mayo)
session.commit()
session.refresh(mayo)
print(mayo)
def delete_tag():
with Session(engine) as session:
statement = select(Tag).where(Tag.name == "<NAME>")
results = session.exec(statement)
no_mayo = results.first()
print("no_mayo: ", no_mayo)
session.delete(no_mayo)
session.commit()
print("Deleted:", no_mayo)
statement = select(Tag).where(Tag.name == "<NAME>")
results = session.exec(statement)
no_mayo = results.first()
if no_mayo is None:
print("There's no no_mayo")
def main():
create_db_and_tables()
create_entities()
# select_product_types()
# select_product_type_panino()
# select_first_row_tag()
# select_all_tags()
# select_four_tags()
# select_next_four_tags()
# update_tag()
delete_tag()
if __name__ == "__main__":
main()
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.select",
"sqlmodel.Field"
] | [((536, 572), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (549, 572), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((276, 313), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (281, 313), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((395, 432), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (400, 432), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((607, 643), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (635, 643), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((850, 865), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (857, 865), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((1536, 1551), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1543, 1551), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((1584, 1603), 'sqlmodel.select', 'select', (['ProductType'], {}), '(ProductType)\n', (1590, 1603), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((1777, 1792), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1784, 1792), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2042, 2057), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2049, 2057), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2266, 2281), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2273, 2281), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2314, 2325), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2320, 2325), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2452, 2467), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2459, 2467), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2652, 2667), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2659, 2667), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2851, 2866), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2858, 2866), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((3236, 3251), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3243, 3251), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((1825, 1844), 'sqlmodel.select', 'select', (['ProductType'], {}), '(ProductType)\n', (1831, 1844), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2090, 2101), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2096, 2101), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2500, 2511), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2506, 2511), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2899, 2910), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2905, 2910), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((3284, 3295), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (3290, 3295), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((3548, 3559), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (3554, 3559), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n'), ((2700, 2711), 'sqlmodel.select', 'select', (['Tag'], {}), '(Tag)\n', (2706, 2711), False, 'from sqlmodel import Field, SQLModel, Session, create_engine, select\n')] |
from datetime import datetime
from os import getenv
from typing import Optional
from fastapi import FastAPI
import strawberry
from strawberry.asgi import GraphQL
from sqlmodel import create_engine, SQLModel
from api.models import PostQL
from api.crud import create_post, get_posts
ENGINE = create_engine(getenv("DATABASE_URL"))
@strawberry.type
class Query:
@strawberry.field
def post(self, slug: Optional[str] = None) -> list[PostQL]:
return get_posts(ENGINE, slug)
@strawberry.type
class Mutation:
@strawberry.field
def add_post(self, slug: str, title: str, content: str, published: bool) -> PostQL:
return create_post(
ENGINE,
PostQL(
slug=slug,
title=title,
content=content,
published=published,
published_at=datetime.now(),
),
)
schema = strawberry.Schema(query=Query, mutation=Mutation)
SQLModel.metadata.create_all(ENGINE)
graphql_app = GraphQL(schema)
app = FastAPI()
app.add_route("/graphql", graphql_app)
app.add_websocket_route("/graphql", graphql_app)
| [
"sqlmodel.SQLModel.metadata.create_all"
] | [((908, 957), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query', 'mutation': 'Mutation'}), '(query=Query, mutation=Mutation)\n', (925, 957), False, 'import strawberry\n'), ((959, 995), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['ENGINE'], {}), '(ENGINE)\n', (987, 995), False, 'from sqlmodel import create_engine, SQLModel\n'), ((1011, 1026), 'strawberry.asgi.GraphQL', 'GraphQL', (['schema'], {}), '(schema)\n', (1018, 1026), False, 'from strawberry.asgi import GraphQL\n'), ((1034, 1043), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (1041, 1043), False, 'from fastapi import FastAPI\n'), ((307, 329), 'os.getenv', 'getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (313, 329), False, 'from os import getenv\n'), ((464, 487), 'api.crud.get_posts', 'get_posts', (['ENGINE', 'slug'], {}), '(ENGINE, slug)\n', (473, 487), False, 'from api.crud import create_post, get_posts\n'), ((856, 870), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (868, 870), False, 'from datetime import datetime\n')] |
from typing import List, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
from sqlmodel import Session, select
from app import models
from app.api import deps
router = APIRouter()
@router.post("/", response_model=models.TeamRead)
def create_team(
*, session: Session = Depends(deps.get_session), team: models.TeamCreate
) -> models.Team:
db_team = models.Team.from_orm(team)
session.add(db_team)
session.commit()
session.refresh(db_team)
return db_team
@router.get("/", response_model=List[models.TeamRead])
def read_teams(
*,
session: Session = Depends(deps.get_session),
offset: int = 0,
limit: int = Query(default=100, lte=100),
) -> List[models.Team]:
teams = session.exec(select(models.Team).offset(offset).limit(limit)).all()
return teams
@router.get("/{team_id}", response_model=models.TeamReadWithHeroes)
def read_team(
*, team_id: int, session: Session = Depends(deps.get_session)
) -> models.Team:
team = session.get(models.Team, team_id)
if not team:
raise HTTPException(status_code=404, detail="Team not found")
return team
@router.patch("/{team_id}", response_model=models.TeamRead)
def update_team(
*,
session: Session = Depends(deps.get_session),
team_id: int,
team: models.TeamUpdate,
) -> models.Team:
db_team = session.get(models.Team, team_id)
if not db_team:
raise HTTPException(status_code=404, detail="Team not found")
team_data = team.dict(exclude_unset=True)
for key, value in team_data.items():
setattr(db_team, key, value)
session.add(db_team)
session.commit()
session.refresh(db_team)
return db_team
@router.delete("/teams/{team_id}")
def delete_team(
*, session: Session = Depends(deps.get_session), team_id: int
) -> Dict[str, bool]:
team = session.get(models.Team, team_id)
if not team:
raise HTTPException(status_code=404, detail="Team not found")
session.delete(team)
session.commit()
return {"ok": True}
| [
"sqlmodel.select"
] | [((198, 209), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (207, 209), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((310, 335), 'fastapi.Depends', 'Depends', (['deps.get_session'], {}), '(deps.get_session)\n', (317, 335), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((395, 421), 'app.models.Team.from_orm', 'models.Team.from_orm', (['team'], {}), '(team)\n', (415, 421), False, 'from app import models\n'), ((629, 654), 'fastapi.Depends', 'Depends', (['deps.get_session'], {}), '(deps.get_session)\n', (636, 654), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((696, 723), 'fastapi.Query', 'Query', ([], {'default': '(100)', 'lte': '(100)'}), '(default=100, lte=100)\n', (701, 723), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((979, 1004), 'fastapi.Depends', 'Depends', (['deps.get_session'], {}), '(deps.get_session)\n', (986, 1004), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1291, 1316), 'fastapi.Depends', 'Depends', (['deps.get_session'], {}), '(deps.get_session)\n', (1298, 1316), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1837, 1862), 'fastapi.Depends', 'Depends', (['deps.get_session'], {}), '(deps.get_session)\n', (1844, 1862), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1103, 1158), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Team not found"""'}), "(status_code=404, detail='Team not found')\n", (1116, 1158), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1471, 1526), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Team not found"""'}), "(status_code=404, detail='Team not found')\n", (1484, 1526), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1979, 2034), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Team not found"""'}), "(status_code=404, detail='Team not found')\n", (1992, 2034), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((776, 795), 'sqlmodel.select', 'select', (['models.Team'], {}), '(models.Team)\n', (782, 795), False, 'from sqlmodel import Session, select\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Disease(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class DiseaseGroup(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
class DiseaseGroupMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
disease_group_id: int
disease_id: int
class PatientDisease(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_id: int
doctor_id: Optional[int] = None
cleft: bool
craniofacial: bool
syndronic: bool
non: bool
comorbidity: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseList(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
disease_id: int
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseCleft(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
cleft_type: str
cleft_lateral: str
cleft_side: str
cleft_complete: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseCraniofacial(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
micrognathia_detail: str
craniofacial_cleft_right: int
craniofacial_cleft_medial: int
craniofacial_cleft_left: int
feem_nasofrontal: bool
feem_nasofrontal_side: str
feem_nasoethmoidal: bool
feem_nasoethmoidal_side: str
feem_mix: bool
feem_mix_side: str
feem_mix_detail: str
craniofacial_microsomia_side: str
craniofacial_microsomia_detail: str
microtia_side: str
microtia_detail: str
craniosynostosis_detail: str
frontonasal_dysplasia_detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class PatientDiseaseOther(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_disease_id: int
disease_group_id: int
name: str
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
| [
"sqlmodel.Field"
] | [((136, 147), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (145, 147), False, 'from fastapi import APIRouter\n'), ((211, 248), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (216, 248), False, 'from sqlmodel import Field, SQLModel\n'), ((331, 368), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (336, 368), False, 'from sqlmodel import Field, SQLModel\n'), ((454, 491), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (459, 491), False, 'from sqlmodel import Field, SQLModel\n'), ((608, 645), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (613, 645), False, 'from sqlmodel import Field, SQLModel\n'), ((978, 1015), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (983, 1015), False, 'from sqlmodel import Field, SQLModel\n'), ((1262, 1299), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1267, 1299), False, 'from sqlmodel import Field, SQLModel\n'), ((1604, 1641), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1609, 1641), False, 'from sqlmodel import Field, SQLModel\n'), ((2367, 2404), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2372, 2404), False, 'from sqlmodel import Field, SQLModel\n')] |
from worker import writer, scanTemplates
from utils.manager import TrainManager
import time
from db import engine
from sqlmodel import Session
from models import Logrun,Usefuel, Template
import cachetool
import inspect
from disclog import postLog,postGeneric
def filler(posrr,posm) -> str:
start=time.time()
manager = TrainManager(posrr=posrr,
posm=posm)
run = True
postGeneric([("info","API init success! Logger started.")],"Startup")
while run:
try:
manager.fetch()
if len(manager.out) > 0:
writer.delay(manager.out,"action")
manager.out = []
except Exception as e:
postLog(e,"error",f"{inspect.stack()[0][3]}:{inspect.stack()[0][2]}")
time.sleep(30)
return f"{(time.time()-start)} total time"
if __name__ == "__main__":
startup = True
while startup:
try:
with Session(engine) as session:
toptemp = session.query(Template).order_by(Template.template_id.desc()).first()
posrrr = session.query(Logrun).order_by(Logrun.action_seq.desc()).first()
posmr = session.query(Usefuel).order_by(Usefuel.action_seq.desc()).first()
if toptemp:
print("skipping init")
else:
cachetool.set_cache(f"last_templates",1622316652000)
cachetool.set_cache(f"last_assets",1622316652000)
scanTemplates()
time.sleep(1200)
if posrrr: posrr = posrrr.action_seq
else: posrr = 1642127
if posmr: posm = posmr.action_seq
else: posm = 981927
filler(posrr,posm)
except Exception as e:
postLog(e,"warn",f"{inspect.stack()[0][3]}:{inspect.stack()[0][2]}")
time.sleep(30) | [
"sqlmodel.Session"
] | [((301, 312), 'time.time', 'time.time', ([], {}), '()\n', (310, 312), False, 'import time\n'), ((332, 368), 'utils.manager.TrainManager', 'TrainManager', ([], {'posrr': 'posrr', 'posm': 'posm'}), '(posrr=posrr, posm=posm)\n', (344, 368), False, 'from utils.manager import TrainManager\n'), ((416, 487), 'disclog.postGeneric', 'postGeneric', (["[('info', 'API init success! Logger started.')]", '"""Startup"""'], {}), "([('info', 'API init success! Logger started.')], 'Startup')\n", (427, 487), False, 'from disclog import postLog, postGeneric\n'), ((599, 634), 'worker.writer.delay', 'writer.delay', (['manager.out', '"""action"""'], {}), "(manager.out, 'action')\n", (611, 634), False, 'from worker import writer, scanTemplates\n'), ((793, 807), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (803, 807), False, 'import time\n'), ((824, 835), 'time.time', 'time.time', ([], {}), '()\n', (833, 835), False, 'import time\n'), ((954, 969), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (961, 969), False, 'from sqlmodel import Session\n'), ((1386, 1439), 'cachetool.set_cache', 'cachetool.set_cache', (['f"""last_templates"""', '(1622316652000)'], {}), "(f'last_templates', 1622316652000)\n", (1405, 1439), False, 'import cachetool\n'), ((1455, 1505), 'cachetool.set_cache', 'cachetool.set_cache', (['f"""last_assets"""', '(1622316652000)'], {}), "(f'last_assets', 1622316652000)\n", (1474, 1505), False, 'import cachetool\n'), ((1521, 1536), 'worker.scanTemplates', 'scanTemplates', ([], {}), '()\n', (1534, 1536), False, 'from worker import writer, scanTemplates\n'), ((1553, 1569), 'time.sleep', 'time.sleep', (['(1200)'], {}), '(1200)\n', (1563, 1569), False, 'import time\n'), ((1932, 1946), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (1942, 1946), False, 'import time\n'), ((1058, 1085), 'models.Template.template_id.desc', 'Template.template_id.desc', ([], {}), '()\n', (1083, 1085), False, 'from models import Logrun, Usefuel, Template\n'), ((1151, 1175), 'models.Logrun.action_seq.desc', 'Logrun.action_seq.desc', ([], {}), '()\n', (1173, 1175), False, 'from models import Logrun, Usefuel, Template\n'), ((1241, 1266), 'models.Usefuel.action_seq.desc', 'Usefuel.action_seq.desc', ([], {}), '()\n', (1264, 1266), False, 'from models import Logrun, Usefuel, Template\n'), ((732, 747), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (745, 747), False, 'import inspect\n'), ((756, 771), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (769, 771), False, 'import inspect\n'), ((1867, 1882), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1880, 1882), False, 'import inspect\n'), ((1891, 1906), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (1904, 1906), False, 'import inspect\n')] |
from typing import Optional
import strawberry
from sqlmodel import (
SQLModel,
Field,
create_engine,
select,
Session
)
engine = create_engine('sqlite:///database.db')
class Person(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
nome: str
idade: int
SQLModel.metadata.create_all(engine)
def create_app(nome: str, idade:int):
person = Person(nome=nome, idade=idade)
with Session(engine) as session:
session.add(person)
session.commit()
session.refresh(person)
return person
@strawberry.type
class Pessoa:
id: Optional[int]
nome: str
idade: int
@strawberry.type
class Query:
@strawberry.field
def all_pessoa(self) -> list[Pessoa]:
query = select(Person)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
@strawberry.type
class Mutation:
create_pessoa: Pessoa = strawberry.field(resolver=create_app)
schema = strawberry.Schema(query=Query, mutation=Mutation) | [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field",
"sqlmodel.select"
] | [((150, 188), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {}), "('sqlite:///database.db')\n", (163, 188), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((320, 356), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (348, 356), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((1026, 1075), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query', 'mutation': 'Mutation'}), '(query=Query, mutation=Mutation)\n', (1043, 1075), False, 'import strawberry\n'), ((251, 288), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (256, 288), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((978, 1015), 'strawberry.field', 'strawberry.field', ([], {'resolver': 'create_app'}), '(resolver=create_app)\n', (994, 1015), False, 'import strawberry\n'), ((449, 464), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (456, 464), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((777, 791), 'sqlmodel.select', 'select', (['Person'], {}), '(Person)\n', (783, 791), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n'), ((805, 820), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (812, 820), False, 'from sqlmodel import SQLModel, Field, create_engine, select, Session\n')] |
from fastapi.exceptions import HTTPException
from sfm.models import Project, WorkItem
from sqlmodel import Session, select
from opencensus.ext.azure.log_exporter import AzureLogHandler
from sfm.config import get_settings
from sfm.logger import create_logger
from sfm.utils import (
create_project_auth_token,
hash_project_auth_token,
verify_admin_key,
)
app_settings = get_settings()
logger = create_logger(__name__)
def get_all(db: Session, skip: int = None, limit: int = None):
"""Get all the projects and return them."""
projects = db.exec(
select(Project).order_by(Project.id).offset(skip).limit(limit)
).all()
if not projects:
logger.debug("Projects not found")
raise HTTPException(status_code=404, detail="Projects not found")
return projects
def get_by_id(db: Session, project_id: int):
"""Get the project with corresponding id and return it."""
project = db.get(Project, project_id)
if not project:
logger.debug("Projects not found")
raise HTTPException(status_code=404, detail="Project not found")
return project
def create_project(db: Session, project_data, admin_key):
"""Take data from request and create a new project in the database."""
project_name_repeat = db.exec(
select(Project).where(Project.name == project_data.name)
).first()
if project_name_repeat is not None:
logger.debug("Database entry already exists")
raise HTTPException(status_code=409, detail="Database entry already exists")
verified_admin = verify_admin_key(admin_key)
if verified_admin:
project_temp = project_data.dict()
token = create_project_auth_token()
hashed_token = hash_project_auth_token(token)
project_temp.update({"project_auth_token_hashed": hashed_token})
project_db = Project(**project_temp)
db.add(project_db)
db.commit()
else:
logger.warning("Attempted to verify as an admin with incorrect credentials")
raise HTTPException(status_code=401, detail="Credentials are incorrect")
# Check the new record
db.refresh(project_db)
new_project = db.get(Project, project_db.id)
if new_project.name == project_data.name:
return [new_project, token] # successfully created record
else:
logger.error("Project did not store correctly in database")
return False # didn't store correctly
def delete_project(db: Session, project_id, admin_key):
"""Take a project_name and remove the row from the database."""
verified_admin = verify_admin_key(admin_key)
if verified_admin:
project = db.get(Project, project_id)
if not project:
logger.debug("Project not found")
raise HTTPException(status_code=404, detail="Project not found")
for item in project.work_items:
db.delete(item)
db.delete(project)
db.commit()
else:
logger.warning("Attempted to verify as admin with incorrect credentials")
raise HTTPException(status_code=401, detail="Credentials are incorrect")
# Check our work
row = db.get(Project, project_id)
if row:
logger.error("Project did not delete correctly")
return False # Row didn't successfully delete or another one exists
else:
return True # Successful deletion
def refresh_project_key(db: Session, project_id, admin_key):
verified_admin = verify_admin_key(admin_key)
if verified_admin:
project_db = db.get(Project, project_id)
if not project_db:
logger.debug("Project with matching id not found")
raise HTTPException(
status_code=404, detail="Project with matching id not found"
)
new_token = create_project_auth_token()
hashed_token = hash_project_auth_token(new_token)
project_db.project_auth_token_hashed = hashed_token
db.add(project_db)
db.commit()
else:
logger.warning("Attempted to verify as admin with incorrect credentials")
raise HTTPException(status_code=401, detail="Credentials are incorrect")
check = db.exec(
select(Project).where(Project.project_auth_token_hashed == hashed_token)
)
if check:
return new_token
else:
logger.error("Project auth token did not update correctly")
return False
def update_project(db: Session, project_id, project_data, admin_key):
"""Take data from request and update an existing Project in the database."""
verified_admin = verify_admin_key(admin_key)
if verified_admin:
project = db.get(Project, project_id)
if not project:
logger.debug("Project not found")
raise HTTPException(status_code=404, detail="Project not found")
project_newdata = project_data.dict(exclude_unset=True, exclude_defaults=True)
for key, value in project_newdata.items():
setattr(project, key, value)
db.add(project)
db.commit()
else:
logger.warning("Attempted to verify as admin with incorrect credentials")
raise HTTPException(status_code=401, detail="Credentials are incorrect")
# return updated item
db.refresh(project)
if project:
return project # updated record
else:
logger.error("Project did not store correctly")
return False # didn't store correctly
| [
"sqlmodel.select"
] | [((382, 396), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (394, 396), False, 'from sfm.config import get_settings\n'), ((408, 431), 'sfm.logger.create_logger', 'create_logger', (['__name__'], {}), '(__name__)\n', (421, 431), False, 'from sfm.logger import create_logger\n'), ((1568, 1595), 'sfm.utils.verify_admin_key', 'verify_admin_key', (['admin_key'], {}), '(admin_key)\n', (1584, 1595), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((2590, 2617), 'sfm.utils.verify_admin_key', 'verify_admin_key', (['admin_key'], {}), '(admin_key)\n', (2606, 2617), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((3466, 3493), 'sfm.utils.verify_admin_key', 'verify_admin_key', (['admin_key'], {}), '(admin_key)\n', (3482, 3493), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((4589, 4616), 'sfm.utils.verify_admin_key', 'verify_admin_key', (['admin_key'], {}), '(admin_key)\n', (4605, 4616), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((730, 789), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Projects not found"""'}), "(status_code=404, detail='Projects not found')\n", (743, 789), False, 'from fastapi.exceptions import HTTPException\n'), ((1039, 1097), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Project not found"""'}), "(status_code=404, detail='Project not found')\n", (1052, 1097), False, 'from fastapi.exceptions import HTTPException\n'), ((1475, 1545), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(409)', 'detail': '"""Database entry already exists"""'}), "(status_code=409, detail='Database entry already exists')\n", (1488, 1545), False, 'from fastapi.exceptions import HTTPException\n'), ((1678, 1705), 'sfm.utils.create_project_auth_token', 'create_project_auth_token', ([], {}), '()\n', (1703, 1705), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((1729, 1759), 'sfm.utils.hash_project_auth_token', 'hash_project_auth_token', (['token'], {}), '(token)\n', (1752, 1759), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((1854, 1877), 'sfm.models.Project', 'Project', ([], {}), '(**project_temp)\n', (1861, 1877), False, 'from sfm.models import Project, WorkItem\n'), ((2034, 2100), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Credentials are incorrect"""'}), "(status_code=401, detail='Credentials are incorrect')\n", (2047, 2100), False, 'from fastapi.exceptions import HTTPException\n'), ((3056, 3122), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Credentials are incorrect"""'}), "(status_code=401, detail='Credentials are incorrect')\n", (3069, 3122), False, 'from fastapi.exceptions import HTTPException\n'), ((3801, 3828), 'sfm.utils.create_project_auth_token', 'create_project_auth_token', ([], {}), '()\n', (3826, 3828), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((3852, 3886), 'sfm.utils.hash_project_auth_token', 'hash_project_auth_token', (['new_token'], {}), '(new_token)\n', (3875, 3886), False, 'from sfm.utils import create_project_auth_token, hash_project_auth_token, verify_admin_key\n'), ((4100, 4166), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Credentials are incorrect"""'}), "(status_code=401, detail='Credentials are incorrect')\n", (4113, 4166), False, 'from fastapi.exceptions import HTTPException\n'), ((5165, 5231), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': '"""Credentials are incorrect"""'}), "(status_code=401, detail='Credentials are incorrect')\n", (5178, 5231), False, 'from fastapi.exceptions import HTTPException\n'), ((2775, 2833), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Project not found"""'}), "(status_code=404, detail='Project not found')\n", (2788, 2833), False, 'from fastapi.exceptions import HTTPException\n'), ((3675, 3750), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Project with matching id not found"""'}), "(status_code=404, detail='Project with matching id not found')\n", (3688, 3750), False, 'from fastapi.exceptions import HTTPException\n'), ((4774, 4832), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Project not found"""'}), "(status_code=404, detail='Project not found')\n", (4787, 4832), False, 'from fastapi.exceptions import HTTPException\n'), ((4197, 4212), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (4203, 4212), False, 'from sqlmodel import Session, select\n'), ((1295, 1310), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (1301, 1310), False, 'from sqlmodel import Session, select\n'), ((577, 592), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (583, 592), False, 'from sqlmodel import Session, select\n')] |
from sqlmodel import Session, select
from sqlalchemy.orm import joinedload
from .models import Pessoa, Livro, engine
def create_livros(titulo: str, pessoa_id: int):
livro = Livro(titulo=titulo, pessoa_id=pessoa_id)
with Session(engine) as session:
session.add(livro)
session.commit()
session.refresh(livro)
return livro
def get_livros():
query = select(Livro).options(joinedload('*'))
with Session(engine) as session:
result = session.execute(query).scalars().unique().all()
return result
def create_pessoas(idade: int, nome: str):
person = Pessoa(nome=nome, idade=idade)
with Session(engine) as session:
session.add(person)
session.commit()
session.refresh(person)
return person
def get_pessoas(
id: int = None,
idade: int = None,
limit: int = 5,
):
query = select(Pessoa)
if id:
query = query.where(Pessoa.id == id)
if idade:
query = query.where(Pessoa.idade == idade)
if limit:
query = query.limit(limit)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((878, 892), 'sqlmodel.select', 'select', (['Pessoa'], {}), '(Pessoa)\n', (884, 892), False, 'from sqlmodel import Session, select\n'), ((231, 246), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (238, 246), False, 'from sqlmodel import Session, select\n'), ((413, 428), 'sqlalchemy.orm.joinedload', 'joinedload', (['"""*"""'], {}), "('*')\n", (423, 428), False, 'from sqlalchemy.orm import joinedload\n'), ((439, 454), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (446, 454), False, 'from sqlmodel import Session, select\n'), ((650, 665), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (657, 665), False, 'from sqlmodel import Session, select\n'), ((1074, 1089), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1081, 1089), False, 'from sqlmodel import Session, select\n'), ((391, 404), 'sqlmodel.select', 'select', (['Livro'], {}), '(Livro)\n', (397, 404), False, 'from sqlmodel import Session, select\n')] |
from datetime import date
from typing import Optional, Iterator
from sqlmodel import SQLModel, Field, create_engine, Session # type: ignore[import]
from my_feed.log import logger
from app.settings import settings
# base non-table sql model
class FeedBase(SQLModel): # type: ignore[misc]
model_id: str
ftype: str # feed item type
title: str
score: Optional[float] = Field(default=None)
# more metadata
subtitle: Optional[str] = Field(default=None)
creator: Optional[str] = Field(default=None)
part: Optional[int] = Field(default=None)
subpart: Optional[int] = Field(default=None)
collection: Optional[str] = Field(default=None)
# dates
when: int
release_date: Optional[date] = Field(default=None)
# urls
image_url: Optional[str] = Field(default=None)
url: Optional[str] = Field(default=None)
# feedbase, with an ID/table
class FeedModel(FeedBase, table=True): # type: ignore
id: int = Field(index=True, primary_key=True)
# store JSON as strings, these are only used on the frontend anyways
tags: str = Field(default=r"[]") # List[str]
data: Optional[bytes] = Field(default=None) # Dict[str, Any]
feed_engine = create_engine(
settings.SQLITE_DB_PATH,
echo=settings.SQL_ECHO,
)
def init_db() -> None:
logger.info("Creating tables...")
SQLModel.metadata.create_all(feed_engine)
def get_db() -> Iterator[Session]:
with Session(feed_engine) as session:
yield session
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field"
] | [((1207, 1269), 'sqlmodel.create_engine', 'create_engine', (['settings.SQLITE_DB_PATH'], {'echo': 'settings.SQL_ECHO'}), '(settings.SQLITE_DB_PATH, echo=settings.SQL_ECHO)\n', (1220, 1269), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((388, 407), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (393, 407), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((459, 478), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (464, 478), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((508, 527), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (513, 527), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((554, 573), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (559, 573), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((603, 622), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (608, 622), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((655, 674), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (660, 674), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((737, 756), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (742, 756), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((800, 819), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (805, 819), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((845, 864), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (850, 864), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((965, 1000), 'sqlmodel.Field', 'Field', ([], {'index': '(True)', 'primary_key': '(True)'}), '(index=True, primary_key=True)\n', (970, 1000), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((1091, 1110), 'sqlmodel.Field', 'Field', ([], {'default': '"""[]"""'}), "(default='[]')\n", (1096, 1110), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((1153, 1172), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (1158, 1172), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((1310, 1343), 'my_feed.log.logger.info', 'logger.info', (['"""Creating tables..."""'], {}), "('Creating tables...')\n", (1321, 1343), False, 'from my_feed.log import logger\n'), ((1348, 1389), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['feed_engine'], {}), '(feed_engine)\n', (1376, 1389), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((1436, 1456), 'sqlmodel.Session', 'Session', (['feed_engine'], {}), '(feed_engine)\n', (1443, 1456), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n')] |
"""
Utility functions.
"""
import logging
import os
from functools import lru_cache
from pathlib import Path
from typing import Iterator, List, Optional
from dotenv import load_dotenv
from rich.logging import RichHandler
from sqlalchemy.engine import Engine
from sqlmodel import Session, SQLModel, create_engine
from yarl import URL
from datajunction.config import Settings
from datajunction.typing import ColumnType
def setup_logging(loglevel: str) -> None:
"""
Setup basic logging.
"""
level = getattr(logging, loglevel.upper(), None)
if not isinstance(level, int):
raise ValueError(f"Invalid log level: {loglevel}")
logformat = "[%(asctime)s] %(levelname)s: %(name)s: %(message)s"
logging.basicConfig(
level=level,
format=logformat,
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
force=True,
)
def get_project_repository() -> Path:
"""
Return the project repository.
This is used for unit tests.
"""
return Path(__file__).parent.parent.parent
@lru_cache
def get_settings() -> Settings:
"""
Return a cached settings object.
"""
dotenv_file = os.environ.get("DOTENV_FILE", ".env")
load_dotenv(dotenv_file)
return Settings()
def get_engine() -> Engine:
"""
Create the metadata engine.
"""
settings = get_settings()
engine = create_engine(settings.index)
return engine
def create_db_and_tables() -> None:
"""
Create the database and tables.
"""
engine = get_engine()
SQLModel.metadata.create_all(engine)
def get_session() -> Iterator[Session]:
"""
Per-request session.
"""
engine = get_engine()
with Session(engine, autoflush=False) as session: # pragma: no cover
yield session
def get_name_from_path(repository: Path, path: Path) -> str:
"""
Compute the name of a node given its path and the repository path.
"""
# strip anything before the repository
relative_path = path.relative_to(repository)
if len(relative_path.parts) < 2 or relative_path.parts[0] not in {
"nodes",
"databases",
}:
raise Exception(f"Invalid path: {path}")
# remove the "nodes" directory from the path
relative_path = relative_path.relative_to(relative_path.parts[0])
# remove extension
relative_path = relative_path.with_suffix("")
# encode percent symbols and periods
encoded = (
str(relative_path)
.replace("%", "%25")
.replace(".", "%2E")
.replace(os.path.sep, ".")
)
return encoded
def get_more_specific_type(
current_type: Optional[ColumnType],
new_type: ColumnType,
) -> ColumnType:
"""
Given two types, return the most specific one.
Different databases might store the same column as different types. For example, Hive
might store timestamps as strings, while Postgres would store the same data as a
datetime.
>>> get_more_specific_type(ColumnType.STR, ColumnType.DATETIME)
<ColumnType.DATETIME: 'DATETIME'>
>>> get_more_specific_type(ColumnType.STR, ColumnType.INT)
<ColumnType.INT: 'INT'>
"""
if current_type is None:
return new_type
hierarchy = [
ColumnType.BYTES,
ColumnType.STR,
ColumnType.FLOAT,
ColumnType.INT,
ColumnType.DECIMAL,
ColumnType.BOOL,
ColumnType.DATETIME,
ColumnType.DATE,
ColumnType.TIME,
ColumnType.TIMEDELTA,
ColumnType.LIST,
ColumnType.DICT,
]
return sorted([current_type, new_type], key=hierarchy.index)[1]
def get_issue_url(
baseurl: URL = URL("https://github.com/DataJunction/datajunction/issues/new"),
title: Optional[str] = None,
body: Optional[str] = None,
labels: Optional[List[str]] = None,
) -> URL:
"""
Return the URL to file an issue on GitHub.
https://docs.github.com/en/issues/tracking-your-work-with-issues/creating-an-issue#creating-an-issue-from-a-url-query
"""
query_arguments = {
"title": title,
"body": body,
"labels": ",".join(label.strip() for label in labels) if labels else None,
}
query_arguments = {k: v for k, v in query_arguments.items() if v is not None}
return baseurl % query_arguments
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((1186, 1223), 'os.environ.get', 'os.environ.get', (['"""DOTENV_FILE"""', '""".env"""'], {}), "('DOTENV_FILE', '.env')\n", (1200, 1223), False, 'import os\n'), ((1228, 1252), 'dotenv.load_dotenv', 'load_dotenv', (['dotenv_file'], {}), '(dotenv_file)\n', (1239, 1252), False, 'from dotenv import load_dotenv\n'), ((1264, 1274), 'datajunction.config.Settings', 'Settings', ([], {}), '()\n', (1272, 1274), False, 'from datajunction.config import Settings\n'), ((1396, 1425), 'sqlmodel.create_engine', 'create_engine', (['settings.index'], {}), '(settings.index)\n', (1409, 1425), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((1565, 1601), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1593, 1601), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((3698, 3760), 'yarl.URL', 'URL', (['"""https://github.com/DataJunction/datajunction/issues/new"""'], {}), "('https://github.com/DataJunction/datajunction/issues/new')\n", (3701, 3760), False, 'from yarl import URL\n'), ((1721, 1753), 'sqlmodel.Session', 'Session', (['engine'], {'autoflush': '(False)'}), '(engine, autoflush=False)\n', (1728, 1753), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((836, 869), 'rich.logging.RichHandler', 'RichHandler', ([], {'rich_tracebacks': '(True)'}), '(rich_tracebacks=True)\n', (847, 869), False, 'from rich.logging import RichHandler\n'), ((1034, 1048), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1038, 1048), False, 'from pathlib import Path\n')] |
import textwrap
from typing import List, Optional
import pytest
from sqlmodel import Field, Relationship, SQLModel
import strawberry
class City(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field()
population: int = Field()
class Manager(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field()
managed_team: "Team" = Relationship(back_populates="manager")
class Team(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field(index=True)
headquarters: Optional[str] = Field(default=None)
manager_id: int = Field(nullable=False, foreign_key="manager.id")
manager: Manager = Relationship(back_populates="managed_team")
heroes: List["Hero"] = Relationship(back_populates="team")
class Hero(SQLModel, table=True):
id: Optional[int] = Field(primary_key=True, default=None)
name: str = Field(index=True)
secret_name: str
age: Optional[int] = Field(default=None, index=True)
team_id: Optional[int] = Field(default=None, foreign_key="team.id")
team: Optional[Team] = Relationship(back_populates="heroes")
@pytest.fixture
def clear_types():
for model in (Team, Hero, Manager, City):
if hasattr(model, "_strawberry_type"):
delattr(model, "_strawberry_type")
def test_all_fields(clear_types):
@strawberry.experimental.pydantic.type(City, all_fields=True)
class CityType:
pass
@strawberry.type
class Query:
@strawberry.field
def city(self) -> CityType:
return CityType(id=1, name="Gotham", population=100000)
schema = strawberry.Schema(query=Query)
expected_schema = """
type CityType {
name: String!
population: Int!
id: Int
}
type Query {
city: CityType!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ city { name } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["city"]["name"] == "Gotham"
def test_basic_type_field_list(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["name", "headquarters"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(name="hobbits", headquarters="The Shire")
schema = strawberry.Schema(query=Query)
expected_schema = """
type Query {
team: TeamType!
}
type TeamType {
name: String!
headquarters: String
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { name } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["name"] == "hobbits"
def test_one_to_one_optional(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["name"])
class TeamType:
pass
@strawberry.experimental.pydantic.type(Hero, fields=["team"])
class HeroType:
pass
@strawberry.type
class Query:
@strawberry.field
def hero(self) -> HeroType:
return HeroType(team=TeamType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
team: TeamType
}
type Query {
hero: HeroType!
}
type TeamType {
name: String!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ hero { team { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["hero"]["team"]["name"] == "Skii"
def test_one_to_one_required(clear_types):
@strawberry.experimental.pydantic.type(Manager, fields=["name"])
class ManagerType:
pass
@strawberry.experimental.pydantic.type(Team, fields=["manager"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(manager=ManagerType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type ManagerType {
name: String!
}
type Query {
team: TeamType!
}
type TeamType {
manager: ManagerType!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { manager { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["manager"]["name"] == "Skii"
def test_nested_type_unordered(clear_types):
@strawberry.experimental.pydantic.type(Hero, fields=["team"])
class HeroType:
pass
@strawberry.experimental.pydantic.type(Team, fields=["name"])
class TeamType:
pass
@strawberry.type
class Query:
@strawberry.field
def hero(self) -> HeroType:
return HeroType(team=TeamType(name="Skii"))
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
team: TeamType
}
type Query {
hero: HeroType!
}
type TeamType {
name: String!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ hero { team { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["hero"]["team"]["name"] == "Skii"
def test_one_to_many(clear_types):
@strawberry.experimental.pydantic.type(Team, fields=["heroes"])
class TeamType:
pass
@strawberry.experimental.pydantic.type(Hero, fields=["name"])
class HeroType:
pass
@strawberry.type
class Query:
@strawberry.field
def team(self) -> TeamType:
return TeamType(heroes=[HeroType(name="Skii"), HeroType(name="Chris")])
schema = strawberry.Schema(query=Query)
expected_schema = """
type HeroType {
name: String!
}
type Query {
team: TeamType!
}
type TeamType {
heroes: [HeroType!]!
}
"""
assert str(schema) == textwrap.dedent(expected_schema).strip()
query = "{ team { heroes { name } } }"
result = schema.execute_sync(query)
assert not result.errors
assert result.data["team"]["heroes"][0]["name"] == "Skii"
assert result.data["team"]["heroes"][1]["name"] == "Chris"
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((196, 233), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (201, 233), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((250, 257), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (255, 257), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((280, 287), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (285, 287), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((351, 388), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (356, 388), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((405, 412), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (410, 412), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((440, 478), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""manager"""'}), "(back_populates='manager')\n", (452, 478), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((539, 576), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (544, 576), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((593, 610), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (598, 610), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((645, 664), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (650, 664), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((687, 734), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'foreign_key': '"""manager.id"""'}), "(nullable=False, foreign_key='manager.id')\n", (692, 734), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((758, 801), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""managed_team"""'}), "(back_populates='managed_team')\n", (770, 801), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((829, 864), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (841, 864), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((925, 962), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)', 'default': 'None'}), '(primary_key=True, default=None)\n', (930, 962), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((979, 996), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (984, 996), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1043, 1074), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'index': '(True)'}), '(default=None, index=True)\n', (1048, 1074), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1104, 1146), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""team.id"""'}), "(default=None, foreign_key='team.id')\n", (1109, 1146), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1174, 1211), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""heroes"""'}), "(back_populates='heroes')\n", (1186, 1211), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((1430, 1490), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['City'], {'all_fields': '(True)'}), '(City, all_fields=True)\n', (1467, 1490), False, 'import strawberry\n'), ((1707, 1737), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (1724, 1737), False, 'import strawberry\n'), ((2177, 2253), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name', 'headquarters']"}), "(Team, fields=['name', 'headquarters'])\n", (2214, 2253), False, 'import strawberry\n'), ((2472, 2502), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (2489, 2502), False, 'import strawberry\n'), ((2931, 2991), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name']"}), "(Team, fields=['name'])\n", (2968, 2991), False, 'import strawberry\n'), ((3031, 3091), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['team']"}), "(Hero, fields=['team'])\n", (3068, 3091), False, 'import strawberry\n'), ((3296, 3326), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (3313, 3326), False, 'import strawberry\n'), ((3790, 3853), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Manager'], {'fields': "['name']"}), "(Manager, fields=['name'])\n", (3827, 3853), False, 'import strawberry\n'), ((3896, 3959), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['manager']"}), "(Team, fields=['manager'])\n", (3933, 3959), False, 'import strawberry\n'), ((4170, 4200), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (4187, 4200), False, 'import strawberry\n'), ((4682, 4742), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['team']"}), "(Hero, fields=['team'])\n", (4719, 4742), False, 'import strawberry\n'), ((4782, 4842), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['name']"}), "(Team, fields=['name'])\n", (4819, 4842), False, 'import strawberry\n'), ((5047, 5077), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (5064, 5077), False, 'import strawberry\n'), ((5533, 5595), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Team'], {'fields': "['heroes']"}), "(Team, fields=['heroes'])\n", (5570, 5595), False, 'import strawberry\n'), ((5635, 5695), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['Hero'], {'fields': "['name']"}), "(Hero, fields=['name'])\n", (5672, 5695), False, 'import strawberry\n'), ((5928, 5958), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (5945, 5958), False, 'import strawberry\n'), ((1929, 1961), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (1944, 1961), False, 'import textwrap\n'), ((2684, 2716), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (2699, 2716), False, 'import textwrap\n'), ((3529, 3561), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (3544, 3561), False, 'import textwrap\n'), ((4413, 4445), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (4428, 4445), False, 'import textwrap\n'), ((5280, 5312), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (5295, 5312), False, 'import textwrap\n'), ((6167, 6199), 'textwrap.dedent', 'textwrap.dedent', (['expected_schema'], {}), '(expected_schema)\n', (6182, 6199), False, 'import textwrap\n')] |
"""Initial
Revision ID: d63ccd5484d7
Revises:
Create Date: 2021-11-14 00:28:55.123695
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('facilities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('category', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_facilities_category'), 'facilities', ['category'], unique=False)
op.create_index(op.f('ix_facilities_created_at'), 'facilities', ['created_at'], unique=False)
op.create_index(op.f('ix_facilities_id'), 'facilities', ['id'], unique=False)
op.create_index(op.f('ix_facilities_name'), 'facilities', ['name'], unique=False)
op.create_index(op.f('ix_facilities_notes'), 'facilities', ['notes'], unique=False)
op.create_index(op.f('ix_facilities_updated_at'), 'facilities', ['updated_at'], unique=False)
op.create_table('increment',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_increment_id'), 'increment', ['id'], unique=False)
op.create_table('listings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('title', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('source', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('source_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('source_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('address', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('short_postal_code', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('property_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('postal_code', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('ber_code', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('bedrooms', sa.Integer(), nullable=True),
sa.Column('bathrooms', sa.Integer(), nullable=True),
sa.Column('price', sa.Integer(), nullable=True),
sa.Column('rating_auto', sa.Integer(), nullable=True),
sa.Column('rating_user', sa.Integer(), nullable=True),
sa.Column('telegram_sent_at', sa.DateTime(), nullable=True),
sa.Column('images_count', sa.Integer(), nullable=True),
sa.Column('latitude', sa.Float(), nullable=True),
sa.Column('longitude', sa.Float(), nullable=True),
sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('publish_date', sa.DateTime(), nullable=True),
sa.Column('last_updated', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_listings_address'), 'listings', ['address'], unique=False)
op.create_index(op.f('ix_listings_bathrooms'), 'listings', ['bathrooms'], unique=False)
op.create_index(op.f('ix_listings_bedrooms'), 'listings', ['bedrooms'], unique=False)
op.create_index(op.f('ix_listings_ber_code'), 'listings', ['ber_code'], unique=False)
op.create_index(op.f('ix_listings_created_at'), 'listings', ['created_at'], unique=False)
op.create_index(op.f('ix_listings_description'), 'listings', ['description'], unique=False)
op.create_index(op.f('ix_listings_id'), 'listings', ['id'], unique=False)
op.create_index(op.f('ix_listings_images_count'), 'listings', ['images_count'], unique=False)
op.create_index(op.f('ix_listings_is_active'), 'listings', ['is_active'], unique=False)
op.create_index(op.f('ix_listings_last_updated'), 'listings', ['last_updated'], unique=False)
op.create_index(op.f('ix_listings_latitude'), 'listings', ['latitude'], unique=False)
op.create_index(op.f('ix_listings_longitude'), 'listings', ['longitude'], unique=False)
op.create_index(op.f('ix_listings_notes'), 'listings', ['notes'], unique=False)
op.create_index(op.f('ix_listings_postal_code'), 'listings', ['postal_code'], unique=False)
op.create_index(op.f('ix_listings_price'), 'listings', ['price'], unique=False)
op.create_index(op.f('ix_listings_property_type'), 'listings', ['property_type'], unique=False)
op.create_index(op.f('ix_listings_publish_date'), 'listings', ['publish_date'], unique=False)
op.create_index(op.f('ix_listings_rating_auto'), 'listings', ['rating_auto'], unique=False)
op.create_index(op.f('ix_listings_rating_user'), 'listings', ['rating_user'], unique=False)
op.create_index(op.f('ix_listings_short_postal_code'), 'listings', ['short_postal_code'], unique=False)
op.create_index(op.f('ix_listings_source'), 'listings', ['source'], unique=False)
op.create_index(op.f('ix_listings_source_code'), 'listings', ['source_code'], unique=False)
op.create_index(op.f('ix_listings_source_id'), 'listings', ['source_id'], unique=False)
op.create_index(op.f('ix_listings_telegram_sent_at'), 'listings', ['telegram_sent_at'], unique=False)
op.create_index(op.f('ix_listings_title'), 'listings', ['title'], unique=False)
op.create_index(op.f('ix_listings_updated_at'), 'listings', ['updated_at'], unique=False)
op.create_index(op.f('ix_listings_url'), 'listings', ['url'], unique=False)
op.create_table('song',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('year', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_song_artist'), 'song', ['artist'], unique=False)
op.create_index(op.f('ix_song_created_at'), 'song', ['created_at'], unique=False)
op.create_index(op.f('ix_song_id'), 'song', ['id'], unique=False)
op.create_index(op.f('ix_song_name'), 'song', ['name'], unique=False)
op.create_index(op.f('ix_song_updated_at'), 'song', ['updated_at'], unique=False)
op.create_index(op.f('ix_song_year'), 'song', ['year'], unique=False)
op.create_table('images',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('size_x', sa.Float(), nullable=True),
sa.Column('size_y', sa.Float(), nullable=True),
sa.Column('listing_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['listing_id'], ['listings.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_images_created_at'), 'images', ['created_at'], unique=False)
op.create_index(op.f('ix_images_id'), 'images', ['id'], unique=False)
op.create_index(op.f('ix_images_listing_id'), 'images', ['listing_id'], unique=False)
op.create_index(op.f('ix_images_size_x'), 'images', ['size_x'], unique=False)
op.create_index(op.f('ix_images_size_y'), 'images', ['size_y'], unique=False)
op.create_index(op.f('ix_images_updated_at'), 'images', ['updated_at'], unique=False)
op.create_index(op.f('ix_images_url'), 'images', ['url'], unique=False)
op.create_table('listingfacilitylink',
sa.Column('listing_id', sa.Integer(), nullable=False),
sa.Column('facility_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['facility_id'], ['facilities.id'], ),
sa.ForeignKeyConstraint(['listing_id'], ['listings.id'], ),
sa.PrimaryKeyConstraint('listing_id', 'facility_id')
)
op.create_index(op.f('ix_listingfacilitylink_facility_id'), 'listingfacilitylink', ['facility_id'], unique=False)
op.create_index(op.f('ix_listingfacilitylink_listing_id'), 'listingfacilitylink', ['listing_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_listingfacilitylink_listing_id'), table_name='listingfacilitylink')
op.drop_index(op.f('ix_listingfacilitylink_facility_id'), table_name='listingfacilitylink')
op.drop_table('listingfacilitylink')
op.drop_index(op.f('ix_images_url'), table_name='images')
op.drop_index(op.f('ix_images_updated_at'), table_name='images')
op.drop_index(op.f('ix_images_size_y'), table_name='images')
op.drop_index(op.f('ix_images_size_x'), table_name='images')
op.drop_index(op.f('ix_images_listing_id'), table_name='images')
op.drop_index(op.f('ix_images_id'), table_name='images')
op.drop_index(op.f('ix_images_created_at'), table_name='images')
op.drop_table('images')
op.drop_index(op.f('ix_song_year'), table_name='song')
op.drop_index(op.f('ix_song_updated_at'), table_name='song')
op.drop_index(op.f('ix_song_name'), table_name='song')
op.drop_index(op.f('ix_song_id'), table_name='song')
op.drop_index(op.f('ix_song_created_at'), table_name='song')
op.drop_index(op.f('ix_song_artist'), table_name='song')
op.drop_table('song')
op.drop_index(op.f('ix_listings_url'), table_name='listings')
op.drop_index(op.f('ix_listings_updated_at'), table_name='listings')
op.drop_index(op.f('ix_listings_title'), table_name='listings')
op.drop_index(op.f('ix_listings_telegram_sent_at'), table_name='listings')
op.drop_index(op.f('ix_listings_source_id'), table_name='listings')
op.drop_index(op.f('ix_listings_source_code'), table_name='listings')
op.drop_index(op.f('ix_listings_source'), table_name='listings')
op.drop_index(op.f('ix_listings_short_postal_code'), table_name='listings')
op.drop_index(op.f('ix_listings_rating_user'), table_name='listings')
op.drop_index(op.f('ix_listings_rating_auto'), table_name='listings')
op.drop_index(op.f('ix_listings_publish_date'), table_name='listings')
op.drop_index(op.f('ix_listings_property_type'), table_name='listings')
op.drop_index(op.f('ix_listings_price'), table_name='listings')
op.drop_index(op.f('ix_listings_postal_code'), table_name='listings')
op.drop_index(op.f('ix_listings_notes'), table_name='listings')
op.drop_index(op.f('ix_listings_longitude'), table_name='listings')
op.drop_index(op.f('ix_listings_latitude'), table_name='listings')
op.drop_index(op.f('ix_listings_last_updated'), table_name='listings')
op.drop_index(op.f('ix_listings_is_active'), table_name='listings')
op.drop_index(op.f('ix_listings_images_count'), table_name='listings')
op.drop_index(op.f('ix_listings_id'), table_name='listings')
op.drop_index(op.f('ix_listings_description'), table_name='listings')
op.drop_index(op.f('ix_listings_created_at'), table_name='listings')
op.drop_index(op.f('ix_listings_ber_code'), table_name='listings')
op.drop_index(op.f('ix_listings_bedrooms'), table_name='listings')
op.drop_index(op.f('ix_listings_bathrooms'), table_name='listings')
op.drop_index(op.f('ix_listings_address'), table_name='listings')
op.drop_table('listings')
op.drop_index(op.f('ix_increment_id'), table_name='increment')
op.drop_table('increment')
op.drop_index(op.f('ix_facilities_updated_at'), table_name='facilities')
op.drop_index(op.f('ix_facilities_notes'), table_name='facilities')
op.drop_index(op.f('ix_facilities_name'), table_name='facilities')
op.drop_index(op.f('ix_facilities_id'), table_name='facilities')
op.drop_index(op.f('ix_facilities_created_at'), table_name='facilities')
op.drop_index(op.f('ix_facilities_category'), table_name='facilities')
op.drop_table('facilities')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((8907, 8943), 'alembic.op.drop_table', 'op.drop_table', (['"""listingfacilitylink"""'], {}), "('listingfacilitylink')\n", (8920, 8943), False, 'from alembic import op\n'), ((9408, 9431), 'alembic.op.drop_table', 'op.drop_table', (['"""images"""'], {}), "('images')\n", (9421, 9431), False, 'from alembic import op\n'), ((9802, 9823), 'alembic.op.drop_table', 'op.drop_table', (['"""song"""'], {}), "('song')\n", (9815, 9823), False, 'from alembic import op\n'), ((11779, 11804), 'alembic.op.drop_table', 'op.drop_table', (['"""listings"""'], {}), "('listings')\n", (11792, 11804), False, 'from alembic import op\n'), ((11876, 11902), 'alembic.op.drop_table', 'op.drop_table', (['"""increment"""'], {}), "('increment')\n", (11889, 11902), False, 'from alembic import op\n'), ((12348, 12375), 'alembic.op.drop_table', 'op.drop_table', (['"""facilities"""'], {}), "('facilities')\n", (12361, 12375), False, 'from alembic import op\n'), ((796, 825), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (819, 825), True, 'import sqlalchemy as sa\n'), ((852, 882), 'alembic.op.f', 'op.f', (['"""ix_facilities_category"""'], {}), "('ix_facilities_category')\n", (856, 882), False, 'from alembic import op\n'), ((946, 978), 'alembic.op.f', 'op.f', (['"""ix_facilities_created_at"""'], {}), "('ix_facilities_created_at')\n", (950, 978), False, 'from alembic import op\n'), ((1044, 1068), 'alembic.op.f', 'op.f', (['"""ix_facilities_id"""'], {}), "('ix_facilities_id')\n", (1048, 1068), False, 'from alembic import op\n'), ((1126, 1152), 'alembic.op.f', 'op.f', (['"""ix_facilities_name"""'], {}), "('ix_facilities_name')\n", (1130, 1152), False, 'from alembic import op\n'), ((1212, 1239), 'alembic.op.f', 'op.f', (['"""ix_facilities_notes"""'], {}), "('ix_facilities_notes')\n", (1216, 1239), False, 'from alembic import op\n'), ((1300, 1332), 'alembic.op.f', 'op.f', (['"""ix_facilities_updated_at"""'], {}), "('ix_facilities_updated_at')\n", (1304, 1332), False, 'from alembic import op\n'), ((1466, 1495), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1489, 1495), True, 'import sqlalchemy as sa\n'), ((1522, 1545), 'alembic.op.f', 'op.f', (['"""ix_increment_id"""'], {}), "('ix_increment_id')\n", (1526, 1545), False, 'from alembic import op\n'), ((3436, 3465), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3459, 3465), True, 'import sqlalchemy as sa\n'), ((3492, 3519), 'alembic.op.f', 'op.f', (['"""ix_listings_address"""'], {}), "('ix_listings_address')\n", (3496, 3519), False, 'from alembic import op\n'), ((3580, 3609), 'alembic.op.f', 'op.f', (['"""ix_listings_bathrooms"""'], {}), "('ix_listings_bathrooms')\n", (3584, 3609), False, 'from alembic import op\n'), ((3672, 3700), 'alembic.op.f', 'op.f', (['"""ix_listings_bedrooms"""'], {}), "('ix_listings_bedrooms')\n", (3676, 3700), False, 'from alembic import op\n'), ((3762, 3790), 'alembic.op.f', 'op.f', (['"""ix_listings_ber_code"""'], {}), "('ix_listings_ber_code')\n", (3766, 3790), False, 'from alembic import op\n'), ((3852, 3882), 'alembic.op.f', 'op.f', (['"""ix_listings_created_at"""'], {}), "('ix_listings_created_at')\n", (3856, 3882), False, 'from alembic import op\n'), ((3946, 3977), 'alembic.op.f', 'op.f', (['"""ix_listings_description"""'], {}), "('ix_listings_description')\n", (3950, 3977), False, 'from alembic import op\n'), ((4042, 4064), 'alembic.op.f', 'op.f', (['"""ix_listings_id"""'], {}), "('ix_listings_id')\n", (4046, 4064), False, 'from alembic import op\n'), ((4120, 4152), 'alembic.op.f', 'op.f', (['"""ix_listings_images_count"""'], {}), "('ix_listings_images_count')\n", (4124, 4152), False, 'from alembic import op\n'), ((4218, 4247), 'alembic.op.f', 'op.f', (['"""ix_listings_is_active"""'], {}), "('ix_listings_is_active')\n", (4222, 4247), False, 'from alembic import op\n'), ((4310, 4342), 'alembic.op.f', 'op.f', (['"""ix_listings_last_updated"""'], {}), "('ix_listings_last_updated')\n", (4314, 4342), False, 'from alembic import op\n'), ((4408, 4436), 'alembic.op.f', 'op.f', (['"""ix_listings_latitude"""'], {}), "('ix_listings_latitude')\n", (4412, 4436), False, 'from alembic import op\n'), ((4498, 4527), 'alembic.op.f', 'op.f', (['"""ix_listings_longitude"""'], {}), "('ix_listings_longitude')\n", (4502, 4527), False, 'from alembic import op\n'), ((4590, 4615), 'alembic.op.f', 'op.f', (['"""ix_listings_notes"""'], {}), "('ix_listings_notes')\n", (4594, 4615), False, 'from alembic import op\n'), ((4674, 4705), 'alembic.op.f', 'op.f', (['"""ix_listings_postal_code"""'], {}), "('ix_listings_postal_code')\n", (4678, 4705), False, 'from alembic import op\n'), ((4770, 4795), 'alembic.op.f', 'op.f', (['"""ix_listings_price"""'], {}), "('ix_listings_price')\n", (4774, 4795), False, 'from alembic import op\n'), ((4854, 4887), 'alembic.op.f', 'op.f', (['"""ix_listings_property_type"""'], {}), "('ix_listings_property_type')\n", (4858, 4887), False, 'from alembic import op\n'), ((4954, 4986), 'alembic.op.f', 'op.f', (['"""ix_listings_publish_date"""'], {}), "('ix_listings_publish_date')\n", (4958, 4986), False, 'from alembic import op\n'), ((5052, 5083), 'alembic.op.f', 'op.f', (['"""ix_listings_rating_auto"""'], {}), "('ix_listings_rating_auto')\n", (5056, 5083), False, 'from alembic import op\n'), ((5148, 5179), 'alembic.op.f', 'op.f', (['"""ix_listings_rating_user"""'], {}), "('ix_listings_rating_user')\n", (5152, 5179), False, 'from alembic import op\n'), ((5244, 5281), 'alembic.op.f', 'op.f', (['"""ix_listings_short_postal_code"""'], {}), "('ix_listings_short_postal_code')\n", (5248, 5281), False, 'from alembic import op\n'), ((5352, 5378), 'alembic.op.f', 'op.f', (['"""ix_listings_source"""'], {}), "('ix_listings_source')\n", (5356, 5378), False, 'from alembic import op\n'), ((5438, 5469), 'alembic.op.f', 'op.f', (['"""ix_listings_source_code"""'], {}), "('ix_listings_source_code')\n", (5442, 5469), False, 'from alembic import op\n'), ((5534, 5563), 'alembic.op.f', 'op.f', (['"""ix_listings_source_id"""'], {}), "('ix_listings_source_id')\n", (5538, 5563), False, 'from alembic import op\n'), ((5626, 5662), 'alembic.op.f', 'op.f', (['"""ix_listings_telegram_sent_at"""'], {}), "('ix_listings_telegram_sent_at')\n", (5630, 5662), False, 'from alembic import op\n'), ((5732, 5757), 'alembic.op.f', 'op.f', (['"""ix_listings_title"""'], {}), "('ix_listings_title')\n", (5736, 5757), False, 'from alembic import op\n'), ((5816, 5846), 'alembic.op.f', 'op.f', (['"""ix_listings_updated_at"""'], {}), "('ix_listings_updated_at')\n", (5820, 5846), False, 'from alembic import op\n'), ((5910, 5933), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (5914, 5933), False, 'from alembic import op\n'), ((6375, 6404), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (6398, 6404), True, 'import sqlalchemy as sa\n'), ((6431, 6453), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (6435, 6453), False, 'from alembic import op\n'), ((6509, 6535), 'alembic.op.f', 'op.f', (['"""ix_song_created_at"""'], {}), "('ix_song_created_at')\n", (6513, 6535), False, 'from alembic import op\n'), ((6595, 6613), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (6599, 6613), False, 'from alembic import op\n'), ((6665, 6685), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (6669, 6685), False, 'from alembic import op\n'), ((6739, 6765), 'alembic.op.f', 'op.f', (['"""ix_song_updated_at"""'], {}), "('ix_song_updated_at')\n", (6743, 6765), False, 'from alembic import op\n'), ((6825, 6845), 'alembic.op.f', 'op.f', (['"""ix_song_year"""'], {}), "('ix_song_year')\n", (6829, 6845), False, 'from alembic import op\n'), ((7318, 7374), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['listing_id']", "['listings.id']"], {}), "(['listing_id'], ['listings.id'])\n", (7341, 7374), True, 'import sqlalchemy as sa\n'), ((7382, 7411), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (7405, 7411), True, 'import sqlalchemy as sa\n'), ((7438, 7466), 'alembic.op.f', 'op.f', (['"""ix_images_created_at"""'], {}), "('ix_images_created_at')\n", (7442, 7466), False, 'from alembic import op\n'), ((7528, 7548), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (7532, 7548), False, 'from alembic import op\n'), ((7602, 7630), 'alembic.op.f', 'op.f', (['"""ix_images_listing_id"""'], {}), "('ix_images_listing_id')\n", (7606, 7630), False, 'from alembic import op\n'), ((7692, 7716), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (7696, 7716), False, 'from alembic import op\n'), ((7774, 7798), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (7778, 7798), False, 'from alembic import op\n'), ((7856, 7884), 'alembic.op.f', 'op.f', (['"""ix_images_updated_at"""'], {}), "('ix_images_updated_at')\n", (7860, 7884), False, 'from alembic import op\n'), ((7946, 7967), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (7950, 7967), False, 'from alembic import op\n'), ((8168, 8227), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['facility_id']", "['facilities.id']"], {}), "(['facility_id'], ['facilities.id'])\n", (8191, 8227), True, 'import sqlalchemy as sa\n'), ((8235, 8291), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['listing_id']", "['listings.id']"], {}), "(['listing_id'], ['listings.id'])\n", (8258, 8291), True, 'import sqlalchemy as sa\n'), ((8299, 8351), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""listing_id"""', '"""facility_id"""'], {}), "('listing_id', 'facility_id')\n", (8322, 8351), True, 'import sqlalchemy as sa\n'), ((8378, 8420), 'alembic.op.f', 'op.f', (['"""ix_listingfacilitylink_facility_id"""'], {}), "('ix_listingfacilitylink_facility_id')\n", (8382, 8420), False, 'from alembic import op\n'), ((8496, 8537), 'alembic.op.f', 'op.f', (['"""ix_listingfacilitylink_listing_id"""'], {}), "('ix_listingfacilitylink_listing_id')\n", (8500, 8537), False, 'from alembic import op\n'), ((8730, 8771), 'alembic.op.f', 'op.f', (['"""ix_listingfacilitylink_listing_id"""'], {}), "('ix_listingfacilitylink_listing_id')\n", (8734, 8771), False, 'from alembic import op\n'), ((8825, 8867), 'alembic.op.f', 'op.f', (['"""ix_listingfacilitylink_facility_id"""'], {}), "('ix_listingfacilitylink_facility_id')\n", (8829, 8867), False, 'from alembic import op\n'), ((8962, 8983), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (8966, 8983), False, 'from alembic import op\n'), ((9024, 9052), 'alembic.op.f', 'op.f', (['"""ix_images_updated_at"""'], {}), "('ix_images_updated_at')\n", (9028, 9052), False, 'from alembic import op\n'), ((9093, 9117), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (9097, 9117), False, 'from alembic import op\n'), ((9158, 9182), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (9162, 9182), False, 'from alembic import op\n'), ((9223, 9251), 'alembic.op.f', 'op.f', (['"""ix_images_listing_id"""'], {}), "('ix_images_listing_id')\n", (9227, 9251), False, 'from alembic import op\n'), ((9292, 9312), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (9296, 9312), False, 'from alembic import op\n'), ((9353, 9381), 'alembic.op.f', 'op.f', (['"""ix_images_created_at"""'], {}), "('ix_images_created_at')\n", (9357, 9381), False, 'from alembic import op\n'), ((9450, 9470), 'alembic.op.f', 'op.f', (['"""ix_song_year"""'], {}), "('ix_song_year')\n", (9454, 9470), False, 'from alembic import op\n'), ((9509, 9535), 'alembic.op.f', 'op.f', (['"""ix_song_updated_at"""'], {}), "('ix_song_updated_at')\n", (9513, 9535), False, 'from alembic import op\n'), ((9574, 9594), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (9578, 9594), False, 'from alembic import op\n'), ((9633, 9651), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (9637, 9651), False, 'from alembic import op\n'), ((9690, 9716), 'alembic.op.f', 'op.f', (['"""ix_song_created_at"""'], {}), "('ix_song_created_at')\n", (9694, 9716), False, 'from alembic import op\n'), ((9755, 9777), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (9759, 9777), False, 'from alembic import op\n'), ((9842, 9865), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (9846, 9865), False, 'from alembic import op\n'), ((9908, 9938), 'alembic.op.f', 'op.f', (['"""ix_listings_updated_at"""'], {}), "('ix_listings_updated_at')\n", (9912, 9938), False, 'from alembic import op\n'), ((9981, 10006), 'alembic.op.f', 'op.f', (['"""ix_listings_title"""'], {}), "('ix_listings_title')\n", (9985, 10006), False, 'from alembic import op\n'), ((10049, 10085), 'alembic.op.f', 'op.f', (['"""ix_listings_telegram_sent_at"""'], {}), "('ix_listings_telegram_sent_at')\n", (10053, 10085), False, 'from alembic import op\n'), ((10128, 10157), 'alembic.op.f', 'op.f', (['"""ix_listings_source_id"""'], {}), "('ix_listings_source_id')\n", (10132, 10157), False, 'from alembic import op\n'), ((10200, 10231), 'alembic.op.f', 'op.f', (['"""ix_listings_source_code"""'], {}), "('ix_listings_source_code')\n", (10204, 10231), False, 'from alembic import op\n'), ((10274, 10300), 'alembic.op.f', 'op.f', (['"""ix_listings_source"""'], {}), "('ix_listings_source')\n", (10278, 10300), False, 'from alembic import op\n'), ((10343, 10380), 'alembic.op.f', 'op.f', (['"""ix_listings_short_postal_code"""'], {}), "('ix_listings_short_postal_code')\n", (10347, 10380), False, 'from alembic import op\n'), ((10423, 10454), 'alembic.op.f', 'op.f', (['"""ix_listings_rating_user"""'], {}), "('ix_listings_rating_user')\n", (10427, 10454), False, 'from alembic import op\n'), ((10497, 10528), 'alembic.op.f', 'op.f', (['"""ix_listings_rating_auto"""'], {}), "('ix_listings_rating_auto')\n", (10501, 10528), False, 'from alembic import op\n'), ((10571, 10603), 'alembic.op.f', 'op.f', (['"""ix_listings_publish_date"""'], {}), "('ix_listings_publish_date')\n", (10575, 10603), False, 'from alembic import op\n'), ((10646, 10679), 'alembic.op.f', 'op.f', (['"""ix_listings_property_type"""'], {}), "('ix_listings_property_type')\n", (10650, 10679), False, 'from alembic import op\n'), ((10722, 10747), 'alembic.op.f', 'op.f', (['"""ix_listings_price"""'], {}), "('ix_listings_price')\n", (10726, 10747), False, 'from alembic import op\n'), ((10790, 10821), 'alembic.op.f', 'op.f', (['"""ix_listings_postal_code"""'], {}), "('ix_listings_postal_code')\n", (10794, 10821), False, 'from alembic import op\n'), ((10864, 10889), 'alembic.op.f', 'op.f', (['"""ix_listings_notes"""'], {}), "('ix_listings_notes')\n", (10868, 10889), False, 'from alembic import op\n'), ((10932, 10961), 'alembic.op.f', 'op.f', (['"""ix_listings_longitude"""'], {}), "('ix_listings_longitude')\n", (10936, 10961), False, 'from alembic import op\n'), ((11004, 11032), 'alembic.op.f', 'op.f', (['"""ix_listings_latitude"""'], {}), "('ix_listings_latitude')\n", (11008, 11032), False, 'from alembic import op\n'), ((11075, 11107), 'alembic.op.f', 'op.f', (['"""ix_listings_last_updated"""'], {}), "('ix_listings_last_updated')\n", (11079, 11107), False, 'from alembic import op\n'), ((11150, 11179), 'alembic.op.f', 'op.f', (['"""ix_listings_is_active"""'], {}), "('ix_listings_is_active')\n", (11154, 11179), False, 'from alembic import op\n'), ((11222, 11254), 'alembic.op.f', 'op.f', (['"""ix_listings_images_count"""'], {}), "('ix_listings_images_count')\n", (11226, 11254), False, 'from alembic import op\n'), ((11297, 11319), 'alembic.op.f', 'op.f', (['"""ix_listings_id"""'], {}), "('ix_listings_id')\n", (11301, 11319), False, 'from alembic import op\n'), ((11362, 11393), 'alembic.op.f', 'op.f', (['"""ix_listings_description"""'], {}), "('ix_listings_description')\n", (11366, 11393), False, 'from alembic import op\n'), ((11436, 11466), 'alembic.op.f', 'op.f', (['"""ix_listings_created_at"""'], {}), "('ix_listings_created_at')\n", (11440, 11466), False, 'from alembic import op\n'), ((11509, 11537), 'alembic.op.f', 'op.f', (['"""ix_listings_ber_code"""'], {}), "('ix_listings_ber_code')\n", (11513, 11537), False, 'from alembic import op\n'), ((11580, 11608), 'alembic.op.f', 'op.f', (['"""ix_listings_bedrooms"""'], {}), "('ix_listings_bedrooms')\n", (11584, 11608), False, 'from alembic import op\n'), ((11651, 11680), 'alembic.op.f', 'op.f', (['"""ix_listings_bathrooms"""'], {}), "('ix_listings_bathrooms')\n", (11655, 11680), False, 'from alembic import op\n'), ((11723, 11750), 'alembic.op.f', 'op.f', (['"""ix_listings_address"""'], {}), "('ix_listings_address')\n", (11727, 11750), False, 'from alembic import op\n'), ((11823, 11846), 'alembic.op.f', 'op.f', (['"""ix_increment_id"""'], {}), "('ix_increment_id')\n", (11827, 11846), False, 'from alembic import op\n'), ((11921, 11953), 'alembic.op.f', 'op.f', (['"""ix_facilities_updated_at"""'], {}), "('ix_facilities_updated_at')\n", (11925, 11953), False, 'from alembic import op\n'), ((11998, 12025), 'alembic.op.f', 'op.f', (['"""ix_facilities_notes"""'], {}), "('ix_facilities_notes')\n", (12002, 12025), False, 'from alembic import op\n'), ((12070, 12096), 'alembic.op.f', 'op.f', (['"""ix_facilities_name"""'], {}), "('ix_facilities_name')\n", (12074, 12096), False, 'from alembic import op\n'), ((12141, 12165), 'alembic.op.f', 'op.f', (['"""ix_facilities_id"""'], {}), "('ix_facilities_id')\n", (12145, 12165), False, 'from alembic import op\n'), ((12210, 12242), 'alembic.op.f', 'op.f', (['"""ix_facilities_created_at"""'], {}), "('ix_facilities_created_at')\n", (12214, 12242), False, 'from alembic import op\n'), ((12287, 12317), 'alembic.op.f', 'op.f', (['"""ix_facilities_category"""'], {}), "('ix_facilities_category')\n", (12291, 12317), False, 'from alembic import op\n'), ((415, 427), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (425, 427), True, 'import sqlalchemy as sa\n'), ((468, 502), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (500, 502), False, 'import sqlmodel\n'), ((547, 581), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (579, 581), False, 'import sqlmodel\n'), ((622, 656), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (654, 656), False, 'import sqlmodel\n'), ((702, 715), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (713, 715), True, 'import sqlalchemy as sa\n'), ((761, 774), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (772, 774), True, 'import sqlalchemy as sa\n'), ((1431, 1443), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1441, 1443), True, 'import sqlalchemy as sa\n'), ((1634, 1646), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1644, 1646), True, 'import sqlalchemy as sa\n'), ((1692, 1704), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1702, 1704), True, 'import sqlalchemy as sa\n'), ((1746, 1780), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1778, 1780), False, 'import sqlmodel\n'), ((1827, 1861), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1859, 1861), False, 'import sqlmodel\n'), ((1900, 1934), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1932, 1934), False, 'import sqlmodel\n'), ((1977, 2011), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2009, 2011), False, 'import sqlmodel\n'), ((2057, 2091), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2089, 2091), False, 'import sqlmodel\n'), ((2139, 2173), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2171, 2173), False, 'import sqlmodel\n'), ((2217, 2251), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2249, 2251), False, 'import sqlmodel\n'), ((2305, 2339), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2337, 2339), False, 'import sqlmodel\n'), ((2388, 2422), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2420, 2422), False, 'import sqlmodel\n'), ((2469, 2503), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2501, 2503), False, 'import sqlmodel\n'), ((2547, 2581), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2579, 2581), False, 'import sqlmodel\n'), ((2625, 2637), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2635, 2637), True, 'import sqlalchemy as sa\n'), ((2682, 2694), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2692, 2694), True, 'import sqlalchemy as sa\n'), ((2735, 2747), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2745, 2747), True, 'import sqlalchemy as sa\n'), ((2794, 2806), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2804, 2806), True, 'import sqlalchemy as sa\n'), ((2853, 2865), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2863, 2865), True, 'import sqlalchemy as sa\n'), ((2917, 2930), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (2928, 2930), True, 'import sqlalchemy as sa\n'), ((2978, 2990), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2988, 2990), True, 'import sqlalchemy as sa\n'), ((3034, 3044), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (3042, 3044), True, 'import sqlalchemy as sa\n'), ((3089, 3099), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (3097, 3099), True, 'import sqlalchemy as sa\n'), ((3140, 3174), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3172, 3174), False, 'import sqlmodel\n'), ((3222, 3235), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3233, 3235), True, 'import sqlalchemy as sa\n'), ((3283, 3296), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3294, 3296), True, 'import sqlalchemy as sa\n'), ((3342, 3355), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3353, 3355), True, 'import sqlalchemy as sa\n'), ((3401, 3414), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (3412, 3414), True, 'import sqlalchemy as sa\n'), ((6018, 6030), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6028, 6030), True, 'import sqlalchemy as sa\n'), ((6071, 6105), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6103, 6105), False, 'import sqlmodel\n'), ((6148, 6182), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (6180, 6182), False, 'import sqlmodel\n'), ((6223, 6235), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6233, 6235), True, 'import sqlalchemy as sa\n'), ((6281, 6294), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (6292, 6294), True, 'import sqlalchemy as sa\n'), ((6340, 6353), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (6351, 6353), True, 'import sqlalchemy as sa\n'), ((6929, 6941), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6939, 6941), True, 'import sqlalchemy as sa\n'), ((6981, 7015), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (7013, 7015), False, 'import sqlmodel\n'), ((7058, 7068), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (7066, 7068), True, 'import sqlalchemy as sa\n'), ((7110, 7120), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (7118, 7120), True, 'import sqlalchemy as sa\n'), ((7166, 7178), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (7176, 7178), True, 'import sqlalchemy as sa\n'), ((7224, 7237), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (7235, 7237), True, 'import sqlalchemy as sa\n'), ((7283, 7296), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (7294, 7296), True, 'import sqlalchemy as sa\n'), ((8073, 8085), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (8083, 8085), True, 'import sqlalchemy as sa\n'), ((8133, 8145), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (8143, 8145), True, 'import sqlalchemy as sa\n')] |
from sqlite3.dbapi2 import Timestamp, adapt
from typing import Optional
from sqlmodel import Field, SQLModel
from pydantic import validator
from datetime import datetime, date
class Rate(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="user.id")
client_id: int = Field(foreign_key="client.id")
valid_from: date
valid_to: date
amount: float # currency: EUR
created_at: datetime
updated_at: datetime
is_active: bool
| [
"sqlmodel.Field"
] | [((236, 273), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (241, 273), False, 'from sqlmodel import Field, SQLModel\n'), ((293, 321), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""user.id"""'}), "(foreign_key='user.id')\n", (298, 321), False, 'from sqlmodel import Field, SQLModel\n'), ((343, 373), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""client.id"""'}), "(foreign_key='client.id')\n", (348, 373), False, 'from sqlmodel import Field, SQLModel\n')] |
import uuid
from typing import List
from typing import Optional
from sqlalchemy.engine import Engine
from sqlmodel import Field
from sqlmodel import Relationship
from sqlmodel import Session
from sqlmodel import SQLModel
from sqlmodel import create_engine
_engine: Optional[Engine] = None
class FileTagAssociation(SQLModel, table=True):
file_id: Optional[uuid.UUID] = Field(
default=None, foreign_key="file.id", primary_key=True
)
tag_id: Optional[uuid.UUID] = Field(
default=None, foreign_key="tag.id", primary_key=True
)
class File(SQLModel, table=True):
id: Optional[uuid.UUID] = Field(default_factory=uuid.uuid4, primary_key=True)
name: str
path: str
tags: List["Tag"] = Relationship(
back_populates="files", link_model=FileTagAssociation
)
class Tag(SQLModel, table=True):
id: Optional[uuid.UUID] = Field(default_factory=uuid.uuid4, primary_key=True)
category: Optional[str]
value: str
files: List[File] = Relationship(
back_populates="tags", link_model=FileTagAssociation
)
def init(url: str) -> None:
global _engine
_engine = create_engine(url)
SQLModel.metadata.create_all(_engine)
def get_engine() -> Engine:
"""Get the global database engine."""
if _engine is None: # pragma: no cover
raise ValueError("Engine must be initialized with `db.init()`")
return _engine
def get_session() -> Session:
"""Create a new database session to use as a context manager."""
return Session(get_engine())
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Relationship",
"sqlmodel.Field",
"sqlmodel.create_engine"
] | [((376, 436), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""file.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='file.id', primary_key=True)\n", (381, 436), False, 'from sqlmodel import Field\n'), ((485, 544), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""tag.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='tag.id', primary_key=True)\n", (490, 544), False, 'from sqlmodel import Field\n'), ((625, 676), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid.uuid4', 'primary_key': '(True)'}), '(default_factory=uuid.uuid4, primary_key=True)\n', (630, 676), False, 'from sqlmodel import Field\n'), ((730, 797), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""files"""', 'link_model': 'FileTagAssociation'}), "(back_populates='files', link_model=FileTagAssociation)\n", (742, 797), False, 'from sqlmodel import Relationship\n'), ((877, 928), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid.uuid4', 'primary_key': '(True)'}), '(default_factory=uuid.uuid4, primary_key=True)\n', (882, 928), False, 'from sqlmodel import Field\n'), ((997, 1063), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""tags"""', 'link_model': 'FileTagAssociation'}), "(back_populates='tags', link_model=FileTagAssociation)\n", (1009, 1063), False, 'from sqlmodel import Relationship\n'), ((1141, 1159), 'sqlmodel.create_engine', 'create_engine', (['url'], {}), '(url)\n', (1154, 1159), False, 'from sqlmodel import create_engine\n'), ((1165, 1202), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['_engine'], {}), '(_engine)\n', (1193, 1202), False, 'from sqlmodel import SQLModel\n')] |
from typing import Optional
from sqlmodel import Field, Session, SQLModel, create_engine
def test_query(clear_sqlmodel):
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
secret_name: str
age: Optional[int] = None
hero_1 = Hero(name="Deadpond", secret_name="<NAME>")
engine = create_engine("sqlite://")
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
session.add(hero_1)
session.commit()
session.refresh(hero_1)
with Session(engine) as session:
query_hero = session.query(Hero).first()
assert query_hero
assert query_hero.name == hero_1.name
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field",
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((377, 403), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite://"""'], {}), "('sqlite://')\n", (390, 403), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((409, 445), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (437, 445), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((190, 227), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (195, 227), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((455, 470), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (462, 470), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((578, 593), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (585, 593), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n')] |
import typing as t
from sqlmodel import SQLModel, Field, Relationship
from datetime import datetime
from .users import DB_User
class DB_AccessToken(SQLModel, table=True):
__tablename__ = 'access_tokens'
id: t.Optional[int] = Field(default=None, primary_key=True)
"""The ID of the access token. This is handled by the database."""
token: str = Field(max_length=40, sa_column_kwargs={'unique': True})
"""The unique access token."""
user_id: t.Optional[int] = Field(default=None, foreign_key="users.id")
user: DB_User = Relationship(back_populates="access_tokens")
"""To what user does this token belong to."""
last_activity_at: datetime
"""When was the access token last active."""
created_at: datetime
"""When was this token created at?"""
type: str = Field(max_length=100)
"""The type of the access token (example: `'session_remember'`)"""
title: t.Optional[str] = Field(max_length=150)
"""The title of the access token."""
last_ip_address: t.Optional[str] = Field(max_length=45)
"""The last IP address associated with this access token."""
last_user_agent: t.Optional[str] = Field(max_length=255)
"""The last browser's user agent that used this token."""
class DB_APIKey(SQLModel, table=True):
__tablename__ = 'api_keys'
id: t.Optional[int] = Field(default=None, primary_key=True)
"""The ID of the API key. This is handled by the database."""
key: str = Field(max_length=100)
"""The unique API key."""
allowedips: t.Optional[str] = Field(max_length=255)
"""The IP addresses that are allowed to use this API key."""
scopes: t.Optional[str] = Field(max_length=255)
"""The scopes that this API key has access to."""
user_id: t.Optional[int] = Field(default=None, foreign_key="users.id")
user: DB_User = Relationship(back_populates="api_keys")
"""As what user to perform actions when using this API key."""
created_at: datetime = Field(default=datetime.utcnow())
"""When was this API key created at?"""
last_activity_at: t.Optional[datetime]
"""When was the API key last active?"""
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((238, 275), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (243, 275), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((365, 420), 'sqlmodel.Field', 'Field', ([], {'max_length': '(40)', 'sa_column_kwargs': "{'unique': True}"}), "(max_length=40, sa_column_kwargs={'unique': True})\n", (370, 420), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((487, 530), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""users.id"""'}), "(default=None, foreign_key='users.id')\n", (492, 530), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((551, 595), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""access_tokens"""'}), "(back_populates='access_tokens')\n", (563, 595), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((811, 832), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (816, 832), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((934, 955), 'sqlmodel.Field', 'Field', ([], {'max_length': '(150)'}), '(max_length=150)\n', (939, 955), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1036, 1056), 'sqlmodel.Field', 'Field', ([], {'max_length': '(45)'}), '(max_length=45)\n', (1041, 1056), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1161, 1182), 'sqlmodel.Field', 'Field', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1166, 1182), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1344, 1381), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1349, 1381), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1464, 1485), 'sqlmodel.Field', 'Field', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1469, 1485), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1551, 1572), 'sqlmodel.Field', 'Field', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1556, 1572), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1668, 1689), 'sqlmodel.Field', 'Field', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1673, 1689), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1775, 1818), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""users.id"""'}), "(default=None, foreign_key='users.id')\n", (1780, 1818), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1839, 1878), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""api_keys"""'}), "(back_populates='api_keys')\n", (1851, 1878), False, 'from sqlmodel import SQLModel, Field, Relationship\n'), ((1988, 2005), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2003, 2005), False, 'from datetime import datetime\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Objects related to the running of Models and Generators."""
from bdb import BdbQuit
from datetime import datetime, timedelta
from time import time
from typing import Any, Dict, List, Mapping, Optional, Set, Tuple
from uuid import UUID
from psycopg import connect as pg3_connect
from pydantic.fields import Field, PrivateAttr
from pydasher import hasher
from sqlalchemy.future import Engine
from sqlmodel import Session, select
from tqdm import tqdm
import dbgen.exceptions as exceptions
from dbgen.core.base import Base, encoders
from dbgen.core.generator import Generator
from dbgen.core.metadata import (
GeneratorEntity,
GeneratorRunEntity,
GensToRun,
ModelEntity,
Repeats,
RunEntity,
Status,
)
from dbgen.core.model import Model
from dbgen.core.node.extract import Extract
from dbgen.core.node.query import BaseQuery
from dbgen.exceptions import DBgenExternalError, DBgenSkipException, RepeatException, SerializationError
from dbgen.utils.log import LogLevel
class RunConfig(Base):
"""Configuration for the running of a Generator and Model"""
retry: bool = False
include: Set[str] = Field(default_factory=set)
exclude: Set[str] = Field(default_factory=set)
start: Optional[str]
until: Optional[str]
batch_size: Optional[int]
progress_bar: bool = True
log_level: LogLevel = LogLevel.INFO
def should_gen_run(self, generator: Generator) -> bool:
"""Check a generator against include/exclude to see if it should run."""
markers = [generator.name, *generator.tags]
should_run = any(
map(
lambda x: (not self.include or x in self.include) and x not in self.exclude,
markers,
)
)
return should_run
def get_invalid_markers(self, model: Model) -> Dict[str, List[str]]:
"""Check that all inputs to RunConfig are meaningful for the model."""
invalid_marks = {}
gen_names = model.gens().keys()
# Validate start and until
for attr in ("start", "until"):
val: str = getattr(self, attr)
if val is not None and val not in gen_names:
invalid_marks[attr] = [val]
# Validate include and exclude as sets
for attr in ("include", "exclude"):
set_val: Set[str] = getattr(self, attr)
invalid_vals = [x for x in set_val if not model.validate_marker(x)]
if invalid_vals:
invalid_marks[attr] = invalid_vals
return invalid_marks
def update_run_by_id(run_id, status: Status, session: Session):
run = session.get(RunEntity, run_id)
assert run, f"No run found with id {run_id}"
run.status = status
session.commit()
class RunInitializer(Base):
"""Intializes a run by syncing the database and getting the run_id."""
def execute(self, engine: Engine, run_config: RunConfig) -> int:
# Use some metadatabase connection to initialize initialize the run
# Store the details of the run on the metadatabase so downstream GeneratorRuns can pick them up
# Sync the database with the registries
with Session(engine) as session:
run = RunEntity(status=Status.initialized)
session.add(run)
session.commit()
session.refresh(run)
assert isinstance(run.id, int)
run.status = Status.running
session.commit()
run_id = run.id
return run_id
class BaseGeneratorRun(Base):
"""A lightwieght wrapper for the Generator that grabs a specific Generator from metadatabase and runs it."""
_old_repeats: Set[UUID] = PrivateAttr(default_factory=set)
_new_repeats: Set[UUID] = PrivateAttr(default_factory=set)
def get_gen(self, meta_engine: Engine, *args, **kwargs) -> Generator:
raise NotImplementedError
def execute(
self,
main_engine: Engine,
meta_engine: Engine,
run_id: Optional[int],
run_config: Optional[RunConfig],
ordering: Optional[int],
):
# Set default values for run_config if none provided
if run_config is None:
run_config = RunConfig()
generator = self.get_gen(meta_engine=meta_engine)
# Initialize the generator_row in the meta database
meta_session = Session(meta_engine)
gen_run = self._initialize_gen_run(
generator=generator, session=meta_session, run_id=run_id, ordering=ordering
)
# Check if our run config excludes our generator
if not run_config.should_gen_run(generator):
self._logger.info(f'Excluding generator {generator.name!r}')
gen_run.status = Status.excluded
meta_session.commit()
return
# Start the Generator
self._logger.info(f'Running generator {generator.name!r}...')
gen_run.status = Status.running
meta_session.commit()
start = time()
# Set the extractor
self._logger.debug('Initializing extractor')
extractor_connection = main_engine.connect()
extract = generator.extract
if isinstance(extract, BaseQuery):
extract.set_extractor(connection=extractor_connection)
else:
extract.set_extractor()
self._logger.debug('Fetching extractor length')
row_count = extract.length(connection=extractor_connection)
gen_run.inputs_extracted = row_count
meta_session.commit()
self._logger.debug('Fetching repeats')
# Query the repeats table for input_hashes that match this generator's hash
self._old_repeats = set(
meta_session.exec(select(Repeats.input_hash).where(Repeats.generator_id == generator.uuid)).all()
)
# The batch_size is set either on the run_config or the generator
batch_size = run_config.batch_size or generator.batch_size
assert batch_size is None or batch_size > 0, f"Invalid batch size batch_size must be >0: {batch_size}"
# Open raw connections for fast loading
main_raw_connection = pg3_connect(str(main_engine.url))
meta_raw_connection = meta_engine.raw_connection()
batch_done = lambda x: x % batch_size == 0 if batch_size is not None else False
# Start while loop to iterate through the nodes
self._logger.info('Looping through extracted rows...')
progress_bar = tqdm(
total=row_count,
position=1,
leave=False,
desc="Transforming...",
disable=not run_config.progress_bar,
)
try:
while True:
gen_run.inputs_processed += 1
row: Dict[str, Mapping[str, Any]] = {}
try:
for node in generator._sort_graph():
output = node.run(row)
# Extract outputs need to be fed to our repeat checker and need to be checked for stop iterations
if isinstance(node, Extract):
if output is None or batch_done(gen_run.inputs_processed):
self._logger.debug('loading batch...')
self._load_repeats(meta_raw_connection, generator)
rows_inserted, rows_updated = self._load(main_raw_connection, generator)
gen_run.rows_inserted += rows_inserted
gen_run.rows_updated += rows_updated
meta_session.commit()
self._logger.debug('done loading batch.')
self._logger.debug(f'inserted {rows_inserted} rows.')
self._logger.debug(f'updated {rows_updated} rows.')
# if we are out of rows break out of while loop
if output is None:
raise StopIteration
is_repeat, input_hash = self._check_repeat(output, generator.uuid)
if not run_config.retry and is_repeat:
raise RepeatException()
row[node.hash] = output # type: ignore
if not is_repeat:
self._new_repeats.add(input_hash)
gen_run.unique_inputs += 1
progress_bar.update()
# Stop iteration is used to catch the empty extractor
except StopIteration:
break
# A repeated input from the extract will also cause a row to be skipped
except RepeatException:
continue
# Any node can raise a skip exception to skip the input before loading
except DBgenSkipException as exc:
self._logger.debug(f"Skipped Row: {exc.msg}")
gen_run.inputs_skipped += 1
# External errors are raised whenever a node fails due to internal logic
except DBgenExternalError as e:
msg = f"\n\nError when running generator {generator.name}\n"
self._logger.error(msg)
self._logger.error(f"\n{e}")
gen_run.status = Status.failed
gen_run.error = str(e)
run = meta_session.get(RunEntity, run_id)
assert run
run.errors = run.errors + 1 if run.errors else 1
meta_session.commit()
meta_session.close()
return 2
except (
Exception,
KeyboardInterrupt,
SystemExit,
BdbQuit,
) as e:
gen_run.status = Status.failed
gen_run.error = (
f"Uncaught Error encountered during running of generator {generator.name}: {e!r}"
)
update_run_by_id(run_id, Status.failed, meta_session)
raise
# Close all connections
finally:
gen_run.runtime = round(time() - start, 3)
meta_session.commit()
main_raw_connection.close()
meta_raw_connection.close()
extractor_connection.close()
gen_run.status = Status.completed
gen_run.runtime = round(time() - start, 3)
self._logger.info(
f"Finished running generator {generator.name}({generator.uuid}) in {gen_run.runtime}(s)."
)
self._logger.info(f"Loaded approximately {gen_run.rows_inserted} rows")
meta_session.commit()
meta_session.close()
return 0
def _initialize_gen_run(
self,
session: Session,
generator: Generator,
run_id: Optional[int],
ordering: Optional[int],
) -> GeneratorRunEntity:
# if no run_id is provided create one and mark it as a testing run
if run_id is None:
run = RunEntity(status='testing')
session.add(run)
session.commit()
session.refresh(run)
ordering = 0
run_id = run.id
gen_row = generator._get_gen_row()
session.merge(gen_row)
session.commit()
query = generator.extract.query if isinstance(generator.extract, BaseQuery) else ''
gen_run = GeneratorRunEntity(
run_id=run_id,
generator_id=gen_row.id,
status=Status.initialized,
ordering=ordering,
query=query,
)
session.add(gen_run)
session.commit()
session.refresh(gen_run)
return gen_run
def _load(self, connection, generator: Generator) -> Tuple[int, int]:
rows_inserted = 0
rows_updated = 0
for load in generator._sorted_loads():
if load.insert:
rows_inserted += len(load._output)
else:
rows_updated += len(load._output)
load.load(connection, gen_id=self.uuid)
return (rows_inserted, rows_updated)
def _load_repeats(self, connection, generator: Generator) -> None:
rows = ((generator.uuid, input_hash) for input_hash in self._new_repeats)
Repeats._quick_load(connection, rows, column_names=["generator_id", "input_hash"])
self._old_repeats = self._old_repeats.union(self._new_repeats)
self._new_repeats = set()
def _check_repeat(self, extracted_dict: Dict[str, Any], generator_uuid: UUID) -> Tuple[bool, UUID]:
# Convert Row to a dictionary so we can hash it for repeat-checking
input_hash = UUID(hasher((generator_uuid, extracted_dict), encoders=encoders))
# If the input_hash has been seen and we don't have retry=True skip row
is_repeat = input_hash in self._old_repeats or input_hash in self._new_repeats
return (is_repeat, input_hash)
class GeneratorRun(BaseGeneratorRun):
generator: Generator
def get_gen(self, meta_engine: Engine, *args, **kwargs):
return self.generator
class RemoteGeneratorRun(BaseGeneratorRun):
generator_id: UUID
def get_gen(self, meta_engine, *args, **kwargs):
with Session(meta_engine) as sess:
gen_json = sess.exec(
select(GeneratorEntity.gen_json).where(GeneratorEntity.id == self.generator_id)
).one()
try:
generator = Generator.deserialize(gen_json)
except ModuleNotFoundError as exc:
import os
raise SerializationError(
f"While deserializing generator id {self.generator_id} an unknown module was encountered. Are you using custom dbgen objects reachable by your python environment? Make sure any custom extractors or code can be found in your PYTHONPATH environment variable\nError: {exc}\nPYTHONPATH={os.environ.get('PYTHONPATH')}"
) from exc
if generator.uuid != self.generator_id:
error = f"Deserialization Failed the generator hash has changed for generator named {generator.name}!\n{generator}\n{self.generator_id}"
raise exceptions.SerializationError(error)
return generator
class ModelRun(Base):
model: Model
def get_gen_run(self, generator: Generator) -> BaseGeneratorRun:
return GeneratorRun(generator=generator)
def execute(
self,
main_engine: Engine,
meta_engine: Engine,
run_config: RunConfig = None,
nuke: bool = False,
rerun_failed: bool = False,
) -> RunEntity:
start = time()
if run_config is None:
run_config = RunConfig()
# Sync the Database statew with the model state
self.model.sync(main_engine, meta_engine, nuke=nuke)
# If doing last failed run query for gens to run and add to include
if rerun_failed:
with meta_engine.connect() as conn:
result = conn.execute(select(GensToRun.__table__.c.name))
for (gen_name,) in result:
run_config.include.add(gen_name)
# Initialize the run
run_init = RunInitializer()
run_id = run_init.execute(meta_engine, run_config)
sorted_generators = self.model._sort_graph()
# Add generators to metadb
with Session(meta_engine) as meta_session:
model_row = self.model._get_model_row()
model_row.last_run = datetime.now()
existing_model = meta_session.get(ModelEntity, model_row.id)
if not existing_model:
meta_session.merge(model_row)
else:
existing_model.last_run = datetime.now()
meta_session.commit()
# Apply start and until to exclude generators not between start_idx and until_idx
if run_config.start or run_config.until:
gen_names = [gen.name for gen in sorted_generators]
start_idx = gen_names.index(run_config.start) if run_config.start else 0
until_idx = gen_names.index(run_config.until) + 1 if run_config.until else len(gen_names)
# Modify include to only include the gen_names that pass the test
run_config.include = run_config.include.union(gen_names[start_idx:until_idx])
print(f"Only running generators: {gen_names[start_idx:until_idx]} due to start/until")
self._logger.debug(
f"Only running generators: {gen_names[start_idx:until_idx]} due to start/until"
)
with tqdm(total=len(sorted_generators), position=0, disable=not run_config.progress_bar) as tq:
for i, generator in enumerate(sorted_generators):
tq.set_description(generator.name)
gen_run = self.get_gen_run(generator)
gen_run.execute(main_engine, meta_engine, run_id, run_config, ordering=i)
tq.update()
# Complete run
with Session(meta_engine) as session:
update_run_by_id(run_id, Status.completed, session)
run = session.get(RunEntity, run_id)
assert run
run.runtime = timedelta(seconds=time() - start)
session.commit()
session.refresh(run)
return run
class RemoteModelRun(ModelRun):
def get_gen_run(self, generator):
return RemoteGeneratorRun(generator_id=generator.uuid)
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((1732, 1758), 'pydantic.fields.Field', 'Field', ([], {'default_factory': 'set'}), '(default_factory=set)\n', (1737, 1758), False, 'from pydantic.fields import Field, PrivateAttr\n'), ((1783, 1809), 'pydantic.fields.Field', 'Field', ([], {'default_factory': 'set'}), '(default_factory=set)\n', (1788, 1809), False, 'from pydantic.fields import Field, PrivateAttr\n'), ((4267, 4299), 'pydantic.fields.PrivateAttr', 'PrivateAttr', ([], {'default_factory': 'set'}), '(default_factory=set)\n', (4278, 4299), False, 'from pydantic.fields import Field, PrivateAttr\n'), ((4330, 4362), 'pydantic.fields.PrivateAttr', 'PrivateAttr', ([], {'default_factory': 'set'}), '(default_factory=set)\n', (4341, 4362), False, 'from pydantic.fields import Field, PrivateAttr\n'), ((4946, 4966), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (4953, 4966), False, 'from sqlmodel import Session, select\n'), ((5577, 5583), 'time.time', 'time', ([], {}), '()\n', (5581, 5583), False, 'from time import time\n'), ((7051, 7162), 'tqdm.tqdm', 'tqdm', ([], {'total': 'row_count', 'position': '(1)', 'leave': '(False)', 'desc': '"""Transforming..."""', 'disable': '(not run_config.progress_bar)'}), "(total=row_count, position=1, leave=False, desc='Transforming...',\n disable=not run_config.progress_bar)\n", (7055, 7162), False, 'from tqdm import tqdm\n'), ((12159, 12281), 'dbgen.core.metadata.GeneratorRunEntity', 'GeneratorRunEntity', ([], {'run_id': 'run_id', 'generator_id': 'gen_row.id', 'status': 'Status.initialized', 'ordering': 'ordering', 'query': 'query'}), '(run_id=run_id, generator_id=gen_row.id, status=Status.\n initialized, ordering=ordering, query=query)\n', (12177, 12281), False, 'from dbgen.core.metadata import GeneratorEntity, GeneratorRunEntity, GensToRun, ModelEntity, Repeats, RunEntity, Status\n'), ((13037, 13123), 'dbgen.core.metadata.Repeats._quick_load', 'Repeats._quick_load', (['connection', 'rows'], {'column_names': "['generator_id', 'input_hash']"}), "(connection, rows, column_names=['generator_id',\n 'input_hash'])\n", (13056, 13123), False, 'from dbgen.core.metadata import GeneratorEntity, GeneratorRunEntity, GensToRun, ModelEntity, Repeats, RunEntity, Status\n'), ((15403, 15409), 'time.time', 'time', ([], {}), '()\n', (15407, 15409), False, 'from time import time\n'), ((3754, 3769), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3761, 3769), False, 'from sqlmodel import Session, select\n'), ((3800, 3836), 'dbgen.core.metadata.RunEntity', 'RunEntity', ([], {'status': 'Status.initialized'}), '(status=Status.initialized)\n', (3809, 3836), False, 'from dbgen.core.metadata import GeneratorEntity, GeneratorRunEntity, GensToRun, ModelEntity, Repeats, RunEntity, Status\n'), ((11778, 11805), 'dbgen.core.metadata.RunEntity', 'RunEntity', ([], {'status': '"""testing"""'}), "(status='testing')\n", (11787, 11805), False, 'from dbgen.core.metadata import GeneratorEntity, GeneratorRunEntity, GensToRun, ModelEntity, Repeats, RunEntity, Status\n'), ((13432, 13491), 'pydasher.hasher', 'hasher', (['(generator_uuid, extracted_dict)'], {'encoders': 'encoders'}), '((generator_uuid, extracted_dict), encoders=encoders)\n', (13438, 13491), False, 'from pydasher import hasher\n'), ((13992, 14012), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (13999, 14012), False, 'from sqlmodel import Session, select\n'), ((16141, 16161), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (16148, 16161), False, 'from sqlmodel import Session, select\n'), ((16264, 16278), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16276, 16278), False, 'from datetime import datetime, timedelta\n'), ((17768, 17788), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (17775, 17788), False, 'from sqlmodel import Session, select\n'), ((11151, 11157), 'time.time', 'time', ([], {}), '()\n', (11155, 11157), False, 'from time import time\n'), ((14217, 14248), 'dbgen.core.generator.Generator.deserialize', 'Generator.deserialize', (['gen_json'], {}), '(gen_json)\n', (14238, 14248), False, 'from dbgen.core.generator import Generator\n'), ((14953, 14989), 'dbgen.exceptions.SerializationError', 'exceptions.SerializationError', (['error'], {}), '(error)\n', (14982, 14989), True, 'import dbgen.exceptions as exceptions\n'), ((16493, 16507), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16505, 16507), False, 'from datetime import datetime, timedelta\n'), ((10902, 10908), 'time.time', 'time', ([], {}), '()\n', (10906, 10908), False, 'from time import time\n'), ((15783, 15817), 'sqlmodel.select', 'select', (['GensToRun.__table__.c.name'], {}), '(GensToRun.__table__.c.name)\n', (15789, 15817), False, 'from sqlmodel import Session, select\n'), ((17981, 17987), 'time.time', 'time', ([], {}), '()\n', (17985, 17987), False, 'from time import time\n'), ((6308, 6334), 'sqlmodel.select', 'select', (['Repeats.input_hash'], {}), '(Repeats.input_hash)\n', (6314, 6334), False, 'from sqlmodel import Session, select\n'), ((8828, 8845), 'dbgen.exceptions.RepeatException', 'RepeatException', ([], {}), '()\n', (8843, 8845), False, 'from dbgen.exceptions import DBgenExternalError, DBgenSkipException, RepeatException, SerializationError\n'), ((14072, 14104), 'sqlmodel.select', 'select', (['GeneratorEntity.gen_json'], {}), '(GeneratorEntity.gen_json)\n', (14078, 14104), False, 'from sqlmodel import Session, select\n'), ((14668, 14696), 'os.environ.get', 'os.environ.get', (['"""PYTHONPATH"""'], {}), "('PYTHONPATH')\n", (14682, 14696), False, 'import os\n')] |
from create_db import Student
from sqlmodel import Session, create_engine, select
sqlite_url = "sqlite:///school.db"
engine = create_engine(sqlite_url, echo=True)
# Read database
with Session(engine) as session:
statement = select(Student)
results = session.exec(statement)
for student in results:
print(student)
print("-" * 100)
# # Read one row
# with Session(engine) as session:
# statement = select(Student).where(Student.first_name=="Misal")
# results = session.exec(statement)
# for student in results:
# print(student)
| [
"sqlmodel.select",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((127, 163), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (140, 163), False, 'from sqlmodel import Session, create_engine, select\n'), ((186, 201), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (193, 201), False, 'from sqlmodel import Session, create_engine, select\n'), ((230, 245), 'sqlmodel.select', 'select', (['Student'], {}), '(Student)\n', (236, 245), False, 'from sqlmodel import Session, create_engine, select\n')] |
from sqlmodel import create_engine, Session
from sqlmodel.main import SQLModel
from core.config import settings
engine = create_engine(settings.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True)
def init_db():
SQLModel.metadata.create_all(engine)
def get_session():
with Session(engine) as session:
yield session
| [
"sqlmodel.main.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((123, 190), 'sqlmodel.create_engine', 'create_engine', (['settings.SQLALCHEMY_DATABASE_URI'], {'pool_pre_ping': '(True)'}), '(settings.SQLALCHEMY_DATABASE_URI, pool_pre_ping=True)\n', (136, 190), False, 'from sqlmodel import create_engine, Session\n'), ((212, 248), 'sqlmodel.main.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (240, 248), False, 'from sqlmodel.main import SQLModel\n'), ((279, 294), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (286, 294), False, 'from sqlmodel import create_engine, Session\n')] |
from typing import Union
from fastapi import Request
from fastapi.param_functions import Depends, Header
from fastapi.security import OAuth2PasswordBearer
from sqlmodel import Session, select
from ..core.constants import AccessLevel, ContextEnum
from ..core.helpers.database import make_session
from ..core.helpers.exceptions import NotAuthorizedError
from ..core.models import Context, ParsedToken, User
from ..core.security import load_jwt_token
reusable_oauth2 = OAuth2PasswordBearer(tokenUrl="api/v1/auth/access-token")
def get_string_token(token: str = Header(None, alias="Authorization")) -> Union[None, str]:
if token:
_, _, token = token.partition(" ")
return token
def load_access_token(request: Request, token: str = Depends(reusable_oauth2)) -> ParsedToken:
return load_jwt_token(token)
async def get_current_user(
session: Session = Depends(make_session), token: ParsedToken = Depends(load_access_token)
) -> User:
user = session.exec(select(User).where(User.id == token.sub)).first()
if not user:
raise NotAuthorizedError("Usuário não localizado")
return user
async def login_required(current_user: User = Depends(get_current_user)) -> None:
if not current_user.is_active:
raise NotAuthorizedError("Sua licença expirou! Entre em contato com um administrador.")
async def validate_super_user(user: User = Depends(get_current_user)) -> None:
if not user.is_super_user:
raise NotAuthorizedError("Essa página só está disponível para administradores")
class ContextManager:
context: ContextEnum
def __init__(self, context: Union[str, ContextEnum]) -> None:
self.context = context if isinstance(context, ContextEnum) else ContextEnum(context)
def __call__(self, request: Request, token: str = Depends(get_string_token)) -> Context:
try:
parsed_token = load_jwt_token(token=token)
user_id = parsed_token.sub
access_level = parsed_token.access_level
authenticated = True
except NotAuthorizedError:
user_id = None
authenticated = False
access_level = AccessLevel.ANONIMOUS
return Context(
context=self.context,
user_id=user_id,
method=request.url.path,
authenticated=authenticated,
access_level=access_level,
)
context_manager = ContextManager(ContextEnum.API)
| [
"sqlmodel.select"
] | [((469, 526), 'fastapi.security.OAuth2PasswordBearer', 'OAuth2PasswordBearer', ([], {'tokenUrl': '"""api/v1/auth/access-token"""'}), "(tokenUrl='api/v1/auth/access-token')\n", (489, 526), False, 'from fastapi.security import OAuth2PasswordBearer\n'), ((563, 598), 'fastapi.param_functions.Header', 'Header', (['None'], {'alias': '"""Authorization"""'}), "(None, alias='Authorization')\n", (569, 598), False, 'from fastapi.param_functions import Depends, Header\n'), ((751, 775), 'fastapi.param_functions.Depends', 'Depends', (['reusable_oauth2'], {}), '(reusable_oauth2)\n', (758, 775), False, 'from fastapi.param_functions import Depends, Header\n'), ((879, 900), 'fastapi.param_functions.Depends', 'Depends', (['make_session'], {}), '(make_session)\n', (886, 900), False, 'from fastapi.param_functions import Depends, Header\n'), ((923, 949), 'fastapi.param_functions.Depends', 'Depends', (['load_access_token'], {}), '(load_access_token)\n', (930, 949), False, 'from fastapi.param_functions import Depends, Header\n'), ((1177, 1202), 'fastapi.param_functions.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1184, 1202), False, 'from fastapi.param_functions import Depends, Header\n'), ((1389, 1414), 'fastapi.param_functions.Depends', 'Depends', (['get_current_user'], {}), '(get_current_user)\n', (1396, 1414), False, 'from fastapi.param_functions import Depends, Header\n'), ((1808, 1833), 'fastapi.param_functions.Depends', 'Depends', (['get_string_token'], {}), '(get_string_token)\n', (1815, 1833), False, 'from fastapi.param_functions import Depends, Header\n'), ((985, 997), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (991, 997), False, 'from sqlmodel import Session, select\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, create_engine
class Student(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
first_name: str
last_name: str
email: str
# dob:
sqlite_url = "sqlite:///school.db"
engine = create_engine(sqlite_url)
SQLModel.metadata.create_all(engine)
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field",
"sqlmodel.create_engine"
] | [((294, 319), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {}), '(sqlite_url)\n', (307, 319), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((320, 356), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (348, 356), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((144, 181), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (149, 181), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
import os
from dotenv import load_dotenv
from dateutil.parser import parse
from sqlmodel import Session, select, SQLModel, create_engine
import requests
from youtube.models import YouTube
load_dotenv()
YT_CHANNEL = os.environ["YT_CHANNEL"]
YOUTUBE_API_KEY = os.environ["YOUTUBE_API_KEY"]
DATABASE_URL = os.environ["DATABASE_URL"]
YOUTUBE_VIDEO = "youtube#video"
BASE_URL = (
"https://www.googleapis.com/youtube/v3/search?key={key}"
"&channelId={channel}&part=snippet,id&order=date&maxResults=20"
)
engine = create_engine(DATABASE_URL, echo=False)
def get_session():
with Session(engine) as session:
yield session
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def get_videos_from_channel(channel: str = YT_CHANNEL) -> list[dict]:
base_url = BASE_URL.format(key=YOUTUBE_API_KEY,
channel=channel)
next_page, url = None, base_url
videos = []
while True:
if next_page is not None:
url = base_url + f"&pageToken={next_page}"
response = requests.get(url).json()
for vid in response["items"]:
if vid["id"]["kind"] != "youtube#video":
continue
videos.append(vid)
if "nextPageToken" not in response:
break
next_page = response["nextPageToken"]
return videos
def insert_youtube_videos(session: Session, videos: list[dict]) -> None:
num_inserted = 0
for video in videos:
video_id = video["id"]["videoId"]
title = video["snippet"]["title"]
description = video["snippet"]["description"]
thumb = video["snippet"]["thumbnails"]["medium"]["url"]
published = video["snippet"]["publishTime"]
statement = select(YouTube).where(YouTube.video_id == video_id)
results = session.exec(statement)
if results.first() is not None:
continue
youtube = YouTube(
video_id=video_id,
title=title,
description=description,
thumb=thumb,
published=parse(published),
)
session.add(youtube)
num_inserted += 1
session.commit()
statement = select(YouTube)
results = session.exec(statement)
total_records = len(results.all())
print(f"Total records: {total_records} (newly inserted: {num_inserted})")
if __name__ == "__main__":
create_db_and_tables()
videos = get_videos_from_channel()
with Session(engine) as session:
insert_youtube_videos(session, videos)
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.select"
] | [((191, 204), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (202, 204), False, 'from dotenv import load_dotenv\n'), ((522, 561), 'sqlmodel.create_engine', 'create_engine', (['DATABASE_URL'], {'echo': '(False)'}), '(DATABASE_URL, echo=False)\n', (535, 561), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((676, 712), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (704, 712), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2206, 2221), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (2212, 2221), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((592, 607), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (599, 607), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2481, 2496), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2488, 2496), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((1062, 1079), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1074, 1079), False, 'import requests\n'), ((1760, 1775), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (1766, 1775), False, 'from sqlmodel import Session, select, SQLModel, create_engine\n'), ((2083, 2099), 'dateutil.parser.parse', 'parse', (['published'], {}), '(published)\n', (2088, 2099), False, 'from dateutil.parser import parse\n')] |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryEntPlasticConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id_order: int
history_id_conference: int
ent_plastic_conference_id: int
state: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class EntPlasticConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
problem: str
question: str
ent_plan: str
surgeon_plant: str
post_plan: str
surgeon_post_plan: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class EntPlasticConferenceDoctorMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
ent_plastic_conference_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_ent_conference", response_model=HistoryEntPlasticConference)
async def create_history_ent_conference(history_ent_conference: HistoryEntPlasticConference, session: AsyncSession = Depends(get_session)):
session.add(history_ent_conference)
await session.commit()
await session.refresh(history_ent_conference)
return history_ent_conference
@router.post("/ent_conference", response_model=EntPlasticConference)
async def create_ent_conference(ent_conference: EntPlasticConference, session: AsyncSession = Depends(get_session)):
session.add(ent_conference)
await session.commit()
await session.refresh(ent_conference)
return ent_conference
@router.get("/history_ent_conference/{id}", response_model=HistoryEntPlasticConference)
async def get_history_ent_conference(id: int, session: AsyncSession = Depends(get_session)):
history_ent_conferences = await session.execute(select(HistoryEntPlasticConference).where(HistoryEntPlasticConference.id == id))
history_ent_conference = history_ent_conferences.scalars().first()
return history_ent_conference
@router.put("/history_ent_conference/{id}", response_model=HistoryEntPlasticConference)
async def update_history_ent_conference(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_ent_conference/{id}")
async def delete_history_ent_conference(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_ent_conference/{id}")
async def delete_ent_conference(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((256, 267), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (265, 267), False, 'from fastapi import APIRouter, Depends\n'), ((351, 388), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (356, 388), False, 'from sqlmodel import Field, SQLModel\n'), ((679, 716), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (684, 716), False, 'from sqlmodel import Field, SQLModel\n'), ((1031, 1068), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1036, 1068), False, 'from sqlmodel import Field, SQLModel\n'), ((1433, 1453), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1440, 1453), False, 'from fastapi import APIRouter, Depends\n'), ((1772, 1792), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1779, 1792), False, 'from fastapi import APIRouter, Depends\n'), ((2082, 2102), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2089, 2102), False, 'from fastapi import APIRouter, Depends\n'), ((2507, 2527), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2514, 2527), False, 'from fastapi import APIRouter, Depends\n'), ((2659, 2679), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2666, 2679), False, 'from fastapi import APIRouter, Depends\n'), ((2803, 2823), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2810, 2823), False, 'from fastapi import APIRouter, Depends\n'), ((2157, 2192), 'sqlalchemy.select', 'select', (['HistoryEntPlasticConference'], {}), '(HistoryEntPlasticConference)\n', (2163, 2192), False, 'from sqlalchemy import select\n')] |
from fastapi.exceptions import HTTPException
import pytest
from sqlmodel import select, Session
from sqlmodel.main import SQLModel
from sfm.routes.projects import crud
from tests.conftest import hashed_token1, hashed_token2
from sfm.models import Project, ProjectCreate, ProjectUpdate
# get_all
def test_get_all(db, session: Session):
"""test that the crud function works as expected"""
response = crud.get_all(db)
assert response is not None
assert response[0].name == "Test Project 1"
assert response[0].lead_name == "<NAME>"
assert response[0].lead_email == "<EMAIL>"
assert response[0].description == "A test project for testing"
assert response[0].location == "Strangeville"
assert response[0].repo_url == "github.com/starkEnterprises"
assert response[0].on_prem is False
assert response[0].project_auth_token_hashed == hashed_token1
assert response[1].name == "Test Project 2"
assert response[1].lead_name == "<NAME>"
assert response[1].lead_email == "<EMAIL>"
assert response[1].description == "A second test project for testing"
assert response[1].location == "Kohler"
assert response[1].repo_url == "github.com/pgaGolf"
assert response[1].on_prem is False
assert response[1].project_auth_token_hashed == hashed_token2
"""
Test that the function raises an error when there are
no projects in the table
"""
session.delete(session.get(Project, 1))
session.delete(session.get(Project, 2))
session.commit()
# SQLModel.metadata.drop_all(engine)
with pytest.raises(Exception) as ex:
crud.get_all(db)
assert ex.value.message == "Projects not found"
# get_by_id
def test_get_by_id(db):
"""test that the crud function works as expected"""
response = crud.get_by_id(db, project_id=1)
assert response is not None
assert response.name == "Test Project 1"
assert response.lead_name == "<NAME>"
assert response.lead_email == "<EMAIL>"
assert response.description == "A test project for testing"
assert response.location == "Strangeville"
assert response.repo_url == "github.com/starkEnterprises"
assert response.on_prem is False
assert response.project_auth_token_hashed == hashed_token1
"""
Testing that the crud function raises exception when the project
does with matching id does not exist in DB
"""
with pytest.raises(Exception) as ex:
crud.get_by_id(db, project_id=15)
assert ex.value.message == "Project not found"
# create_project
def test_create(db):
"""Testing that the project works as expected"""
project_data = ProjectCreate(
**{
"name": "Test Project 3",
"lead_name": "<NAME>",
"lead_email": "<EMAIL>",
"description": "A test project for testing creation",
"location": "Bikini Gotham",
"repo_url": "github.com/crustyEnterprises",
"on_prem": True,
}
)
response = crud.create_project(db, project_data, admin_key="admin_key")
assert len(response) == 2
assert response[0].name == "Test Project 3"
assert response[0].lead_name == "<NAME>"
assert response[0].lead_email == "<EMAIL>"
assert response[0].description == "A test project for testing creation"
assert response[0].location == "Bikini Gotham"
assert response[0].repo_url == "github.com/crustyEnterprises"
assert response[0].on_prem is True
assert response[0].project_auth_token_hashed is not None
"""
Testing that the function raises an error if the project name already
exists in the database
"""
with pytest.raises(Exception) as ex:
response = crud.create_project(db, project_data, admin_key="admin_key")
assert ex.value.message == "Database entry already exists"
"""
Testing that the project raises an exception when the admin_key
is incorrect
"""
with pytest.raises(Exception) as ex:
crud.create_project(db, project_data, admin_key="Shmadmin_key")
assert ex.value.message == "Credentials are incorrect"
# delete_project
def test_delete_project(db):
"""Testing that the crud function works as expected"""
response = crud.delete_project(db, project_id=1, admin_key="admin_key")
assert response is True
projects = db.exec(select(Project)).all()
for project in projects:
assert project.id != 1
"""
Testing that the crud function raises an exception when the project
with matching id does not exist in the database
"""
with pytest.raises(Exception) as ex:
crud.delete_project(db, project_id=15, admin_key="admin_key")
assert ex.value.message == "Project not found"
"""
Testing that the project raises an exception when the admin_key
is incorrect
"""
with pytest.raises(Exception) as ex:
crud.delete_project(db, project_id=1, admin_key="Shmadmin_key")
assert ex.value.message == "Credentials are incorrect"
# refresh_project_key
def test_refresh_project_key(db):
"""Testing that the crud function works as expected"""
response = crud.refresh_project_key(db, project_id=1, admin_key="admin_key")
assert response is not False
assert response != "Catalyst"
# testing that refreshing key did not change project details
project_test = db.get(Project, 1)
assert project_test.name == "Test Project 1"
assert project_test.lead_name == "<NAME>"
assert project_test.lead_email == "<EMAIL>"
assert project_test.description == "A test project for testing"
assert project_test.location == "Strangeville"
assert project_test.repo_url == "github.com/starkEnterprises"
assert project_test.on_prem is False
"""
Testing that the crud function raises an exception when the project
with matching id does not exist in the database
"""
with pytest.raises(Exception) as ex:
crud.refresh_project_key(db, project_id=15, admin_key="admin_key")
assert ex.value.message == "Project not found"
"""
Testing that the project raises an exception when the admin_key
is incorrect
"""
with pytest.raises(Exception) as ex:
crud.refresh_project_key(db, project_id=1, admin_key="Shmadmin_key")
assert ex.value.message == "Credentials are incorrect"
# update_project
def test_update_project(db):
"""Testing that the project works as expected"""
update_dict = {
"name": "New Test Project 1",
"lead_name": "Strong Squid",
"repo_url": "github.com/SquidEnterprises",
}
# vvv causes unset params to become default (exclude_unset didnt help)
updated_project = ProjectUpdate(**update_dict)
response = crud.update_project(
db, project_id=1, project_data=updated_project, admin_key="admin_key"
)
assert response is not None
assert response.name == "New Test Project 1"
assert response.lead_name == "Strong Squid"
assert response.lead_email == "<EMAIL>"
assert response.description == "A test project for testing"
assert response.location == "Strangeville"
assert response.repo_url == "github.com/SquidEnterprises"
assert response.on_prem is False
assert response.project_auth_token_hashed == hashed_token1
"""
Testing that the crud function raises an exception when the
project with matching id does not exist in the database
"""
with pytest.raises(Exception) as ex:
crud.update_project(
db,
project_id=15,
project_data="placeholder",
admin_key="admin_key",
)
assert ex.value.message == "Project not found"
"""
Testing that the project raises an exception when the admin_key
is incorrect
"""
with pytest.raises(Exception) as ex:
crud.update_project(
db,
project_id=1,
project_data="placeholder",
admin_key="Shmadmin_key",
)
assert ex.value.message == "Credentials are incorrect"
| [
"sqlmodel.select"
] | [((409, 425), 'sfm.routes.projects.crud.get_all', 'crud.get_all', (['db'], {}), '(db)\n', (421, 425), False, 'from sfm.routes.projects import crud\n'), ((1792, 1824), 'sfm.routes.projects.crud.get_by_id', 'crud.get_by_id', (['db'], {'project_id': '(1)'}), '(db, project_id=1)\n', (1806, 1824), False, 'from sfm.routes.projects import crud\n'), ((2644, 2891), 'sfm.models.ProjectCreate', 'ProjectCreate', ([], {}), "(**{'name': 'Test Project 3', 'lead_name': '<NAME>',\n 'lead_email': '<EMAIL>', 'description':\n 'A test project for testing creation', 'location': 'Bikini Gotham',\n 'repo_url': 'github.com/crustyEnterprises', 'on_prem': True})\n", (2657, 2891), False, 'from sfm.models import Project, ProjectCreate, ProjectUpdate\n'), ((3005, 3065), 'sfm.routes.projects.crud.create_project', 'crud.create_project', (['db', 'project_data'], {'admin_key': '"""admin_key"""'}), "(db, project_data, admin_key='admin_key')\n", (3024, 3065), False, 'from sfm.routes.projects import crud\n'), ((4236, 4296), 'sfm.routes.projects.crud.delete_project', 'crud.delete_project', (['db'], {'project_id': '(1)', 'admin_key': '"""admin_key"""'}), "(db, project_id=1, admin_key='admin_key')\n", (4255, 4296), False, 'from sfm.routes.projects import crud\n'), ((5148, 5213), 'sfm.routes.projects.crud.refresh_project_key', 'crud.refresh_project_key', (['db'], {'project_id': '(1)', 'admin_key': '"""admin_key"""'}), "(db, project_id=1, admin_key='admin_key')\n", (5172, 5213), False, 'from sfm.routes.projects import crud\n'), ((6699, 6727), 'sfm.models.ProjectUpdate', 'ProjectUpdate', ([], {}), '(**update_dict)\n', (6712, 6727), False, 'from sfm.models import Project, ProjectCreate, ProjectUpdate\n'), ((6743, 6837), 'sfm.routes.projects.crud.update_project', 'crud.update_project', (['db'], {'project_id': '(1)', 'project_data': 'updated_project', 'admin_key': '"""admin_key"""'}), "(db, project_id=1, project_data=updated_project,\n admin_key='admin_key')\n", (6762, 6837), False, 'from sfm.routes.projects import crud\n'), ((1570, 1594), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (1583, 1594), False, 'import pytest\n'), ((1610, 1626), 'sfm.routes.projects.crud.get_all', 'crud.get_all', (['db'], {}), '(db)\n', (1622, 1626), False, 'from sfm.routes.projects import crud\n'), ((2403, 2427), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (2416, 2427), False, 'import pytest\n'), ((2443, 2476), 'sfm.routes.projects.crud.get_by_id', 'crud.get_by_id', (['db'], {'project_id': '(15)'}), '(db, project_id=15)\n', (2457, 2476), False, 'from sfm.routes.projects import crud\n'), ((3657, 3681), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (3670, 3681), False, 'import pytest\n'), ((3708, 3768), 'sfm.routes.projects.crud.create_project', 'crud.create_project', (['db', 'project_data'], {'admin_key': '"""admin_key"""'}), "(db, project_data, admin_key='admin_key')\n", (3727, 3768), False, 'from sfm.routes.projects import crud\n'), ((3947, 3971), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (3960, 3971), False, 'import pytest\n'), ((3987, 4050), 'sfm.routes.projects.crud.create_project', 'crud.create_project', (['db', 'project_data'], {'admin_key': '"""Shmadmin_key"""'}), "(db, project_data, admin_key='Shmadmin_key')\n", (4006, 4050), False, 'from sfm.routes.projects import crud\n'), ((4581, 4605), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4594, 4605), False, 'import pytest\n'), ((4621, 4682), 'sfm.routes.projects.crud.delete_project', 'crud.delete_project', (['db'], {'project_id': '(15)', 'admin_key': '"""admin_key"""'}), "(db, project_id=15, admin_key='admin_key')\n", (4640, 4682), False, 'from sfm.routes.projects import crud\n'), ((4849, 4873), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (4862, 4873), False, 'import pytest\n'), ((4889, 4952), 'sfm.routes.projects.crud.delete_project', 'crud.delete_project', (['db'], {'project_id': '(1)', 'admin_key': '"""Shmadmin_key"""'}), "(db, project_id=1, admin_key='Shmadmin_key')\n", (4908, 4952), False, 'from sfm.routes.projects import crud\n'), ((5904, 5928), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (5917, 5928), False, 'import pytest\n'), ((5944, 6010), 'sfm.routes.projects.crud.refresh_project_key', 'crud.refresh_project_key', (['db'], {'project_id': '(15)', 'admin_key': '"""admin_key"""'}), "(db, project_id=15, admin_key='admin_key')\n", (5968, 6010), False, 'from sfm.routes.projects import crud\n'), ((6177, 6201), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (6190, 6201), False, 'import pytest\n'), ((6217, 6285), 'sfm.routes.projects.crud.refresh_project_key', 'crud.refresh_project_key', (['db'], {'project_id': '(1)', 'admin_key': '"""Shmadmin_key"""'}), "(db, project_id=1, admin_key='Shmadmin_key')\n", (6241, 6285), False, 'from sfm.routes.projects import crud\n'), ((7445, 7469), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (7458, 7469), False, 'import pytest\n'), ((7485, 7578), 'sfm.routes.projects.crud.update_project', 'crud.update_project', (['db'], {'project_id': '(15)', 'project_data': '"""placeholder"""', 'admin_key': '"""admin_key"""'}), "(db, project_id=15, project_data='placeholder',\n admin_key='admin_key')\n", (7504, 7578), False, 'from sfm.routes.projects import crud\n'), ((7800, 7824), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (7813, 7824), False, 'import pytest\n'), ((7840, 7936), 'sfm.routes.projects.crud.update_project', 'crud.update_project', (['db'], {'project_id': '(1)', 'project_data': '"""placeholder"""', 'admin_key': '"""Shmadmin_key"""'}), "(db, project_id=1, project_data='placeholder', admin_key\n ='Shmadmin_key')\n", (7859, 7936), False, 'from sfm.routes.projects import crud\n'), ((4348, 4363), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (4354, 4363), False, 'from sqlmodel import select, Session\n')] |
from typing import List, Union
from fastapi import APIRouter, Request
from fastapi.exceptions import HTTPException
from sqlmodel import Session, or_, select
from ..db import ActiveSession
from ..security import (
AdminUser,
AuthenticatedUser,
HashedPassword,
User,
UserCreate,
UserPasswordPatch,
UserResponse,
get_current_user,
)
router = APIRouter()
@router.get("/", response_model=List[UserResponse], dependencies=[AdminUser])
async def list_users(*, session: Session = ActiveSession):
users = session.exec(select(User)).all()
return users
@router.post("/", response_model=UserResponse, dependencies=[AdminUser])
async def create_user(*, session: Session = ActiveSession, user: UserCreate):
db_user = User.from_orm(user)
session.add(db_user)
session.commit()
session.refresh(db_user)
return db_user
@router.patch(
"/{user_id}/password/",
response_model=UserResponse,
dependencies=[AuthenticatedUser],
)
async def update_user_password(
*,
user_id: int,
session: Session = ActiveSession,
request: Request,
patch: UserPasswordPatch,
):
# Query the content
user = session.get(User, user_id)
if not user:
raise HTTPException(status_code=404, detail="User not found")
# Check the user can update the password
current_user: User = get_current_user(request=request)
if user.id != current_user.id and not current_user.superuser:
raise HTTPException(status_code=403, detail="You can't update this user password")
if not patch.password == patch.password_confirm:
raise HTTPException(status_code=400, detail="Passwords don't match")
# Update the password
user.password = HashedPassword(patch.password)
# Commit the session
session.commit()
session.refresh(user)
return user
@router.get("/me/", response_model=UserResponse)
async def my_profile(current_user: User = AuthenticatedUser):
return current_user
@router.get(
"/{user_id_or_username}/",
response_model=UserResponse,
dependencies=[AuthenticatedUser],
)
async def query_user(*, session: Session = ActiveSession, user_id_or_username: Union[str, int]):
user = session.query(User).where(
or_(
User.id == user_id_or_username,
User.username == user_id_or_username,
)
)
if not user:
raise HTTPException(status_code=404, detail="User not found")
return user.first()
@router.delete("/{user_id}/", dependencies=[AdminUser])
def delete_user(*, session: Session = ActiveSession, request: Request, user_id: int):
user = session.get(User, user_id)
if not user:
raise HTTPException(status_code=404, detail="Content not found")
# Check the user is not deleting himself
current_user = get_current_user(request=request)
if user.id == current_user.id:
raise HTTPException(status_code=403, detail="You can't delete yourself")
session.delete(user)
session.commit()
return {"ok": True}
| [
"sqlmodel.select",
"sqlmodel.or_"
] | [((374, 385), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (383, 385), False, 'from fastapi import APIRouter, Request\n'), ((1229, 1284), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""User not found"""'}), "(status_code=404, detail='User not found')\n", (1242, 1284), False, 'from fastapi.exceptions import HTTPException\n'), ((1470, 1546), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""You can\'t update this user password"""'}), '(status_code=403, detail="You can\'t update this user password")\n', (1483, 1546), False, 'from fastapi.exceptions import HTTPException\n'), ((1615, 1677), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""Passwords don\'t match"""'}), '(status_code=400, detail="Passwords don\'t match")\n', (1628, 1677), False, 'from fastapi.exceptions import HTTPException\n'), ((2244, 2317), 'sqlmodel.or_', 'or_', (['(User.id == user_id_or_username)', '(User.username == user_id_or_username)'], {}), '(User.id == user_id_or_username, User.username == user_id_or_username)\n', (2247, 2317), False, 'from sqlmodel import Session, or_, select\n'), ((2391, 2446), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""User not found"""'}), "(status_code=404, detail='User not found')\n", (2404, 2446), False, 'from fastapi.exceptions import HTTPException\n'), ((2684, 2742), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Content not found"""'}), "(status_code=404, detail='Content not found')\n", (2697, 2742), False, 'from fastapi.exceptions import HTTPException\n'), ((2890, 2956), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""You can\'t delete yourself"""'}), '(status_code=403, detail="You can\'t delete yourself")\n', (2903, 2956), False, 'from fastapi.exceptions import HTTPException\n'), ((550, 562), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (556, 562), False, 'from sqlmodel import Session, or_, select\n')] |
"""init
Revision ID: f9c634db477d
Revises:
Create Date: 2021-09-10 00:24:32.718895
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'f9c634db477d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('song',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_song_artist'), 'song', ['artist'], unique=False)
op.create_index(op.f('ix_song_id'), 'song', ['id'], unique=False)
op.create_index(op.f('ix_song_name'), 'song', ['name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_song_name'), table_name='song')
op.drop_index(op.f('ix_song_id'), table_name='song')
op.drop_index(op.f('ix_song_artist'), table_name='song')
op.drop_table('song')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((1158, 1179), 'alembic.op.drop_table', 'op.drop_table', (['"""song"""'], {}), "('song')\n", (1171, 1179), False, 'from alembic import op\n'), ((599, 628), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (622, 628), True, 'import sqlalchemy as sa\n'), ((655, 677), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (659, 677), False, 'from alembic import op\n'), ((733, 751), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (737, 751), False, 'from alembic import op\n'), ((803, 823), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (807, 823), False, 'from alembic import op\n'), ((995, 1015), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (999, 1015), False, 'from alembic import op\n'), ((1054, 1072), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (1058, 1072), False, 'from alembic import op\n'), ((1111, 1133), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (1115, 1133), False, 'from alembic import op\n'), ((415, 449), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (447, 449), False, 'import sqlmodel\n'), ((492, 526), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (524, 526), False, 'import sqlmodel\n'), ((565, 577), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (575, 577), True, 'import sqlalchemy as sa\n')] |
import asyncio
import strawberry
from sqlmodel import Session, select
from strawberry.types import Info
from fastapi_server.models.user import User
from fastapi_server.routes.graph_ql.broadcaster import Broadcast
broadcast = Broadcast()
@strawberry.type
class UserSystemQuery:
@strawberry.field
def user_login(self, info: Info, email: str, password: str) -> str:
# TODO Replace with actual password hash function
session: Session = info.context['session']
statement = select(User).where(User.email == email, User.password_hashed == password)
user = session.exec(statement).first()
if user is None:
raise FileNotFoundError('Email and password do not match')
return f'Login successful for {email}'
@strawberry.type
class UserSystemMutation:
@strawberry.mutation
def user_register(self, info: Info, username: str, email: str, password: str, password_repeated: str) -> bool:
if password != password_repeated:
raise KeyError('not same pw')
# TODO Replace with actual password hash function
password_hashed = hash(password)
session: Session = info.context['session']
username_taken = session.exec(select(User).where(User.username == username)).first()
if username_taken is not None:
raise KeyError('username taken')
email_taken = session.exec(select(User).where(User.email == email)).first()
if email_taken is not None:
raise KeyError('email taken')
session.add(
User(
username=username,
email=email,
password_hashed=password_hashed,
is_admin=False,
is_disabled=False,
is_verified=False,
)
)
session.commit()
return True
@strawberry.mutation
def user_send_password_reset_email(self, info: Info, email: str) -> bool:
# Check if email exists in db, send password reset with token
pass
@strawberry.mutation
def user_reset_password(self, info: Info, token: str) -> bool:
# Decypher email from token, if token is valid reset password and send a generated password per email
pass
@strawberry.mutation
def user_check_logged_in(self, info: Info) -> bool:
# Read request cookies to check if user is logged in. Also check if the token in the cookie is valid
pass
async def main():
pass
if __name__ == '__main__':
asyncio.run(main())
| [
"sqlmodel.select"
] | [((228, 239), 'fastapi_server.routes.graph_ql.broadcaster.Broadcast', 'Broadcast', ([], {}), '()\n', (237, 239), False, 'from fastapi_server.routes.graph_ql.broadcaster import Broadcast\n'), ((1560, 1687), 'fastapi_server.models.user.User', 'User', ([], {'username': 'username', 'email': 'email', 'password_hashed': 'password_hashed', 'is_admin': '(False)', 'is_disabled': '(False)', 'is_verified': '(False)'}), '(username=username, email=email, password_hashed=password_hashed,\n is_admin=False, is_disabled=False, is_verified=False)\n', (1564, 1687), False, 'from fastapi_server.models.user import User\n'), ((505, 517), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (511, 517), False, 'from sqlmodel import Session, select\n'), ((1226, 1238), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1232, 1238), False, 'from sqlmodel import Session, select\n'), ((1400, 1412), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1406, 1412), False, 'from sqlmodel import Session, select\n')] |
"""
Functions for building queries, from nodes or SQL.
"""
import ast
import datetime
import operator
import re
from typing import Any, Callable, Dict, List, Literal, Optional, Set, Tuple, cast
from dateutil.parser import parse
from sqlalchemy.engine.url import make_url
from sqlalchemy.schema import Column as SqlaColumn
from sqlalchemy.sql.elements import BinaryExpression
from sqlalchemy.sql.expression import ClauseElement
from sqlmodel import Session, select
from sqloxide import parse_sql
from datajunction.constants import DEFAULT_DIMENSION_COLUMN
from datajunction.errors import DJError, DJInvalidInputException, ErrorCode
from datajunction.models.node import Node, NodeType
from datajunction.models.query import QueryCreate
from datajunction.sql.dag import (
get_database_for_nodes,
get_dimensions,
get_referenced_columns_from_sql,
get_referenced_columns_from_tree,
)
from datajunction.sql.parse import (
find_nodes_by_key,
find_nodes_by_key_with_parent,
get_expression_from_projection,
)
from datajunction.sql.transpile import get_query, get_select_for_node
from datajunction.typing import (
Expression,
Identifier,
Join,
Projection,
Relation,
Select,
)
from datajunction.utils import get_session
FILTER_RE = re.compile(r"([\w\./_]+)(<=|<|>=|>|!=|=)(.+)")
FilterOperator = Literal[">", ">=", "<", "<=", "=", "!="]
COMPARISONS: Dict[FilterOperator, Callable[[Any, Any], bool]] = {
">": operator.gt,
">=": operator.ge,
"<": operator.lt,
"<=": operator.le,
"=": operator.eq,
"!=": operator.ne,
}
def parse_filter(filter_: str) -> Tuple[str, FilterOperator, str]:
"""
Parse a filter into name, op, value.
"""
match = FILTER_RE.match(filter_)
if not match:
raise DJInvalidInputException(
message=f'The filter "{filter_}" is invalid',
errors=[
DJError(
code=ErrorCode.INVALID_FILTER_PATTERN,
message=(
f'The filter "{filter_}" is not a valid filter. Filters should '
"consist of a dimension name, follow by a valid operator "
"(<=|<|>=|>|!=|=), followed by a value. If the value is a "
"string or date/time it should be enclosed in single quotes."
),
debug={"context": {"filter": filter_}},
),
],
)
name, operator_, value = match.groups()
operator_ = cast(FilterOperator, operator_)
return name, operator_, value
def get_filter(columns: Dict[str, SqlaColumn], filter_: str) -> BinaryExpression:
"""
Build a SQLAlchemy filter.
"""
name, operator_, value = parse_filter(filter_)
if name not in columns:
raise Exception(f"Invalid column name: {name}")
column = columns[name]
comparison = COMPARISONS[operator_]
if column.type.python_type in [datetime.date, datetime.datetime]:
try:
value = str(parse(value))
except Exception as ex:
raise Exception(f"Invalid date or datetime value: {value}") from ex
else:
try:
value = ast.literal_eval(value)
except Exception as ex:
raise Exception(f"Invalid value: {value}") from ex
return comparison(column, value)
def get_dimensions_from_filters(filters: List[str]) -> Set[str]:
"""
Extract dimensions from filters passed to the metric API.
"""
return {parse_filter(filter_)[0] for filter_ in filters}
def get_query_for_node( # pylint: disable=too-many-locals
session: Session,
node: Node,
groupbys: List[str],
filters: List[str],
database_id: Optional[int] = None,
) -> QueryCreate:
"""
Return a DJ QueryCreate object from a given node.
"""
# check that groupbys and filters are valid dimensions
requested_dimensions = set(groupbys) | get_dimensions_from_filters(filters)
valid_dimensions = set(get_dimensions(node))
if not requested_dimensions <= valid_dimensions:
invalid = sorted(requested_dimensions - valid_dimensions)
plural = "s" if len(invalid) > 1 else ""
raise Exception(f"Invalid dimension{plural}: {', '.join(invalid)}")
# which columns are needed from the parents; this is used to determine the database
# where the query will run
referenced_columns = get_referenced_columns_from_sql(node.expression, node.parents)
# extract all referenced dimensions so we can join the node with them
dimensions: Dict[str, Node] = {}
for dimension in requested_dimensions:
name, column = dimension.rsplit(".", 1)
if (
name not in {parent.name for parent in node.parents}
and name not in dimensions
):
dimensions[name] = session.exec(select(Node).where(Node.name == name)).one()
referenced_columns[name].add(column)
# find database
nodes = [node]
nodes.extend(dimensions.values())
database = get_database_for_nodes(session, nodes, referenced_columns, database_id)
# base query
node_select = get_select_for_node(node, database)
source = node_select.froms[0]
# join with dimensions
for dimension in dimensions.values():
subquery = get_select_for_node(
dimension,
database,
referenced_columns[dimension.name],
).alias(dimension.name)
condition = find_on_clause(node, source, dimension, subquery)
node_select = node_select.select_from(source.join(subquery, condition))
columns = {
f"{column.table.name}.{column.name}": column
for from_ in node_select.froms
for column in from_.columns
}
# filter
node_select = node_select.filter(
*[get_filter(columns, filter_) for filter_ in filters]
)
# groupby
node_select = node_select.group_by(*[columns[groupby] for groupby in groupbys])
# add groupbys to projection as well
for groupby in groupbys:
node_select.append_column(columns[groupby])
dialect = make_url(database.URI).get_dialect()
sql = str(
node_select.compile(dialect=dialect(), compile_kwargs={"literal_binds": True}),
)
return QueryCreate(database_id=database.id, submitted_query=sql)
def find_on_clause(
node: Node,
node_select: Select,
dimension: Node,
subquery: Select,
) -> ClauseElement:
"""
Return the on clause for a node/dimension selects.
"""
for parent in node.parents:
for column in parent.columns:
if column.dimension == dimension:
dimension_column = column.dimension_column or DEFAULT_DIMENSION_COLUMN
return (
node_select.columns[column.name] # type: ignore
== subquery.columns[dimension_column] # type: ignore
)
raise Exception(f"Node {node.name} has no columns with dimension {dimension.name}")
# pylint: disable=too-many-branches, too-many-locals, too-many-statements
def get_query_for_sql(sql: str) -> QueryCreate:
"""
Return a query given a SQL expression querying the repo.
Eg:
SELECT
"core.users.gender", "core.num_comments"
FROM metrics
WHERE "core.comments.user_id" > 1
GROUP BY
"core.users.gender"
This works by converting metrics (``core.num_comments``) into their selection
definition (``COUNT(*)``), updating the sources to include the metrics parents
(including joining with dimensions), and updating column references in the
``WHERE``, ``GROUP BY``, etc.
"""
session = next(get_session())
tree = parse_sql(sql, dialect="ansi")
query_select = tree[0]["Query"]["body"]["Select"]
# fetch all metric and dimension nodes
nodes = {node.name: node for node in session.exec(select(Node))}
# extract metrics and dimensions from the query
identifiers = {
identifier["value"]
for identifier in find_nodes_by_key(query_select, "Identifier")
}
for compound_identifier in find_nodes_by_key(query_select, "CompoundIdentifier"):
identifiers.add(".".join(part["value"] for part in compound_identifier))
requested_metrics: Set[Node] = set()
requested_dimensions: Set[Node] = set()
for identifier in identifiers:
if identifier in nodes and nodes[identifier].type == NodeType.METRIC:
requested_metrics.add(nodes[identifier])
continue
if "." not in identifier:
raise Exception(f"Invalid dimension: {identifier}")
name, column = identifier.rsplit(".", 1)
if name not in nodes:
raise Exception(f"Invalid dimension: {identifier}")
node = nodes[name]
if node.type != NodeType.DIMENSION:
continue
column_names = {column.name for column in node.columns}
if column not in column_names:
raise Exception(f"Invalid dimension: {identifier}")
requested_dimensions.add(node)
# update ``FROM``/``JOIN`` based on requests metrics and dimensions
parents = process_metrics(query_select, requested_metrics, requested_dimensions)
# update metric references in the projection
projection = query_select["projection"]
metric_names = {metric.name for metric in requested_metrics}
for expression, parent in list(
find_nodes_by_key_with_parent(projection, "UnnamedExpr"),
):
replace_metric_identifier(expression, parent, nodes, metric_names)
for expression_with_alias, parent in list(
find_nodes_by_key_with_parent(projection, "ExprWithAlias"),
):
alias = expression_with_alias["alias"]
expression = expression_with_alias["expr"]
replace_metric_identifier(expression, parent, nodes, metric_names, alias)
# update metric references in ``ORDER BY`` and ``HAVING``
for part in (tree[0]["Query"]["order_by"], query_select["having"]):
for identifier, parent in list(
find_nodes_by_key_with_parent(part, "Identifier"),
):
name = identifier["value"]
if name not in nodes:
if "." in name and name.rsplit(".", 1)[0] in nodes:
# not a metric, but a column reference
continue
raise Exception(f"Invalid identifier: {name}")
node = nodes[name]
metric_tree = parse_sql(node.expression, dialect="ansi")
parent.pop("Identifier")
parent.update(
get_expression_from_projection(
metric_tree[0]["Query"]["body"]["Select"]["projection"][0],
),
)
# replace dimension references
parts = [
query_select[part]
for part in ("projection", "selection", "group_by", "sort_by")
]
parts.append(tree[0]["Query"]["order_by"])
for part in parts:
for identifier, parent in list(
find_nodes_by_key_with_parent(part, "Identifier"),
):
if identifier["value"] not in identifiers:
continue
name, column = identifier["value"].rsplit(".", 1)
parent.pop("Identifier")
parent["CompoundIdentifier"] = [
{"quote_style": '"', "value": name},
{"quote_style": '"', "value": column},
]
parents.extend(requested_dimensions)
referenced_columns = get_referenced_columns_from_tree(tree, parents)
database = get_database_for_nodes(session, parents, referenced_columns)
dialect = make_url(database.URI).get_dialect()
query = get_query(None, parents, tree, database, dialect.name)
sql = str(query.compile(dialect=dialect(), compile_kwargs={"literal_binds": True}))
return QueryCreate(database_id=database.id, submitted_query=sql)
def process_metrics(
query_select: Select,
requested_metrics: Set[Node],
requested_dimensions: Set[Node],
) -> List[Node]:
"""
Process metrics in the query, updating ``FROM`` and adding any joins.
Modifies ``query_select`` inplace and Returns the parents.
"""
if not requested_metrics:
if not requested_dimensions:
return []
if len(requested_dimensions) > 1:
raise Exception(
"Cannot query from multiple dimensions when no metric is specified",
)
dimension = list(requested_dimensions)[0]
query_select["from"] = [
{
"joins": [],
"relation": {
"Table": {
"alias": None,
"args": [],
"name": [{"quote_style": '"', "value": dimension.name}],
"with_hints": [],
},
},
},
]
return [dimension]
# check that there is a metric with the superset of parents from all metrics
main_metric = sorted(
requested_metrics,
key=lambda metric: (len(metric.parents), metric.name),
reverse=True,
)[0]
for metric in requested_metrics:
if not set(metric.parents) <= set(main_metric.parents):
raise Exception(
f"Metrics {metric.name} and {main_metric.name} have non-shared parents",
)
# replace the ``from`` part of the parse tree with the ``from`` from the metric that
# has all the necessary parents
metric_tree = parse_sql(main_metric.expression, dialect="ansi")
query_select["from"] = metric_tree[0]["Query"]["body"]["Select"]["from"]
# join to any dimensions
for dimension in requested_dimensions:
query_select["from"][0]["joins"].append(
get_dimension_join(main_metric, dimension),
)
return main_metric.parents
def replace_metric_identifier(
expression: Expression,
parent: Projection,
nodes: Dict[str, Node],
metric_names: Set[str],
alias: Optional[Identifier] = None,
) -> None:
"""
Replace any metric reference in ``expression`` with its SQL.
"""
if "CompoundIdentifier" in expression:
expression["Identifier"] = {
"quote_style": None,
"value": ".".join(
part["value"] for part in expression.pop("CompoundIdentifier")
),
}
elif "Identifier" not in expression:
return
name = expression["Identifier"]["value"]
if name not in metric_names:
return
# if this is an unnamed expression remove the key from the parent, since it will be
# replaced with an expression with alias
parent.pop("UnnamedExpr", None)
node = nodes[name]
metric_tree = parse_sql(node.expression, dialect="ansi")
parent["ExprWithAlias"] = {
"alias": alias or {"quote_style": '"', "value": node.name},
"expr": get_expression_from_projection(
metric_tree[0]["Query"]["body"]["Select"]["projection"][0],
),
}
def get_join_columns(node: Node, dimension: Node) -> Tuple[str, str, str]:
"""
Return the columns to perform a join between a node and a dimension.
"""
for parent in node.parents:
for column in parent.columns:
if column.dimension == dimension:
return (
parent.name,
column.name,
column.dimension_column or DEFAULT_DIMENSION_COLUMN,
)
raise Exception(f"Node {node.name} has no columns with dimension {dimension.name}")
def get_dimension_join(node: Node, dimension: Node) -> Join:
"""
Return the join between a node and a dimension.
"""
parent_name, node_column, dimension_column = get_join_columns(node, dimension)
relation: Relation = {
"Table": {
"alias": None,
"args": [],
"name": [{"quote_style": None, "value": dimension.name}],
"with_hints": [],
},
}
return {
"join_operator": {
"Inner": {
"On": {
"BinaryOp": {
"left": {
"CompoundIdentifier": [
{"quote_style": None, "value": parent_name},
{"quote_style": None, "value": node_column},
],
},
"op": "Eq",
"right": {
"CompoundIdentifier": [
{"quote_style": None, "value": dimension.name},
{"quote_style": None, "value": dimension_column},
],
},
},
},
"Using": [],
},
},
"relation": relation,
}
| [
"sqlmodel.select"
] | [((1275, 1322), 're.compile', 're.compile', (['"""([\\\\w\\\\./_]+)(<=|<|>=|>|!=|=)(.+)"""'], {}), "('([\\\\w\\\\./_]+)(<=|<|>=|>|!=|=)(.+)')\n", (1285, 1322), False, 'import re\n'), ((2526, 2557), 'typing.cast', 'cast', (['FilterOperator', 'operator_'], {}), '(FilterOperator, operator_)\n', (2530, 2557), False, 'from typing import Any, Callable, Dict, List, Literal, Optional, Set, Tuple, cast\n'), ((4418, 4480), 'datajunction.sql.dag.get_referenced_columns_from_sql', 'get_referenced_columns_from_sql', (['node.expression', 'node.parents'], {}), '(node.expression, node.parents)\n', (4449, 4480), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((5043, 5114), 'datajunction.sql.dag.get_database_for_nodes', 'get_database_for_nodes', (['session', 'nodes', 'referenced_columns', 'database_id'], {}), '(session, nodes, referenced_columns, database_id)\n', (5065, 5114), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((5151, 5186), 'datajunction.sql.transpile.get_select_for_node', 'get_select_for_node', (['node', 'database'], {}), '(node, database)\n', (5170, 5186), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((6273, 6330), 'datajunction.models.query.QueryCreate', 'QueryCreate', ([], {'database_id': 'database.id', 'submitted_query': 'sql'}), '(database_id=database.id, submitted_query=sql)\n', (6284, 6330), False, 'from datajunction.models.query import QueryCreate\n'), ((7722, 7752), 'sqloxide.parse_sql', 'parse_sql', (['sql'], {'dialect': '"""ansi"""'}), "(sql, dialect='ansi')\n", (7731, 7752), False, 'from sqloxide import parse_sql\n'), ((8130, 8183), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['query_select', '"""CompoundIdentifier"""'], {}), "(query_select, 'CompoundIdentifier')\n", (8147, 8183), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((11505, 11552), 'datajunction.sql.dag.get_referenced_columns_from_tree', 'get_referenced_columns_from_tree', (['tree', 'parents'], {}), '(tree, parents)\n', (11537, 11552), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((11569, 11629), 'datajunction.sql.dag.get_database_for_nodes', 'get_database_for_nodes', (['session', 'parents', 'referenced_columns'], {}), '(session, parents, referenced_columns)\n', (11591, 11629), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((11693, 11747), 'datajunction.sql.transpile.get_query', 'get_query', (['None', 'parents', 'tree', 'database', 'dialect.name'], {}), '(None, parents, tree, database, dialect.name)\n', (11702, 11747), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((11848, 11905), 'datajunction.models.query.QueryCreate', 'QueryCreate', ([], {'database_id': 'database.id', 'submitted_query': 'sql'}), '(database_id=database.id, submitted_query=sql)\n', (11859, 11905), False, 'from datajunction.models.query import QueryCreate\n'), ((13543, 13592), 'sqloxide.parse_sql', 'parse_sql', (['main_metric.expression'], {'dialect': '"""ansi"""'}), "(main_metric.expression, dialect='ansi')\n", (13552, 13592), False, 'from sqloxide import parse_sql\n'), ((14773, 14815), 'sqloxide.parse_sql', 'parse_sql', (['node.expression'], {'dialect': '"""ansi"""'}), "(node.expression, dialect='ansi')\n", (14782, 14815), False, 'from sqloxide import parse_sql\n'), ((4007, 4027), 'datajunction.sql.dag.get_dimensions', 'get_dimensions', (['node'], {}), '(node)\n', (4021, 4027), False, 'from datajunction.sql.dag import get_database_for_nodes, get_dimensions, get_referenced_columns_from_sql, get_referenced_columns_from_tree\n'), ((7695, 7708), 'datajunction.utils.get_session', 'get_session', ([], {}), '()\n', (7706, 7708), False, 'from datajunction.utils import get_session\n'), ((9444, 9500), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['projection', '"""UnnamedExpr"""'], {}), "(projection, 'UnnamedExpr')\n", (9473, 9500), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((9639, 9697), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['projection', '"""ExprWithAlias"""'], {}), "(projection, 'ExprWithAlias')\n", (9668, 9697), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((14932, 15027), 'datajunction.sql.parse.get_expression_from_projection', 'get_expression_from_projection', (["metric_tree[0]['Query']['body']['Select']['projection'][0]"], {}), "(metric_tree[0]['Query']['body']['Select'][\n 'projection'][0])\n", (14962, 15027), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((3203, 3226), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (3219, 3226), False, 'import ast\n'), ((6115, 6137), 'sqlalchemy.engine.url.make_url', 'make_url', (['database.URI'], {}), '(database.URI)\n', (6123, 6137), False, 'from sqlalchemy.engine.url import make_url\n'), ((8047, 8092), 'datajunction.sql.parse.find_nodes_by_key', 'find_nodes_by_key', (['query_select', '"""Identifier"""'], {}), "(query_select, 'Identifier')\n", (8064, 8092), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((10073, 10122), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['part', '"""Identifier"""'], {}), "(part, 'Identifier')\n", (10102, 10122), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((10485, 10527), 'sqloxide.parse_sql', 'parse_sql', (['node.expression'], {'dialect': '"""ansi"""'}), "(node.expression, dialect='ansi')\n", (10494, 10527), False, 'from sqloxide import parse_sql\n'), ((11029, 11078), 'datajunction.sql.parse.find_nodes_by_key_with_parent', 'find_nodes_by_key_with_parent', (['part', '"""Identifier"""'], {}), "(part, 'Identifier')\n", (11058, 11078), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((11644, 11666), 'sqlalchemy.engine.url.make_url', 'make_url', (['database.URI'], {}), '(database.URI)\n', (11652, 11666), False, 'from sqlalchemy.engine.url import make_url\n'), ((3034, 3046), 'dateutil.parser.parse', 'parse', (['value'], {}), '(value)\n', (3039, 3046), False, 'from dateutil.parser import parse\n'), ((5310, 5386), 'datajunction.sql.transpile.get_select_for_node', 'get_select_for_node', (['dimension', 'database', 'referenced_columns[dimension.name]'], {}), '(dimension, database, referenced_columns[dimension.name])\n', (5329, 5386), False, 'from datajunction.sql.transpile import get_query, get_select_for_node\n'), ((7905, 7917), 'sqlmodel.select', 'select', (['Node'], {}), '(Node)\n', (7911, 7917), False, 'from sqlmodel import Session, select\n'), ((10608, 10703), 'datajunction.sql.parse.get_expression_from_projection', 'get_expression_from_projection', (["metric_tree[0]['Query']['body']['Select']['projection'][0]"], {}), "(metric_tree[0]['Query']['body']['Select'][\n 'projection'][0])\n", (10638, 10703), False, 'from datajunction.sql.parse import find_nodes_by_key, find_nodes_by_key_with_parent, get_expression_from_projection\n'), ((1898, 2240), 'datajunction.errors.DJError', 'DJError', ([], {'code': 'ErrorCode.INVALID_FILTER_PATTERN', 'message': 'f"""The filter "{filter_}" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes."""', 'debug': "{'context': {'filter': filter_}}"}), '(code=ErrorCode.INVALID_FILTER_PATTERN, message=\n f\'The filter "{filter_}" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes.\'\n , debug={\'context\': {\'filter\': filter_}})\n', (1905, 2240), False, 'from datajunction.errors import DJError, DJInvalidInputException, ErrorCode\n'), ((4856, 4868), 'sqlmodel.select', 'select', (['Node'], {}), '(Node)\n', (4862, 4868), False, 'from sqlmodel import Session, select\n')] |
from collections import deque
from time import sleep
import pytest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import WebDriverException
from sqlmodel import Session, select
from youtube.db import engine
from youtube.models import YouTube
@pytest.fixture(scope="session")
def driver():
driver = webdriver.Chrome()
try:
driver.get("http://localhost:8000/")
yield driver
except WebDriverException:
raise RuntimeError("Cannot get to localhost:8000, did you start FastAPI?")
finally:
driver.quit()
@pytest.fixture(scope="session")
def scroll_to_end(driver):
cache_size = 5
num_rows = deque(maxlen=cache_size)
i = 0
while True:
last_element = driver.find_elements_by_class_name("mui--text-subhead")[-1]
actions = webdriver.ActionChains(driver)
actions.move_to_element(last_element).perform()
i += 1
num_rows.append(len(driver.find_elements_by_tag_name("tr")))
if i > cache_size and num_rows.count(num_rows[-1]) == len(num_rows):
print("num rows stable, seems I hit the end of infinite scroll")
break
def test_number_of_rows_on_page(session, driver, scroll_to_end):
with Session(engine) as session:
num_row_in_db_table = len(session.exec(select(YouTube)).all())
num_rows_on_page = len(driver.find_elements_by_tag_name("tbody tr"))
assert num_rows_on_page == num_row_in_db_table
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((311, 342), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (325, 342), False, 'import pytest\n'), ((616, 647), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (630, 647), False, 'import pytest\n'), ((370, 388), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (386, 388), False, 'from selenium import webdriver\n'), ((709, 733), 'collections.deque', 'deque', ([], {'maxlen': 'cache_size'}), '(maxlen=cache_size)\n', (714, 733), False, 'from collections import deque\n'), ((862, 892), 'selenium.webdriver.ActionChains', 'webdriver.ActionChains', (['driver'], {}), '(driver)\n', (884, 892), False, 'from selenium import webdriver\n'), ((1283, 1298), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1290, 1298), False, 'from sqlmodel import Session, select\n'), ((1358, 1373), 'sqlmodel.select', 'select', (['YouTube'], {}), '(YouTube)\n', (1364, 1373), False, 'from sqlmodel import Session, select\n')] |
from typing import List
from uuid import UUID
import inject
from sqlmodel import Session, select
from src.core.events import EventDescription
from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError
from src.core.models import Client, Context, CreateClient, QueryClient, UpdateClient
from src.core.services import Streamer
@inject.params(streamer=Streamer)
def create(session: Session, schema: CreateClient, context: Context, streamer: Streamer) -> Client:
if session.exec(select(Client).where(Client.email == schema.email)).first():
raise DatabaseError("Já existe um cliente cadastrado com o email: %s" % schema.email)
if session.exec(select(Client).where(Client.phone == schema.phone)).first():
raise DatabaseError("Já existe um cliente cadastrado com o telefone: %s" % schema.phone)
client = Client(**schema.dict(), owner_id=context.user_id)
session.add(client)
session.commit()
streamer.send_event(EventDescription.CREATE_USER, context=context, client=client.dict())
return client
def get_all(session: Session, query_schema: QueryClient, context: Context) -> List[Client]:
args = []
if not context.user_is_super_user:
args.append(Client.owner_id == context.user_id)
return session.exec(select(Client).where(*args)).all()
def get_by_id(session: Session, client_id: UUID, context: Context) -> Client:
client = session.exec(select(Client).where(Client.id == client_id)).first()
if not client:
raise NotFoundError(f"Não foi possível localizar o Client com ID: {client_id}")
if not context.user_is_super_user and client.owner_id != context.user_id:
raise NotAuthorizedError(f"Você não possui permissão para consultar os dados do cliente com ID {client_id}!")
return client
@inject.params(streamer=Streamer)
def delete(session: Session, client_id: UUID, context: Context, streamer: Streamer) -> Client:
client = session.exec(select(Client).where(Client.id == client_id)).first()
if not client:
raise NotFoundError(f"Não foi possível localizar o Cliente com ID: {client_id}")
if not context.user_is_super_user and client.owner_id != context.user_id:
raise NotAuthorizedError(f"Você não possui permissão para excluir o Cliente com ID: {client_id}")
session.delete(client)
session.commit()
streamer.send_event(description=EventDescription.DELETE_CLIENT, context=context, client=client.dict())
return client
@inject.params(streamer=Streamer)
def update(session: Session, data: UpdateClient, context: Context, streamer: Streamer) -> Client:
client = session.exec(select(Client).where(Client.id == data.id)).first()
if not client:
raise NotFoundError(f"Não foi possível localizar o Client com ID: {data.id}")
if not context.user_is_super_user and client.owner_id != context.user_id:
raise NotAuthorizedError(f"Você não possui permissão para excluir o Cliente com ID: {data.id}")
columns = client.__table__.columns.keys()
for key, value in data:
if key not in columns:
continue
setattr(client, key, value)
session.add(client)
session.commit()
streamer.send_event(
description=EventDescription.UPDATE_CLIENT,
context=context,
data={"client_data": client.dict(), "update_schema": data.dict()},
)
return client
| [
"sqlmodel.select"
] | [((360, 392), 'inject.params', 'inject.params', ([], {'streamer': 'Streamer'}), '(streamer=Streamer)\n', (373, 392), False, 'import inject\n'), ((1819, 1851), 'inject.params', 'inject.params', ([], {'streamer': 'Streamer'}), '(streamer=Streamer)\n', (1832, 1851), False, 'import inject\n'), ((2500, 2532), 'inject.params', 'inject.params', ([], {'streamer': 'Streamer'}), '(streamer=Streamer)\n', (2513, 2532), False, 'import inject\n'), ((588, 667), 'src.core.helpers.exceptions.DatabaseError', 'DatabaseError', (["('Já existe um cliente cadastrado com o email: %s' % schema.email)"], {}), "('Já existe um cliente cadastrado com o email: %s' % schema.email)\n", (601, 667), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((764, 851), 'src.core.helpers.exceptions.DatabaseError', 'DatabaseError', (["('Já existe um cliente cadastrado com o telefone: %s' % schema.phone)"], {}), "('Já existe um cliente cadastrado com o telefone: %s' % schema\n .phone)\n", (777, 851), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((1526, 1599), 'src.core.helpers.exceptions.NotFoundError', 'NotFoundError', (['f"""Não foi possível localizar o Client com ID: {client_id}"""'], {}), "(f'Não foi possível localizar o Client com ID: {client_id}')\n", (1539, 1599), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((1693, 1806), 'src.core.helpers.exceptions.NotAuthorizedError', 'NotAuthorizedError', (['f"""Você não possui permissão para consultar os dados do cliente com ID {client_id}!"""'], {}), "(\n f'Você não possui permissão para consultar os dados do cliente com ID {client_id}!'\n )\n", (1711, 1806), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((2061, 2135), 'src.core.helpers.exceptions.NotFoundError', 'NotFoundError', (['f"""Não foi possível localizar o Cliente com ID: {client_id}"""'], {}), "(f'Não foi possível localizar o Cliente com ID: {client_id}')\n", (2074, 2135), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((2229, 2325), 'src.core.helpers.exceptions.NotAuthorizedError', 'NotAuthorizedError', (['f"""Você não possui permissão para excluir o Cliente com ID: {client_id}"""'], {}), "(\n f'Você não possui permissão para excluir o Cliente com ID: {client_id}')\n", (2247, 2325), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((2743, 2814), 'src.core.helpers.exceptions.NotFoundError', 'NotFoundError', (['f"""Não foi possível localizar o Client com ID: {data.id}"""'], {}), "(f'Não foi possível localizar o Client com ID: {data.id}')\n", (2756, 2814), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((2908, 3002), 'src.core.helpers.exceptions.NotAuthorizedError', 'NotAuthorizedError', (['f"""Você não possui permissão para excluir o Cliente com ID: {data.id}"""'], {}), "(\n f'Você não possui permissão para excluir o Cliente com ID: {data.id}')\n", (2926, 3002), False, 'from src.core.helpers.exceptions import DatabaseError, NotAuthorizedError, NotFoundError\n'), ((513, 527), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (519, 527), False, 'from sqlmodel import Session, select\n'), ((689, 703), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (695, 703), False, 'from sqlmodel import Session, select\n'), ((1297, 1311), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (1303, 1311), False, 'from sqlmodel import Session, select\n'), ((1438, 1452), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (1444, 1452), False, 'from sqlmodel import Session, select\n'), ((1973, 1987), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (1979, 1987), False, 'from sqlmodel import Session, select\n'), ((2657, 2671), 'sqlmodel.select', 'select', (['Client'], {}), '(Client)\n', (2663, 2671), False, 'from sqlmodel import Session, select\n')] |
from datetime import datetime, date , time
from typing import Optional, List
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryAppointmentOr(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
appointment_or_id: int
state_from: str
state_to: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class HistoryAppointmentOrMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
appointment_or_id: int
procedure_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class AppointmentOr(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
state: str
date_procedure: date
date_admission: date
date_confirmation: date
time_start: time
time_end: time
disease: str
detail: str
is_special_tool_required: bool
is_icu_reserved: bool
is_date_recorded: bool
tool_note: str
icu_note: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class AppointmentOrReschedule(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
appointment_or_id: int
date_from: date
date_to: date
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class AppointmentOrDoctorMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
appointment_or_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_appointment_or", response_model=HistoryAppointmentOr)
async def create_appointment_or(history_appointment_or: HistoryAppointmentOr, session: AsyncSession = Depends(get_session)):
session.add(history_appointment_or)
await session.commit()
await session.refresh(history_appointment_or)
return history_appointment_or
@router.post("/appointment_or", response_model=AppointmentOr)
async def create_history_appointment_or(appointment_or: AppointmentOr, session: AsyncSession = Depends(get_session)):
session.add(appointment_or)
await session.commit()
await session.refresh(appointment_or)
return appointment_or
@router.get("/history_appointment_or/{id}", response_model=HistoryAppointmentOr)
async def get_history_appointment_or(id: int, session: AsyncSession = Depends(get_session)):
history_appointments_or = await session.execute(select(HistoryAppointmentOr).where(HistoryAppointmentOr.id == id))
history_appointment_or = history_appointments_or.scalars().first()
return history_appointment_or
@router.get("/history_appointment_or/user/{user_id}", response_model=HistoryAppointmentOr)
async def get_history_appointment_or_user(user_id: int, session: AsyncSession = Depends(get_session)):
history_appointments_or = await session.execute(select(HistoryAppointmentOr).where(HistoryAppointmentOr.created_by == user_id))
history_appointment_or = history_appointments_or.scalars().first()
return history_appointment_or
@router.put("/history_appointment_or/{id}", response_model=HistoryAppointmentOr)
async def update_history_appointment_or(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_appointment_or/{id}")
async def delete_history_appointment_or(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/appointment_or/{id}")
async def delete_appointment_or(session: AsyncSession = Depends(get_session)):
return None
# @router.get("/history_appointment_or/history/{patient_id}", response_model=HistoryTravelReimburse)
# async def get_history_travel_reimburse_patient(patient_id: int, session: AsyncSession = Depends(get_session)):
# history_id = await session.execute(select(HistoryTravelReimburse.id).where(HistoryTravelReimburse.patient_id == patient_id))
# history_travel_reimburses = await session.execute(select(HistoryTravelReimburse).where(HistoryTravelReimburse.history_id == history_id))
# history_travel_reimburse = history_travel_reimburses.scalars().first()
# return history_travel_reimburse
| [
"sqlmodel.Field"
] | [((274, 285), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (283, 285), False, 'from fastapi import APIRouter, Depends\n'), ((362, 399), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (367, 399), False, 'from sqlmodel import Field, SQLModel\n'), ((671, 708), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (676, 708), False, 'from sqlmodel import Field, SQLModel\n'), ((934, 971), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (939, 971), False, 'from sqlmodel import Field, SQLModel\n'), ((1449, 1486), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1454, 1486), False, 'from sqlmodel import Field, SQLModel\n'), ((1737, 1774), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1742, 1774), False, 'from sqlmodel import Field, SQLModel\n'), ((2109, 2129), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2116, 2129), False, 'from fastapi import APIRouter, Depends\n'), ((2442, 2462), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2449, 2462), False, 'from fastapi import APIRouter, Depends\n'), ((2745, 2765), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2752, 2765), False, 'from fastapi import APIRouter, Depends\n'), ((3165, 3185), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3172, 3185), False, 'from fastapi import APIRouter, Depends\n'), ((3581, 3601), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3588, 3601), False, 'from fastapi import APIRouter, Depends\n'), ((3733, 3753), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3740, 3753), False, 'from fastapi import APIRouter, Depends\n'), ((3869, 3889), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3876, 3889), False, 'from fastapi import APIRouter, Depends\n'), ((2820, 2848), 'sqlalchemy.select', 'select', (['HistoryAppointmentOr'], {}), '(HistoryAppointmentOr)\n', (2826, 2848), False, 'from sqlalchemy import select\n'), ((3240, 3268), 'sqlalchemy.select', 'select', (['HistoryAppointmentOr'], {}), '(HistoryAppointmentOr)\n', (3246, 3268), False, 'from sqlalchemy import select\n')] |
from sqlmodel import SQLModel, create_engine
from aot_quotes.common.db.quotes import Quotes
engine = create_engine("sqlite:///database.db", echo=True)
def migrate():
SQLModel.metadata.create_all(engine)
if __name__ == "__main__":
migrate()
__all__ = ["Quotes"]
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((103, 152), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {'echo': '(True)'}), "('sqlite:///database.db', echo=True)\n", (116, 152), False, 'from sqlmodel import SQLModel, create_engine\n'), ((174, 210), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (202, 210), False, 'from sqlmodel import SQLModel, create_engine\n')] |
"""
Models for columns.
"""
from typing import TYPE_CHECKING, Optional, TypedDict
from sqlalchemy.sql.schema import Column as SqlaColumn
from sqlalchemy.types import Enum
from sqlmodel import Field, Relationship, SQLModel
from datajunction.typing import ColumnType
if TYPE_CHECKING:
from datajunction.models.node import Node
class ColumnYAML(TypedDict, total=False):
"""
Schema of a column in the YAML file.
"""
type: str
dimension: str
class Column(SQLModel, table=True): # type: ignore
"""
A column.
Columns can be physical (associated with ``Table`` objects) or abstract (associated
with ``Node`` objects).
"""
id: Optional[int] = Field(default=None, primary_key=True)
name: str
type: ColumnType = Field(sa_column=SqlaColumn(Enum(ColumnType)))
dimension_id: Optional[int] = Field(default=None, foreign_key="node.id")
dimension: "Node" = Relationship()
dimension_column: Optional[str] = None
def to_yaml(self) -> ColumnYAML:
"""
Serialize the column for YAML.
"""
return {
"type": self.type.value, # pylint: disable=no-member
}
def __hash__(self) -> int:
return hash(self.id)
| [
"sqlmodel.Relationship",
"sqlmodel.Field"
] | [((694, 731), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (699, 731), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((850, 892), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""node.id"""'}), "(default=None, foreign_key='node.id')\n", (855, 892), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((917, 931), 'sqlmodel.Relationship', 'Relationship', ([], {}), '()\n', (929, 931), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((796, 812), 'sqlalchemy.types.Enum', 'Enum', (['ColumnType'], {}), '(ColumnType)\n', (800, 812), False, 'from sqlalchemy.types import Enum\n')] |
import os
from venv import create
from sqlmodel import SQLModel, create_engine
from .base_variables import APPNAME,DEBUG
from utils import print_warning
import sys
if DEBUG:
PG_HOST: str = os.getenv("PGHOST", "localhost")
PG_USER: str = os.getenv("PGUSER", "postgres")
PG_PASSWORD: str = os.getenv("PGPASSWORD", "<PASSWORD>")
PG_PORT: str = os.getenv("PGPORT", "5432")
PG_DATABASE:str = os.getenv("PGDATABASE", APPNAME)
DATABASE_URL:str = os.getenv("DATABASE_URL", f"postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}")
else:
DATABASE_URL = os.getenv("DATABASE_URL", "")
if not DATABASE_URL:
print_warning(
(
"You are not setting the DATABASE_URL in your environment!",
)
)
sys.exit("[ERROR] Default DATABASE_URL is not set\n")
print(DATABASE_URL)
dbengine = create_engine(DATABASE_URL)
| [
"sqlmodel.create_engine"
] | [((847, 874), 'sqlmodel.create_engine', 'create_engine', (['DATABASE_URL'], {}), '(DATABASE_URL)\n', (860, 874), False, 'from sqlmodel import SQLModel, create_engine\n'), ((193, 225), 'os.getenv', 'os.getenv', (['"""PGHOST"""', '"""localhost"""'], {}), "('PGHOST', 'localhost')\n", (202, 225), False, 'import os\n'), ((245, 276), 'os.getenv', 'os.getenv', (['"""PGUSER"""', '"""postgres"""'], {}), "('PGUSER', 'postgres')\n", (254, 276), False, 'import os\n'), ((300, 337), 'os.getenv', 'os.getenv', (['"""PGPASSWORD"""', '"""<PASSWORD>"""'], {}), "('PGPASSWORD', '<PASSWORD>')\n", (309, 337), False, 'import os\n'), ((357, 384), 'os.getenv', 'os.getenv', (['"""PGPORT"""', '"""5432"""'], {}), "('PGPORT', '5432')\n", (366, 384), False, 'import os\n'), ((407, 439), 'os.getenv', 'os.getenv', (['"""PGDATABASE"""', 'APPNAME'], {}), "('PGDATABASE', APPNAME)\n", (416, 439), False, 'import os\n'), ((463, 567), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', 'f"""postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}"""'], {}), "('DATABASE_URL',\n f'postgresql://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}')\n", (472, 567), False, 'import os\n'), ((589, 618), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', '""""""'], {}), "('DATABASE_URL', '')\n", (598, 618), False, 'import os\n'), ((644, 721), 'utils.print_warning', 'print_warning', (["('You are not setting the DATABASE_URL in your environment!',)"], {}), "(('You are not setting the DATABASE_URL in your environment!',))\n", (657, 721), False, 'from utils import print_warning\n'), ((762, 815), 'sys.exit', 'sys.exit', (['"""[ERROR] Default DATABASE_URL is not set\n"""'], {}), "('[ERROR] Default DATABASE_URL is not set\\n')\n", (770, 815), False, 'import sys\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime, date
class Rate(SQLModel, table=True):
"""Create an SQLModel for rates"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
client_id: int = Field(foreign_key="app_db.client.id")
valid_from: date
valid_to: date
amount: float # currency: EUR
created_at: datetime
updated_at: datetime
is_active: bool
__table_args__ = {"schema": "app_db"}
| [
"sqlmodel.Field"
] | [((201, 238), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (206, 238), False, 'from sqlmodel import Field, SQLModel\n'), ((258, 296), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.appuser.id"""'}), "(foreign_key='app_db.appuser.id')\n", (263, 296), False, 'from sqlmodel import Field, SQLModel\n'), ((318, 355), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.client.id"""'}), "(foreign_key='app_db.client.id')\n", (323, 355), False, 'from sqlmodel import Field, SQLModel\n')] |
"""user latest record
Revision ID: 7c2a518ed636
Revises: fe2df95ee61a
Create Date: 2021-11-27 15:37:54.561822
"""
import sqlalchemy as sa
import sqlmodel
import sqlmodel.sql.sqltypes
from alembic import op
# revision identifiers, used by Alembic.
revision = "7c2a518ed636"
down_revision = "fe2df95ee61a"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"user_latest_records",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("TIMEZONE('utc', CURRENT_TIMESTAMP)"),
nullable=False,
),
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("problem_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("problem_set_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("record_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(["problem_id"], ["problems.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["problem_set_id"], ["problem_sets.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["record_id"], ["records.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("user_id", "problem_id", "problem_set_id", "record_id"),
)
op.create_index(
op.f("ix_user_latest_records_created_at"),
"user_latest_records",
["created_at"],
unique=False,
)
op.create_index(
op.f("ix_user_latest_records_id"), "user_latest_records", ["id"], unique=False
)
op.create_index(
op.f("ix_user_latest_records_updated_at"),
"user_latest_records",
["updated_at"],
unique=False,
)
op.add_column(
"problem_configs",
sa.Column(
"commit_message",
sqlmodel.sql.sqltypes.AutoString(),
server_default="",
nullable=False,
),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("problem_configs", "commit_message")
op.drop_index(
op.f("ix_user_latest_records_updated_at"), table_name="user_latest_records"
)
op.drop_index(op.f("ix_user_latest_records_id"), table_name="user_latest_records")
op.drop_index(
op.f("ix_user_latest_records_created_at"), table_name="user_latest_records"
)
op.drop_table("user_latest_records")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString",
"sqlmodel.sql.sqltypes.GUID"
] | [((2542, 2593), 'alembic.op.drop_column', 'op.drop_column', (['"""problem_configs"""', '"""commit_message"""'], {}), "('problem_configs', 'commit_message')\n", (2556, 2593), False, 'from alembic import op\n'), ((2903, 2939), 'alembic.op.drop_table', 'op.drop_table', (['"""user_latest_records"""'], {}), "('user_latest_records')\n", (2916, 2939), False, 'from alembic import op\n'), ((1279, 1355), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['problem_id']", "['problems.id']"], {'ondelete': '"""CASCADE"""'}), "(['problem_id'], ['problems.id'], ondelete='CASCADE')\n", (1302, 1355), True, 'import sqlalchemy as sa\n'), ((1365, 1454), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['problem_set_id']", "['problem_sets.id']"], {'ondelete': '"""CASCADE"""'}), "(['problem_set_id'], ['problem_sets.id'], ondelete=\n 'CASCADE')\n", (1388, 1454), True, 'import sqlalchemy as sa\n'), ((1481, 1555), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['record_id']", "['records.id']"], {'ondelete': '"""CASCADE"""'}), "(['record_id'], ['records.id'], ondelete='CASCADE')\n", (1504, 1555), True, 'import sqlalchemy as sa\n'), ((1565, 1635), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {'ondelete': '"""CASCADE"""'}), "(['user_id'], ['users.id'], ondelete='CASCADE')\n", (1588, 1635), True, 'import sqlalchemy as sa\n'), ((1645, 1674), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1668, 1674), True, 'import sqlalchemy as sa\n'), ((1684, 1759), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""user_id"""', '"""problem_id"""', '"""problem_set_id"""', '"""record_id"""'], {}), "('user_id', 'problem_id', 'problem_set_id', 'record_id')\n", (1703, 1759), True, 'import sqlalchemy as sa\n'), ((1796, 1837), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_created_at"""'], {}), "('ix_user_latest_records_created_at')\n", (1800, 1837), False, 'from alembic import op\n'), ((1951, 1984), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_id"""'], {}), "('ix_user_latest_records_id')\n", (1955, 1984), False, 'from alembic import op\n'), ((2065, 2106), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_updated_at"""'], {}), "('ix_user_latest_records_updated_at')\n", (2069, 2106), False, 'from alembic import op\n'), ((2621, 2662), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_updated_at"""'], {}), "('ix_user_latest_records_updated_at')\n", (2625, 2662), False, 'from alembic import op\n'), ((2721, 2754), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_id"""'], {}), "('ix_user_latest_records_id')\n", (2725, 2754), False, 'from alembic import op\n'), ((2817, 2858), 'alembic.op.f', 'op.f', (['"""ix_user_latest_records_created_at"""'], {}), "('ix_user_latest_records_created_at')\n", (2821, 2858), False, 'from alembic import op\n'), ((546, 572), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (557, 572), True, 'import sqlalchemy as sa\n'), ((744, 770), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (755, 770), True, 'import sqlalchemy as sa\n'), ((914, 942), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (940, 942), False, 'import sqlmodel\n'), ((993, 1021), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1019, 1021), False, 'import sqlmodel\n'), ((1076, 1104), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1102, 1104), False, 'import sqlmodel\n'), ((1153, 1181), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1179, 1181), False, 'import sqlmodel\n'), ((1224, 1252), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1250, 1252), False, 'import sqlmodel\n'), ((2298, 2332), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2330, 2332), False, 'import sqlmodel\n'), ((601, 646), 'sqlalchemy.text', 'sa.text', (['"""TIMEZONE(\'utc\', CURRENT_TIMESTAMP)"""'], {}), '("TIMEZONE(\'utc\', CURRENT_TIMESTAMP)")\n', (608, 646), True, 'import sqlalchemy as sa\n'), ((799, 844), 'sqlalchemy.text', 'sa.text', (['"""TIMEZONE(\'utc\', CURRENT_TIMESTAMP)"""'], {}), '("TIMEZONE(\'utc\', CURRENT_TIMESTAMP)")\n', (806, 844), True, 'import sqlalchemy as sa\n')] |
import os
from typing import List
from sqlmodel.sql.expression import select
from utilities.filepath import get_home_dir
from sqlmodel import create_engine, SQLModel, Session
# these are imported so that the initialization of the database can be done
from schemas.common.event import Event
from schemas.common.extension import Extension
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class DB(metaclass=Singleton):
def __init__(self) -> None:
self.engine = create_engine("sqlite:///{}".format(os.path.join(get_home_dir(), "extensions.db")))
def initialize(self) -> None:
SQLModel.metadata.create_all(self.engine)
def save(self, obj: SQLModel) -> None:
session = Session(self.engine)
session.add(obj)
session.commit()
session.refresh(obj)
return obj
def fetch_extensions(self)-> List[Extension]:
with Session(self.engine) as session:
results = session.exec(select(Extension)).all()
return results
if __name__=="__main__":
DB().initialize() | [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.sql.expression.select"
] | [((793, 834), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['self.engine'], {}), '(self.engine)\n', (821, 834), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((897, 917), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (904, 917), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((1080, 1100), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (1087, 1100), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((715, 729), 'utilities.filepath.get_home_dir', 'get_home_dir', ([], {}), '()\n', (727, 729), False, 'from utilities.filepath import get_home_dir\n'), ((1148, 1165), 'sqlmodel.sql.expression.select', 'select', (['Extension'], {}), '(Extension)\n', (1154, 1165), False, 'from sqlmodel.sql.expression import select\n')] |
from typing import AsyncGenerator, Generator
from aiobotocore.client import AioBaseClient
from aiobotocore.session import get_session
from sqlmodel import Session
from sqlmodel.ext.asyncio.session import AsyncSession
from ..core.config import settings
from ..db.db import engine, engine_async
async def get_s3() -> AsyncGenerator[AioBaseClient, None]:
session = get_session()
async with session.create_client(
"s3",
region_name=settings.MINIO_REGION_NAME,
endpoint_url=settings.MINIO_URL,
use_ssl=False,
aws_secret_access_key=settings.MINIO_SECRET_KEY,
aws_access_key_id=settings.MINIO_ACCESS_KEY,
) as client:
yield client
async def get_db_async() -> AsyncGenerator[AsyncSession, None]:
async with AsyncSession(engine_async) as session:
yield session
def get_db() -> Generator[Session, None, None]:
with Session(engine) as session:
yield session
| [
"sqlmodel.ext.asyncio.session.AsyncSession",
"sqlmodel.Session"
] | [((370, 383), 'aiobotocore.session.get_session', 'get_session', ([], {}), '()\n', (381, 383), False, 'from aiobotocore.session import get_session\n'), ((777, 803), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['engine_async'], {}), '(engine_async)\n', (789, 803), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((897, 912), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (904, 912), False, 'from sqlmodel import Session\n')] |
from typing import Optional
from sqlmodel import SQLModel, Field, create_engine, Session
engine = create_engine(url="sqlite:///users.db", echo=True)
class User(SQLModel, table=True):
id:Optional[int] = Field(None, primary_key=True)
username: str
password:str
def get_session():
with Session(engine) as session:
yield session
def init_db():
SQLModel.metadata.create_all(engine) | [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Field",
"sqlmodel.create_engine",
"sqlmodel.Session"
] | [((99, 149), 'sqlmodel.create_engine', 'create_engine', ([], {'url': '"""sqlite:///users.db"""', 'echo': '(True)'}), "(url='sqlite:///users.db', echo=True)\n", (112, 149), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((208, 237), 'sqlmodel.Field', 'Field', (['None'], {'primary_key': '(True)'}), '(None, primary_key=True)\n', (213, 237), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((372, 408), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (400, 408), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n'), ((302, 317), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (309, 317), False, 'from sqlmodel import SQLModel, Field, create_engine, Session\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.