code stringlengths 110 18.9k | apis list | extract_api stringlengths 123 24.4k |
|---|---|---|
import os
from flask import Flask
import os
import requests
from app import config
from app.routes import default
from app.routes import debug
from app.routes import auth
from app.routes import admin
from app.routes import profile
from app.routes import server
from app.pages import create_pages
from sqlmodel import SQLModel
from sqlmodel import create_engine
from app.models.user import Log, User
APP_SETTINGS = os.getenv("APP_SETTINGS", "Testing")
def drop_database(config):
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import inspect
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine(config["DATABASE_URI"])
meta = MetaData()
meta.reflect(bind=engine)
meta.drop_all(engine, checkfirst=False)
def create_app():
app = Flask(__name__, template_folder="app/templates/", static_folder="app/static/")
app.count_requests = 0
app.config.from_object(f"app.config.{APP_SETTINGS}")
app.secret_key = os.urandom(256)
app.url_map.strict_slashes = False
app.register_blueprint(default.bp)
app.register_blueprint(debug.bp, url_prefix="/debug")
app.register_blueprint(auth.bp, url_prefix="/auth")
app.register_blueprint(admin.bp, url_prefix="/admin")
app.register_blueprint(profile.bp, url_prefix="/user")
app.register_blueprint(server.bp, url_prefix="/server")
app.register_error_handler(Exception, default.handle_exception)
return app
app = create_app()
app.pages = create_pages()
with app.app_context():
from app.database import engine
app.engine = engine
if os.getenv("DROP_DATABASE", False):
drop_database(app.config)
SQLModel.metadata.create_all(engine)
if __name__ == "__main__":
app.run(host=app.config.get("HOST"), port=app.config.get("PORT"))
| [
"sqlmodel.SQLModel.metadata.create_all"
] | [((417, 453), 'os.getenv', 'os.getenv', (['"""APP_SETTINGS"""', '"""Testing"""'], {}), "('APP_SETTINGS', 'Testing')\n", (426, 453), False, 'import os\n'), ((1587, 1601), 'app.pages.create_pages', 'create_pages', ([], {}), '()\n', (1599, 1601), False, 'from app.pages import create_pages\n'), ((1691, 1724), 'os.getenv', 'os.getenv', (['"""DROP_DATABASE"""', '(False)'], {}), "('DROP_DATABASE', False)\n", (1700, 1724), False, 'import os\n'), ((1757, 1793), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1785, 1793), False, 'from sqlmodel import SQLModel\n'), ((735, 772), 'sqlalchemy.create_engine', 'create_engine', (["config['DATABASE_URI']"], {}), "(config['DATABASE_URI'])\n", (748, 772), False, 'from sqlalchemy import create_engine\n'), ((784, 794), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (792, 794), False, 'from sqlalchemy import MetaData\n'), ((899, 977), 'flask.Flask', 'Flask', (['__name__'], {'template_folder': '"""app/templates/"""', 'static_folder': '"""app/static/"""'}), "(__name__, template_folder='app/templates/', static_folder='app/static/')\n", (904, 977), False, 'from flask import Flask\n'), ((1084, 1099), 'os.urandom', 'os.urandom', (['(256)'], {}), '(256)\n', (1094, 1099), False, 'import os\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from string import ascii_lowercase
from typing import Any, Dict, Tuple, Type, TypeVar
from uuid import UUID
from hypothesis import strategies as st
from hypothesis.strategies._internal.strategies import SearchStrategy
from sqlalchemy.orm import registry
from sqlmodel import Field
from dbgen.core.entity import BaseEntity, EntityMetaclass
protected_words = {"mro"}
uni_text = lambda x: st.text(ascii_lowercase, min_size=x)
non_private_attr = uni_text(1).filter(lambda x: x[0] != "_").filter(lambda x: x not in protected_words)
pydantic_type_strat = st.sampled_from((str, float, bool, int))
id_field = Field(
default=None,
primary_key=True,
sa_column_kwargs={"autoincrement": False, "unique": True},
)
fk_field = lambda x: Field(default=None, foreign_key=x)
ID_TYPE = UUID
reserved_words = {'hex', 'uuid', 'hash'}
@st.composite
def example_entity(
draw,
class_name: str = None,
fks: Dict[str, str] = None,
attrs: Dict[str, Tuple[type, Any]] = None,
draw_attrs: bool = True,
registry_: registry = None,
) -> SearchStrategy[Type[BaseEntity]]:
class_name = class_name or draw(uni_text(1))
if fks is None:
fks = draw(
st.dictionaries(
non_private_attr.filter(
lambda x: attrs and x not in attrs and x != 'id' and x not in reserved_words
),
non_private_attr,
)
)
annotations: Dict[str, type] = {"id": UUID}
if draw_attrs:
annotations.update(
draw(
st.dictionaries(
non_private_attr.filter(lambda x: x not in fks and x not in reserved_words),
pydantic_type_strat,
min_size=1,
)
)
)
added_attrs = {"id": id_field}
for fk_name, fk_col_reference in fks.items():
annotations[fk_name] = UUID
added_attrs[fk_name] = fk_field(fk_col_reference)
attrs = attrs or {}
for attr_name, attr_dets in attrs.items():
if len(attr_dets) == 1:
type_ = attr_dets[0]
else:
type_, default = attr_dets
added_attrs[attr_name] = default
annotations[attr_name] = type_
identifying = draw(st.sets(st.sampled_from(list(annotations.keys()))))
data = {
"__annotations__": annotations,
"__identifying__": identifying,
"__module__": "tests.strategies.entity",
"__qualname__": class_name,
"__tablename__": f"table_{class_name}",
**added_attrs,
}
new_class = EntityMetaclass(
class_name,
(BaseEntity,),
data,
table=True,
registry=registry_ or registry(),
force_validation=True,
)
return new_class
T = TypeVar("T")
def fill_required_fields(
entity_class: Type[BaseEntity],
default_values={},
):
required_fields = [
(name, default_values.get(val.type_, val.type_))
for name, val in entity_class.__fields__.items()
if val.required
]
return entity_class.validate({x: y() for x, y in required_fields})
| [
"sqlmodel.Field"
] | [((1149, 1189), 'hypothesis.strategies.sampled_from', 'st.sampled_from', (['(str, float, bool, int)'], {}), '((str, float, bool, int))\n', (1164, 1189), True, 'from hypothesis import strategies as st\n'), ((1202, 1303), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'sa_column_kwargs': "{'autoincrement': False, 'unique': True}"}), "(default=None, primary_key=True, sa_column_kwargs={'autoincrement': \n False, 'unique': True})\n", (1207, 1303), False, 'from sqlmodel import Field\n'), ((3375, 3387), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (3382, 3387), False, 'from typing import Any, Dict, Tuple, Type, TypeVar\n'), ((986, 1022), 'hypothesis.strategies.text', 'st.text', (['ascii_lowercase'], {'min_size': 'x'}), '(ascii_lowercase, min_size=x)\n', (993, 1022), True, 'from hypothesis import strategies as st\n'), ((1335, 1369), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': 'x'}), '(default=None, foreign_key=x)\n', (1340, 1369), False, 'from sqlmodel import Field\n'), ((3299, 3309), 'sqlalchemy.orm.registry', 'registry', ([], {}), '()\n', (3307, 3309), False, 'from sqlalchemy.orm import registry\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from typing import Optional
from datetime import datetime
router = APIRouter(prefix="/api/users", tags=["user"])
session = Session(engine)
@router.post("/")
async def post_user(
user: User,
session: Session = Depends(get_session),
):
"""
Post a new user.
Parameters
----------
user : User
User that is to be added to the database.
session : Session
SQL session that is to be used to add the user.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(User).where(User.short_name == user.short_name)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(user)
session.commit()
session.refresh(user)
return user
@router.get("/")
async def get_users(
session: Session = Depends(get_session),
is_active: bool = None,
short_name: str = None,
):
"""
Get list of user(s).
Parameters
----------
session : Session
SQL session that is to be used to get the users.
Defaults to creating a dependency on the running SQL model session.
is_active : bool
Status of users to be pulled.
short_name : str
Short name of user to be pulled.
"""
statement = select(User)
if is_active != None:
statement = (
select(User)
.where(User.is_active == is_active)
.order_by(User.start_date.desc())
)
result = session.exec(statement).all()
return result
@router.put("/{user_id}/")
async def update_user(
user_id: int,
is_active: Optional[bool] = None,
new_short_name: Optional[str] = None,
new_first_name: Optional[str] = None,
new_last_name: Optional[str] = None,
new_email: Optional[str] = None,
new_team_id: Optional[str] = None,
session: Session = Depends(get_session),
):
"""
Update a user.
Parameters
----------
user_id : int
ID of user to be updated.
is_active : Optional[bool]
Updated status of user.
new_short_name : Optional[bool]
Updated short name of user.
new_first_name : Optional[bool]
Updated first name of user.
new_last_name : Optional[bool]
Updated last name of user.
new_email : Optional[bool]
Updated email of user
new_team_id : Optional[bool]
Updated team id.
session : Session
SQL session that is to be used to update the user.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(User).where(User.id == user_id)
user_to_update = session.exec(statement).one()
if is_active != None:
user_to_update.is_active = is_active
if new_short_name != None:
user_to_update.short_name = new_short_name
if new_first_name != None:
user_to_update.first_name = new_first_name
if new_last_name != None:
user_to_update.last_name = new_last_name
if new_email != None:
user_to_update.email = new_email
if new_team_id != None:
user_to_update.team_id = new_team_id
user_to_update.updated_at = datetime.now()
session.add(user_to_update)
session.commit()
session.refresh(user_to_update)
return user_to_update
| [
"sqlmodel.select",
"sqlmodel.Session"
] | [((271, 316), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/users"""', 'tags': "['user']"}), "(prefix='/api/users', tags=['user'])\n", (280, 316), False, 'from fastapi import APIRouter, Depends\n'), ((327, 342), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (334, 342), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((423, 443), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (430, 443), False, 'from fastapi import APIRouter, Depends\n'), ((1074, 1094), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1081, 1094), False, 'from fastapi import APIRouter, Depends\n'), ((1519, 1531), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1525, 1531), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2102, 2122), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2109, 2122), False, 'from fastapi import APIRouter, Depends\n'), ((3390, 3404), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3402, 3404), False, 'from datetime import datetime\n'), ((752, 764), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (758, 764), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2814, 2826), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (2820, 2826), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1592, 1604), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (1598, 1604), False, 'from sqlmodel import Session, select, SQLModel, or_\n')] |
from fastapi import Path, Depends, HTTPException
from sqlmodel import Session, select
from db import get_db_session
from model.warehouse import Warehouse
from service.base_crud import BaseCRUD
async def validate_warehouse_id(
warehouse_id: int = Path(...),
db_session: Session = Depends(get_db_session)
) -> Warehouse:
"""Validates the if a warehouse is present with the given id.
Args:
warehouse_id: int. The id of the warehouse.
db_session: Session. The database session used to interact with the DB.
Returns:
Warehouse. The warehouse corresponding to the warehouse_id.
Raises:
HTTPException. Warehouse does not exist in the DB.
"""
warehouse: Warehouse = warehouse_crud.get(db_session, warehouse_id)
if not warehouse or not warehouse.active:
raise HTTPException(status_code=404, detail='Warehouse does not exist')
return warehouse
class WarehouseCRUD(BaseCRUD):
model = Warehouse
def get_by_name(self, db_session: Session, name: str) -> Warehouse:
"""Fetches warehouse by name.
Args:
db_session: Session. The database session used to interact
with the DB.
name: str. The name of the warehouse.
Returns:
Warehouse. The warehouse having the given name.
"""
statement = select(self.model).where(self.model.name == name)
return db_session.exec(statement).first()
warehouse_crud = WarehouseCRUD()
| [
"sqlmodel.select"
] | [((257, 266), 'fastapi.Path', 'Path', (['...'], {}), '(...)\n', (261, 266), False, 'from fastapi import Path, Depends, HTTPException\n'), ((298, 321), 'fastapi.Depends', 'Depends', (['get_db_session'], {}), '(get_db_session)\n', (305, 321), False, 'from fastapi import Path, Depends, HTTPException\n'), ((843, 908), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Warehouse does not exist"""'}), "(status_code=404, detail='Warehouse does not exist')\n", (856, 908), False, 'from fastapi import Path, Depends, HTTPException\n'), ((1371, 1389), 'sqlmodel.select', 'select', (['self.model'], {}), '(self.model)\n', (1377, 1389), False, 'from sqlmodel import Session, select\n')] |
import pytest
from typing import Generator, Dict
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.pool import StaticPool
from fastapi.testclient import TestClient
from app.models import *
from app.main import app
from app.api.deps import get_db
@pytest.fixture(name="session")
def session_fixture() -> Generator:
engine = create_engine(
"sqlite://",
echo=False,
connect_args={"check_same_thread": False},
poolclass=StaticPool,
)
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_db_override():
return session
app.dependency_overrides[get_db] = get_db_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
@pytest.fixture(scope="module")
def random_product() -> Dict[str, str]:
return {
"id": 1,
"name": "<NAME>",
"price": 80,
}
| [
"sqlmodel.create_engine",
"sqlmodel.Session",
"sqlmodel.SQLModel.metadata.create_all"
] | [((267, 297), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (281, 297), False, 'import pytest\n'), ((593, 622), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (607, 622), False, 'import pytest\n'), ((853, 883), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (867, 883), False, 'import pytest\n'), ((347, 455), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite://"""'], {'echo': '(False)', 'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "('sqlite://', echo=False, connect_args={'check_same_thread': \n False}, poolclass=StaticPool)\n", (360, 455), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((494, 530), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (522, 530), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((780, 795), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (790, 795), False, 'from fastapi.testclient import TestClient\n'), ((817, 849), 'app.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (847, 849), False, 'from app.main import app\n'), ((540, 555), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (547, 555), False, 'from sqlmodel import Session, SQLModel, create_engine\n')] |
import typing as t
from sqlalchemy.orm import RelationshipProperty, selectinload
from sqlmodel import select
def get_schema_context(info):
schema_context = info.context["auto_schema"]
return schema_context
def get_model_for_type(info, type_):
schema_context = get_schema_context(info)
model = schema_context["type_to_model"][type_]
return model
def get_strawberry_fields_for_type(info, type_):
schema_context = get_schema_context(info)
strawberry_fields = schema_context["type_to_type_definition"][type_].fields
return strawberry_fields
def get_mapper_for_column(info, column):
return column.property.mapper
def get_type_for_column(info, column):
schema_context = get_schema_context(info)
return schema_context["mapper_to_type"][get_mapper_for_column(info, column)]
def get_graphql_python_name_map_for_type(info, type_):
"""Create a mapping from graphql field names to python attribute names"""
strawberry_fields = get_strawberry_fields_for_type(info, type_)
name_map = {
f.get_graphql_name(info.schema.config.auto_camel_case): f.python_name
for f in strawberry_fields
}
return name_map
def get_selected_field_columns(info, type_, selected_fields, model=None):
model = get_model_for_type(info, type_) if model is None else model
name_map = get_graphql_python_name_map_for_type(info, type_)
selected_field_columns = [
(s, getattr(model, name_map[s.name])) for s in selected_fields
]
return selected_field_columns
def get_selected_scalar_non_scalar_field_columns(
info, type_, selected_fields, model=None
):
selected_field_columns = get_selected_field_columns(
info, type_, selected_fields, model
)
scalar_field_columns = [
fc
for fc in selected_field_columns
if not isinstance(fc[1].property, RelationshipProperty)
]
non_scalar_field_columns = [
c
for c in selected_field_columns
if isinstance(c[1].property, RelationshipProperty)
]
return scalar_field_columns, non_scalar_field_columns
def do_nested_select(info, type_, query, selected_field, column, parent_model):
selected_fields = [s for s in selected_field.selections]
model = get_model_for_type(info, type_)
(
scalar_field_columns,
non_scalar_field_columns,
) = get_selected_scalar_non_scalar_field_columns(
info, type_, selected_fields, model
)
# TODO: selectinload is good for one to many relationships because it does
# not create cartesian product issues.
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#what-kind-of-loading-to-use
# however we probably want joined loading for many to one relationships
# and we can set innerjoin to true if the relationship is nonnullable
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#joined-eager-loading
# one issue with selectinload is it will not work with nested relationships
# that have compositie primary keys. this shows up on sql server
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#select-in-loading
subquery = selectinload(column)
query = query.options(subquery)
# TODO: this nested select is untested and probably doesn't work we want to
# use chained loading to specify futher levels
# https://docs.sqlalchemy.org/en/14/orm/loading_relationships.html#relationship-loading-with-loader-options
if non_scalar_field_columns:
for field_column in non_scalar_field_columns:
field, column = field_column
column_type = get_type_for_column(info, column)
do_nested_select(info, column_type, subquery, field, column, model)
return query
def eq_filter(column, value):
return column == value
def neq_filter(column, value):
return column != value
def lt_filter(column, value):
return column < value
def lte_filter(column, value):
return column <= value
def gt_filter(column, value):
return column > value
def gte_filter(column, value):
return column >= value
def contains_filter(column, value):
return column.contains(value)
# TODO: write more filters
filter_map = {
"eq": eq_filter,
"neq": neq_filter,
"lt": lt_filter,
"lte": lte_filter,
"gt": gt_filter,
"gte": gte_filter,
"contains": contains_filter,
}
def do_filter(info, type_, query, column, filter):
from api.strawberry_sqlalchemy.schema_generation import (
NonScalarComparison,
ScalarComparison,
)
scalar = isinstance(filter, ScalarComparison)
non_scalar = isinstance(filter, NonScalarComparison)
if scalar:
for filter_key in filter.__dict__.keys():
value = getattr(filter, filter_key)
if value is not None:
filter_func = filter_map[filter_key]
query = query.where(filter_func(column, value))
elif non_scalar:
# TODO: implement non scalar filter processing
raise NotImplementedError("Non scalar filters are not yet implemented.")
return query
def do_where(info, type_, query, where_clause):
from api.strawberry_sqlalchemy.schema_generation import (
NonScalarComparison,
ScalarComparison,
)
if where_clause is None:
return query
isinstance(where_clause, ScalarComparison)
non_scalar = isinstance(where_clause, NonScalarComparison)
model = get_model_for_type(info, type_)
name_map = get_graphql_python_name_map_for_type(info, type_)
if non_scalar:
for name in where_clause.__dict__.keys():
filter_ = getattr(where_clause, name)
if filter_ is not None:
column = (
None if name not in name_map else getattr(model, name_map[name])
)
query = do_filter(info, type_, query, column, filter_)
return query
def create_all_type_resolver(type_: type):
"""create a resolver for all instances of a type. Supports various filters"""
from api.strawberry_sqlalchemy.schema_generation import (
create_non_scalar_comparison_expression,
create_non_scalar_order_by_expression,
create_non_scalar_select_columns_enum,
)
def all_type_resolver(
self,
info,
where: t.Optional[create_non_scalar_comparison_expression(type_)] = None,
limit: t.Optional[int] = None,
offset: t.Optional[int] = None,
orderBy: t.Optional[create_non_scalar_order_by_expression(type_)] = None,
distinctOn: t.Optional[
t.List[create_non_scalar_select_columns_enum(type_)]
] = None,
) -> t.List[type_]:
# handle the case where we are querying a many attribute
# in a one to many relationship
# the many attribute uses an all_type_query resolver so that the user
# can supply filters. but strawberry.field.get_result tries to
# load the nested attribute using the resolver.
# because we are using eager loading we actually just want to access
# the attribute on the parent using get_attr(model, python_name)
# we don't want to generate a nested query
# TODO: to check that we are not at the root we check that the prev
# path is not None. Not sure if this is always true!
if info.path.prev is not None:
return getattr(self, info.python_name)
model = get_model_for_type(info, type_)
db = info.context["db"]
selected_fields = [s for s in info.selected_fields[0].selections]
(
scalar_field_columns,
non_scalar_field_columns,
) = get_selected_scalar_non_scalar_field_columns(info, type_, selected_fields)
query = select(model)
query = do_where(info, type_, query, where)
if non_scalar_field_columns:
for field_column in non_scalar_field_columns:
field, column = field_column
column_type = get_type_for_column(info, column)
query = do_nested_select(info, column_type, query, field, column, model)
rows = db.exec(query).all()
return rows
return all_type_resolver
| [
"sqlmodel.select"
] | [((3185, 3205), 'sqlalchemy.orm.selectinload', 'selectinload', (['column'], {}), '(column)\n', (3197, 3205), False, 'from sqlalchemy.orm import RelationshipProperty, selectinload\n'), ((7813, 7826), 'sqlmodel.select', 'select', (['model'], {}), '(model)\n', (7819, 7826), False, 'from sqlmodel import select\n'), ((6373, 6419), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_comparison_expression', 'create_non_scalar_comparison_expression', (['type_'], {}), '(type_)\n', (6412, 6419), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n'), ((6536, 6580), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_order_by_expression', 'create_non_scalar_order_by_expression', (['type_'], {}), '(type_)\n', (6573, 6580), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n'), ((6641, 6685), 'api.strawberry_sqlalchemy.schema_generation.create_non_scalar_select_columns_enum', 'create_non_scalar_select_columns_enum', (['type_'], {}), '(type_)\n', (6678, 6685), False, 'from api.strawberry_sqlalchemy.schema_generation import create_non_scalar_comparison_expression, create_non_scalar_order_by_expression, create_non_scalar_select_columns_enum\n')] |
from datetime import datetime, date
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class History(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
patient_id: int
hospital_id: Optional[int]
hospital_node_id: Optional[int]
hospital_room: str
discipline_group_id: int
discipline_id: int
date: date
source: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class HistoryDoctor(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
doctor_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class HistoryModuleMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
module_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class HistoryTag(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
tag_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
| [
"sqlmodel.Field"
] | [((142, 153), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (151, 153), False, 'from fastapi import APIRouter\n'), ((217, 254), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (222, 254), False, 'from sqlmodel import Field, SQLModel\n'), ((624, 661), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (629, 661), False, 'from sqlmodel import Field, SQLModel\n'), ((880, 917), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (885, 917), False, 'from sqlmodel import Field, SQLModel\n'), ((1130, 1167), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1135, 1167), False, 'from sqlmodel import Field, SQLModel\n')] |
"""initial3
Revision ID: 01b6c8ce3965
Revises: <PASSWORD>
Create Date: 2021-11-01 04:29:57.210756
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '01b6c8ce3965'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('images', sa.Column('size_x', sa.Float(), nullable=False))
op.add_column('images', sa.Column('size_y', sa.Float(), nullable=False))
op.create_index(op.f('ix_images_size_x'), 'images', ['size_x'], unique=False)
op.create_index(op.f('ix_images_size_y'), 'images', ['size_y'], unique=False)
op.add_column('listings', sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False))
op.create_index(op.f('ix_listings_url'), 'listings', ['url'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_listings_url'), table_name='listings')
op.drop_column('listings', 'url')
op.drop_index(op.f('ix_images_size_y'), table_name='images')
op.drop_index(op.f('ix_images_size_x'), table_name='images')
op.drop_column('images', 'size_y')
op.drop_column('images', 'size_x')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((1075, 1108), 'alembic.op.drop_column', 'op.drop_column', (['"""listings"""', '"""url"""'], {}), "('listings', 'url')\n", (1089, 1108), False, 'from alembic import op\n'), ((1243, 1277), 'alembic.op.drop_column', 'op.drop_column', (['"""images"""', '"""size_y"""'], {}), "('images', 'size_y')\n", (1257, 1277), False, 'from alembic import op\n'), ((1282, 1316), 'alembic.op.drop_column', 'op.drop_column', (['"""images"""', '"""size_x"""'], {}), "('images', 'size_x')\n", (1296, 1316), False, 'from alembic import op\n'), ((561, 585), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (565, 585), False, 'from alembic import op\n'), ((643, 667), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (647, 667), False, 'from alembic import op\n'), ((825, 848), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (829, 848), False, 'from alembic import op\n'), ((1023, 1046), 'alembic.op.f', 'op.f', (['"""ix_listings_url"""'], {}), "('ix_listings_url')\n", (1027, 1046), False, 'from alembic import op\n'), ((1127, 1151), 'alembic.op.f', 'op.f', (['"""ix_images_size_y"""'], {}), "('ix_images_size_y')\n", (1131, 1151), False, 'from alembic import op\n'), ((1192, 1216), 'alembic.op.f', 'op.f', (['"""ix_images_size_x"""'], {}), "('ix_images_size_x')\n", (1196, 1216), False, 'from alembic import op\n'), ((435, 445), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (443, 445), True, 'import sqlalchemy as sa\n'), ((512, 522), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (520, 522), True, 'import sqlalchemy as sa\n'), ((752, 786), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (784, 786), False, 'import sqlmodel\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, or_
from ..models.team import Team
from ..models.user import User
from sqlalchemy.exc import NoResultFound
from datetime import datetime
router = APIRouter(prefix="/api/teams", tags=["team"])
session = Session(engine)
@router.post("/")
async def post_team(
*,
team: Team,
session: Session = Depends(get_session),
):
"""Post new team"""
statement = select(Team).where(or_(Team.name == team.name, Team.id == team.id))
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(team)
session.commit()
session.refresh(team)
return team
@router.get("/")
async def get_team_list(session: Session = Depends(get_session)):
"""Get team list"""
statement = select(Team)
results = session.exec(statement).all()
return results
@router.get("/active")
async def get_active_team_list(session: Session = Depends(get_session)):
"""Get list of active teams"""
statement = (
select(
Team.id,
Team.lead_user_id,
Team.name.label("team_name"),
Team.short_name.label("team_short_name"),
User.id,
User.short_name.label("user_name"),
)
.join(User)
.where(Team.is_active == True)
)
results = session.exec(statement).all()
return results
@router.get("/{team_name}")
async def read_teams(team_name: str = None, session: Session = Depends(get_session)):
"""Read the contents of a given team"""
statement = select(Team).where(Team.name == team_name)
try:
result = session.exec(statement).one()
return result
except NoResultFound:
msg = f"""There is no team named {team_name}"""
return msg
@router.get("/{team_id}/user-name")
async def get_user_name_by_team_id(
team_id: int, session: Session = Depends(get_session)
):
"""Get user name by team id"""
statement = (
select(Team.id, User.id, User.name)
.join(User)
.where(Team.id == team_id)
.where(User.active == True)
)
result = session.exec(statement).one()
return result
@router.put("/{team_name}/activate")
async def activate_team(
team_name: str = None,
session: Session = Depends(get_session),
):
"""Activate team"""
statement = select(Team).where(Team.name == team_name)
team_to_activate = session.exec(statement).one()
team_to_activate.is_active = True
team_to_activate.updated_at = datetime.now()
session.add(team_to_activate)
session.commit()
session.refresh(team_to_activate)
return team_to_activate
@router.put("/{team_name}/deactivate")
async def deactivate_team(
team_name: str = None,
session: Session = Depends(get_session),
):
"""Deactivate team"""
statement = select(Team).where(Team.name == team_name)
team_to_deactivate = session.exec(statement).one()
team_to_deactivate.is_active = False
team_to_deactivate.updated_at = datetime.now()
session.add(team_to_deactivate)
session.commit()
session.refresh(team_to_deactivate)
return team_to_deactivate
@router.put("/")
async def update_team(
id: str = None,
lead_user_id: str = None,
name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
"""Update team"""
statement = select(Team).where(or_(Team.name == name, Team.id == id))
team_to_update = session.exec(statement).one()
team_to_update.lead_user_id = lead_user_id
team_to_update.name = name
team_to_update.is_active = is_active
session.add(team_to_update)
team_to_update.updated_at = datetime.now()
session.commit()
session.refresh(team_to_update)
return team_to_update
| [
"sqlmodel.Session",
"sqlmodel.or_",
"sqlmodel.select"
] | [((264, 309), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/teams"""', 'tags': "['team']"}), "(prefix='/api/teams', tags=['team'])\n", (273, 309), False, 'from fastapi import APIRouter, Depends\n'), ((320, 335), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (327, 335), False, 'from sqlmodel import Session, select, or_\n'), ((423, 443), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (430, 443), False, 'from fastapi import APIRouter, Depends\n'), ((822, 842), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (829, 842), False, 'from fastapi import APIRouter, Depends\n'), ((885, 897), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (891, 897), False, 'from sqlmodel import Session, select, or_\n'), ((1036, 1056), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1043, 1056), False, 'from fastapi import APIRouter, Depends\n'), ((1576, 1596), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1583, 1596), False, 'from fastapi import APIRouter, Depends\n'), ((1992, 2012), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1999, 2012), False, 'from fastapi import APIRouter, Depends\n'), ((2385, 2405), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2392, 2405), False, 'from fastapi import APIRouter, Depends\n'), ((2618, 2632), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2630, 2632), False, 'from datetime import datetime\n'), ((2872, 2892), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2879, 2892), False, 'from fastapi import APIRouter, Depends\n'), ((3114, 3128), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3126, 3128), False, 'from datetime import datetime\n'), ((3421, 3441), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (3428, 3441), False, 'from fastapi import APIRouter, Depends\n'), ((3776, 3790), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3788, 3790), False, 'from datetime import datetime\n'), ((507, 554), 'sqlmodel.or_', 'or_', (['(Team.name == team.name)', '(Team.id == team.id)'], {}), '(Team.name == team.name, Team.id == team.id)\n', (510, 554), False, 'from sqlmodel import Session, select, or_\n'), ((3503, 3540), 'sqlmodel.or_', 'or_', (['(Team.name == name)', '(Team.id == id)'], {}), '(Team.name == name, Team.id == id)\n', (3506, 3540), False, 'from sqlmodel import Session, select, or_\n'), ((488, 500), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (494, 500), False, 'from sqlmodel import Session, select, or_\n'), ((1659, 1671), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (1665, 1671), False, 'from sqlmodel import Session, select, or_\n'), ((2450, 2462), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (2456, 2462), False, 'from sqlmodel import Session, select, or_\n'), ((2939, 2951), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (2945, 2951), False, 'from sqlmodel import Session, select, or_\n'), ((3484, 3496), 'sqlmodel.select', 'select', (['Team'], {}), '(Team)\n', (3490, 3496), False, 'from sqlmodel import Session, select, or_\n'), ((2077, 2112), 'sqlmodel.select', 'select', (['Team.id', 'User.id', 'User.name'], {}), '(Team.id, User.id, User.name)\n', (2083, 2112), False, 'from sqlmodel import Session, select, or_\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Optional
from uuid import UUID, uuid4
import requests
from pydantic import HttpUrl
from pydantic.tools import parse_obj_as
from sqlalchemy.sql.expression import text
from sqlmodel import Session, select
from dbgen import Const, Entity, Extract, Generator, Model, Query
from dbgen.configuration import config, get_engines
from dbgen.core.node.transforms import PyBlock
class CustomJsonExtract(Extract):
url: HttpUrl = parse_obj_as(HttpUrl, 'https://jsonplaceholder.typicode.com/posts')
outputs: List[str] = ['out', 'uuid']
def setup(self, **_):
self._response = requests.get(self.url).json()
self._response += [{}]
def extract(self):
for row in self._response:
row['uuid'] = uuid4()
yield {'out': row, 'uuid': row['uuid']}
def length(self, **_):
return len(self._response)
class JSONEntityBase(Entity):
__tablename__ = 'json_entity'
tags: Optional[List[dict]]
my_uuid: Optional[UUID]
class JSONEntity(JSONEntityBase, table=True):
__tablename__ = 'json_entity'
__identifying__ = {
'json_val',
}
json_val: Optional[dict]
model = Model(name='test_json')
load_json = Generator(name='load_json', loads=[JSONEntity.load(insert=True, json_val=Const({}))])
model.add_gen(load_json)
extract = CustomJsonExtract()
load = JSONEntity.load(insert=True, json_val=extract['out'], my_uuid=extract['uuid'])
load_http_json = Generator(name='load_http_json', extract=extract, loads=[load])
model.add_gen(load_http_json)
query = Query(select(JSONEntity.id, JSONEntity.json_val.op('->')(text("'title'")).label('title')))
def get_title_words(text: str):
if text:
return [{'word': word} for word in text.split(' ')]
pb = PyBlock(function=get_title_words, inputs=[query['title']])
load = JSONEntity.load(json_entity=query['id'], tags=pb['out'])
add_tags = Generator(name='add_tags', extract=query, transforms=[pb], loads=[load])
model.add_gen(add_tags)
if __name__ == '__main__':
main_engine, _ = get_engines(config)
with Session(main_engine) as session:
json_entity = session.exec(select(JSONEntity)).first()
if json_entity:
print(json_entity.dict().keys())
print(json_entity.json_val)
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((1773, 1796), 'dbgen.Model', 'Model', ([], {'name': '"""test_json"""'}), "(name='test_json')\n", (1778, 1796), False, 'from dbgen import Const, Entity, Extract, Generator, Model, Query\n'), ((2054, 2117), 'dbgen.Generator', 'Generator', ([], {'name': '"""load_http_json"""', 'extract': 'extract', 'loads': '[load]'}), "(name='load_http_json', extract=extract, loads=[load])\n", (2063, 2117), False, 'from dbgen import Const, Entity, Extract, Generator, Model, Query\n'), ((2362, 2420), 'dbgen.core.node.transforms.PyBlock', 'PyBlock', ([], {'function': 'get_title_words', 'inputs': "[query['title']]"}), "(function=get_title_words, inputs=[query['title']])\n", (2369, 2420), False, 'from dbgen.core.node.transforms import PyBlock\n'), ((2496, 2568), 'dbgen.Generator', 'Generator', ([], {'name': '"""add_tags"""', 'extract': 'query', 'transforms': '[pb]', 'loads': '[load]'}), "(name='add_tags', extract=query, transforms=[pb], loads=[load])\n", (2505, 2568), False, 'from dbgen import Const, Entity, Extract, Generator, Model, Query\n'), ((1047, 1114), 'pydantic.tools.parse_obj_as', 'parse_obj_as', (['HttpUrl', '"""https://jsonplaceholder.typicode.com/posts"""'], {}), "(HttpUrl, 'https://jsonplaceholder.typicode.com/posts')\n", (1059, 1114), False, 'from pydantic.tools import parse_obj_as\n'), ((2642, 2661), 'dbgen.configuration.get_engines', 'get_engines', (['config'], {}), '(config)\n', (2653, 2661), False, 'from dbgen.configuration import config, get_engines\n'), ((2671, 2691), 'sqlmodel.Session', 'Session', (['main_engine'], {}), '(main_engine)\n', (2678, 2691), False, 'from sqlmodel import Session, select\n'), ((1354, 1361), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1359, 1361), False, 'from uuid import UUID, uuid4\n'), ((1208, 1230), 'requests.get', 'requests.get', (['self.url'], {}), '(self.url)\n', (1220, 1230), False, 'import requests\n'), ((2338, 2353), 'sqlalchemy.sql.expression.text.split', 'text.split', (['""" """'], {}), "(' ')\n", (2348, 2353), False, 'from sqlalchemy.sql.expression import text\n'), ((1882, 1891), 'dbgen.Const', 'Const', (['{}'], {}), '({})\n', (1887, 1891), False, 'from dbgen import Const, Entity, Extract, Generator, Model, Query\n'), ((2214, 2229), 'sqlalchemy.sql.expression.text', 'text', (['"""\'title\'"""'], {}), '("\'title\'")\n', (2218, 2229), False, 'from sqlalchemy.sql.expression import text\n'), ((2739, 2757), 'sqlmodel.select', 'select', (['JSONEntity'], {}), '(JSONEntity)\n', (2745, 2757), False, 'from sqlmodel import Session, select\n')] |
from datetime import date
from typing import Optional
from sqlmodel import SQLModel, Field
# if TYPE_CHECKING:
# from app.src.models.db.order import Order
class AppUserBase(SQLModel):
username: str
name: Optional[str]
surname: Optional[str]
birth_date: Optional[date]
email: str
password: str
isAdmin: bool = False
class AppUser(AppUserBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
class AppUserCreate(AppUserBase):
pass
class AppUserRead(AppUserBase):
id: int
# Nel modello update tutti gli attributi devono essere opzionali
class AppUserUpdate(AppUserBase):
name: Optional[str] = None
surname: Optional[str] = None
birth_date: Optional[date] = None
username: Optional[str] = None
email: str
password: str
isAdmin: Optional[bool] = None
| [
"sqlmodel.Field"
] | [((415, 452), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (420, 452), False, 'from sqlmodel import SQLModel, Field\n')] |
"""Implementing balance and fiscal_note_items tables
Revision ID: 6099ed2a58e0
Revises: <KEY>
Create Date: 2021-10-11 14:52:44.126077
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6099ed2a58e0"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"balance",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("value", sa.Float(), nullable=False),
sa.Column("type", sa.Enum("DEBT", "CREDIT", name="balancetype"), nullable=False),
sa.Column(
"operation",
sa.Enum(
"PAYMENT_OF_EMPLOYEES",
"PAYMENT_OF_SUPPLIERS",
"ANOTHER_PAYMENTS",
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
name="operationtype",
),
nullable=False,
),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_balance_created_at"), "balance", ["created_at"], unique=False)
op.create_index(op.f("ix_balance_operation"), "balance", ["operation"], unique=False)
op.create_index(op.f("ix_balance_owner_id"), "balance", ["owner_id"], unique=False)
op.create_index(op.f("ix_balance_type"), "balance", ["type"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("buy_value", sa.Float(), nullable=False),
sa.Column("sugested_sell_value", sa.Float(), nullable=False),
sa.Column("owner_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("fiscal_note_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("item_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("file_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.ForeignKeyConstraint(
["file_id"],
["files.bucket_key"],
),
sa.ForeignKeyConstraint(
["fiscal_note_id"],
["fiscal_notes.id"],
),
sa.ForeignKeyConstraint(
["item_id"],
["items.id"],
),
sa.ForeignKeyConstraint(
["owner_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_fiscal_note_items_fiscal_note_id"), "fiscal_note_items", ["fiscal_note_id"], unique=False)
op.create_index(op.f("ix_fiscal_note_items_item_id"), "fiscal_note_items", ["item_id"], unique=False)
op.create_index(op.f("ix_fiscal_note_items_owner_id"), "fiscal_note_items", ["owner_id"], unique=False)
op.create_index(
op.f("ix_fiscal_note_items_sugested_sell_value"), "fiscal_note_items", ["sugested_sell_value"], unique=False
)
op.create_index(op.f("ix_clients_owner_id"), "clients", ["owner_id"], unique=False)
op.create_index(op.f("ix_fiscal_notes_owner_id"), "fiscal_notes", ["owner_id"], unique=False)
op.alter_column("items", "cost", existing_type=postgresql.DOUBLE_PRECISION(precision=53), nullable=True)
op.create_index(op.f("ix_items_amount"), "items", ["amount"], unique=False)
op.alter_column("order_details", "order_id", existing_type=postgresql.UUID(), nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_users_password_hash"), table_name="users")
op.drop_index(op.f("ix_users_name"), table_name="users")
op.drop_index(op.f("ix_orders_description"), table_name="orders")
op.drop_index(op.f("ix_order_details_sell_value"), table_name="order_details")
op.drop_index(op.f("ix_order_details_item_name"), table_name="order_details")
op.drop_index(op.f("ix_order_details_cost"), table_name="order_details")
op.alter_column("order_details", "order_id", existing_type=postgresql.UUID(), nullable=False)
op.drop_index(op.f("ix_items_value"), table_name="items")
op.drop_index(op.f("ix_items_cost"), table_name="items")
op.drop_index(op.f("ix_items_amount"), table_name="items")
op.alter_column("items", "cost", existing_type=postgresql.DOUBLE_PRECISION(precision=53), nullable=False)
op.drop_index(op.f("ix_fiscal_notes_owner_id"), table_name="fiscal_notes")
op.drop_index(op.f("ix_fiscal_notes_file_id"), table_name="fiscal_notes")
op.drop_index(op.f("ix_fiscal_notes_description"), table_name="fiscal_notes")
op.drop_index(op.f("ix_files_uploaded_at"), table_name="files")
op.drop_index(op.f("ix_clients_phone"), table_name="clients")
op.drop_index(op.f("ix_clients_owner_id"), table_name="clients")
op.drop_index(op.f("ix_clients_created_at"), table_name="clients")
op.drop_index(op.f("ix_fiscal_note_items_sugested_sell_value"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_owner_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_item_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_fiscal_note_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_file_id"), table_name="fiscal_note_items")
op.drop_index(op.f("ix_fiscal_note_items_buy_value"), table_name="fiscal_note_items")
op.drop_table("fiscal_note_items")
op.drop_index(op.f("ix_balance_value"), table_name="balance")
op.drop_index(op.f("ix_balance_type"), table_name="balance")
op.drop_index(op.f("ix_balance_owner_id"), table_name="balance")
op.drop_index(op.f("ix_balance_operation"), table_name="balance")
op.drop_index(op.f("ix_balance_description"), table_name="balance")
op.drop_index(op.f("ix_balance_created_at"), table_name="balance")
op.drop_table("balance")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString",
"sqlmodel.sql.sqltypes.GUID"
] | [((5828, 5862), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_note_items"""'], {}), "('fiscal_note_items')\n", (5841, 5862), False, 'from alembic import op\n'), ((6280, 6304), 'alembic.op.drop_table', 'op.drop_table', (['"""balance"""'], {}), "('balance')\n", (6293, 6304), False, 'from alembic import op\n'), ((1354, 1405), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {}), "(['owner_id'], ['users.id'])\n", (1377, 1405), True, 'import sqlalchemy as sa\n'), ((1450, 1479), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1473, 1479), True, 'import sqlalchemy as sa\n'), ((1507, 1536), 'alembic.op.f', 'op.f', (['"""ix_balance_created_at"""'], {}), "('ix_balance_created_at')\n", (1511, 1536), False, 'from alembic import op\n'), ((1599, 1627), 'alembic.op.f', 'op.f', (['"""ix_balance_operation"""'], {}), "('ix_balance_operation')\n", (1603, 1627), False, 'from alembic import op\n'), ((1689, 1716), 'alembic.op.f', 'op.f', (['"""ix_balance_owner_id"""'], {}), "('ix_balance_owner_id')\n", (1693, 1716), False, 'from alembic import op\n'), ((1777, 1800), 'alembic.op.f', 'op.f', (['"""ix_balance_type"""'], {}), "('ix_balance_type')\n", (1781, 1800), False, 'from alembic import op\n'), ((2414, 2472), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {}), "(['file_id'], ['files.bucket_key'])\n", (2437, 2472), True, 'import sqlalchemy as sa\n'), ((2517, 2581), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['fiscal_note_id']", "['fiscal_notes.id']"], {}), "(['fiscal_note_id'], ['fiscal_notes.id'])\n", (2540, 2581), True, 'import sqlalchemy as sa\n'), ((2626, 2676), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['item_id']", "['items.id']"], {}), "(['item_id'], ['items.id'])\n", (2649, 2676), True, 'import sqlalchemy as sa\n'), ((2721, 2772), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {}), "(['owner_id'], ['users.id'])\n", (2744, 2772), True, 'import sqlalchemy as sa\n'), ((2817, 2846), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2840, 2846), True, 'import sqlalchemy as sa\n'), ((2874, 2917), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {}), "('ix_fiscal_note_items_fiscal_note_id')\n", (2878, 2917), False, 'from alembic import op\n'), ((2994, 3030), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_item_id"""'], {}), "('ix_fiscal_note_items_item_id')\n", (2998, 3030), False, 'from alembic import op\n'), ((3100, 3137), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_owner_id"""'], {}), "('ix_fiscal_note_items_owner_id')\n", (3104, 3137), False, 'from alembic import op\n'), ((3217, 3265), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {}), "('ix_fiscal_note_items_sugested_sell_value')\n", (3221, 3265), False, 'from alembic import op\n'), ((3352, 3379), 'alembic.op.f', 'op.f', (['"""ix_clients_owner_id"""'], {}), "('ix_clients_owner_id')\n", (3356, 3379), False, 'from alembic import op\n'), ((3440, 3472), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_owner_id"""'], {}), "('ix_fiscal_notes_owner_id')\n", (3444, 3472), False, 'from alembic import op\n'), ((3647, 3670), 'alembic.op.f', 'op.f', (['"""ix_items_amount"""'], {}), "('ix_items_amount')\n", (3651, 3670), False, 'from alembic import op\n'), ((3942, 3972), 'alembic.op.f', 'op.f', (['"""ix_users_password_hash"""'], {}), "('ix_users_password_hash')\n", (3946, 3972), False, 'from alembic import op\n'), ((4012, 4033), 'alembic.op.f', 'op.f', (['"""ix_users_name"""'], {}), "('ix_users_name')\n", (4016, 4033), False, 'from alembic import op\n'), ((4073, 4102), 'alembic.op.f', 'op.f', (['"""ix_orders_description"""'], {}), "('ix_orders_description')\n", (4077, 4102), False, 'from alembic import op\n'), ((4143, 4178), 'alembic.op.f', 'op.f', (['"""ix_order_details_sell_value"""'], {}), "('ix_order_details_sell_value')\n", (4147, 4178), False, 'from alembic import op\n'), ((4226, 4260), 'alembic.op.f', 'op.f', (['"""ix_order_details_item_name"""'], {}), "('ix_order_details_item_name')\n", (4230, 4260), False, 'from alembic import op\n'), ((4308, 4337), 'alembic.op.f', 'op.f', (['"""ix_order_details_cost"""'], {}), "('ix_order_details_cost')\n", (4312, 4337), False, 'from alembic import op\n'), ((4483, 4505), 'alembic.op.f', 'op.f', (['"""ix_items_value"""'], {}), "('ix_items_value')\n", (4487, 4505), False, 'from alembic import op\n'), ((4545, 4566), 'alembic.op.f', 'op.f', (['"""ix_items_cost"""'], {}), "('ix_items_cost')\n", (4549, 4566), False, 'from alembic import op\n'), ((4606, 4629), 'alembic.op.f', 'op.f', (['"""ix_items_amount"""'], {}), "('ix_items_amount')\n", (4610, 4629), False, 'from alembic import op\n'), ((4779, 4811), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_owner_id"""'], {}), "('ix_fiscal_notes_owner_id')\n", (4783, 4811), False, 'from alembic import op\n'), ((4858, 4889), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_file_id"""'], {}), "('ix_fiscal_notes_file_id')\n", (4862, 4889), False, 'from alembic import op\n'), ((4936, 4971), 'alembic.op.f', 'op.f', (['"""ix_fiscal_notes_description"""'], {}), "('ix_fiscal_notes_description')\n", (4940, 4971), False, 'from alembic import op\n'), ((5018, 5046), 'alembic.op.f', 'op.f', (['"""ix_files_uploaded_at"""'], {}), "('ix_files_uploaded_at')\n", (5022, 5046), False, 'from alembic import op\n'), ((5086, 5110), 'alembic.op.f', 'op.f', (['"""ix_clients_phone"""'], {}), "('ix_clients_phone')\n", (5090, 5110), False, 'from alembic import op\n'), ((5152, 5179), 'alembic.op.f', 'op.f', (['"""ix_clients_owner_id"""'], {}), "('ix_clients_owner_id')\n", (5156, 5179), False, 'from alembic import op\n'), ((5221, 5250), 'alembic.op.f', 'op.f', (['"""ix_clients_created_at"""'], {}), "('ix_clients_created_at')\n", (5225, 5250), False, 'from alembic import op\n'), ((5292, 5340), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {}), "('ix_fiscal_note_items_sugested_sell_value')\n", (5296, 5340), False, 'from alembic import op\n'), ((5392, 5429), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_owner_id"""'], {}), "('ix_fiscal_note_items_owner_id')\n", (5396, 5429), False, 'from alembic import op\n'), ((5481, 5517), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_item_id"""'], {}), "('ix_fiscal_note_items_item_id')\n", (5485, 5517), False, 'from alembic import op\n'), ((5569, 5612), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {}), "('ix_fiscal_note_items_fiscal_note_id')\n", (5573, 5612), False, 'from alembic import op\n'), ((5664, 5700), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_file_id"""'], {}), "('ix_fiscal_note_items_file_id')\n", (5668, 5700), False, 'from alembic import op\n'), ((5752, 5790), 'alembic.op.f', 'op.f', (['"""ix_fiscal_note_items_buy_value"""'], {}), "('ix_fiscal_note_items_buy_value')\n", (5756, 5790), False, 'from alembic import op\n'), ((5881, 5905), 'alembic.op.f', 'op.f', (['"""ix_balance_value"""'], {}), "('ix_balance_value')\n", (5885, 5905), False, 'from alembic import op\n'), ((5947, 5970), 'alembic.op.f', 'op.f', (['"""ix_balance_type"""'], {}), "('ix_balance_type')\n", (5951, 5970), False, 'from alembic import op\n'), ((6012, 6039), 'alembic.op.f', 'op.f', (['"""ix_balance_owner_id"""'], {}), "('ix_balance_owner_id')\n", (6016, 6039), False, 'from alembic import op\n'), ((6081, 6109), 'alembic.op.f', 'op.f', (['"""ix_balance_operation"""'], {}), "('ix_balance_operation')\n", (6085, 6109), False, 'from alembic import op\n'), ((6151, 6181), 'alembic.op.f', 'op.f', (['"""ix_balance_description"""'], {}), "('ix_balance_description')\n", (6155, 6181), False, 'from alembic import op\n'), ((6223, 6252), 'alembic.op.f', 'op.f', (['"""ix_balance_created_at"""'], {}), "('ix_balance_created_at')\n", (6227, 6252), False, 'from alembic import op\n'), ((524, 552), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (550, 552), False, 'import sqlmodel\n'), ((598, 608), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (606, 608), True, 'import sqlalchemy as sa\n'), ((653, 698), 'sqlalchemy.Enum', 'sa.Enum', (['"""DEBT"""', '"""CREDIT"""'], {'name': '"""balancetype"""'}), "('DEBT', 'CREDIT', name='balancetype')\n", (660, 698), True, 'import sqlalchemy as sa\n'), ((773, 945), 'sqlalchemy.Enum', 'sa.Enum', (['"""PAYMENT_OF_EMPLOYEES"""', '"""PAYMENT_OF_SUPPLIERS"""', '"""ANOTHER_PAYMENTS"""', '"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""'], {'name': '"""operationtype"""'}), "('PAYMENT_OF_EMPLOYEES', 'PAYMENT_OF_SUPPLIERS', 'ANOTHER_PAYMENTS',\n 'SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT', 'SALE_IN_MONEY', name=\n 'operationtype')\n", (780, 945), True, 'import sqlalchemy as sa\n'), ((1153, 1187), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1185, 1187), False, 'import sqlmodel\n'), ((1238, 1251), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1249, 1251), True, 'import sqlalchemy as sa\n'), ((1299, 1327), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1325, 1327), False, 'import sqlmodel\n'), ((1911, 1939), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (1937, 1939), False, 'import sqlmodel\n'), ((1989, 1999), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (1997, 1999), True, 'import sqlalchemy as sa\n'), ((2059, 2069), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (2067, 2069), True, 'import sqlalchemy as sa\n'), ((2118, 2146), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2144, 2146), False, 'import sqlmodel\n'), ((2201, 2229), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2227, 2229), False, 'import sqlmodel\n'), ((2277, 2305), 'sqlmodel.sql.sqltypes.GUID', 'sqlmodel.sql.sqltypes.GUID', ([], {}), '()\n', (2303, 2305), False, 'import sqlmodel\n'), ((2353, 2387), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2385, 2387), False, 'import sqlmodel\n'), ((3569, 3610), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3596, 3610), False, 'from sqlalchemy.dialects import postgresql\n'), ((3770, 3787), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (3785, 3787), False, 'from sqlalchemy.dialects import postgresql\n'), ((4430, 4447), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4445, 4447), False, 'from sqlalchemy.dialects import postgresql\n'), ((4702, 4743), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (4729, 4743), False, 'from sqlalchemy.dialects import postgresql\n')] |
from sqlalchemy.orm import Session
from sqlmodel import select
from sqlalchemy.exc import SQLAlchemyError
from typing import Any
# import sys
#
# sys.path.append("..")
from app.db import models, pagination,session_scope
from app.util import passutil, schemas
from app.logs import fastapi_logger
from app.crud import get_user,get_user_password
class CRUDLogin:
def check_username_password(self, email: str, password: str) -> Any:
""" Verify Password"""
db_user_info = get_user_password(email=email)
return passutil.verify_password(str(password),
str(db_user_info.password))
def check_active_session(self, session_id: str):
""" check for active session """
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("check_active_session")
return None
def login_user(self, user: schemas.UserLogIn, session_id: str) -> Any:
""" Login Attempt Record """
try:
with session_scope() as db:
db_session = models.UsersLoginAttempt(
email=user.email,
session_id=session_id,
ip_address=user.ip_address,
browser=user.browser,
status="logged_in")
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("login_user")
return None
def active_user(self, session_id: str) -> Any:
""" check for active user"""
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
db_session = results.one()
db_session.status = "active"
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("active_user")
return None
def logoff_user(self, session_id: str) -> Any:
""" Logging off Record"""
try:
with session_scope() as db:
statement = select(models.UsersLoginAttempt).where(
models.UsersLoginAttempt.session_id == session_id)
results = db.exec(statement)
db_session = results.one()
db_session.status = "logged_off"
db.add(db_session)
db.commit()
db.refresh(db_session)
return db_session
except SQLAlchemyError as e:
fastapi_logger.exception("logoff_user")
return None
crud_login = CRUDLogin()
| [
"sqlmodel.select"
] | [((511, 541), 'app.crud.get_user_password', 'get_user_password', ([], {'email': 'email'}), '(email=email)\n', (528, 541), False, 'from app.crud import get_user, get_user_password\n'), ((799, 814), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (812, 814), False, 'from app.db import models, pagination, session_scope\n'), ((1127, 1175), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""check_active_session"""'], {}), "('check_active_session')\n", (1151, 1175), False, 'from app.logs import fastapi_logger\n'), ((1349, 1364), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (1362, 1364), False, 'from app.db import models, pagination, session_scope\n'), ((1402, 1541), 'app.db.models.UsersLoginAttempt', 'models.UsersLoginAttempt', ([], {'email': 'user.email', 'session_id': 'session_id', 'ip_address': 'user.ip_address', 'browser': 'user.browser', 'status': '"""logged_in"""'}), "(email=user.email, session_id=session_id,\n ip_address=user.ip_address, browser=user.browser, status='logged_in')\n", (1426, 1541), False, 'from app.db import models, pagination, session_scope\n'), ((1835, 1873), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""login_user"""'], {}), "('login_user')\n", (1859, 1873), False, 'from app.logs import fastapi_logger\n'), ((2023, 2038), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (2036, 2038), False, 'from app.db import models, pagination, session_scope\n'), ((2516, 2555), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""active_user"""'], {}), "('active_user')\n", (2540, 2555), False, 'from app.logs import fastapi_logger\n'), ((2702, 2717), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (2715, 2717), False, 'from app.db import models, pagination, session_scope\n'), ((3199, 3238), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""logoff_user"""'], {}), "('logoff_user')\n", (3223, 3238), False, 'from app.logs import fastapi_logger\n'), ((851, 883), 'sqlmodel.select', 'select', (['models.UsersLoginAttempt'], {}), '(models.UsersLoginAttempt)\n', (857, 883), False, 'from sqlmodel import select\n'), ((2075, 2107), 'sqlmodel.select', 'select', (['models.UsersLoginAttempt'], {}), '(models.UsersLoginAttempt)\n', (2081, 2107), False, 'from sqlmodel import select\n'), ((2754, 2786), 'sqlmodel.select', 'select', (['models.UsersLoginAttempt'], {}), '(models.UsersLoginAttempt)\n', (2760, 2786), False, 'from sqlmodel import select\n')] |
from typing import Optional # (1)
from sqlmodel import Field, SQLModel, create_engine # (2)
class Hero(SQLModel, table=True): # (3)
id: Optional[int] = Field(default=None, primary_key=True) # (4)
name: str # (5)
secret_name: str # (6)
age: Optional[int] = None # (7)
sqlite_file_name = "database.db" # (8)
sqlite_url = f"sqlite:///{sqlite_file_name}" # (9)
engine = create_engine(sqlite_url, echo=True) # (10)
def create_db_and_tables(): # (11)
SQLModel.metadata.create_all(engine) # (12)
if __name__ == "__main__": # (13)
create_db_and_tables() # (14)
| [
"sqlmodel.create_engine",
"sqlmodel.Field",
"sqlmodel.SQLModel.metadata.create_all"
] | [((397, 433), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (410, 433), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((162, 199), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (167, 199), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((484, 520), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (512, 520), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
from datetime import datetime
from decimal import Decimal
from typing import Optional
from fastapi import APIRouter
from sqlmodel import Field, SQLModel
router = APIRouter()
class Guardian(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
occupation_id: Optional[int] = None
gender: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
latitude_custom: Decimal
longitude_custom: Decimal
alive: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianIdNumber(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
id_type_id: int
number: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianPatientMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
patient_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class GuardianNotification(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
guardian_id: int
name: str
detail: str
is_read: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
| [
"sqlmodel.Field"
] | [((164, 175), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (173, 175), False, 'from fastapi import APIRouter\n'), ((240, 277), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (245, 277), False, 'from sqlmodel import Field, SQLModel\n'), ((1118, 1155), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1123, 1155), False, 'from sqlmodel import Field, SQLModel\n'), ((1410, 1447), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1415, 1447), False, 'from sqlmodel import Field, SQLModel\n'), ((1686, 1723), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1691, 1723), False, 'from sqlmodel import Field, SQLModel\n'), ((1948, 1985), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1953, 1985), False, 'from sqlmodel import Field, SQLModel\n')] |
from datetime import date, datetime
from typing import Optional
from pydantic import BaseModel, validator
from sqlmodel import Field, SQLModel
# Simple classes for access control tokens
class Token(BaseModel):
access_token: str
token_type: str
expiry: datetime
class TokenData(BaseModel):
username: Optional[str] = None
# Default user class, this is the one to interact with.
class User(SQLModel):
id: Optional[int] = Field(default=None, primary_key=True)
full_name: str
username: str
email: str
disabled: Optional[bool] = Field(default=False)
roles: Optional[str] = Field(default="appuser")
created: Optional[datetime] = Field(default=datetime.utcnow())
# Don't ever return FullUser instances - ALWAYS return 'User' at maximum, since FullUser includes hashedpasword.
# FullUser is only need during creation or resetting of password.
class FullUser(User, table=True):
__tablename__ = "Users"
hashedpassword: str
# Opservation class is used for both storage and retrieval operations.
class Observation(SQLModel, table=True):
__tablename__ = "Observations"
id: Optional[int] = Field(default=None, primary_key=True)
indoortempf: float
tempf: float
dewptf: float
windchillf: float
indoorhumidity: float
humidity: float
windspeedmph: float
windgustmph: float
winddir: int
absbaromin: float
baromin: float
rainin: float
dailyrainin: float
weeklyrainin: float
monthlyrainin: float
solarradiation: float
UV: int
dateutc: datetime
realtime: int
rtfreq: int
# Reappropriate @validator decorators to perform convertions from imperial to metric on each datapoint as they are created for output.
# This saves logic/ressources in the endpoint and/or in the client looping datasets or during mapping.
class Metric_Observation(Observation):
@validator('indoortempf', 'tempf', 'dewptf', 'windchillf', allow_reuse=True)
def convertf(cls, v: float):
# convert to Celcius
v = (v - 32) * 5.0/9.0
return round(v, 2)
@validator('windspeedmph', 'windgustmph', allow_reuse=True)
def convertmph(cls, v: float):
# convert to m/s
v = v*0.44704
return round(v, 2)
@validator('absbaromin', 'baromin', allow_reuse=True)
def converthpa(cls, v: float):
# convert to hPa
v = v * 33.86
return round(v, 2)
@validator('rainin', 'dailyrainin', 'weeklyrainin', 'monthlyrainin', allow_reuse=True)
def convertin(cls, v: float):
# convert to hPa
v = v * 25.4
return round(v, 2)
# Dependency function to map an ugly pile of params to a cleaner Observation object
def create_observation(ID: str, PASSWORD: str, indoortempf: float, tempf: float, dewptf: float,
windchillf: float, indoorhumidity: float, humidity: float, windspeedmph: float,
windgustmph: float, winddir: int, absbaromin: float, baromin: float, rainin: float,
dailyrainin: float, weeklyrainin: float, monthlyrainin: float, solarradiation: float,
UV: int, dateutc: str, softwaretype: str, action: str, realtime: int, rtfreq: int):
return Observation(**locals())
| [
"sqlmodel.Field"
] | [((445, 482), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (450, 482), False, 'from sqlmodel import Field, SQLModel\n'), ((566, 586), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (571, 586), False, 'from sqlmodel import Field, SQLModel\n'), ((614, 638), 'sqlmodel.Field', 'Field', ([], {'default': '"""appuser"""'}), "(default='appuser')\n", (619, 638), False, 'from sqlmodel import Field, SQLModel\n'), ((1146, 1183), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1151, 1183), False, 'from sqlmodel import Field, SQLModel\n'), ((1883, 1958), 'pydantic.validator', 'validator', (['"""indoortempf"""', '"""tempf"""', '"""dewptf"""', '"""windchillf"""'], {'allow_reuse': '(True)'}), "('indoortempf', 'tempf', 'dewptf', 'windchillf', allow_reuse=True)\n", (1892, 1958), False, 'from pydantic import BaseModel, validator\n'), ((2085, 2143), 'pydantic.validator', 'validator', (['"""windspeedmph"""', '"""windgustmph"""'], {'allow_reuse': '(True)'}), "('windspeedmph', 'windgustmph', allow_reuse=True)\n", (2094, 2143), False, 'from pydantic import BaseModel, validator\n'), ((2259, 2311), 'pydantic.validator', 'validator', (['"""absbaromin"""', '"""baromin"""'], {'allow_reuse': '(True)'}), "('absbaromin', 'baromin', allow_reuse=True)\n", (2268, 2311), False, 'from pydantic import BaseModel, validator\n'), ((2427, 2516), 'pydantic.validator', 'validator', (['"""rainin"""', '"""dailyrainin"""', '"""weeklyrainin"""', '"""monthlyrainin"""'], {'allow_reuse': '(True)'}), "('rainin', 'dailyrainin', 'weeklyrainin', 'monthlyrainin',\n allow_reuse=True)\n", (2436, 2516), False, 'from pydantic import BaseModel, validator\n'), ((687, 704), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (702, 704), False, 'from datetime import date, datetime\n')] |
from datetime import datetime, timezone
from typing import Generic, Optional, Type, TypeVar
from fastapi_users.authentication.strategy.db import AccessTokenDatabase
from fastapi_users.authentication.strategy.db.models import BaseAccessToken
from pydantic import UUID4
from sqlalchemy import Column, types
from sqlalchemy.ext.asyncio import AsyncSession
from sqlmodel import Field, Session, SQLModel, select
def now_utc():
return datetime.now(timezone.utc)
class SQLModelBaseAccessToken(BaseAccessToken, SQLModel):
__tablename__ = "accesstoken"
token: str = Field(
sa_column=Column("token", types.String(length=43), primary_key=True)
)
created_at: datetime = Field(
default_factory=now_utc,
sa_column=Column(
"created_at", types.DateTime(timezone=True), nullable=False, index=True
),
)
user_id: UUID4 = Field(foreign_key="user.id", nullable=False)
class Config:
orm_mode = True
A = TypeVar("A", bound=SQLModelBaseAccessToken)
class SQLModelAccessTokenDatabase(Generic[A], AccessTokenDatabase[A]):
"""
Access token database adapter for SQLModel.
:param user_db_model: SQLModel model of a DB representation of an access token.
:param session: SQLAlchemy session.
"""
def __init__(self, access_token_model: Type[A], session: Session):
self.access_token_model = access_token_model
self.session = session
async def get_by_token(
self, token: str, max_age: Optional[datetime] = None
) -> Optional[A]:
statement = select(self.access_token_model).where(
self.access_token_model.token == token
)
if max_age is not None:
statement = statement.where(self.access_token_model.created_at >= max_age)
results = self.session.exec(statement)
return results.first()
async def create(self, access_token: A) -> A:
self.session.add(access_token)
self.session.commit()
self.session.refresh(access_token)
return access_token
async def update(self, access_token: A) -> A:
self.session.add(access_token)
self.session.commit()
self.session.refresh(access_token)
return access_token
async def delete(self, access_token: A) -> None:
self.session.delete(access_token)
self.session.commit()
class SQLModelAccessTokenDatabaseAsync(Generic[A], AccessTokenDatabase[A]):
"""
Access token database adapter for SQLModel working purely asynchronously.
:param user_db_model: SQLModel model of a DB representation of an access token.
:param session: SQLAlchemy async session.
"""
def __init__(self, access_token_model: Type[A], session: AsyncSession):
self.access_token_model = access_token_model
self.session = session
async def get_by_token(
self, token: str, max_age: Optional[datetime] = None
) -> Optional[A]:
statement = select(self.access_token_model).where(
self.access_token_model.token == token
)
if max_age is not None:
statement = statement.where(self.access_token_model.created_at >= max_age)
results = await self.session.execute(statement)
object = results.first()
if object is None:
return None
return object[0]
async def create(self, access_token: A) -> A:
self.session.add(access_token)
await self.session.commit()
await self.session.refresh(access_token)
return access_token
async def update(self, access_token: A) -> A:
self.session.add(access_token)
await self.session.commit()
await self.session.refresh(access_token)
return access_token
async def delete(self, access_token: A) -> None:
await self.session.delete(access_token)
await self.session.commit()
| [
"sqlmodel.Field",
"sqlmodel.select"
] | [((974, 1017), 'typing.TypeVar', 'TypeVar', (['"""A"""'], {'bound': 'SQLModelBaseAccessToken'}), "('A', bound=SQLModelBaseAccessToken)\n", (981, 1017), False, 'from typing import Generic, Optional, Type, TypeVar\n'), ((436, 462), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (448, 462), False, 'from datetime import datetime, timezone\n'), ((880, 924), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""user.id"""', 'nullable': '(False)'}), "(foreign_key='user.id', nullable=False)\n", (885, 924), False, 'from sqlmodel import Field, Session, SQLModel, select\n'), ((616, 639), 'sqlalchemy.types.String', 'types.String', ([], {'length': '(43)'}), '(length=43)\n', (628, 639), False, 'from sqlalchemy import Column, types\n'), ((784, 813), 'sqlalchemy.types.DateTime', 'types.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (798, 813), False, 'from sqlalchemy import Column, types\n'), ((1568, 1599), 'sqlmodel.select', 'select', (['self.access_token_model'], {}), '(self.access_token_model)\n', (1574, 1599), False, 'from sqlmodel import Field, Session, SQLModel, select\n'), ((2970, 3001), 'sqlmodel.select', 'select', (['self.access_token_model'], {}), '(self.access_token_model)\n', (2976, 3001), False, 'from sqlmodel import Field, Session, SQLModel, select\n')] |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship, Column, DateTime
from app.models.links import LinkGroupUser
from typing import List, Optional
from pydantic import EmailStr
from app.models.base_uuid_model import BaseUUIDModel
from uuid import UUID
class UserBase(SQLModel):
first_name: str
last_name: str
email: EmailStr = Field(nullable=True, index=True, sa_column_kwargs={"unique": True})
is_active: bool = Field(default=True)
is_superuser: bool = Field(default=False)
birthdate: Optional[datetime] = Field(sa_column=Column(DateTime(timezone=True), nullable=True)) #birthday with timezone
phone: Optional[str]
state: Optional[str]
country: Optional[str]
address: Optional[str]
class User(BaseUUIDModel, UserBase, table=True):
hashed_password: str = Field(
nullable=False, index=True
)
role_id: Optional[UUID] = Field(default=None, foreign_key="role.id")
role: Optional["Role"] = Relationship(back_populates="users", sa_relationship_kwargs={"lazy": "selectin"})
groups: List["Group"] = Relationship(back_populates="users", link_model=LinkGroupUser, sa_relationship_kwargs={"lazy": "selectin"})
| [
"sqlmodel.Field",
"sqlmodel.DateTime",
"sqlmodel.Relationship"
] | [((369, 436), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'index': '(True)', 'sa_column_kwargs': "{'unique': True}"}), "(nullable=True, index=True, sa_column_kwargs={'unique': True})\n", (374, 436), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((463, 482), 'sqlmodel.Field', 'Field', ([], {'default': '(True)'}), '(default=True)\n', (468, 482), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((508, 528), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (513, 528), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((842, 875), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'index': '(True)'}), '(nullable=False, index=True)\n', (847, 875), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((920, 962), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""role.id"""'}), "(default=None, foreign_key='role.id')\n", (925, 962), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((992, 1077), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""users"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='users', sa_relationship_kwargs={'lazy':\n 'selectin'})\n", (1004, 1077), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((1102, 1213), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""users"""', 'link_model': 'LinkGroupUser', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='users', link_model=LinkGroupUser,\n sa_relationship_kwargs={'lazy': 'selectin'})\n", (1114, 1213), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n'), ((588, 611), 'sqlmodel.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (596, 611), False, 'from sqlmodel import Field, SQLModel, Relationship, Column, DateTime\n')] |
"""(Basic) Message Tables/Models.
Models of the Traction tables for Messages (layer over AcaPy basic messaging).
"""
import uuid
from datetime import datetime
from typing import List, Optional
from sqlalchemy.orm import selectinload
from sqlmodel import Field, Relationship
from sqlalchemy import (
Column,
func,
String,
select,
desc,
text,
)
from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY
from sqlmodel.ext.asyncio.session import AsyncSession
from api.db.models.base import BaseModel
from api.db.models.v1.contact import Contact
from api.endpoints.models.v1.errors import (
NotFoundError,
)
class Message(BaseModel, table=True):
"""Message.
Model for the Message table (postgresql specific dialects in use).
This will track Messages for the Tenants (between contacts).
Attributes:
message_id: Traction ID for message OR when receiving, it is the AcaPy message_id
tenant_id: Traction Tenant ID
contact_id: Traction Contact ID
status: Business and Tenant indicator for Credential state; independent of AcaPy
Basic Message Exchange state
role: sender or recipient
deleted: Issuer Credential "soft" delete indicator.
tags: Set by tenant for arbitrary grouping of Credentials
content: actual content of the message
state: The underlying AcaPy message exchange state
sent_time: sent_time data in AcaPy payload
created_at: Timestamp when record was created in Traction
updated_at: Timestamp when record was last modified in Traction
"""
message_id: uuid.UUID = Field(
sa_column=Column(
UUID(as_uuid=True),
primary_key=True,
server_default=text("gen_random_uuid()"),
)
)
tenant_id: uuid.UUID = Field(foreign_key="tenant.id", index=True)
contact_id: uuid.UUID = Field(foreign_key="contact.contact_id", index=True)
status: str = Field(nullable=False)
role: str = Field(nullable=False)
deleted: bool = Field(nullable=False, default=False)
tags: List[str] = Field(sa_column=Column(ARRAY(String)))
content: str = Field(nullable=True)
revocation_comment: str = Field(nullable=True)
# acapy data ---
state: str = Field(nullable=False)
sent_time: datetime = Field(sa_column=Column(TIMESTAMP, nullable=True))
# --- acapy data
# relationships ---
contact: Optional[Contact] = Relationship(back_populates="messages")
# --- relationships
created_at: datetime = Field(
sa_column=Column(TIMESTAMP, nullable=False, server_default=func.now())
)
updated_at: datetime = Field(
sa_column=Column(
TIMESTAMP, nullable=False, server_default=func.now(), onupdate=func.now()
)
)
@classmethod
async def get_by_id(
cls: "Message",
db: AsyncSession,
tenant_id: uuid.UUID,
message_id: uuid.UUID,
deleted: bool | None = False,
) -> "Message":
"""Get Message by id.
Find and return the database Message record
Args:
db: database session
tenant_id: Traction ID of tenant making the call
message_id: Traction ID of Message
Returns: The Traction Message (db) record
Raises:
NotFoundError: if the Message cannot be found by ID and deleted
flag
"""
q = (
select(cls)
.where(cls.tenant_id == tenant_id)
.where(cls.message_id == message_id)
.where(cls.deleted == deleted)
.options(selectinload(cls.contact))
)
q_result = await db.execute(q)
db_rec = q_result.scalar_one_or_none()
if not db_rec:
raise NotFoundError(
code="message.id_not_found",
title="Message does not exist",
detail=f"Message does not exist for id<{message_id}>",
)
return db_rec
@classmethod
async def list_by_contact_id(
cls: "Message",
db: AsyncSession,
tenant_id: uuid.UUID,
contact_id: uuid.UUID,
) -> List["Message"]:
"""List by Contact ID.
Find and return list of Message records for Contact.
tenant_id: Traction ID of tenant making the call
contact_id: Traction ID of Contact
Returns: List of Traction Message (db) records in descending order
"""
q = (
select(cls)
.where(cls.contact_id == contact_id)
.where(cls.tenant_id == tenant_id)
.options(selectinload(cls.contact))
.order_by(desc(cls.updated_at))
)
q_result = await db.execute(q)
db_recs = q_result.scalars()
return db_recs
@classmethod
async def list_by_tenant_id(
cls: "Message",
db: AsyncSession,
tenant_id: uuid.UUID,
) -> List["Message"]:
"""List by Tenant ID.
Find and return list of Message records for Tenant.
tenant_id: Traction ID of tenant making the call
Returns: List of Traction Message (db) records in descending order
"""
q = (
select(cls)
.where(cls.tenant_id == tenant_id)
.options(selectinload(cls.contact))
.order_by(desc(cls.updated_at))
)
q_result = await db.execute(q)
db_recs = q_result.scalars()
return db_recs
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((1808, 1850), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""tenant.id"""', 'index': '(True)'}), "(foreign_key='tenant.id', index=True)\n", (1813, 1850), False, 'from sqlmodel import Field, Relationship\n'), ((1879, 1930), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""contact.contact_id"""', 'index': '(True)'}), "(foreign_key='contact.contact_id', index=True)\n", (1884, 1930), False, 'from sqlmodel import Field, Relationship\n'), ((1949, 1970), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (1954, 1970), False, 'from sqlmodel import Field, Relationship\n'), ((1987, 2008), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (1992, 2008), False, 'from sqlmodel import Field, Relationship\n'), ((2029, 2065), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'default': '(False)'}), '(nullable=False, default=False)\n', (2034, 2065), False, 'from sqlmodel import Field, Relationship\n'), ((2146, 2166), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2151, 2166), False, 'from sqlmodel import Field, Relationship\n'), ((2197, 2217), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2202, 2217), False, 'from sqlmodel import Field, Relationship\n'), ((2257, 2278), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (2262, 2278), False, 'from sqlmodel import Field, Relationship\n'), ((2434, 2473), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""messages"""'}), "(back_populates='messages')\n", (2446, 2473), False, 'from sqlmodel import Field, Relationship\n'), ((2321, 2353), 'sqlalchemy.Column', 'Column', (['TIMESTAMP'], {'nullable': '(True)'}), '(TIMESTAMP, nullable=True)\n', (2327, 2353), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((3593, 3618), 'sqlalchemy.orm.selectinload', 'selectinload', (['cls.contact'], {}), '(cls.contact)\n', (3605, 3618), False, 'from sqlalchemy.orm import selectinload\n'), ((3757, 3890), 'api.endpoints.models.v1.errors.NotFoundError', 'NotFoundError', ([], {'code': '"""message.id_not_found"""', 'title': '"""Message does not exist"""', 'detail': 'f"""Message does not exist for id<{message_id}>"""'}), "(code='message.id_not_found', title='Message does not exist',\n detail=f'Message does not exist for id<{message_id}>')\n", (3770, 3890), False, 'from api.endpoints.models.v1.errors import NotFoundError\n'), ((4652, 4672), 'sqlalchemy.desc', 'desc', (['cls.updated_at'], {}), '(cls.updated_at)\n', (4656, 4672), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((5335, 5355), 'sqlalchemy.desc', 'desc', (['cls.updated_at'], {}), '(cls.updated_at)\n', (5339, 5355), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((1661, 1679), 'sqlalchemy.dialects.postgresql.UUID', 'UUID', ([], {'as_uuid': '(True)'}), '(as_uuid=True)\n', (1665, 1679), False, 'from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY\n'), ((2111, 2124), 'sqlalchemy.dialects.postgresql.ARRAY', 'ARRAY', (['String'], {}), '(String)\n', (2116, 2124), False, 'from sqlalchemy.dialects.postgresql import UUID, TIMESTAMP, ARRAY\n'), ((1738, 1763), 'sqlalchemy.text', 'text', (['"""gen_random_uuid()"""'], {}), "('gen_random_uuid()')\n", (1742, 1763), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((2600, 2610), 'sqlalchemy.func.now', 'func.now', ([], {}), '()\n', (2608, 2610), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((2732, 2742), 'sqlalchemy.func.now', 'func.now', ([], {}), '()\n', (2740, 2742), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((2753, 2763), 'sqlalchemy.func.now', 'func.now', ([], {}), '()\n', (2761, 2763), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((4603, 4628), 'sqlalchemy.orm.selectinload', 'selectinload', (['cls.contact'], {}), '(cls.contact)\n', (4615, 4628), False, 'from sqlalchemy.orm import selectinload\n'), ((5286, 5311), 'sqlalchemy.orm.selectinload', 'selectinload', (['cls.contact'], {}), '(cls.contact)\n', (5298, 5311), False, 'from sqlalchemy.orm import selectinload\n'), ((5206, 5217), 'sqlalchemy.select', 'select', (['cls'], {}), '(cls)\n', (5212, 5217), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((3421, 3432), 'sqlalchemy.select', 'select', (['cls'], {}), '(cls)\n', (3427, 3432), False, 'from sqlalchemy import Column, func, String, select, desc, text\n'), ((4474, 4485), 'sqlalchemy.select', 'select', (['cls'], {}), '(cls)\n', (4480, 4485), False, 'from sqlalchemy import Column, func, String, select, desc, text\n')] |
from typing import Union
from fastapi import FastAPI
from pydantic import BaseSettings
from ...utils import get_settings
try:
from sqlalchemy.engine import Engine
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.ext.asyncio.session import AsyncSession
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Database:
"""
A class to wrap the sqlalchemy engine and open a connection session to the db.
"""
def __init__(self, engine: Union[Engine, AsyncEngine], is_async: bool = False):
self.engine = engine
self.is_async = is_async
def open(self) -> Union[Session, AsyncSession]:
if self.is_async:
return AsyncSession(self.engine)
else:
return Session(self.engine)
def setup(app: FastAPI, settings: BaseSettings = None) -> Database:
"""
Install the sqlmodel plugin to the app.
This will attach 1 attribute to `app.state` i.e:
* `db` - `popol.sqlmodel.Database` instance object to open db connection.
Args:
app: FastAPI app.
settings: The settings (can be pydantic.BaseSettings).
Returns:
Database: The database.
"""
settings = get_settings(app, settings)
prefix = "SQLALCHEMY_"
db_uri = getattr(settings, f"{prefix}DATABASE_URI", None)
if not db_uri:
raise RuntimeError(f"{prefix}DATABASE_URI is not set")
async_mode = getattr(settings, f"{prefix}ASYNC_MODE", False)
options = getattr(settings, f"{prefix}OPTIONS", {})
if async_mode:
engine = create_async_engine(db_uri, **options)
else:
engine = create_engine(db_uri, **options)
db = Database(engine, async_mode)
app.state.db = db
async def startup():
# reference: https://github.com/tiangolo/sqlmodel/issues/54#issue-981884262
if async_mode:
async with engine.begin() as conn:
await conn.run_sync(SQLModel.metadata.create_all)
else:
SQLModel.metadata.create_all(engine)
app.add_event_handler("startup", startup)
return db
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine",
"sqlmodel.ext.asyncio.session.AsyncSession"
] | [((1723, 1761), 'sqlalchemy.ext.asyncio.create_async_engine', 'create_async_engine', (['db_uri'], {}), '(db_uri, **options)\n', (1742, 1761), False, 'from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine\n'), ((1789, 1821), 'sqlmodel.create_engine', 'create_engine', (['db_uri'], {}), '(db_uri, **options)\n', (1802, 1821), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((862, 887), 'sqlmodel.ext.asyncio.session.AsyncSession', 'AsyncSession', (['self.engine'], {}), '(self.engine)\n', (874, 887), False, 'from sqlmodel.ext.asyncio.session import AsyncSession\n'), ((921, 941), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (928, 941), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((2155, 2191), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (2183, 2191), False, 'from sqlmodel import Session, SQLModel, create_engine\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from datetime import datetime
from functools import partial
from io import StringIO
from typing import (
Any,
Callable,
ClassVar,
Dict,
Iterable,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
from uuid import UUID
from sqlalchemy import Column, DateTime
from sqlalchemy.orm import registry
from sqlalchemy.sql import func
from sqlalchemy.sql.base import ImmutableColumnCollection
from sqlalchemy.sql.schema import Table
from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass
from dbgen.core.args import ArgLike, Const
from dbgen.core.attribute import Attribute
from dbgen.core.base import Base, BaseMeta
from dbgen.core.node.load import Load, LoadEntity
from dbgen.core.type_registry import column_registry
from dbgen.exceptions import DBgenInvalidArgument, DBgenMissingInfo
def inherit_field(
bases, field_name: str, initial_value=set(), joiner=lambda x, y: x.union(y), type_check: bool = True
):
field_val = initial_value
for base in reversed(bases):
curr_id = getattr(base, field_name, initial_value)
if curr_id is not None:
if type_check and not isinstance(curr_id, type(initial_value)):
raise TypeError(f"Invalid {field_name} val: {curr_id}")
field_val = joiner(field_val, curr_id)
return field_val
overwrite_parent = partial(inherit_field, initial_value="", joiner=lambda x, y: y)
DEFAULT_ENTITY_REGISTRY = registry()
logger = logging.getLogger('dbgen.core.entity')
_T = TypeVar("_T")
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]:
return lambda a: a
@__dataclass_transform__(
kw_only_default=True,
field_descriptors=(
Field,
FieldInfo,
Attribute,
),
)
class EntityMetaclass(SQLModelMetaclass, BaseMeta):
def __new__(mcs, name, bases, attrs, **kwargs):
# Join the keys from all parents for __identifying__, _hashinclude_, and _hashexclude_
new_attrs = attrs.copy()
for value in ("__identifying__", "_hashexclude_", "_hashinclude_"):
starting = new_attrs.get(value, set())
if isinstance(starting, list):
starting = set(starting)
new_attrs[value] = starting.union(inherit_field(bases, value))
if kwargs.get('all_id', False):
assert (
"__identifying__" not in attrs
), f"Error with Entity {name}. Can't supply both all_id kwarg and __identifying__ attr"
new_attrs['__identifying__'] = new_attrs['__identifying__'].union(
{key for key in attrs.get('__annotations__', {})}
)
# Automatically add identifying attributes to the hashinclude
new_attrs["_hashinclude_"].update(new_attrs.get("__identifying__"))
# Set the default registry to be the default_registry
if "registry" not in kwargs:
kwargs["registry"] = DEFAULT_ENTITY_REGISTRY
# Call SQLModelMetaclass.__new__
cls = super().__new__(mcs, name, bases, new_attrs, **kwargs)
# Validate that we don't have table=True on current class and a base
current_cls_is_table = getattr(cls.__config__, "table", False) and kwargs.get("table")
setattr(cls, "_is_table", current_cls_is_table)
if current_cls_is_table:
base_is_table = False
for base in bases:
config = getattr(base, "__config__", None)
if config and getattr(config, "table", False):
base_is_table = True
offending_base_name = base.__name__
break
if base_is_table:
raise ValueError(
"Can't use table=True when inheriting from another table.\n"
f"Both {offending_base_name} and {name} have table=True set.\n"
"Create a common ancestor with table=False and mutaually inherit from that."
)
# Need to look into parents to find schema, only using most recent
schema_key = "__schema__"
schema = getattr(cls, schema_key, "") or overwrite_parent(bases, schema_key)
table_args = getattr(cls, "__table_args__", None) or dict().copy()
if not schema:
schema = "public"
if schema:
setattr(cls, schema_key, schema)
table_args = table_args.copy()
table_args.update({"schema": schema})
setattr(cls, "__table_args__", table_args)
setattr(
cls,
"__fulltablename__",
f"{schema}.{cls.__tablename__}" if schema else cls.__tablename__,
)
# Validate __identifying__ by making sure all attribute exists on Entity
unknown_ids = list(
filter(
lambda x: x not in cls.__fields__,
new_attrs["__identifying__"],
)
)
if unknown_ids:
raise ValueError(
f"Invalid Entity Class Definition. Identifying attributes not found on class: {unknown_ids}"
)
return cls
def __init__(cls, name, bases, attrs, **kwargs):
if cls._is_table:
registry = cls._sa_registry
if cls.__fulltablename__ in registry.metadata.tables:
raise ValueError(
f"The Class {attrs.get('__module__','')}.{name}'s __table_name__ {cls.__tablename__!r} already present in the registry's metadata.\n"
"This can occur if two Entity sub-classes share a case-insensitive name or if the same table has been added to the registry twice.\n"
"To address this you can set a different __tablename__ attribute for one or to clear the registry, you can call Entity.clear_registry() prior to declaring this class."
)
super().__init__(name, bases, attrs, **kwargs)
class BaseEntity(Base, SQLModel, metaclass=EntityMetaclass):
__identifying__: ClassVar[Set[str]]
__fulltablename__: ClassVar[str]
__schema__: ClassVar[str]
__table__: ClassVar[Table]
_is_table: ClassVar[bool]
_sa_registry: ClassVar[registry]
class Config:
"""Pydantic Config"""
force_validation = True
@classmethod
def _columns(cls) -> ImmutableColumnCollection:
if isinstance(cls.__fulltablename__, str):
table = cls.metadata.tables.get(cls.__fulltablename__)
if table is not None:
return table.c
raise ValueError(
f"{cls.__fulltablename__} not in metadata, is table=True set? {cls.metadata.tables}"
)
raise ValueError(f"Can't read __fulltablename__ {cls.__fulltablename__}")
@classmethod
def _get_load_entity(cls) -> LoadEntity:
"""Returns a LoadEntity which has the bare-minimum needed to load into this table."""
# Check that entity is a table
if not cls._is_table:
raise ValueError(f"{cls.__qualname__} is not a table. Can't get LoadEntity of a non-table Entity")
columns = cls._columns()
# Search for primary key name
primary_keys = [x.name for x in cls.__table__.primary_key]
if len(primary_keys) > 1:
raise NotImplementedError(f"Multiple primary_keys found: {primary_keys}")
elif not primary_keys:
raise ValueError(f"No primary key found on {cls.__name__}'s columns:\n{columns}")
primary_key_name = primary_keys[0]
all_attrs = {col.name: col for col in columns if not col.foreign_keys}
all_fks = {col.name: col for col in columns if col.foreign_keys}
# Create the attribute dict which maps attribute name to column type
attributes = {}
for col_name, col in columns.items():
try:
dt = column_registry[col.type]
attributes[col_name] = (
f"{dt.type_name}[]" if getattr(col.type, '_is_array', False) else dt.type_name
)
except KeyError:
raise TypeError(
f"Cannot parse column {col_name} on table {cls.__tablename__} due to its unknown type {type(col.type)}"
)
foreign_keys = set(all_fks.keys())
identifying_attributes = {x for x in all_attrs if x in cls.__identifying__}
identifying_fks = [x for x in all_fks if x in cls.__identifying__]
return LoadEntity(
name=cls.__tablename__ or cls.__name__,
schema_=cls.__schema__,
entity_class_str=f"{cls.__module__}.{cls.__qualname__}",
primary_key_name=primary_key_name,
attributes=attributes,
foreign_keys=foreign_keys,
identifying_attributes=identifying_attributes,
identifying_foreign_keys=identifying_fks,
)
@classmethod
def load(cls, insert: bool = False, validation: Optional[str] = None, **kwargs) -> Load[UUID]:
name = cls.__tablename__
assert isinstance(name, str)
# TODO check if we need this anymore
key_filter = lambda keyval: keyval[0] != "insert" and not isinstance(keyval[1], (ArgLike, Load))
invalid_args = list(filter(key_filter, kwargs.items()))
JSONAble = (str, int, float, dict, tuple)
for arg_name, invalid_arg in invalid_args:
# Check Invalid args to see if a const block would be appropriate
if isinstance(invalid_arg, JSONAble):
kwargs[arg_name] = Const(invalid_arg)
else:
raise ValueError(f"Non-jsonable constant value found: {arg_name}\n{invalid_arg}")
# get PK
pk = kwargs.pop(name, None)
# if we don't have a PK reference check for missing ID info
if not pk:
missing = cls.__identifying__ - set(kwargs)
if missing:
err = (
"Cannot refer to a row in {} without a PK or essential data."
" Missing essential data: {}"
)
raise DBgenMissingInfo(err.format(name, missing))
# Iterate through the columns to ensure we have no unknown kwargs
class_columns: List[Column] = list(cls._columns()) or []
all_attrs = {col.name: col for col in class_columns if not col.foreign_keys}
all_fks = {col.name: col for col in class_columns if col.foreign_keys}
attrs = {key: val for key, val in kwargs.items() if key in all_attrs}
fks = {key: col for key, col in kwargs.items() if key not in attrs}
for fk in fks:
if fk not in all_fks:
raise DBgenInvalidArgument(f'unknown "{fk}" kwarg in Load of {name}')
for k, v in fks.items():
if isinstance(v, Load):
fks[k] = v[v.outputs[0]]
return Load(
load_entity=cls._get_load_entity(),
primary_key=pk,
inputs={**attrs, **fks},
insert=insert,
validation=validation,
)
@classmethod
def _quick_load(cls, connection, rows: Iterable[Iterable[Any]], column_names: List[str]) -> None:
"""Bulk load many rows into entity"""
from dbgen.templates import jinja_env
# Assemble rows into stringio for copy_from statement
io_obj = StringIO()
for row in rows:
io_obj.write("\t".join(map(str, row)) + "\n")
io_obj.seek(0)
# Temporary table to copy data into
# Set name to be hash of input rows to ensure uniqueness for parallelization
temp_table_name = f"{cls.__tablename__}_temp_load_table"
load_entity = cls._get_load_entity()
# Need to create a temp table to copy data into
# Add an auto_inc column so that data can be ordered by its insert location
drop_temp_table = f"DROP TABLE IF EXISTS {temp_table_name};"
create_temp_table = """
CREATE TEMPORARY TABLE {temp_table_name} AS
TABLE {schema}.{obj}
WITH NO DATA;
ALTER TABLE {temp_table_name}
ADD COLUMN auto_inc SERIAL NOT NULL;
""".format(
obj=load_entity.name,
schema=load_entity.schema_,
temp_table_name=temp_table_name,
)
insert_template = jinja_env.get_template("insert.sql.jinja")
template_args = dict(
obj=load_entity.name,
obj_pk_name=load_entity.primary_key_name,
temp_table_name=temp_table_name,
all_column_names=column_names,
schema=load_entity.schema_,
first=False,
update=True,
)
insert_statement = insert_template.render(**template_args)
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
with connection.cursor() as curs:
curs.execute(create_temp_table)
curs.copy_from(io_obj, temp_table_name, null="None", columns=column_names)
curs.execute(insert_statement)
connection.commit()
with connection.cursor() as curs:
curs.execute(drop_temp_table)
connection.commit()
@classmethod
def clear_registry(cls):
"""Removes all Entity classes from the Entity registry"""
cls.metadata.clear()
cls._sa_registry.dispose()
@classmethod
def foreign_key(cls, primary_key: bool = False):
"""Removes all Entity classes from the Entity registry"""
load_entity = cls._get_load_entity()
return Field(
None,
foreign_key=f"{cls.__fulltablename__}.{load_entity.primary_key_name}",
primary_key=primary_key,
)
id_field = Field(
default=None,
primary_key=True,
sa_column_kwargs={"autoincrement": False, "unique": True},
)
gen_id_field = Field(
default=None,
)
get_created_at_field = lambda: Field(
None, sa_column=Column(DateTime(timezone=True), server_default=func.now())
)
class Entity(BaseEntity):
id: Optional[UUID] = id_field
gen_id: Optional[UUID]
created_at: Optional[datetime] = get_created_at_field()
Model = TypeVar("Model", bound="BaseEntity")
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: None = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[BaseEntity]:
...
@overload
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]],
base: Type[Model],
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
...
def create_entity(
model_name: str,
field_definitions: Dict[str, Union[Tuple[type, Any], type, Tuple[type, ...]]] = None,
base: Optional[Type[Model]] = None,
identifying: Set[str] = None,
schema: Optional[str] = None,
__module__: str = __name__,
**kwargs,
) -> Type[Model]:
"""
Dynamically create a model, similar to the Pydantic `create_model()` method
:param model_name: name of the created model
:param field_definitions: data fields of the create model
:param base: base to inherit from
:param __module__: module of the created model
:param **kwargs: Other keyword arguments to pass to the metaclass constructor, e.g. table=True
"""
if base is None:
base = cast(Type["Model"], BaseEntity)
field_definitions = field_definitions or {}
fields = {}
annotations = {}
identifying = identifying or set()
for f_name, f_def in field_definitions.items():
if f_name.startswith("_"):
raise ValueError("Field names may not start with an underscore")
try:
if isinstance(f_def, tuple) and len(f_def) > 1:
f_annotation, f_value = f_def
elif isinstance(f_def, tuple):
f_annotation, f_value = f_def[0], Field(nullable=False)
else:
f_annotation, f_value = f_def, Field(nullable=False)
except ValueError as e:
raise ValueError(
"field_definitions values must be either a tuple of (<type_annotation>, <default_value>)"
"or just a type annotation [or a 1-tuple of (<type_annotation>,)]"
) from e
if f_annotation:
annotations[f_name] = f_annotation
fields[f_name] = f_value
namespace = {
"__annotations__": annotations,
"__identifying__": identifying,
"__module__": __module__,
}
if schema is not None:
namespace.update({"__schema__": schema})
if "registry" in kwargs:
assert isinstance(kwargs.get("registry"), registry), "Invalid type for registry:"
namespace.update(fields) # type: ignore
return EntityMetaclass(model_name, (base,), namespace, **kwargs) # type: ignore
| [
"sqlmodel.main.Field"
] | [((2008, 2071), 'functools.partial', 'partial', (['inherit_field'], {'initial_value': '""""""', 'joiner': '(lambda x, y: y)'}), "(inherit_field, initial_value='', joiner=lambda x, y: y)\n", (2015, 2071), False, 'from functools import partial\n'), ((2098, 2108), 'sqlalchemy.orm.registry', 'registry', ([], {}), '()\n', (2106, 2108), False, 'from sqlalchemy.orm import registry\n'), ((2118, 2156), 'logging.getLogger', 'logging.getLogger', (['"""dbgen.core.entity"""'], {}), "('dbgen.core.entity')\n", (2135, 2156), False, 'import logging\n'), ((2163, 2176), 'typing.TypeVar', 'TypeVar', (['"""_T"""'], {}), "('_T')\n", (2170, 2176), False, 'from typing import Any, Callable, ClassVar, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union, cast, overload\n'), ((14519, 14620), 'sqlmodel.main.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'sa_column_kwargs': "{'autoincrement': False, 'unique': True}"}), "(default=None, primary_key=True, sa_column_kwargs={'autoincrement': \n False, 'unique': True})\n", (14524, 14620), False, 'from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass\n'), ((14646, 14665), 'sqlmodel.main.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (14651, 14665), False, 'from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass\n'), ((14952, 14988), 'typing.TypeVar', 'TypeVar', (['"""Model"""'], {'bound': '"""BaseEntity"""'}), "('Model', bound='BaseEntity')\n", (14959, 14988), False, 'from typing import Any, Callable, ClassVar, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union, cast, overload\n'), ((9247, 9569), 'dbgen.core.node.load.LoadEntity', 'LoadEntity', ([], {'name': '(cls.__tablename__ or cls.__name__)', 'schema_': 'cls.__schema__', 'entity_class_str': 'f"""{cls.__module__}.{cls.__qualname__}"""', 'primary_key_name': 'primary_key_name', 'attributes': 'attributes', 'foreign_keys': 'foreign_keys', 'identifying_attributes': 'identifying_attributes', 'identifying_foreign_keys': 'identifying_fks'}), "(name=cls.__tablename__ or cls.__name__, schema_=cls.__schema__,\n entity_class_str=f'{cls.__module__}.{cls.__qualname__}',\n primary_key_name=primary_key_name, attributes=attributes, foreign_keys=\n foreign_keys, identifying_attributes=identifying_attributes,\n identifying_foreign_keys=identifying_fks)\n", (9257, 9569), False, 'from dbgen.core.node.load import Load, LoadEntity\n'), ((12132, 12142), 'io.StringIO', 'StringIO', ([], {}), '()\n', (12140, 12142), False, 'from io import StringIO\n'), ((13093, 13135), 'dbgen.templates.jinja_env.get_template', 'jinja_env.get_template', (['"""insert.sql.jinja"""'], {}), "('insert.sql.jinja')\n", (13115, 13135), False, 'from dbgen.templates import jinja_env\n'), ((14351, 14468), 'sqlmodel.main.Field', 'Field', (['None'], {'foreign_key': 'f"""{cls.__fulltablename__}.{load_entity.primary_key_name}"""', 'primary_key': 'primary_key'}), "(None, foreign_key=\n f'{cls.__fulltablename__}.{load_entity.primary_key_name}', primary_key=\n primary_key)\n", (14356, 14468), False, 'from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass\n'), ((16326, 16357), 'typing.cast', 'cast', (["Type['Model']", 'BaseEntity'], {}), "(Type['Model'], BaseEntity)\n", (16330, 16357), False, 'from typing import Any, Callable, ClassVar, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union, cast, overload\n'), ((10325, 10343), 'dbgen.core.args.Const', 'Const', (['invalid_arg'], {}), '(invalid_arg)\n', (10330, 10343), False, 'from dbgen.core.args import ArgLike, Const\n'), ((11458, 11521), 'dbgen.exceptions.DBgenInvalidArgument', 'DBgenInvalidArgument', (['f"""unknown "{fk}" kwarg in Load of {name}"""'], {}), '(f\'unknown "{fk}" kwarg in Load of {name}\')\n', (11478, 11521), False, 'from dbgen.exceptions import DBgenInvalidArgument, DBgenMissingInfo\n'), ((14739, 14762), 'sqlalchemy.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (14747, 14762), False, 'from sqlalchemy import Column, DateTime\n'), ((14779, 14789), 'sqlalchemy.sql.func.now', 'func.now', ([], {}), '()\n', (14787, 14789), False, 'from sqlalchemy.sql import func\n'), ((16860, 16881), 'sqlmodel.main.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (16865, 16881), False, 'from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass\n'), ((16947, 16968), 'sqlmodel.main.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (16952, 16968), False, 'from sqlmodel.main import Field, FieldInfo, SQLModel, SQLModelMetaclass\n')] |
from datetime import datetime, date
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryVpi(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
detail: str
vpi_method: str
velum_structure: str
tonsil_enlargement_right: str
tonsil_enlargement_left: str
adenoid_hypertrophy_percent: int
tonsilectomy_right: bool
tonsilectomy_left: bool
ademoidectomy: bool
tongue_tie: bool
tongue_tie_frenectomy: bool
veloadenoid_clodure: str
gap_type: str
gap_length: str
vpi: str
speech_therapy: bool
furlow_palatoplasty: bool
furlow_palatoplasty_date: date
sphincteroplasty: bool
sphicteroplasty_date: date
obturator: bool
obturator_date: date
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_vpi", response_model=HistoryVpi)
async def create_history_vpi(history_vpi: HistoryVpi, session: AsyncSession = Depends(get_session)):
session.add(history_vpi)
await session.commit()
await session.refresh(history_vpi)
return history_vpi
@router.get("/history_vpi/{id}", response_model=HistoryVpi)
async def get_history_vpi(id: int, session: AsyncSession = Depends(get_session)):
history_vpis = await session.execute(select(HistoryVpi).where(HistoryVpi.id == id))
history_vpi = history_vpis.scalars().first()
return history_vpi
@router.put("/history_vpi/{id}", response_model=HistoryVpi)
async def update_history_vpi(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_vpi/{id}")
async def delete_history_vpi(session: AsyncSession = Depends(get_session)):
return None | [
"sqlmodel.Field"
] | [((261, 272), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (270, 272), False, 'from fastapi import APIRouter, Depends\n'), ((339, 376), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (344, 376), False, 'from sqlmodel import Field, SQLModel\n'), ((1212, 1232), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1219, 1232), False, 'from fastapi import APIRouter, Depends\n'), ((1474, 1494), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1481, 1494), False, 'from fastapi import APIRouter, Depends\n'), ((1781, 1801), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1788, 1801), False, 'from fastapi import APIRouter, Depends\n'), ((1911, 1931), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1918, 1931), False, 'from fastapi import APIRouter, Depends\n'), ((1538, 1556), 'sqlalchemy.select', 'select', (['HistoryVpi'], {}), '(HistoryVpi)\n', (1544, 1556), False, 'from sqlalchemy import select\n')] |
from datetime import datetime
from typing import List, Optional
from pydantic import BaseModel
from pydantic.networks import HttpUrl
from sqlmodel import Field, SQLModel
PageType = str
class Ecoindex(SQLModel):
grade: Optional[str] = Field(
default=None,
title="Ecoindex grade",
description="Is the corresponding ecoindex grade of the page (from A to G)",
)
score: Optional[float] = Field(
default=None,
title="Ecoindex score",
description="Is the corresponding ecoindex score of the page (0 to 100)",
ge=0,
le=100,
)
ges: Optional[float] = Field(
default=None,
title="Ecoindex GES equivalent",
description="Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page",
ge=0,
)
water: Optional[float] = Field(
default=None,
title="Ecoindex Water equivalent",
description="Is the equivalent water consumption (in `cl`) of the page",
ge=0,
)
class Page(BaseModel):
logs: List
outer_html: str
nodes: List
class PageMetrics(SQLModel):
size: float = Field(
default=...,
title="Page size",
description="Is the size of the page and of the downloaded elements of the page in KB",
ge=0,
)
nodes: int = Field(
default=...,
title="Page nodes",
description="Is the number of the DOM elements in the page",
ge=0,
)
requests: int = Field(
default=...,
title="Page requests",
description="Is the number of external requests made by the page",
ge=0,
)
class WindowSize(BaseModel):
height: int = Field(
default=...,
title="Window height",
description="Height of the simulated window in pixel",
)
width: int = Field(
default=...,
title="Window width",
description="Width of the simulated window in pixel",
)
def __str__(self) -> str:
return f"{self.width},{self.height}"
class WebPage(SQLModel):
width: Optional[int] = Field(
default=None,
title="Page Width",
description="Width of the simulated window in pixel",
)
height: Optional[int] = Field(
default=None,
title="Page Height",
description="Height of the simulated window in pixel",
)
url: Optional[HttpUrl] = Field(
default=None, title="Page url", description="Url of the analysed page"
)
class Result(Ecoindex, PageMetrics, WebPage):
date: Optional[datetime] = Field(
default=None, title="Analysis datetime", description="Date of the analysis"
)
page_type: Optional[PageType] = Field(
default=None,
title="Page type",
description="Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)",
)
| [
"sqlmodel.Field"
] | [((242, 367), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex grade"""', 'description': '"""Is the corresponding ecoindex grade of the page (from A to G)"""'}), "(default=None, title='Ecoindex grade', description=\n 'Is the corresponding ecoindex grade of the page (from A to G)')\n", (247, 367), False, 'from sqlmodel import Field, SQLModel\n'), ((423, 559), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex score"""', 'description': '"""Is the corresponding ecoindex score of the page (0 to 100)"""', 'ge': '(0)', 'le': '(100)'}), "(default=None, title='Ecoindex score', description=\n 'Is the corresponding ecoindex score of the page (0 to 100)', ge=0, le=100)\n", (428, 559), False, 'from sqlmodel import Field, SQLModel\n'), ((629, 783), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex GES equivalent"""', 'description': '"""Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page"""', 'ge': '(0)'}), "(default=None, title='Ecoindex GES equivalent', description=\n 'Is the equivalent of greenhouse gases emission (in `gCO2e`) of the page',\n ge=0)\n", (634, 783), False, 'from sqlmodel import Field, SQLModel\n'), ((843, 981), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Ecoindex Water equivalent"""', 'description': '"""Is the equivalent water consumption (in `cl`) of the page"""', 'ge': '(0)'}), "(default=None, title='Ecoindex Water equivalent', description=\n 'Is the equivalent water consumption (in `cl`) of the page', ge=0)\n", (848, 981), False, 'from sqlmodel import Field, SQLModel\n'), ((1141, 1281), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page size"""', 'description': '"""Is the size of the page and of the downloaded elements of the page in KB"""', 'ge': '(0)'}), "(default=..., title='Page size', description=\n 'Is the size of the page and of the downloaded elements of the page in KB',\n ge=0)\n", (1146, 1281), False, 'from sqlmodel import Field, SQLModel\n'), ((1329, 1439), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page nodes"""', 'description': '"""Is the number of the DOM elements in the page"""', 'ge': '(0)'}), "(default=..., title='Page nodes', description=\n 'Is the number of the DOM elements in the page', ge=0)\n", (1334, 1439), False, 'from sqlmodel import Field, SQLModel\n'), ((1494, 1613), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Page requests"""', 'description': '"""Is the number of external requests made by the page"""', 'ge': '(0)'}), "(default=..., title='Page requests', description=\n 'Is the number of external requests made by the page', ge=0)\n", (1499, 1613), False, 'from sqlmodel import Field, SQLModel\n'), ((1697, 1798), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Window height"""', 'description': '"""Height of the simulated window in pixel"""'}), "(default=..., title='Window height', description=\n 'Height of the simulated window in pixel')\n", (1702, 1798), False, 'from sqlmodel import Field, SQLModel\n'), ((1842, 1941), 'sqlmodel.Field', 'Field', ([], {'default': '...', 'title': '"""Window width"""', 'description': '"""Width of the simulated window in pixel"""'}), "(default=..., title='Window width', description=\n 'Width of the simulated window in pixel')\n", (1847, 1941), False, 'from sqlmodel import Field, SQLModel\n'), ((2098, 2196), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page Width"""', 'description': '"""Width of the simulated window in pixel"""'}), "(default=None, title='Page Width', description=\n 'Width of the simulated window in pixel')\n", (2103, 2196), False, 'from sqlmodel import Field, SQLModel\n'), ((2251, 2351), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page Height"""', 'description': '"""Height of the simulated window in pixel"""'}), "(default=None, title='Page Height', description=\n 'Height of the simulated window in pixel')\n", (2256, 2351), False, 'from sqlmodel import Field, SQLModel\n'), ((2407, 2484), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page url"""', 'description': '"""Url of the analysed page"""'}), "(default=None, title='Page url', description='Url of the analysed page')\n", (2412, 2484), False, 'from sqlmodel import Field, SQLModel\n'), ((2578, 2665), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Analysis datetime"""', 'description': '"""Date of the analysis"""'}), "(default=None, title='Analysis datetime', description=\n 'Date of the analysis')\n", (2583, 2665), False, 'from sqlmodel import Field, SQLModel\n'), ((2711, 2857), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'title': '"""Page type"""', 'description': '"""Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)"""'}), "(default=None, title='Page type', description=\n 'Is the type of the page, based ton the [opengraph type tag](https://ogp.me/#types)'\n )\n", (2716, 2857), False, 'from sqlmodel import Field, SQLModel\n')] |
"""initial2
Revision ID: 9d9a<PASSWORD>dbfd7
Revises: <PASSWORD>
Create Date: 2021-11-01 04:28:38.426261
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '9d9a746db<PASSWORD>'
down_revision = 'a<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('images',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('url', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_images_id'), 'images', ['id'], unique=False)
op.create_index(op.f('ix_images_url'), 'images', ['url'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_images_url'), table_name='images')
op.drop_index(op.f('ix_images_id'), table_name='images')
op.drop_table('images')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((994, 1017), 'alembic.op.drop_table', 'op.drop_table', (['"""images"""'], {}), "('images')\n", (1007, 1017), False, 'from alembic import op\n'), ((561, 590), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (584, 590), True, 'import sqlalchemy as sa\n'), ((617, 637), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (621, 637), False, 'from alembic import op\n'), ((691, 712), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (695, 712), False, 'from alembic import op\n'), ((885, 906), 'alembic.op.f', 'op.f', (['"""ix_images_url"""'], {}), "('ix_images_url')\n", (889, 906), False, 'from alembic import op\n'), ((947, 967), 'alembic.op.f', 'op.f', (['"""ix_images_id"""'], {}), "('ix_images_id')\n", (951, 967), False, 'from alembic import op\n'), ((452, 464), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (462, 464), True, 'import sqlalchemy as sa\n'), ((504, 538), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (536, 538), False, 'import sqlmodel\n')] |
"""
Node related APIs.
"""
import logging
from datetime import datetime
from typing import List, Optional
from fastapi import APIRouter, Depends
from sqlmodel import Session, SQLModel, select
from datajunction.models.column import ColumnType
from datajunction.models.node import Node, NodeType
from datajunction.utils import get_session
_logger = logging.getLogger(__name__)
router = APIRouter()
class SimpleColumn(SQLModel):
"""
A simplified column schema, without ID or dimensions.
"""
name: str
type: ColumnType
class NodeMetadata(SQLModel):
"""
A node with information about columns and if it is a metric.
"""
id: int
name: str
description: str = ""
created_at: datetime
updated_at: datetime
type: NodeType
expression: Optional[str] = None
columns: List[SimpleColumn]
@router.get("/nodes/", response_model=List[NodeMetadata])
def read_nodes(*, session: Session = Depends(get_session)) -> List[NodeMetadata]:
"""
List the available nodes.
"""
return session.exec(select(Node)).all()
| [
"sqlmodel.select"
] | [((351, 378), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (368, 378), False, 'import logging\n'), ((388, 399), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (397, 399), False, 'from fastapi import APIRouter, Depends\n'), ((946, 966), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (953, 966), False, 'from fastapi import APIRouter, Depends\n'), ((1061, 1073), 'sqlmodel.select', 'select', (['Node'], {}), '(Node)\n', (1067, 1073), False, 'from sqlmodel import Session, SQLModel, select\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from sqlmodel import Session, select, SQLModel, or_
from sqlalchemy.exc import NoResultFound
from ..models.role import Role
from datetime import datetime
router = APIRouter(prefix="/api/roles", tags=["role"])
session = Session(engine)
# Post new role
@router.post("/")
async def post_role(*, role: Role, session: Session = Depends(get_session)):
statement = select(Role).where(Role.id == role.id)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(role)
session.commit()
session.refresh(role)
return role
# Get list of all roles
@router.get("/")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role)
results = session.exec(statement).all()
return results
# Get list of active roles
@router.get("/active")
async def read_roles(session: Session = Depends(get_session)):
statement = select(Role).where(Role.is_active == True)
results = session.exec(statement).all()
return results
@router.put("/{role_id}/activate")
async def activate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_activate = session.exec(statement).one()
role_to_activate.is_active = True
role_to_activate.updated_at = datetime.now()
session.add(role_to_activate)
session.commit()
session.refresh(role_to_activate)
return role_to_activate
# Deactivate role
@router.put("/{role_id}/deactivate")
async def deactivate_role(
role_id: str = None,
session: Session = Depends(get_session),
):
statement = select(Role).where(Role.id == role_id)
role_to_deactivate = session.exec(statement).one()
role_to_deactivate.is_active = False
role_to_deactivate.updated_at = datetime.now()
session.add(role_to_deactivate)
session.commit()
session.refresh(role_to_deactivate)
return role_to_deactivate
# Update role
@router.put("/")
async def update_role(
id: str = None,
new_name: str = None,
new_short_name: str = None,
is_active: bool = None,
session: Session = Depends(get_session),
):
statement = select(Role.is_active).where(Role.id == id)
result = session.exec(statement).first()
if result == True:
statement = select(Role).where(Role.id == id)
role_to_update = session.exec(statement).one()
if new_name != None:
role_to_update.name = new_name
if new_short_name != None:
role_to_update.short_name = new_short_name
if is_active != None:
role_to_update.is_active = is_active
session.add(role_to_update)
role_to_update.updated_at = datetime.now()
session.commit()
session.refresh(role_to_update)
return role_to_update
else:
return False
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((243, 288), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/roles"""', 'tags': "['role']"}), "(prefix='/api/roles', tags=['role'])\n", (252, 288), False, 'from fastapi import APIRouter, Depends\n'), ((299, 314), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (306, 314), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((404, 424), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (411, 424), False, 'from fastapi import APIRouter, Depends\n'), ((769, 789), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (776, 789), False, 'from fastapi import APIRouter, Depends\n'), ((808, 820), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (814, 820), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((976, 996), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (983, 996), False, 'from fastapi import APIRouter, Depends\n'), ((1231, 1251), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1238, 1251), False, 'from fastapi import APIRouter, Depends\n'), ((1436, 1450), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1448, 1450), False, 'from datetime import datetime\n'), ((1704, 1724), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1711, 1724), False, 'from fastapi import APIRouter, Depends\n'), ((1916, 1930), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1928, 1930), False, 'from datetime import datetime\n'), ((2243, 2263), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2250, 2263), False, 'from fastapi import APIRouter, Depends\n'), ((2818, 2832), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2830, 2832), False, 'from datetime import datetime\n'), ((443, 455), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (449, 455), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1015, 1027), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1021, 1027), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1272, 1284), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1278, 1284), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((1745, 1757), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (1751, 1757), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2284, 2306), 'sqlmodel.select', 'select', (['Role.is_active'], {}), '(Role.is_active)\n', (2290, 2306), False, 'from sqlmodel import Session, select, SQLModel, or_\n'), ((2416, 2428), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (2422, 2428), False, 'from sqlmodel import Session, select, SQLModel, or_\n')] |
from sqlmodel import Session, select
from config.Database import Database
class UserDatabase(Database):
def __init__(self) -> None:
super(UserDatabase, self).__init__()
async def get_by_params(self, object: object, email: str):
with Session(self._engine) as session:
statement = select(object).where(object.email == email)
return session.exec(statement).first() | [
"sqlmodel.Session",
"sqlmodel.select"
] | [((259, 280), 'sqlmodel.Session', 'Session', (['self._engine'], {}), '(self._engine)\n', (266, 280), False, 'from sqlmodel import Session, select\n'), ((317, 331), 'sqlmodel.select', 'select', (['object'], {}), '(object)\n', (323, 331), False, 'from sqlmodel import Session, select\n')] |
"""empty message
Revision ID: de316f0831f9
Revises: 6<PASSWORD>1e462e9
Create Date: 2021-11-19 23:38:39.754126
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "de316f0831f9"
down_revision = "60f151e462e9"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
sale_type = postgresql.ENUM(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
)
sale_type.create(op.get_bind())
op.add_column(
"orders",
sa.Column(
"sale_type",
sa.Enum(
"SALE_IN_PIX",
"SALE_IN_DEBT",
"SALE_IN_CREDIT",
"SALE_IN_MONEY",
"SALE_IN_TRANSFER",
"SALE_IN_BILLET",
"SALE_OTHERS",
name="saletype",
),
nullable=True,
),
)
op.drop_index("ix_balance_operation", table_name="balance")
op.drop_index("ix_balance_type", table_name="balance")
op.drop_column("balance", "type")
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=False)
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=False)
op.add_column("clients", sa.Column("zip_code", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
op.add_column("clients", sa.Column("address", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
op.add_column("order_details", sa.Column("value", sa.Float(), nullable=True))
op.drop_column("order_details", "sell_value")
op.drop_column("order_details", "cost")
op.drop_index("ix_fiscal_note_items_fiscal_note_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_item_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_owner_id", table_name="fiscal_note_items")
op.drop_index("ix_fiscal_note_items_sugested_sell_value", table_name="fiscal_note_items")
op.drop_table("fiscal_note_items")
op.drop_index("ix_fiscal_notes_owner_id", table_name="fiscal_notes")
op.drop_index("ix_fiscal_notes_purchase_date", table_name="fiscal_notes")
op.drop_table("fiscal_notes")
op.drop_index("ix_users_email", table_name="users")
op.create_index(op.f("ix_users_email"), "users", ["email"], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column("orders", "owner_id", existing_type=postgresql.UUID(), nullable=True)
op.drop_column("orders", "sale_type")
op.alter_column("clients", "owner_id", existing_type=postgresql.UUID(), nullable=True)
op.alter_column("clients", "phone", existing_type=sa.VARCHAR(), nullable=True)
op.alter_column("clients", "email", existing_type=sa.VARCHAR(), nullable=True)
op.add_column(
"balance",
sa.Column("type", postgresql.ENUM("DEBT", "CREDIT", name="balancetype"), autoincrement=False, nullable=True),
)
op.create_index("ix_balance_type", "balance", ["type"], unique=False)
op.create_index("ix_balance_operation", "balance", ["operation"], unique=False)
op.drop_column("clients", "address")
op.drop_column("clients", "zip_code")
op.add_column(
"order_details",
sa.Column("cost", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
)
op.add_column(
"order_details",
sa.Column("sell_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
)
op.drop_column("order_details", "value")
op.drop_index(op.f("ix_users_email"), table_name="users")
op.create_index("ix_users_email", "users", ["email"], unique=False)
op.create_table(
"fiscal_notes",
sa.Column("id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column("purchase_date", sa.DATE(), autoincrement=False, nullable=False),
sa.Column("owner_id", postgresql.UUID(), autoincrement=False, nullable=True),
sa.Column("file_id", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(["file_id"], ["files.bucket_key"], name="fiscal_notes_file_id_fkey"),
sa.ForeignKeyConstraint(["owner_id"], ["users.id"], name="fiscal_notes_owner_id_fkey"),
sa.PrimaryKeyConstraint("id", name="fiscal_notes_pkey"),
postgresql_ignore_search_path=False,
)
op.create_index("ix_fiscal_notes_purchase_date", "fiscal_notes", ["purchase_date"], unique=False)
op.create_index("ix_fiscal_notes_owner_id", "fiscal_notes", ["owner_id"], unique=False)
op.create_table(
"fiscal_note_items",
sa.Column("id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("buy_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False),
sa.Column(
"sugested_sell_value", postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False
),
sa.Column("owner_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("fiscal_note_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("item_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("file_id", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(["file_id"], ["files.bucket_key"], name="fiscal_note_items_file_id_fkey"),
sa.ForeignKeyConstraint(["fiscal_note_id"], ["fiscal_notes.id"], name="fiscal_note_items_fiscal_note_id_fkey"),
sa.ForeignKeyConstraint(["item_id"], ["items.id"], name="fiscal_note_items_item_id_fkey"),
sa.ForeignKeyConstraint(["owner_id"], ["users.id"], name="fiscal_note_items_owner_id_fkey"),
sa.PrimaryKeyConstraint("id", name="fiscal_note_items_pkey"),
)
op.create_index(
"ix_fiscal_note_items_sugested_sell_value", "fiscal_note_items", ["sugested_sell_value"], unique=False
)
op.create_index("ix_fiscal_note_items_owner_id", "fiscal_note_items", ["owner_id"], unique=False)
op.create_index("ix_fiscal_note_items_item_id", "fiscal_note_items", ["item_id"], unique=False)
op.create_index("ix_fiscal_note_items_fiscal_note_id", "fiscal_note_items", ["fiscal_note_id"], unique=False)
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((461, 620), 'sqlalchemy.dialects.postgresql.ENUM', 'postgresql.ENUM', (['"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""', '"""SALE_IN_TRANSFER"""', '"""SALE_IN_BILLET"""', '"""SALE_OTHERS"""'], {'name': '"""saletype"""'}), "('SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT',\n 'SALE_IN_MONEY', 'SALE_IN_TRANSFER', 'SALE_IN_BILLET', 'SALE_OTHERS',\n name='saletype')\n", (476, 620), False, 'from sqlalchemy.dialects import postgresql\n'), ((1150, 1209), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_balance_operation"""'], {'table_name': '"""balance"""'}), "('ix_balance_operation', table_name='balance')\n", (1163, 1209), False, 'from alembic import op\n'), ((1214, 1268), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_balance_type"""'], {'table_name': '"""balance"""'}), "('ix_balance_type', table_name='balance')\n", (1227, 1268), False, 'from alembic import op\n'), ((1273, 1306), 'alembic.op.drop_column', 'op.drop_column', (['"""balance"""', '"""type"""'], {}), "('balance', 'type')\n", (1287, 1306), False, 'from alembic import op\n'), ((1950, 1995), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""sell_value"""'], {}), "('order_details', 'sell_value')\n", (1964, 1995), False, 'from alembic import op\n'), ((2000, 2039), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""cost"""'], {}), "('order_details', 'cost')\n", (2014, 2039), False, 'from alembic import op\n'), ((2045, 2134), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_fiscal_note_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_fiscal_note_id', table_name=\n 'fiscal_note_items')\n", (2058, 2134), False, 'from alembic import op\n'), ((2134, 2211), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_item_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_item_id', table_name='fiscal_note_items')\n", (2147, 2211), False, 'from alembic import op\n'), ((2216, 2294), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_owner_id"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_owner_id', table_name='fiscal_note_items')\n", (2229, 2294), False, 'from alembic import op\n'), ((2299, 2393), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_note_items_sugested_sell_value"""'], {'table_name': '"""fiscal_note_items"""'}), "('ix_fiscal_note_items_sugested_sell_value', table_name=\n 'fiscal_note_items')\n", (2312, 2393), False, 'from alembic import op\n'), ((2393, 2427), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_note_items"""'], {}), "('fiscal_note_items')\n", (2406, 2427), False, 'from alembic import op\n'), ((2432, 2500), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_notes_owner_id"""'], {'table_name': '"""fiscal_notes"""'}), "('ix_fiscal_notes_owner_id', table_name='fiscal_notes')\n", (2445, 2500), False, 'from alembic import op\n'), ((2505, 2578), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_fiscal_notes_purchase_date"""'], {'table_name': '"""fiscal_notes"""'}), "('ix_fiscal_notes_purchase_date', table_name='fiscal_notes')\n", (2518, 2578), False, 'from alembic import op\n'), ((2583, 2612), 'alembic.op.drop_table', 'op.drop_table', (['"""fiscal_notes"""'], {}), "('fiscal_notes')\n", (2596, 2612), False, 'from alembic import op\n'), ((2617, 2668), 'alembic.op.drop_index', 'op.drop_index', (['"""ix_users_email"""'], {'table_name': '"""users"""'}), "('ix_users_email', table_name='users')\n", (2630, 2668), False, 'from alembic import op\n'), ((2961, 2998), 'alembic.op.drop_column', 'op.drop_column', (['"""orders"""', '"""sale_type"""'], {}), "('orders', 'sale_type')\n", (2975, 2998), False, 'from alembic import op\n'), ((3422, 3491), 'alembic.op.create_index', 'op.create_index', (['"""ix_balance_type"""', '"""balance"""', "['type']"], {'unique': '(False)'}), "('ix_balance_type', 'balance', ['type'], unique=False)\n", (3437, 3491), False, 'from alembic import op\n'), ((3496, 3575), 'alembic.op.create_index', 'op.create_index', (['"""ix_balance_operation"""', '"""balance"""', "['operation']"], {'unique': '(False)'}), "('ix_balance_operation', 'balance', ['operation'], unique=False)\n", (3511, 3575), False, 'from alembic import op\n'), ((3580, 3616), 'alembic.op.drop_column', 'op.drop_column', (['"""clients"""', '"""address"""'], {}), "('clients', 'address')\n", (3594, 3616), False, 'from alembic import op\n'), ((3621, 3658), 'alembic.op.drop_column', 'op.drop_column', (['"""clients"""', '"""zip_code"""'], {}), "('clients', 'zip_code')\n", (3635, 3658), False, 'from alembic import op\n'), ((3983, 4023), 'alembic.op.drop_column', 'op.drop_column', (['"""order_details"""', '"""value"""'], {}), "('order_details', 'value')\n", (3997, 4023), False, 'from alembic import op\n'), ((4091, 4158), 'alembic.op.create_index', 'op.create_index', (['"""ix_users_email"""', '"""users"""', "['email']"], {'unique': '(False)'}), "('ix_users_email', 'users', ['email'], unique=False)\n", (4106, 4158), False, 'from alembic import op\n'), ((4939, 5041), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_notes_purchase_date"""', '"""fiscal_notes"""', "['purchase_date']"], {'unique': '(False)'}), "('ix_fiscal_notes_purchase_date', 'fiscal_notes', [\n 'purchase_date'], unique=False)\n", (4954, 5041), False, 'from alembic import op\n'), ((5041, 5132), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_notes_owner_id"""', '"""fiscal_notes"""', "['owner_id']"], {'unique': '(False)'}), "('ix_fiscal_notes_owner_id', 'fiscal_notes', ['owner_id'],\n unique=False)\n", (5056, 5132), False, 'from alembic import op\n'), ((6370, 6493), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_sugested_sell_value"""', '"""fiscal_note_items"""', "['sugested_sell_value']"], {'unique': '(False)'}), "('ix_fiscal_note_items_sugested_sell_value',\n 'fiscal_note_items', ['sugested_sell_value'], unique=False)\n", (6385, 6493), False, 'from alembic import op\n'), ((6508, 6610), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_owner_id"""', '"""fiscal_note_items"""', "['owner_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_owner_id', 'fiscal_note_items', [\n 'owner_id'], unique=False)\n", (6523, 6610), False, 'from alembic import op\n'), ((6610, 6710), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_item_id"""', '"""fiscal_note_items"""', "['item_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_item_id', 'fiscal_note_items', [\n 'item_id'], unique=False)\n", (6625, 6710), False, 'from alembic import op\n'), ((6710, 6823), 'alembic.op.create_index', 'op.create_index', (['"""ix_fiscal_note_items_fiscal_note_id"""', '"""fiscal_note_items"""', "['fiscal_note_id']"], {'unique': '(False)'}), "('ix_fiscal_note_items_fiscal_note_id', 'fiscal_note_items',\n ['fiscal_note_id'], unique=False)\n", (6725, 6823), False, 'from alembic import op\n'), ((705, 718), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (716, 718), False, 'from alembic import op\n'), ((2689, 2711), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (2693, 2711), False, 'from alembic import op\n'), ((4043, 4065), 'alembic.op.f', 'op.f', (['"""ix_users_email"""'], {}), "('ix_users_email')\n", (4047, 4065), False, 'from alembic import op\n'), ((4629, 4726), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {'name': '"""fiscal_notes_file_id_fkey"""'}), "(['file_id'], ['files.bucket_key'], name=\n 'fiscal_notes_file_id_fkey')\n", (4652, 4726), True, 'import sqlalchemy as sa\n'), ((4731, 4822), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {'name': '"""fiscal_notes_owner_id_fkey"""'}), "(['owner_id'], ['users.id'], name=\n 'fiscal_notes_owner_id_fkey')\n", (4754, 4822), True, 'import sqlalchemy as sa\n'), ((4827, 4882), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {'name': '"""fiscal_notes_pkey"""'}), "('id', name='fiscal_notes_pkey')\n", (4850, 4882), True, 'import sqlalchemy as sa\n'), ((5871, 5973), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['file_id']", "['files.bucket_key']"], {'name': '"""fiscal_note_items_file_id_fkey"""'}), "(['file_id'], ['files.bucket_key'], name=\n 'fiscal_note_items_file_id_fkey')\n", (5894, 5973), True, 'import sqlalchemy as sa\n'), ((5978, 6093), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['fiscal_note_id']", "['fiscal_notes.id']"], {'name': '"""fiscal_note_items_fiscal_note_id_fkey"""'}), "(['fiscal_note_id'], ['fiscal_notes.id'], name=\n 'fiscal_note_items_fiscal_note_id_fkey')\n", (6001, 6093), True, 'import sqlalchemy as sa\n'), ((6098, 6192), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['item_id']", "['items.id']"], {'name': '"""fiscal_note_items_item_id_fkey"""'}), "(['item_id'], ['items.id'], name=\n 'fiscal_note_items_item_id_fkey')\n", (6121, 6192), True, 'import sqlalchemy as sa\n'), ((6197, 6293), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['owner_id']", "['users.id']"], {'name': '"""fiscal_note_items_owner_id_fkey"""'}), "(['owner_id'], ['users.id'], name=\n 'fiscal_note_items_owner_id_fkey')\n", (6220, 6293), True, 'import sqlalchemy as sa\n'), ((6298, 6358), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {'name': '"""fiscal_note_items_pkey"""'}), "('id', name='fiscal_note_items_pkey')\n", (6321, 6358), True, 'import sqlalchemy as sa\n'), ((813, 960), 'sqlalchemy.Enum', 'sa.Enum', (['"""SALE_IN_PIX"""', '"""SALE_IN_DEBT"""', '"""SALE_IN_CREDIT"""', '"""SALE_IN_MONEY"""', '"""SALE_IN_TRANSFER"""', '"""SALE_IN_BILLET"""', '"""SALE_OTHERS"""'], {'name': '"""saletype"""'}), "('SALE_IN_PIX', 'SALE_IN_DEBT', 'SALE_IN_CREDIT', 'SALE_IN_MONEY',\n 'SALE_IN_TRANSFER', 'SALE_IN_BILLET', 'SALE_OTHERS', name='saletype')\n", (820, 960), True, 'import sqlalchemy as sa\n'), ((1361, 1373), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (1371, 1373), True, 'import sqlalchemy as sa\n'), ((1445, 1457), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (1455, 1457), True, 'import sqlalchemy as sa\n'), ((1532, 1549), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (1547, 1549), False, 'from sqlalchemy.dialects import postgresql\n'), ((1624, 1641), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (1639, 1641), False, 'from sqlalchemy.dialects import postgresql\n'), ((1710, 1744), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1742, 1744), False, 'import sqlmodel\n'), ((1812, 1846), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1844, 1846), False, 'import sqlmodel\n'), ((1918, 1928), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (1926, 1928), True, 'import sqlalchemy as sa\n'), ((2923, 2940), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (2938, 2940), False, 'from sqlalchemy.dialects import postgresql\n'), ((3056, 3073), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (3071, 3073), False, 'from sqlalchemy.dialects import postgresql\n'), ((3144, 3156), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (3154, 3156), True, 'import sqlalchemy as sa\n'), ((3227, 3239), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (3237, 3239), True, 'import sqlalchemy as sa\n'), ((3320, 3373), 'sqlalchemy.dialects.postgresql.ENUM', 'postgresql.ENUM', (['"""DEBT"""', '"""CREDIT"""'], {'name': '"""balancetype"""'}), "('DEBT', 'CREDIT', name='balancetype')\n", (3335, 3373), False, 'from sqlalchemy.dialects import postgresql\n'), ((3729, 3770), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3756, 3770), False, 'from sqlalchemy.dialects import postgresql\n'), ((3892, 3933), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (3919, 3933), False, 'from sqlalchemy.dialects import postgresql\n'), ((4228, 4245), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4243, 4245), False, 'from sqlalchemy.dialects import postgresql\n'), ((4318, 4330), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (4328, 4330), True, 'import sqlalchemy as sa\n'), ((4405, 4414), 'sqlalchemy.DATE', 'sa.DATE', ([], {}), '()\n', (4412, 4414), True, 'import sqlalchemy as sa\n'), ((4484, 4501), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (4499, 4501), False, 'from sqlalchemy.dialects import postgresql\n'), ((4569, 4581), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (4579, 4581), True, 'import sqlalchemy as sa\n'), ((5203, 5220), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5218, 5220), False, 'from sqlalchemy.dialects import postgresql\n'), ((5291, 5332), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (5318, 5332), False, 'from sqlalchemy.dialects import postgresql\n'), ((5426, 5467), 'sqlalchemy.dialects.postgresql.DOUBLE_PRECISION', 'postgresql.DOUBLE_PRECISION', ([], {'precision': '(53)'}), '(precision=53)\n', (5453, 5467), False, 'from sqlalchemy.dialects import postgresql\n'), ((5546, 5563), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5561, 5563), False, 'from sqlalchemy.dialects import postgresql\n'), ((5639, 5656), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5654, 5656), False, 'from sqlalchemy.dialects import postgresql\n'), ((5725, 5742), 'sqlalchemy.dialects.postgresql.UUID', 'postgresql.UUID', ([], {}), '()\n', (5740, 5742), False, 'from sqlalchemy.dialects import postgresql\n'), ((5811, 5823), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {}), '()\n', (5821, 5823), True, 'import sqlalchemy as sa\n')] |
import scrapy
from imdb_rating.dependencies.models import Movie
from pydantic import ValidationError
from scrapy.crawler import CrawlerProcess
from sqlmodel import Session, select
class IMDBSpider(scrapy.Spider):
name = "imdb"
custom_settings = {"FEED_EXPORT_ENCODING": "utf-8"}
def start_requests(self):
"""
This method is called by Scrapy to start the crawl.
"""
self.start = self.start.strftime("%Y-%m-%d")
self.end = self.end.strftime("%Y-%m-%d")
yield scrapy.Request(
url=f"https://www.imdb.com/search/title/?title_type=feature&year={self.start},{self.end}&start=1",
callback=self.parse,
)
def parse(self, response):
"""
This method is called by Scrapy to parse the response.
Parameters
----------
response : scrapy.http.Response
The response from the server.
Yields
------
scrapy.http.Request
The next request to be crawled.
"""
for film in response.xpath('//*[@id="main"]/div/div[3]/div/div'):
try:
title = film.xpath(".//div[3]/h3/a/text()").get()
except:
title = None
try:
year = (
film.xpath(".//div[3]/h3/span[2]/text()")
.get()
.split(" ")[-1]
.replace("(", "")
.replace(")", "")
)
except:
year = None
try:
rating = film.xpath(".//div[3]/div/div/strong/text()").get()
except:
rating = None
try:
duration = film.css("span.runtime::text").get().replace(" min", "")
except:
duration = None
try:
votes = film.css(".//div[3]/p[4]/span[2]/@data-value").get()
except:
votes = None
try:
genres = film.css("span.genre::text").get().split(", ")
genres = [genre.strip() for genre in genres]
genres.extend([None for _ in range(3 - len(genres))])
genre1, genre2, genre3 = genres[:3]
except:
genre1, genre2, genre3 = None, None, None
try:
certificate = film.css("span.certificate::text").get()
except:
certificate = None
try:
synopsis = film.xpath(".//div[3]/p[2]/text()").get().strip()
except:
synopsis = None
try:
image = film.xpath(".//div[2]/a/img/@loadlate").get().split("._V1_")[0]
except:
image = None
try:
cast = film.xpath(".//div[3]/p[3]/*/text()").getall()
split = cast.index("|")
directors = cast[:split]
directors.extend([None for _ in range(3 - len(directors))])
director1, director2, director3 = directors[:3]
actors = cast[split + 1 :]
actors.extend([None for _ in range(3 - len(actors))])
actor1, actor2, actor3 = actors[:3]
except:
actor1, actor2, actor3 = None, None, None
director1, director2, director3 = None, None, None
try:
movie = Movie.validate(
dict(
title=title,
year=year,
actual_rating=rating,
votes=votes,
duration=duration,
certificate=certificate,
synopsis=synopsis,
image=image,
actor1=actor1,
actor2=actor2,
actor3=actor3,
director1=director1,
director2=director2,
director3=director3,
genre1=genre1,
genre2=genre2,
genre3=genre3,
)
)
with Session(self.engine) as session:
statement = select(Movie).where(
Movie.title == movie.title and Movie.year == movie.year
)
results = session.exec(statement)
movie_orig = results.first()
if movie_orig:
movie_orig_values = dict(movie_orig)
movie_orig_values.pop("id")
movie_new_values = dict(movie)
movie_new_values.pop("id")
if movie_orig_values != movie_new_values:
for key, value in movie_new_values.items():
setattr(movie_orig, key, value)
session.add(movie_orig)
session.commit()
else:
session.add(movie)
session.commit()
except ValidationError:
continue
try:
next_page = response.css("a.next-page::attr(href)").get()
yield response.follow(next_page, callback=self.parse)
except:
pass
if __name__ == "__main__":
from datetime import datetime, timedelta
start = datetime.today() - timedelta(days=90)
end = datetime.today() + timedelta(days=30)
process = CrawlerProcess({"FEED_FORMAT": "json", "FEED_URI": f"data/test.json"})
process.crawl(IMDBSpider, start=start, end=end)
process.start()
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((5642, 5712), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', (["{'FEED_FORMAT': 'json', 'FEED_URI': f'data/test.json'}"], {}), "({'FEED_FORMAT': 'json', 'FEED_URI': f'data/test.json'})\n", (5656, 5712), False, 'from scrapy.crawler import CrawlerProcess\n'), ((5541, 5557), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (5555, 5557), False, 'from datetime import datetime, timedelta\n'), ((5560, 5578), 'datetime.timedelta', 'timedelta', ([], {'days': '(90)'}), '(days=90)\n', (5569, 5578), False, 'from datetime import datetime, timedelta\n'), ((5589, 5605), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (5603, 5605), False, 'from datetime import datetime, timedelta\n'), ((5608, 5626), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (5617, 5626), False, 'from datetime import datetime, timedelta\n'), ((520, 664), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'f"""https://www.imdb.com/search/title/?title_type=feature&year={self.start},{self.end}&start=1"""', 'callback': 'self.parse'}), "(url=\n f'https://www.imdb.com/search/title/?title_type=feature&year={self.start},{self.end}&start=1'\n , callback=self.parse)\n", (534, 664), False, 'import scrapy\n'), ((4252, 4272), 'sqlmodel.Session', 'Session', (['self.engine'], {}), '(self.engine)\n', (4259, 4272), False, 'from sqlmodel import Session, select\n'), ((4317, 4330), 'sqlmodel.select', 'select', (['Movie'], {}), '(Movie)\n', (4323, 4330), False, 'from sqlmodel import Session, select\n')] |
from fastapi import Depends, Response
from fastapi.routing import APIRouter
from pydantic import BaseModel # pylint: disable=E0611
from sqlmodel import Session, select
from starlette.responses import JSONResponse
from fastapi_server.database.database import get_session
from fastapi_server.models.user import User
login_router = APIRouter()
class LoginModel(BaseModel):
email: str
password: str
# TODO: Replace /login endpoint when Response is available in strawberry query info-context
@login_router.post('/login')
async def login(login_data: LoginModel, session: Session = Depends(get_session)) -> Response:
statement = select(User).where(User.email == login_data.email, User.password_hashed == login_data.password)
user = session.exec(statement).first()
if user is None:
raise FileNotFoundError('Email and password do not match')
# Set message and cookies in frontend
content = {'message': 'Come to the dark side, we have cookies'}
response = JSONResponse(content=content)
response.set_cookie(key='fakesession', value='fake-cookie-session-value', httponly=True, secure=True, expires=3600)
return response
| [
"sqlmodel.select"
] | [((332, 343), 'fastapi.routing.APIRouter', 'APIRouter', ([], {}), '()\n', (341, 343), False, 'from fastapi.routing import APIRouter\n'), ((590, 610), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (597, 610), False, 'from fastapi import Depends, Response\n'), ((994, 1023), 'starlette.responses.JSONResponse', 'JSONResponse', ([], {'content': 'content'}), '(content=content)\n', (1006, 1023), False, 'from starlette.responses import JSONResponse\n'), ((641, 653), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (647, 653), False, 'from sqlmodel import Session, select\n')] |
from sqlmodel import Session
from sfm.database import engine
from sfm.config import get_settings
from sfm.utils import verify_api_auth_token
from fastapi import Depends, HTTPException
from fastapi.security import HTTPBearer, HTTPBasicCredentials
from passlib.context import CryptContext
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
app_settings = get_settings()
security = HTTPBearer()
def get_db(): # pragma: no cover
db = Session(engine)
try:
yield db
finally:
db.close()
def has_access(
credentials: HTTPBasicCredentials = Depends(security),
): # pragma: no cover
token = credentials.credentials
verified = verify_api_auth_token(token)
if verified:
return True
else:
raise HTTPException(status_code=403, detail="Incorrect Credentials")
| [
"sqlmodel.Session"
] | [((302, 353), 'passlib.context.CryptContext', 'CryptContext', ([], {'schemes': "['bcrypt']", 'deprecated': '"""auto"""'}), "(schemes=['bcrypt'], deprecated='auto')\n", (314, 353), False, 'from passlib.context import CryptContext\n'), ((369, 383), 'sfm.config.get_settings', 'get_settings', ([], {}), '()\n', (381, 383), False, 'from sfm.config import get_settings\n'), ((395, 407), 'fastapi.security.HTTPBearer', 'HTTPBearer', ([], {}), '()\n', (405, 407), False, 'from fastapi.security import HTTPBearer, HTTPBasicCredentials\n'), ((453, 468), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (460, 468), False, 'from sqlmodel import Session\n'), ((585, 602), 'fastapi.Depends', 'Depends', (['security'], {}), '(security)\n', (592, 602), False, 'from fastapi import Depends, HTTPException\n'), ((678, 706), 'sfm.utils.verify_api_auth_token', 'verify_api_auth_token', (['token'], {}), '(token)\n', (699, 706), False, 'from sfm.utils import verify_api_auth_token\n'), ((768, 830), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""Incorrect Credentials"""'}), "(status_code=403, detail='Incorrect Credentials')\n", (781, 830), False, 'from fastapi import Depends, HTTPException\n')] |
import asyncio
import logging
import os
import time
from datetime import datetime
from sqlmodel import Session, SQLModel, select
from starlette.concurrency import run_in_threadpool
from ..datatypes import ArtmuseumTimeLabel
from ..scraping.artmuseum import scrap_artmuseum
from ..scraping.philharmonia import scrap_philharmonia
from .models import ArtmuseumExhibition, PhilharmoniaConcert
def refresh_data(engine):
"""
Scrap all the data sources for up-to-date info. Drop local values and replace them with the new data.
We are trying to be an exact mirror of our data sources.
The easiest way to achieve this is to regularly throw out all the data we have and scrap up-to-date info.
The cost of this approach in performance/resources is neglectable and is much preffered over complications
brought by trying to maintain a local copy by continuously patching it up with UPDATEs.
(there can be edits in the source info, urls can change, etc. - it's not worth it to consider all such corner cases)
"""
logging.info("Started scraping up-to-date info.")
known_addrs = {}
with Session(engine) as session:
stmt = select(ArtmuseumExhibition.url, ArtmuseumExhibition.address).where(
ArtmuseumExhibition.address != None
)
known_addrs = dict(session.exec(stmt).all())
exhibitions = scrap_artmuseum(known_addrs)
concerts = scrap_philharmonia()
logging.info("Finished scraping up-to-date info.")
logging.info("Started updating the database.")
with Session(engine) as session:
session.query(PhilharmoniaConcert).delete()
session.query(ArtmuseumExhibition).delete()
session.bulk_save_objects(concerts)
session.bulk_save_objects(exhibitions)
session.commit()
logging.info("Finished updating the database.")
async def loop_refreshing_data(engine, update_interval, initial_sleep_time: int = 0):
if initial_sleep_time > 0:
await asyncio.sleep(initial_sleep_time)
while True:
await run_in_threadpool(refresh_data, engine)
await asyncio.sleep(update_interval)
def init_db(engine):
update_interval = 60 * 60 * 8 # 8 hours
initial_sleep_time = 0
if os.path.isfile(engine.url.database):
last_modified = os.path.getmtime(engine.url.database)
dt = time.time() - last_modified
if dt <= update_interval:
initial_sleep_time = update_interval - dt
last_update = datetime.fromtimestamp(last_modified).replace(microsecond=0)
logging.info(
f"Last database update - {last_update}, the next one is scheduled in ...[N]h [N]m.... (at h:m)"
)
SQLModel.metadata.create_all(engine)
asyncio.create_task(
loop_refreshing_data(engine, update_interval, initial_sleep_time)
)
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.select"
] | [((1043, 1092), 'logging.info', 'logging.info', (['"""Started scraping up-to-date info."""'], {}), "('Started scraping up-to-date info.')\n", (1055, 1092), False, 'import logging\n'), ((1432, 1482), 'logging.info', 'logging.info', (['"""Finished scraping up-to-date info."""'], {}), "('Finished scraping up-to-date info.')\n", (1444, 1482), False, 'import logging\n'), ((1488, 1534), 'logging.info', 'logging.info', (['"""Started updating the database."""'], {}), "('Started updating the database.')\n", (1500, 1534), False, 'import logging\n'), ((1798, 1845), 'logging.info', 'logging.info', (['"""Finished updating the database."""'], {}), "('Finished updating the database.')\n", (1810, 1845), False, 'import logging\n'), ((2231, 2266), 'os.path.isfile', 'os.path.isfile', (['engine.url.database'], {}), '(engine.url.database)\n', (2245, 2266), False, 'import os\n'), ((2705, 2741), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (2733, 2741), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1123, 1138), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1130, 1138), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1544, 1559), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1551, 1559), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2292, 2329), 'os.path.getmtime', 'os.path.getmtime', (['engine.url.database'], {}), '(engine.url.database)\n', (2308, 2329), False, 'import os\n'), ((1979, 2012), 'asyncio.sleep', 'asyncio.sleep', (['initial_sleep_time'], {}), '(initial_sleep_time)\n', (1992, 2012), False, 'import asyncio\n'), ((2043, 2082), 'starlette.concurrency.run_in_threadpool', 'run_in_threadpool', (['refresh_data', 'engine'], {}), '(refresh_data, engine)\n', (2060, 2082), False, 'from starlette.concurrency import run_in_threadpool\n'), ((2097, 2127), 'asyncio.sleep', 'asyncio.sleep', (['update_interval'], {}), '(update_interval)\n', (2110, 2127), False, 'import asyncio\n'), ((2343, 2354), 'time.time', 'time.time', ([], {}), '()\n', (2352, 2354), False, 'import time\n'), ((2560, 2679), 'logging.info', 'logging.info', (['f"""Last database update - {last_update}, the next one is scheduled in ...[N]h [N]m.... (at h:m)"""'], {}), "(\n f'Last database update - {last_update}, the next one is scheduled in ...[N]h [N]m.... (at h:m)'\n )\n", (2572, 2679), False, 'import logging\n'), ((1166, 1226), 'sqlmodel.select', 'select', (['ArtmuseumExhibition.url', 'ArtmuseumExhibition.address'], {}), '(ArtmuseumExhibition.url, ArtmuseumExhibition.address)\n', (1172, 1226), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2487, 2524), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['last_modified'], {}), '(last_modified)\n', (2509, 2524), False, 'from datetime import datetime\n')] |
import pytest
from fastapi.testclient import TestClient
from sqlmodel import Session, SQLModel, create_engine
from sqlmodel.pool import StaticPool
from api.main import app, get_session
from api.models import Measurement, Observer
@pytest.fixture(name="session")
def session_fixture():
engine = create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
)
SQLModel.metadata.create_all(engine)
with Session(engine) as session:
yield session
@pytest.fixture(name="client")
def client_fixture(session: Session):
def get_session_override():
return session
app.dependency_overrides[get_session] = get_session_override
client = TestClient(app)
yield client
app.dependency_overrides.clear()
@pytest.fixture(name="observer_1")
def observer_fixture(session: Session):
observer = Observer(phone="+1555-555-5555", email="<EMAIL>")
session.add(observer)
session.commit()
yield observer
session.delete(observer)
def test_create_observer(client: TestClient):
response = client.post(
"/observers/", json={"phone": "+1555-555-5555", "email": "<EMAIL>"}
)
data = response.json()
assert response.status_code == 200
assert data["phone"] == "+1555-555-5555"
assert data["email"] == "<EMAIL>"
assert data["id"] is not None
def test_create_observer_incomplete(client: TestClient):
# No hande
response = client.post("/observers/", json={"phone": "+1555-555-5555"})
assert response.status_code == 422
def test_create_observer_invalid(client: TestClient):
# email has an invalid type
response = client.post(
"/observers/", json={"phone": "+1555-555-5555", "email": {"key": "value"}}
)
assert response.status_code == 422
def test_delete_observer(session: Session, client: TestClient, observer_1: Observer):
response = client.delete(f"/observers/{observer_1.id}")
observer_in_db = session.get(Observer, observer_1.id)
assert response.status_code == 200
assert observer_in_db is None
def test_create_measurement_wrong_observer(client: TestClient, observer_1: Observer):
response = client.post(
"/measurements/",
json={
"temperaturescale": "C",
"temperature": 4,
"organizationid": 876543,
"siteid": 65432,
"observer_id": 8,
},
)
data = response.json()
assert response.status_code == 400
assert data["detail"] == "Not a valid observer id"
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((234, 264), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""session"""'}), "(name='session')\n", (248, 264), False, 'import pytest\n'), ((510, 539), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""client"""'}), "(name='client')\n", (524, 539), False, 'import pytest\n'), ((785, 818), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""observer_1"""'}), "(name='observer_1')\n", (799, 818), False, 'import pytest\n'), ((301, 396), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite://"""'], {'connect_args': "{'check_same_thread': False}", 'poolclass': 'StaticPool'}), "('sqlite://', connect_args={'check_same_thread': False},\n poolclass=StaticPool)\n", (314, 396), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((411, 447), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (439, 447), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((712, 727), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (722, 727), False, 'from fastapi.testclient import TestClient\n'), ((749, 781), 'api.main.app.dependency_overrides.clear', 'app.dependency_overrides.clear', ([], {}), '()\n', (779, 781), False, 'from api.main import app, get_session\n'), ((874, 923), 'api.models.Observer', 'Observer', ([], {'phone': '"""+1555-555-5555"""', 'email': '"""<EMAIL>"""'}), "(phone='+1555-555-5555', email='<EMAIL>')\n", (882, 923), False, 'from api.models import Measurement, Observer\n'), ((457, 472), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (464, 472), False, 'from sqlmodel import Session, SQLModel, create_engine\n')] |
"""Add countries
Revision ID: <KEY>
Revises: 423e059e8b64
Create Date: 2022-02-12 07:51:13.003045+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "423e059e8b64"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"countries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.add_column("applications", sa.Column("country_id", sa.Integer(), nullable=False))
op.create_foreign_key(None, "applications", "countries", ["country_id"], ["id"])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, "applications", type_="foreignkey")
op.drop_column("applications", "country_id")
op.drop_table("countries")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((701, 786), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""countries"""', "['country_id']", "['id']"], {}), "(None, 'applications', 'countries', ['country_id'], ['id']\n )\n", (722, 786), False, 'from alembic import op\n'), ((906, 966), 'alembic.op.drop_constraint', 'op.drop_constraint', (['None', '"""applications"""'], {'type_': '"""foreignkey"""'}), "(None, 'applications', type_='foreignkey')\n", (924, 966), False, 'from alembic import op\n'), ((971, 1015), 'alembic.op.drop_column', 'op.drop_column', (['"""applications"""', '"""country_id"""'], {}), "('applications', 'country_id')\n", (985, 1015), False, 'from alembic import op\n'), ((1020, 1046), 'alembic.op.drop_table', 'op.drop_table', (['"""countries"""'], {}), "('countries')\n", (1033, 1046), False, 'from alembic import op\n'), ((571, 600), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (594, 600), True, 'import sqlalchemy as sa\n'), ((453, 465), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (463, 465), True, 'import sqlalchemy as sa\n'), ((510, 544), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (542, 544), False, 'import sqlmodel\n'), ((666, 678), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (676, 678), True, 'import sqlalchemy as sa\n')] |
from typing import TYPE_CHECKING, Any, Dict, Optional
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship, select
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.services.db import db_session
from joj.horse.services.oauth import OAuth2Profile
if TYPE_CHECKING:
from joj.horse.models import User
class UserOAuthAccount(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "user_oauth_accounts"
oauth_name: str = Field()
access_token: str = Field()
refresh_token: Optional[str] = Field(None, nullable=True)
expires_at: Optional[int] = Field(None, nullable=True)
account_id: str = Field(index=True)
account_name: Optional[str] = Field(None, index=True, nullable=True)
account_email: str = Field(index=True)
user_id: Optional[UUID] = Field(
sa_column=Column(GUID, ForeignKey("users.id", ondelete="CASCADE"))
)
user: Optional["User"] = Relationship(back_populates="oauth_accounts")
@staticmethod
async def create_or_update(
oauth_name: str, token: Dict[str, Any], profile: OAuth2Profile
) -> "UserOAuthAccount":
access_token = token["access_token"]
refresh_token = token.get("refresh_token", None)
expires_at = token.get("expires_at", None)
async with db_session() as session:
statement = (
select(UserOAuthAccount)
.where(UserOAuthAccount.oauth_name == oauth_name)
.where(UserOAuthAccount.account_id == profile.account_id)
)
results = await session.exec(statement)
oauth_account: Optional[UserOAuthAccount] = results.one_or_none()
if oauth_account:
oauth_account.access_token = access_token
oauth_account.refresh_token = refresh_token
oauth_account.expires_at = expires_at
oauth_account.account_name = profile.account_name
else:
oauth_account = UserOAuthAccount(
oauth_name=oauth_name,
access_token=access_token,
refresh_token=refresh_token,
expires_at=expires_at,
account_id=profile.account_id,
account_name=profile.account_name,
account_email=profile.account_email,
)
session.sync_session.add(oauth_account)
await session.commit()
await session.refresh(oauth_account)
return oauth_account
| [
"sqlmodel.Field",
"sqlmodel.select",
"sqlmodel.Relationship"
] | [((558, 565), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (563, 565), False, 'from sqlmodel import Field, Relationship, select\n'), ((590, 597), 'sqlmodel.Field', 'Field', ([], {}), '()\n', (595, 597), False, 'from sqlmodel import Field, Relationship, select\n'), ((633, 659), 'sqlmodel.Field', 'Field', (['None'], {'nullable': '(True)'}), '(None, nullable=True)\n', (638, 659), False, 'from sqlmodel import Field, Relationship, select\n'), ((692, 718), 'sqlmodel.Field', 'Field', (['None'], {'nullable': '(True)'}), '(None, nullable=True)\n', (697, 718), False, 'from sqlmodel import Field, Relationship, select\n'), ((741, 758), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (746, 758), False, 'from sqlmodel import Field, Relationship, select\n'), ((793, 831), 'sqlmodel.Field', 'Field', (['None'], {'index': '(True)', 'nullable': '(True)'}), '(None, index=True, nullable=True)\n', (798, 831), False, 'from sqlmodel import Field, Relationship, select\n'), ((857, 874), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (862, 874), False, 'from sqlmodel import Field, Relationship, select\n'), ((1023, 1068), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""oauth_accounts"""'}), "(back_populates='oauth_accounts')\n", (1035, 1068), False, 'from sqlmodel import Field, Relationship, select\n'), ((1393, 1405), 'joj.horse.services.db.db_session', 'db_session', ([], {}), '()\n', (1403, 1405), False, 'from joj.horse.services.db import db_session\n'), ((944, 986), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""users.id"""'], {'ondelete': '"""CASCADE"""'}), "('users.id', ondelete='CASCADE')\n", (954, 986), False, 'from sqlalchemy.schema import Column, ForeignKey\n'), ((1460, 1484), 'sqlmodel.select', 'select', (['UserOAuthAccount'], {}), '(UserOAuthAccount)\n', (1466, 1484), False, 'from sqlmodel import Field, Relationship, select\n')] |
from fastapi import APIRouter, Depends
from ..utils import engine, get_session
from ..models.capacity import Capacity
from sqlmodel import Session, select, SQLModel, and_
from sqlalchemy.exc import NoResultFound
from ..models.user import User
from ..models.team import Team
router = APIRouter(prefix="/api/capacities", tags=["capacity"])
session = Session(engine)
@router.post("/")
async def post_capacity(*, capacity: Capacity, session: Session = Depends(get_session)):
"""
Post new capacity.
Parameters
----------
capacity : Capacity
Capacity that is to be added to the database.
session : Session
SQL session that is to be used to add the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Capacity).where(
and_(
Capacity.user_id == capacity.user_id,
Capacity.team_id == capacity.team_id,
capacity.year == capacity.year,
Capacity.month == capacity.month,
)
)
try:
result = session.exec(statement).one()
return False
except NoResultFound:
session.add(capacity)
session.commit()
session.refresh(capacity)
return capacity
@router.get("/")
async def get_capacities(
session: Session = Depends(get_session),
is_locked: bool = None,
user_id: int = None,
team_id: int = None,
month: int = None,
year: int = None,
):
"""
Get list of all capacities.
Parameters
----------
session : Session
SQL session that is to be used to get a list of the epic areas.
Defaults to creating a dependency on the running SQL model session.
is_locked : bool
Whether or not the capacity is locked or not.
user_id : int
User id of the user in question.
team_id : int
Team id of the user's team.
month : int
Month of capacity in question.
year : int
Year of capacity in question.
"""
statement = select(Capacity)
# Select capacity by user_id, team_id, month, year
if (user_id and team_id and month and year) != None:
statement = (
select(
Capacity.id.label("capacity_id"),
User.short_name.label("user_short_name"),
Team.short_name.label("team_short_name"),
Capacity.year,
Capacity.month,
Capacity.days,
)
.select_from(Capacity)
.join(User, Capacity.user_id == User.id)
.join(Team, Capacity.team_id == Team.id)
.where(Capacity.user_id == user_id)
.where(Capacity.team_id == team_id)
.where(Capacity.month == month)
.where(Capacity.year == year)
)
result = session.exec(statement).all()
return result
@router.delete("/")
async def delete_capacities(
capacity_id: str = None,
session: Session = Depends(get_session),
):
"""
Delete a capacity
Parameters
----------
capacity_id : str
ID of the capacity that is to be removed from the database.
session : Session
SQL session that is to be used to delete the capacity.
Defaults to creating a dependency on the running SQL model session.
"""
statement = select(Capacity).where(
Capacity.id == capacity_id,
)
capacity_to_delete = session.exec(statement).one()
session.delete(capacity_to_delete)
session.commit()
return True
| [
"sqlmodel.Session",
"sqlmodel.select",
"sqlmodel.and_"
] | [((284, 338), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/api/capacities"""', 'tags': "['capacity']"}), "(prefix='/api/capacities', tags=['capacity'])\n", (293, 338), False, 'from fastapi import APIRouter, Depends\n'), ((349, 364), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (356, 364), False, 'from sqlmodel import Session, select, SQLModel, and_\n'), ((451, 471), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (458, 471), False, 'from fastapi import APIRouter, Depends\n'), ((1324, 1344), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1331, 1344), False, 'from fastapi import APIRouter, Depends\n'), ((2033, 2049), 'sqlmodel.select', 'select', (['Capacity'], {}), '(Capacity)\n', (2039, 2049), False, 'from sqlmodel import Session, select, SQLModel, and_\n'), ((2976, 2996), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (2983, 2996), False, 'from fastapi import APIRouter, Depends\n'), ((828, 979), 'sqlmodel.and_', 'and_', (['(Capacity.user_id == capacity.user_id)', '(Capacity.team_id == capacity.team_id)', '(capacity.year == capacity.year)', '(Capacity.month == capacity.month)'], {}), '(Capacity.user_id == capacity.user_id, Capacity.team_id == capacity.\n team_id, capacity.year == capacity.year, Capacity.month == capacity.month)\n', (832, 979), False, 'from sqlmodel import Session, select, SQLModel, and_\n'), ((796, 812), 'sqlmodel.select', 'select', (['Capacity'], {}), '(Capacity)\n', (802, 812), False, 'from sqlmodel import Session, select, SQLModel, and_\n'), ((3337, 3353), 'sqlmodel.select', 'select', (['Capacity'], {}), '(Capacity)\n', (3343, 3353), False, 'from sqlmodel import Session, select, SQLModel, and_\n')] |
from datetime import datetime
from typing import Optional
import typer
from sqlalchemy.orm.exc import UnmappedInstanceError
from sqlmodel import Session, select
from .database import engine
from .functions_aux import Status
from .tables import ToDo, Timer
app = typer.Typer()
@app.command()
def task(id: str, task: str = None,
status: Optional[Status] = typer.Option(None),
tag: str = None, remarks: str = None, project: str = None,
due_date: datetime = typer.Option(None, formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, formats=['%Y-%m-%d'])):
"""Edit record from to-do list"""
with Session(engine) as session:
try:
query = session.get(ToDo, id)
if task is not None:
query.task = task
if tag is not None:
query.tag = tag
if remarks is not None:
query.remarks = remarks
if project is not None:
query.project = project
if status is None or status == query.status:
pass
elif status == 'done':
query.status = status
query.date_end = datetime.now().date()
elif status == 'doing' and query.status == 'done':
query.status = status
query.date_end = None
elif status == 'to do':
timer = session.exec(select(Timer).where(
Timer.id_todo == id)).all()
if len(timer) > 0:
typer.secho(f'\nTask already started\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
else:
query.status = status
query.date_end = None
else:
query.status = status
today = datetime.today()
if due_date is not None and reminder \
is not None and reminder >= due_date:
typer.secho(
f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and reminder <= today:
typer.secho(
f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif due_date is not None and query.reminder \
is not None and due_date < query.reminder:
typer.secho(
f'\ndue date must be grater than {query.reminder.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None and query.due_date \
is not None and reminder >= query.due_date:
typer.secho(
f'\nreminder must be smaller than {query.due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
elif reminder is not None:
query.reminder = reminder
elif due_date is not None:
query.due_date = due_date
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def project(project: str, new_project: str):
"""Edit project name in tasks"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
task.project = new_project
session.add(task)
edit = typer.confirm(f"""Are you sure you want to edit:
{tasks}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_task(id: str):
"""Delete task"""
try:
with Session(engine) as session:
task = session.get(ToDo, id)
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{task}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Deleting it!",
fg=typer.colors.RED)
session.commit()
except UnmappedInstanceError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def del_project(project: str):
"""Delete all tasks from a project"""
with Session(engine) as session:
tasks = session.exec(select(ToDo).where(
ToDo.project == project)).all()
if len(tasks) > 0:
for task in tasks:
timers = session.exec(select(Timer).where(
Timer.id_todo == task.id)).all()
for timer in timers:
session.delete(timer)
session.delete(task)
session.delete(task)
edit = typer.confirm(f"""Are you sure you want to delete:
{tasks}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("deleting it!",
fg=typer.colors.RED)
session.commit()
else:
typer.secho(f'\nInvalid project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def timer(id: int,
end: datetime = typer.Option('', formats=['%Y-%m-%d %H:%M:%S'])):
"""Edit record from Timer"""
with Session(engine) as session:
try:
query = session.get(Timer, id)
if end <= query.start:
typer.secho(
f'\nEnd must be >= {query.start}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if end >= datetime.now():
typer.secho(
f'\nEnd must be < {datetime.now()}'
)
raise typer.Exit(code=1)
query.end = end
session.add(query)
edit = typer.confirm(f"""Are you sure you want to edit:
{query}""")
if not edit:
typer.secho("Not editing",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("Editing it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid timer id\n',
fg=typer.colors.RED)
@app.command()
def del_timer(id: int):
"""Delete record from Timer"""
with Session(engine) as session:
try:
query = session.get(Timer, id)
session.delete(query)
edit = typer.confirm(f"""Are you sure you want to delete:
{query}""")
if not edit:
typer.secho("Not deleting",
fg=typer.colors.RED)
raise typer.Abort()
typer.secho("deleting it!",
fg=typer.colors.RED)
session.commit()
except AttributeError:
typer.secho(f'\nInvalid timer id\n',
fg=typer.colors.RED) | [
"sqlmodel.Session",
"sqlmodel.select"
] | [((265, 278), 'typer.Typer', 'typer.Typer', ([], {}), '()\n', (276, 278), False, 'import typer\n'), ((368, 386), 'typer.Option', 'typer.Option', (['None'], {}), '(None)\n', (380, 386), False, 'import typer\n'), ((486, 526), 'typer.Option', 'typer.Option', (['None'], {'formats': "['%Y-%m-%d']"}), "(None, formats=['%Y-%m-%d'])\n", (498, 526), False, 'import typer\n'), ((558, 598), 'typer.Option', 'typer.Option', (['None'], {'formats': "['%Y-%m-%d']"}), "(None, formats=['%Y-%m-%d'])\n", (570, 598), False, 'import typer\n'), ((7039, 7086), 'typer.Option', 'typer.Option', (['""""""'], {'formats': "['%Y-%m-%d %H:%M:%S']"}), "('', formats=['%Y-%m-%d %H:%M:%S'])\n", (7051, 7086), False, 'import typer\n'), ((648, 663), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (655, 663), False, 'from sqlmodel import Session, select\n'), ((4296, 4311), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4303, 4311), False, 'from sqlmodel import Session, select\n'), ((6027, 6042), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6034, 6042), False, 'from sqlmodel import Session, select\n'), ((7131, 7146), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (7138, 7146), False, 'from sqlmodel import Session, select\n'), ((8240, 8255), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (8247, 8255), False, 'from sqlmodel import Session, select\n'), ((1880, 1896), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (1894, 1896), False, 'from datetime import datetime\n'), ((3516, 3592), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to edit:\n {query}"""'], {}), '(f"""Are you sure you want to edit:\n {query}""")\n', (3529, 3592), False, 'import typer\n'), ((3758, 3805), 'typer.secho', 'typer.secho', (['"""Editing it!"""'], {'fg': 'typer.colors.RED'}), "('Editing it!', fg=typer.colors.RED)\n", (3769, 3805), False, 'import typer\n'), ((4571, 4643), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to edit:\n {tasks}"""'], {}), '(f"""Are you sure you want to edit:\n {tasks}""")\n', (4584, 4643), False, 'import typer\n'), ((4809, 4856), 'typer.secho', 'typer.secho', (['"""Editing it!"""'], {'fg': 'typer.colors.RED'}), "('Editing it!', fg=typer.colors.RED)\n", (4820, 4856), False, 'import typer\n'), ((4936, 4994), 'typer.secho', 'typer.secho', (['f"""\nInvalid project\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid project\n""", fg=typer.colors.RED)\n', (4947, 4994), False, 'import typer\n'), ((5035, 5053), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (5045, 5053), False, 'import typer\n'), ((5138, 5153), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (5145, 5153), False, 'from sqlmodel import Session, select\n'), ((5434, 5507), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to delete:\n {task}"""'], {}), '(f"""Are you sure you want to delete:\n {task}""")\n', (5447, 5507), False, 'import typer\n'), ((5674, 5722), 'typer.secho', 'typer.secho', (['"""Deleting it!"""'], {'fg': 'typer.colors.RED'}), "('Deleting it!', fg=typer.colors.RED)\n", (5685, 5722), False, 'import typer\n'), ((5818, 5876), 'typer.secho', 'typer.secho', (['f"""\nInvalid task id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid task id\n""", fg=typer.colors.RED)\n', (5829, 5876), False, 'import typer\n'), ((5909, 5927), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (5919, 5927), False, 'import typer\n'), ((6490, 6564), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to delete:\n {tasks}"""'], {}), '(f"""Are you sure you want to delete:\n {tasks}""")\n', (6503, 6564), False, 'import typer\n'), ((6731, 6779), 'typer.secho', 'typer.secho', (['"""deleting it!"""'], {'fg': 'typer.colors.RED'}), "('deleting it!', fg=typer.colors.RED)\n", (6742, 6779), False, 'import typer\n'), ((6859, 6917), 'typer.secho', 'typer.secho', (['f"""\nInvalid project\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid project\n""", fg=typer.colors.RED)\n', (6870, 6917), False, 'import typer\n'), ((6958, 6976), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (6968, 6976), False, 'import typer\n'), ((7679, 7768), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to edit:\n {query}"""'], {}), '(\n f"""Are you sure you want to edit:\n {query}""")\n', (7692, 7768), False, 'import typer\n'), ((7929, 7976), 'typer.secho', 'typer.secho', (['"""Editing it!"""'], {'fg': 'typer.colors.RED'}), "('Editing it!', fg=typer.colors.RED)\n", (7940, 7976), False, 'import typer\n'), ((8377, 8451), 'typer.confirm', 'typer.confirm', (['f"""Are you sure you want to delete:\n {query}"""'], {}), '(f"""Are you sure you want to delete:\n {query}""")\n', (8390, 8451), False, 'import typer\n'), ((8618, 8666), 'typer.secho', 'typer.secho', (['"""deleting it!"""'], {'fg': 'typer.colors.RED'}), "('deleting it!', fg=typer.colors.RED)\n", (8629, 8666), False, 'import typer\n'), ((2174, 2192), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2184, 2192), False, 'import typer\n'), ((3634, 3681), 'typer.secho', 'typer.secho', (['"""Not editing"""'], {'fg': 'typer.colors.RED'}), "('Not editing', fg=typer.colors.RED)\n", (3645, 3681), False, 'import typer\n'), ((3732, 3745), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (3743, 3745), False, 'import typer\n'), ((3902, 3960), 'typer.secho', 'typer.secho', (['f"""\nInvalid task id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid task id\n""", fg=typer.colors.RED)\n', (3913, 3960), False, 'import typer\n'), ((4001, 4019), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (4011, 4019), False, 'import typer\n'), ((4070, 4128), 'typer.secho', 'typer.secho', (['f"""\nInvalid task id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid task id\n""", fg=typer.colors.RED)\n', (4081, 4128), False, 'import typer\n'), ((4169, 4187), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (4179, 4187), False, 'import typer\n'), ((4685, 4732), 'typer.secho', 'typer.secho', (['"""Not editing"""'], {'fg': 'typer.colors.RED'}), "('Not editing', fg=typer.colors.RED)\n", (4696, 4732), False, 'import typer\n'), ((4783, 4796), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (4794, 4796), False, 'import typer\n'), ((5549, 5597), 'typer.secho', 'typer.secho', (['"""Not deleting"""'], {'fg': 'typer.colors.RED'}), "('Not deleting', fg=typer.colors.RED)\n", (5560, 5597), False, 'import typer\n'), ((5648, 5661), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (5659, 5661), False, 'import typer\n'), ((6606, 6654), 'typer.secho', 'typer.secho', (['"""Not deleting"""'], {'fg': 'typer.colors.RED'}), "('Not deleting', fg=typer.colors.RED)\n", (6617, 6654), False, 'import typer\n'), ((6705, 6718), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (6716, 6718), False, 'import typer\n'), ((7266, 7337), 'typer.secho', 'typer.secho', (['f"""\nEnd must be >= {query.start}\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nEnd must be >= {query.start}\n""", fg=typer.colors.RED)\n', (7277, 7337), False, 'import typer\n'), ((7399, 7417), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (7409, 7417), False, 'import typer\n'), ((7440, 7454), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7452, 7454), False, 'from datetime import datetime\n'), ((7581, 7599), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (7591, 7599), False, 'import typer\n'), ((7805, 7852), 'typer.secho', 'typer.secho', (['"""Not editing"""'], {'fg': 'typer.colors.RED'}), "('Not editing', fg=typer.colors.RED)\n", (7816, 7852), False, 'import typer\n'), ((7903, 7916), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (7914, 7916), False, 'import typer\n'), ((8073, 8132), 'typer.secho', 'typer.secho', (['f"""\nInvalid timer id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid timer id\n""", fg=typer.colors.RED)\n', (8084, 8132), False, 'import typer\n'), ((8493, 8541), 'typer.secho', 'typer.secho', (['"""Not deleting"""'], {'fg': 'typer.colors.RED'}), "('Not deleting', fg=typer.colors.RED)\n", (8504, 8541), False, 'import typer\n'), ((8592, 8605), 'typer.Abort', 'typer.Abort', ([], {}), '()\n', (8603, 8605), False, 'import typer\n'), ((8764, 8823), 'typer.secho', 'typer.secho', (['f"""\nInvalid timer id\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nInvalid timer id\n""", fg=typer.colors.RED)\n', (8775, 8823), False, 'import typer\n'), ((2406, 2424), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2416, 2424), False, 'import typer\n'), ((2651, 2669), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2661, 2669), False, 'import typer\n'), ((1200, 1214), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1212, 1214), False, 'from datetime import datetime\n'), ((2966, 2984), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (2976, 2984), False, 'import typer\n'), ((4353, 4365), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (4359, 4365), False, 'from sqlmodel import Session, select\n'), ((6084, 6096), 'sqlmodel.select', 'select', (['ToDo'], {}), '(ToDo)\n', (6090, 6096), False, 'from sqlmodel import Session, select\n'), ((7524, 7538), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7536, 7538), False, 'from datetime import datetime\n'), ((1558, 1621), 'typer.secho', 'typer.secho', (['f"""\nTask already started\n"""'], {'fg': 'typer.colors.RED'}), '(f"""\nTask already started\n""", fg=typer.colors.RED)\n', (1569, 1621), False, 'import typer\n'), ((1678, 1696), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1688, 1696), False, 'import typer\n'), ((3283, 3301), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (3293, 3301), False, 'import typer\n'), ((5241, 5254), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (5247, 5254), False, 'from sqlmodel import Session, select\n'), ((6244, 6257), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (6250, 6257), False, 'from sqlmodel import Session, select\n'), ((1434, 1447), 'sqlmodel.select', 'select', (['Timer'], {}), '(Timer)\n', (1440, 1447), False, 'from sqlmodel import Session, select\n')] |
from datetime import datetime
from sqlmodel import Session, SQLModel, create_engine, text
import sqlite3
database_loc = "backend/database.sqlite"
con_str = f"sqlite:///{database_loc}"
engine = create_engine(con_str, echo=True)
sqlite3_engine = sqlite3.connect(f"{database_loc}")
def get_session():
session = Session(engine)
return session
def create_db():
SQLModel.metadata.create_all(engine)
def execute_sample_sql(session):
"""Read sample sql database and import it."""
with open("backend/tests/sample.sql") as f:
content = f.read()
queries = filter(None, content.split(";\n"))
queries = [text(query) for query in queries]
for query in queries:
session.exec(query)
session.commit()
session.expire_all()
session = Session(engine)
tags_metadata = [
{
"name": "user",
"description": "Operations with users",
},
{
"name": "epic",
"description": "operations with epics",
},
{
"name": "epic_area",
"description": "operations with epic areas",
},
{
"name": "team",
"description": "operations with teams",
},
{
"name": "sponsor",
"description": "operations with sponsors",
},
{
"name": "client",
"description": "operations with clients",
},
{
"name": "forecast",
"description": "operations with forecasts",
},
{
"name": "rate",
"description": "operations with rates",
},
{
"name": "timelog",
"description": "operations with timelogs",
},
]
def string_to_datetime(date_string):
date = datetime.strptime(date_string, "%Y-%m-%d %H:%M")
return date
def string_to_datetime_hm(date_string):
date = datetime.strptime(date_string, "%H:%M")
return date
def string_to_datetime_GMT(date_string):
date = datetime.strptime(date_string, "%a %b %d %Y %H:%M:%S %Z%z")
return date
def string_to_datetime_work(date_string):
date = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S.%fZ")
return date
def datetime_to_string(date_date):
date_string = date_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
return date_string
def time_period(time_of_start, time_of_end):
starting_time = string_to_datetime_work(time_of_start)
ending_time = string_to_datetime_work(time_of_end)
working_time = ending_time - starting_time
return working_time
def date_str_to_date(date: str):
date_date = datetime.strptime(date, "%Y-%m-%d").date()
return date_date
far_date = date_str_to_date("9999-12-31")
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.text",
"sqlmodel.create_engine"
] | [((195, 228), 'sqlmodel.create_engine', 'create_engine', (['con_str'], {'echo': '(True)'}), '(con_str, echo=True)\n', (208, 228), False, 'from sqlmodel import Session, SQLModel, create_engine, text\n'), ((246, 280), 'sqlite3.connect', 'sqlite3.connect', (['f"""{database_loc}"""'], {}), "(f'{database_loc}')\n", (261, 280), False, 'import sqlite3\n'), ((784, 799), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (791, 799), False, 'from sqlmodel import Session, SQLModel, create_engine, text\n'), ((316, 331), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (323, 331), False, 'from sqlmodel import Session, SQLModel, create_engine, text\n'), ((374, 410), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (402, 410), False, 'from sqlmodel import Session, SQLModel, create_engine, text\n'), ((1670, 1718), 'datetime.datetime.strptime', 'datetime.strptime', (['date_string', '"""%Y-%m-%d %H:%M"""'], {}), "(date_string, '%Y-%m-%d %H:%M')\n", (1687, 1718), False, 'from datetime import datetime\n'), ((1788, 1827), 'datetime.datetime.strptime', 'datetime.strptime', (['date_string', '"""%H:%M"""'], {}), "(date_string, '%H:%M')\n", (1805, 1827), False, 'from datetime import datetime\n'), ((1898, 1957), 'datetime.datetime.strptime', 'datetime.strptime', (['date_string', '"""%a %b %d %Y %H:%M:%S %Z%z"""'], {}), "(date_string, '%a %b %d %Y %H:%M:%S %Z%z')\n", (1915, 1957), False, 'from datetime import datetime\n'), ((2029, 2084), 'datetime.datetime.strptime', 'datetime.strptime', (['date_string', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(date_string, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (2046, 2084), False, 'from datetime import datetime\n'), ((636, 647), 'sqlmodel.text', 'text', (['query'], {}), '(query)\n', (640, 647), False, 'from sqlmodel import Session, SQLModel, create_engine, text\n'), ((2506, 2541), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%Y-%m-%d"""'], {}), "(date, '%Y-%m-%d')\n", (2523, 2541), False, 'from datetime import datetime\n')] |
import os
import pathlib
from datetime import datetime, date, time
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile
from fastapi.encoders import jsonable_encoder
from sqlmodel import Field, SQLModel
from ..db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
cert_path = '/uploads/user/{user_id}/cert/'
avatar_path = '/uploads/user/{user_id}/avatar/'
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
state: str # Statue: pending, active, inactive
id_type_id: int
id_number: str = None
email = str
email_verified_at: Optional[datetime] = None
password: str
remember_token: str
hospital_id: Optional[int] = None
hospital_node_id: Optional[int] = None
discipline_id: int
first_name_thai: str
last_name_thai: str
first_name_english: str
last_name_english: str
nickname: str
birth_date: date
gender: str
academic_degree: str
is_thai_address: bool
address_house_number: str
address_moo: str
address_soi: str
address_road: str
address_tambon_id: Optional[int] = None
address_amphoe_id: Optional[int] = None
address_province_id: Optional[int] = None
address_other: str
latitude: Decimal
longitude: Decimal
potential: str
avatar_path: str
document_path: str
policy_accept: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserPhone(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
number: str
detail: str
receive_sms: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserFeedback(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
feedback_type_id: Optional[int] = None
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserNotification(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
name: str
detail: str
is_read: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserRole(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
role_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class UserSource(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
created_by: int
class UserLog(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
class Doctor(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int
hospital_id: int
discipline_id: int
prefix: str
first_name: str
last_name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class DoctorProcedureMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
doctor_id: int
procedure_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class Discipline(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
discipline_group_id: int
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class DisciplineGroup(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class Role(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class RoleModuleFunctionMap(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
role_id: int
module_function_id: int
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/user", response_model=User)
async def create_user(user: User, session: AsyncSession = Depends(get_session)):
# Add user
session.add(user)
await session.commit()
await session.refresh(user)
return user
@router.get("/user", response_model=List[User])
async def get_users(session: AsyncSession = Depends(get_session)):
# Select all users
statement = select(User)
result = await session.execute(statement)
users = result.scalars().all()
return users
@router.post("/user/check_id/{id_type_id}/{id_number}", response_model=bool)
async def check_id_available(id_type_id: int, id_number: str, session: AsyncSession = Depends(get_session)):
# Check id existence
users = await session.execute(select(User).where(User.id_type_id == id_type_id).where(User.id_number == id_number))
if users is None:
return True
return False
@router.get("/user/pending_num", response_model=int)
async def get_pending_user_num(session: AsyncSession = Depends(get_session)):
# Get user with pending status
users = await session.execute(select(User).where(User.state == "pending"))
return len(users.scalars().all())
@router.get("/user/{user_id}", response_model=User)
async def get_user(user_id: int, session: AsyncSession = Depends(get_session)):
# Get user with id
users = await session.execute(select(User).where(User.id == user_id))
user = users.scalars().first()
return user
@router.put("/user/{user_id}", response_model=User)
async def update_user(user_id: int, user: User, session: AsyncSession = Depends(get_session)):
# Get user with id
statement = select(User).where(User.id == user_id)
users = await session.execute(statement)
# Update detail
user_old = users.one()
model = User(**user_old)
update_data = user.dict(exclude_unset=True)
updated_user = model.copy(update=update_data)
user_old = jsonable_encoder(updated_user)
# Commit to database
await session.commit()
return user_old
@router.delete("/user/{user_id}")
async def delete_user(user_id: int, session: AsyncSession = Depends(get_session)):
# Check user existence
statement = select(User).where(User.id == user_id)
user = await session.execute(statement)
# Not found error
if not user:
raise HTTPException(status_code=404, detail='User not found')
user = user.scalars().one()
# Delete
await session.delete(user)
await session.commit()
return status.HTTP_200_OK
@router.post("/user/{user_id}/document")
async def upload_document(user_id: int, document: UploadFile = File(...), session: AsyncSession = Depends(get_session)):
# File name
file_dir = os.getcwd() + cert_path.format(user_id=user_id)
file_path = file_dir + document.filename
# Make directory if not exist
try:
if not os.path.exists(file_dir):
pathlib.Path(file_dir).mkdir(parents=True, exist_ok=True)
except Exception as e:
print(e)
# Write file
with open(file_path, 'wb') as f:
f.write(document.file.read())
f.close()
# Update user document path
statement = select(User).where(User.id == user_id)
users = await session.execute(statement)
user_old = users.scalars().one()
user_old.document_path = file_path
await session.commit()
return {'document_path': file_path}
@router.post("/user/{user_id}/accept")
async def accept_policy(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/reset")
async def reset_password(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/avatar")
async def upload_avatar(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/user/{user_id}/avatar")
async def delete_avatar(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/{user_id}/role")
async def set_role(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/feedback")
async def create_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/feedback")
async def get_user_feedbacks(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/feedback/{feedback_id}")
async def get_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.put("/user/feedback/{feedback_id}")
async def update_user_feedback(session: AsyncSession = Depends(get_session)):
return None
@router.post("/user/doctor")
async def create_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/doctor")
async def get_doctors(session: AsyncSession = Depends(get_session)):
return None
@router.get("/user/doctor/{doctor_id}")
async def get_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.put("/user/doctor/{doctor_id}")
async def update_doctor(session: AsyncSession = Depends(get_session)):
return None
@router.delete("/user/doctor/{doctor_id}")
async def delete_doctor(session: AsyncSession = Depends(get_session)):
return None
| [
"sqlmodel.Field"
] | [((412, 423), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (421, 423), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((577, 614), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (582, 614), False, 'from sqlmodel import Field, SQLModel\n'), ((1689, 1726), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1694, 1726), False, 'from sqlmodel import Field, SQLModel\n'), ((1973, 2010), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1978, 2010), False, 'from sqlmodel import Field, SQLModel\n'), ((2266, 2303), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2271, 2303), False, 'from sqlmodel import Field, SQLModel\n'), ((2540, 2577), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2545, 2577), False, 'from sqlmodel import Field, SQLModel\n'), ((2785, 2822), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2790, 2822), False, 'from sqlmodel import Field, SQLModel\n'), ((2945, 2982), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (2950, 2982), False, 'from sqlmodel import Field, SQLModel\n'), ((3062, 3099), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3067, 3099), False, 'from sqlmodel import Field, SQLModel\n'), ((3397, 3434), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3402, 3434), False, 'from sqlmodel import Field, SQLModel\n'), ((3649, 3686), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3654, 3686), False, 'from sqlmodel import Field, SQLModel\n'), ((3908, 3945), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (3913, 3945), False, 'from sqlmodel import Field, SQLModel\n'), ((4127, 4164), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (4132, 4164), False, 'from sqlmodel import Field, SQLModel\n'), ((4363, 4400), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (4368, 4400), False, 'from sqlmodel import Field, SQLModel\n'), ((4656, 4676), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4663, 4676), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((4886, 4906), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (4893, 4906), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((4948, 4960), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (4954, 4960), False, 'from sqlalchemy import select\n'), ((5224, 5244), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5231, 5244), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((5561, 5581), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5568, 5581), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((5847, 5867), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (5854, 5867), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6144, 6164), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6151, 6164), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6480, 6510), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['updated_user'], {}), '(updated_user)\n', (6496, 6510), False, 'from fastapi.encoders import jsonable_encoder\n'), ((6681, 6701), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (6688, 6701), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7182, 7191), 'fastapi.File', 'File', (['...'], {}), '(...)\n', (7186, 7191), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7217, 7237), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (7224, 7237), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8042, 8062), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8049, 8062), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8170, 8190), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8177, 8190), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8298, 8318), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8305, 8318), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8428, 8448), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8435, 8448), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8549, 8569), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8556, 8569), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8676, 8696), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8683, 8696), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8800, 8820), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8807, 8820), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((8937, 8957), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (8944, 8957), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9077, 9097), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9084, 9097), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9195, 9215), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9202, 9215), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9310, 9330), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9317, 9330), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9436, 9456), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9443, 9456), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9565, 9585), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9572, 9585), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((9697, 9717), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (9704, 9717), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((6884, 6939), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""User not found"""'}), "(status_code=404, detail='User not found')\n", (6897, 6939), False, 'from fastapi import APIRouter, Depends, HTTPException, status, File, UploadFile\n'), ((7272, 7283), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7281, 7283), False, 'import os\n'), ((6206, 6218), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (6212, 6218), False, 'from sqlalchemy import select\n'), ((6747, 6759), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (6753, 6759), False, 'from sqlalchemy import select\n'), ((7424, 7448), 'os.path.exists', 'os.path.exists', (['file_dir'], {}), '(file_dir)\n', (7438, 7448), False, 'import os\n'), ((7724, 7736), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (7730, 7736), False, 'from sqlalchemy import select\n'), ((5653, 5665), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5659, 5665), False, 'from sqlalchemy import select\n'), ((5927, 5939), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5933, 5939), False, 'from sqlalchemy import select\n'), ((7462, 7484), 'pathlib.Path', 'pathlib.Path', (['file_dir'], {}), '(file_dir)\n', (7474, 7484), False, 'import pathlib\n'), ((5306, 5318), 'sqlalchemy.select', 'select', (['User'], {}), '(User)\n', (5312, 5318), False, 'from sqlalchemy import select\n')] |
"""Initial migration
Revision ID: 5f31ff8814e7
Revises:
Create Date: 2022-04-30 19:39:20.164043+00:00
"""
# pylint: disable=no-member, invalid-name, missing-function-docstring, unused-import
import sqlalchemy as sa
import sqlalchemy_utils
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "5f31ff8814e7"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"database",
sa.Column("name", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("id", sa.Integer(), nullable=True),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("URI", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("read_only", sa.Boolean(), nullable=True),
sa.Column("async", sa.Boolean(), nullable=True),
sa.Column("cost", sa.Float(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
op.create_index(op.f("ix_database_URI"), "database", ["URI"], unique=False)
op.create_index(op.f("ix_database_async"), "database", ["async"], unique=False)
op.create_index(op.f("ix_database_cost"), "database", ["cost"], unique=False)
op.create_index(
op.f("ix_database_description"),
"database",
["description"],
unique=False,
)
op.create_index(op.f("ix_database_id"), "database", ["id"], unique=False)
op.create_index(
op.f("ix_database_read_only"),
"database",
["read_only"],
unique=False,
)
op.create_table(
"node",
sa.Column("name", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=True),
sa.Column(
"type",
sa.Enum("SOURCE", "TRANSFORM", "METRIC", "DIMENSION", name="nodetype"),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=True),
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("expression", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
op.create_index(op.f("ix_node_description"), "node", ["description"], unique=False)
op.create_index(op.f("ix_node_expression"), "node", ["expression"], unique=False)
op.create_index(op.f("ix_node_id"), "node", ["id"], unique=False)
op.create_table(
"column",
sa.Column(
"type",
sa.Enum(
"BYTES",
"STR",
"FLOAT",
"INT",
"DECIMAL",
"BOOL",
"DATETIME",
"DATE",
"TIME",
"TIMEDELTA",
"LIST",
"DICT",
name="columntype",
),
nullable=True,
),
sa.Column("id", sa.Integer(), nullable=True),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("dimension_id", sa.Integer(), nullable=True),
sa.Column(
"dimension_column",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
),
sa.ForeignKeyConstraint(
["dimension_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
op.f("ix_column_dimension_column"),
"column",
["dimension_column"],
unique=False,
)
op.create_index(
op.f("ix_column_dimension_id"),
"column",
["dimension_id"],
unique=False,
)
op.create_index(op.f("ix_column_id"), "column", ["id"], unique=False)
op.create_index(op.f("ix_column_name"), "column", ["name"], unique=False)
op.create_table(
"noderelationship",
sa.Column("parent_id", sa.Integer(), nullable=True),
sa.Column("child_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["child_id"],
["node.id"],
),
sa.ForeignKeyConstraint(
["parent_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("parent_id", "child_id"),
)
op.create_index(
op.f("ix_noderelationship_child_id"),
"noderelationship",
["child_id"],
unique=False,
)
op.create_index(
op.f("ix_noderelationship_parent_id"),
"noderelationship",
["parent_id"],
unique=False,
)
op.create_table(
"query",
sa.Column("id", sqlalchemy_utils.types.uuid.UUIDType(), nullable=False),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("catalog", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("schema_", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column(
"submitted_query",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
),
sa.Column("executed_query", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("scheduled", sa.DateTime(), nullable=True),
sa.Column("started", sa.DateTime(), nullable=True),
sa.Column("finished", sa.DateTime(), nullable=True),
sa.Column("state", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("progress", sa.Float(), nullable=True),
sa.ForeignKeyConstraint(
["database_id"],
["database.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_query_catalog"), "query", ["catalog"], unique=False)
op.create_index(
op.f("ix_query_database_id"),
"query",
["database_id"],
unique=False,
)
op.create_index(
op.f("ix_query_executed_query"),
"query",
["executed_query"],
unique=False,
)
op.create_index(op.f("ix_query_finished"), "query", ["finished"], unique=False)
op.create_index(op.f("ix_query_progress"), "query", ["progress"], unique=False)
op.create_index(op.f("ix_query_scheduled"), "query", ["scheduled"], unique=False)
op.create_index(op.f("ix_query_schema_"), "query", ["schema_"], unique=False)
op.create_index(op.f("ix_query_started"), "query", ["started"], unique=False)
op.create_index(op.f("ix_query_state"), "query", ["state"], unique=False)
op.create_index(
op.f("ix_query_submitted_query"),
"query",
["submitted_query"],
unique=False,
)
op.create_table(
"table",
sa.Column("id", sa.Integer(), nullable=True),
sa.Column("node_id", sa.Integer(), nullable=False),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("catalog", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("schema_", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("table", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("cost", sa.Float(), nullable=True),
sa.ForeignKeyConstraint(
["database_id"],
["database.id"],
),
sa.ForeignKeyConstraint(
["node_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_table_catalog"), "table", ["catalog"], unique=False)
op.create_index(op.f("ix_table_cost"), "table", ["cost"], unique=False)
op.create_index(
op.f("ix_table_database_id"),
"table",
["database_id"],
unique=False,
)
op.create_index(op.f("ix_table_id"), "table", ["id"], unique=False)
op.create_index(op.f("ix_table_node_id"), "table", ["node_id"], unique=False)
op.create_index(op.f("ix_table_schema_"), "table", ["schema_"], unique=False)
op.create_index(op.f("ix_table_table"), "table", ["table"], unique=False)
op.create_table(
"nodecolumns",
sa.Column("node_id", sa.Integer(), nullable=True),
sa.Column("column_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["column_id"],
["column.id"],
),
sa.ForeignKeyConstraint(
["node_id"],
["node.id"],
),
sa.PrimaryKeyConstraint("node_id", "column_id"),
)
op.create_index(
op.f("ix_nodecolumns_column_id"),
"nodecolumns",
["column_id"],
unique=False,
)
op.create_index(
op.f("ix_nodecolumns_node_id"),
"nodecolumns",
["node_id"],
unique=False,
)
op.create_table(
"tablecolumns",
sa.Column("table_id", sa.Integer(), nullable=True),
sa.Column("column_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["column_id"],
["column.id"],
),
sa.ForeignKeyConstraint(
["table_id"],
["table.id"],
),
sa.PrimaryKeyConstraint("table_id", "column_id"),
)
op.create_index(
op.f("ix_tablecolumns_column_id"),
"tablecolumns",
["column_id"],
unique=False,
)
op.create_index(
op.f("ix_tablecolumns_table_id"),
"tablecolumns",
["table_id"],
unique=False,
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_tablecolumns_table_id"), table_name="tablecolumns")
op.drop_index(op.f("ix_tablecolumns_column_id"), table_name="tablecolumns")
op.drop_table("tablecolumns")
op.drop_index(op.f("ix_nodecolumns_node_id"), table_name="nodecolumns")
op.drop_index(op.f("ix_nodecolumns_column_id"), table_name="nodecolumns")
op.drop_table("nodecolumns")
op.drop_index(op.f("ix_table_table"), table_name="table")
op.drop_index(op.f("ix_table_schema_"), table_name="table")
op.drop_index(op.f("ix_table_node_id"), table_name="table")
op.drop_index(op.f("ix_table_id"), table_name="table")
op.drop_index(op.f("ix_table_database_id"), table_name="table")
op.drop_index(op.f("ix_table_cost"), table_name="table")
op.drop_index(op.f("ix_table_catalog"), table_name="table")
op.drop_table("table")
op.drop_index(op.f("ix_query_submitted_query"), table_name="query")
op.drop_index(op.f("ix_query_state"), table_name="query")
op.drop_index(op.f("ix_query_started"), table_name="query")
op.drop_index(op.f("ix_query_schema_"), table_name="query")
op.drop_index(op.f("ix_query_scheduled"), table_name="query")
op.drop_index(op.f("ix_query_progress"), table_name="query")
op.drop_index(op.f("ix_query_finished"), table_name="query")
op.drop_index(op.f("ix_query_executed_query"), table_name="query")
op.drop_index(op.f("ix_query_database_id"), table_name="query")
op.drop_index(op.f("ix_query_catalog"), table_name="query")
op.drop_table("query")
op.drop_index(op.f("ix_noderelationship_parent_id"), table_name="noderelationship")
op.drop_index(op.f("ix_noderelationship_child_id"), table_name="noderelationship")
op.drop_table("noderelationship")
op.drop_index(op.f("ix_column_name"), table_name="column")
op.drop_index(op.f("ix_column_id"), table_name="column")
op.drop_index(op.f("ix_column_dimension_id"), table_name="column")
op.drop_index(op.f("ix_column_dimension_column"), table_name="column")
op.drop_table("column")
op.drop_index(op.f("ix_node_id"), table_name="node")
op.drop_index(op.f("ix_node_expression"), table_name="node")
op.drop_index(op.f("ix_node_description"), table_name="node")
op.drop_table("node")
op.drop_index(op.f("ix_database_read_only"), table_name="database")
op.drop_index(op.f("ix_database_id"), table_name="database")
op.drop_index(op.f("ix_database_description"), table_name="database")
op.drop_index(op.f("ix_database_cost"), table_name="database")
op.drop_index(op.f("ix_database_async"), table_name="database")
op.drop_index(op.f("ix_database_URI"), table_name="database")
op.drop_table("database")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((9906, 9935), 'alembic.op.drop_table', 'op.drop_table', (['"""tablecolumns"""'], {}), "('tablecolumns')\n", (9919, 9935), False, 'from alembic import op\n'), ((10094, 10122), 'alembic.op.drop_table', 'op.drop_table', (['"""nodecolumns"""'], {}), "('nodecolumns')\n", (10107, 10122), False, 'from alembic import op\n'), ((10569, 10591), 'alembic.op.drop_table', 'op.drop_table', (['"""table"""'], {}), "('table')\n", (10582, 10591), False, 'from alembic import op\n'), ((11257, 11279), 'alembic.op.drop_table', 'op.drop_table', (['"""query"""'], {}), "('query')\n", (11270, 11279), False, 'from alembic import op\n'), ((11459, 11492), 'alembic.op.drop_table', 'op.drop_table', (['"""noderelationship"""'], {}), "('noderelationship')\n", (11472, 11492), False, 'from alembic import op\n'), ((11767, 11790), 'alembic.op.drop_table', 'op.drop_table', (['"""column"""'], {}), "('column')\n", (11780, 11790), False, 'from alembic import op\n'), ((11983, 12004), 'alembic.op.drop_table', 'op.drop_table', (['"""node"""'], {}), "('node')\n", (11996, 12004), False, 'from alembic import op\n'), ((12421, 12446), 'alembic.op.drop_table', 'op.drop_table', (['"""database"""'], {}), "('database')\n", (12434, 12446), False, 'from alembic import op\n'), ((1138, 1167), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1161, 1167), True, 'import sqlalchemy as sa\n'), ((1177, 1204), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""'], {}), "('name')\n", (1196, 1204), True, 'import sqlalchemy as sa\n'), ((1232, 1255), 'alembic.op.f', 'op.f', (['"""ix_database_URI"""'], {}), "('ix_database_URI')\n", (1236, 1255), False, 'from alembic import op\n'), ((1312, 1337), 'alembic.op.f', 'op.f', (['"""ix_database_async"""'], {}), "('ix_database_async')\n", (1316, 1337), False, 'from alembic import op\n'), ((1396, 1420), 'alembic.op.f', 'op.f', (['"""ix_database_cost"""'], {}), "('ix_database_cost')\n", (1400, 1420), False, 'from alembic import op\n'), ((1487, 1518), 'alembic.op.f', 'op.f', (['"""ix_database_description"""'], {}), "('ix_database_description')\n", (1491, 1518), False, 'from alembic import op\n'), ((1613, 1635), 'alembic.op.f', 'op.f', (['"""ix_database_id"""'], {}), "('ix_database_id')\n", (1617, 1635), False, 'from alembic import op\n'), ((1700, 1729), 'alembic.op.f', 'op.f', (['"""ix_database_read_only"""'], {}), "('ix_database_read_only')\n", (1704, 1729), False, 'from alembic import op\n'), ((2438, 2467), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2461, 2467), True, 'import sqlalchemy as sa\n'), ((2477, 2504), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""'], {}), "('name')\n", (2496, 2504), True, 'import sqlalchemy as sa\n'), ((2532, 2559), 'alembic.op.f', 'op.f', (['"""ix_node_description"""'], {}), "('ix_node_description')\n", (2536, 2559), False, 'from alembic import op\n'), ((2620, 2646), 'alembic.op.f', 'op.f', (['"""ix_node_expression"""'], {}), "('ix_node_expression')\n", (2624, 2646), False, 'from alembic import op\n'), ((2706, 2724), 'alembic.op.f', 'op.f', (['"""ix_node_id"""'], {}), "('ix_node_id')\n", (2710, 2724), False, 'from alembic import op\n'), ((3585, 3639), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['dimension_id']", "['node.id']"], {}), "(['dimension_id'], ['node.id'])\n", (3608, 3639), True, 'import sqlalchemy as sa\n'), ((3684, 3713), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (3707, 3713), True, 'import sqlalchemy as sa\n'), ((3750, 3784), 'alembic.op.f', 'op.f', (['"""ix_column_dimension_column"""'], {}), "('ix_column_dimension_column')\n", (3754, 3784), False, 'from alembic import op\n'), ((3891, 3921), 'alembic.op.f', 'op.f', (['"""ix_column_dimension_id"""'], {}), "('ix_column_dimension_id')\n", (3895, 3921), False, 'from alembic import op\n'), ((4015, 4035), 'alembic.op.f', 'op.f', (['"""ix_column_id"""'], {}), "('ix_column_id')\n", (4019, 4035), False, 'from alembic import op\n'), ((4089, 4111), 'alembic.op.f', 'op.f', (['"""ix_column_name"""'], {}), "('ix_column_name')\n", (4093, 4111), False, 'from alembic import op\n'), ((4325, 4375), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['child_id']", "['node.id']"], {}), "(['child_id'], ['node.id'])\n", (4348, 4375), True, 'import sqlalchemy as sa\n'), ((4420, 4471), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['parent_id']", "['node.id']"], {}), "(['parent_id'], ['node.id'])\n", (4443, 4471), True, 'import sqlalchemy as sa\n'), ((4516, 4564), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""parent_id"""', '"""child_id"""'], {}), "('parent_id', 'child_id')\n", (4539, 4564), True, 'import sqlalchemy as sa\n'), ((4601, 4637), 'alembic.op.f', 'op.f', (['"""ix_noderelationship_child_id"""'], {}), "('ix_noderelationship_child_id')\n", (4605, 4637), False, 'from alembic import op\n'), ((4746, 4783), 'alembic.op.f', 'op.f', (['"""ix_noderelationship_parent_id"""'], {}), "('ix_noderelationship_parent_id')\n", (4750, 4783), False, 'from alembic import op\n'), ((5762, 5819), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['database_id']", "['database.id']"], {}), "(['database_id'], ['database.id'])\n", (5785, 5819), True, 'import sqlalchemy as sa\n'), ((5864, 5893), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (5887, 5893), True, 'import sqlalchemy as sa\n'), ((5921, 5945), 'alembic.op.f', 'op.f', (['"""ix_query_catalog"""'], {}), "('ix_query_catalog')\n", (5925, 5945), False, 'from alembic import op\n'), ((6012, 6040), 'alembic.op.f', 'op.f', (['"""ix_query_database_id"""'], {}), "('ix_query_database_id')\n", (6016, 6040), False, 'from alembic import op\n'), ((6141, 6172), 'alembic.op.f', 'op.f', (['"""ix_query_executed_query"""'], {}), "('ix_query_executed_query')\n", (6145, 6172), False, 'from alembic import op\n'), ((6267, 6292), 'alembic.op.f', 'op.f', (['"""ix_query_finished"""'], {}), "('ix_query_finished')\n", (6271, 6292), False, 'from alembic import op\n'), ((6351, 6376), 'alembic.op.f', 'op.f', (['"""ix_query_progress"""'], {}), "('ix_query_progress')\n", (6355, 6376), False, 'from alembic import op\n'), ((6435, 6461), 'alembic.op.f', 'op.f', (['"""ix_query_scheduled"""'], {}), "('ix_query_scheduled')\n", (6439, 6461), False, 'from alembic import op\n'), ((6521, 6545), 'alembic.op.f', 'op.f', (['"""ix_query_schema_"""'], {}), "('ix_query_schema_')\n", (6525, 6545), False, 'from alembic import op\n'), ((6603, 6627), 'alembic.op.f', 'op.f', (['"""ix_query_started"""'], {}), "('ix_query_started')\n", (6607, 6627), False, 'from alembic import op\n'), ((6685, 6707), 'alembic.op.f', 'op.f', (['"""ix_query_state"""'], {}), "('ix_query_state')\n", (6689, 6707), False, 'from alembic import op\n'), ((6772, 6804), 'alembic.op.f', 'op.f', (['"""ix_query_submitted_query"""'], {}), "('ix_query_submitted_query')\n", (6776, 6804), False, 'from alembic import op\n'), ((7400, 7457), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['database_id']", "['database.id']"], {}), "(['database_id'], ['database.id'])\n", (7423, 7457), True, 'import sqlalchemy as sa\n'), ((7502, 7551), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['node_id']", "['node.id']"], {}), "(['node_id'], ['node.id'])\n", (7525, 7551), True, 'import sqlalchemy as sa\n'), ((7596, 7625), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (7619, 7625), True, 'import sqlalchemy as sa\n'), ((7653, 7677), 'alembic.op.f', 'op.f', (['"""ix_table_catalog"""'], {}), "('ix_table_catalog')\n", (7657, 7677), False, 'from alembic import op\n'), ((7735, 7756), 'alembic.op.f', 'op.f', (['"""ix_table_cost"""'], {}), "('ix_table_cost')\n", (7739, 7756), False, 'from alembic import op\n'), ((7820, 7848), 'alembic.op.f', 'op.f', (['"""ix_table_database_id"""'], {}), "('ix_table_database_id')\n", (7824, 7848), False, 'from alembic import op\n'), ((7940, 7959), 'alembic.op.f', 'op.f', (['"""ix_table_id"""'], {}), "('ix_table_id')\n", (7944, 7959), False, 'from alembic import op\n'), ((8012, 8036), 'alembic.op.f', 'op.f', (['"""ix_table_node_id"""'], {}), "('ix_table_node_id')\n", (8016, 8036), False, 'from alembic import op\n'), ((8094, 8118), 'alembic.op.f', 'op.f', (['"""ix_table_schema_"""'], {}), "('ix_table_schema_')\n", (8098, 8118), False, 'from alembic import op\n'), ((8176, 8198), 'alembic.op.f', 'op.f', (['"""ix_table_table"""'], {}), "('ix_table_table')\n", (8180, 8198), False, 'from alembic import op\n'), ((8406, 8459), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['column_id']", "['column.id']"], {}), "(['column_id'], ['column.id'])\n", (8429, 8459), True, 'import sqlalchemy as sa\n'), ((8504, 8553), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['node_id']", "['node.id']"], {}), "(['node_id'], ['node.id'])\n", (8527, 8553), True, 'import sqlalchemy as sa\n'), ((8598, 8645), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""node_id"""', '"""column_id"""'], {}), "('node_id', 'column_id')\n", (8621, 8645), True, 'import sqlalchemy as sa\n'), ((8682, 8714), 'alembic.op.f', 'op.f', (['"""ix_nodecolumns_column_id"""'], {}), "('ix_nodecolumns_column_id')\n", (8686, 8714), False, 'from alembic import op\n'), ((8819, 8849), 'alembic.op.f', 'op.f', (['"""ix_nodecolumns_node_id"""'], {}), "('ix_nodecolumns_node_id')\n", (8823, 8849), False, 'from alembic import op\n'), ((9097, 9150), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['column_id']", "['column.id']"], {}), "(['column_id'], ['column.id'])\n", (9120, 9150), True, 'import sqlalchemy as sa\n'), ((9195, 9246), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['table_id']", "['table.id']"], {}), "(['table_id'], ['table.id'])\n", (9218, 9246), True, 'import sqlalchemy as sa\n'), ((9291, 9339), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""table_id"""', '"""column_id"""'], {}), "('table_id', 'column_id')\n", (9314, 9339), True, 'import sqlalchemy as sa\n'), ((9376, 9409), 'alembic.op.f', 'op.f', (['"""ix_tablecolumns_column_id"""'], {}), "('ix_tablecolumns_column_id')\n", (9380, 9409), False, 'from alembic import op\n'), ((9515, 9547), 'alembic.op.f', 'op.f', (['"""ix_tablecolumns_table_id"""'], {}), "('ix_tablecolumns_table_id')\n", (9519, 9547), False, 'from alembic import op\n'), ((9761, 9793), 'alembic.op.f', 'op.f', (['"""ix_tablecolumns_table_id"""'], {}), "('ix_tablecolumns_table_id')\n", (9765, 9793), False, 'from alembic import op\n'), ((9840, 9873), 'alembic.op.f', 'op.f', (['"""ix_tablecolumns_column_id"""'], {}), "('ix_tablecolumns_column_id')\n", (9844, 9873), False, 'from alembic import op\n'), ((9954, 9984), 'alembic.op.f', 'op.f', (['"""ix_nodecolumns_node_id"""'], {}), "('ix_nodecolumns_node_id')\n", (9958, 9984), False, 'from alembic import op\n'), ((10030, 10062), 'alembic.op.f', 'op.f', (['"""ix_nodecolumns_column_id"""'], {}), "('ix_nodecolumns_column_id')\n", (10034, 10062), False, 'from alembic import op\n'), ((10141, 10163), 'alembic.op.f', 'op.f', (['"""ix_table_table"""'], {}), "('ix_table_table')\n", (10145, 10163), False, 'from alembic import op\n'), ((10203, 10227), 'alembic.op.f', 'op.f', (['"""ix_table_schema_"""'], {}), "('ix_table_schema_')\n", (10207, 10227), False, 'from alembic import op\n'), ((10267, 10291), 'alembic.op.f', 'op.f', (['"""ix_table_node_id"""'], {}), "('ix_table_node_id')\n", (10271, 10291), False, 'from alembic import op\n'), ((10331, 10350), 'alembic.op.f', 'op.f', (['"""ix_table_id"""'], {}), "('ix_table_id')\n", (10335, 10350), False, 'from alembic import op\n'), ((10390, 10418), 'alembic.op.f', 'op.f', (['"""ix_table_database_id"""'], {}), "('ix_table_database_id')\n", (10394, 10418), False, 'from alembic import op\n'), ((10458, 10479), 'alembic.op.f', 'op.f', (['"""ix_table_cost"""'], {}), "('ix_table_cost')\n", (10462, 10479), False, 'from alembic import op\n'), ((10519, 10543), 'alembic.op.f', 'op.f', (['"""ix_table_catalog"""'], {}), "('ix_table_catalog')\n", (10523, 10543), False, 'from alembic import op\n'), ((10610, 10642), 'alembic.op.f', 'op.f', (['"""ix_query_submitted_query"""'], {}), "('ix_query_submitted_query')\n", (10614, 10642), False, 'from alembic import op\n'), ((10682, 10704), 'alembic.op.f', 'op.f', (['"""ix_query_state"""'], {}), "('ix_query_state')\n", (10686, 10704), False, 'from alembic import op\n'), ((10744, 10768), 'alembic.op.f', 'op.f', (['"""ix_query_started"""'], {}), "('ix_query_started')\n", (10748, 10768), False, 'from alembic import op\n'), ((10808, 10832), 'alembic.op.f', 'op.f', (['"""ix_query_schema_"""'], {}), "('ix_query_schema_')\n", (10812, 10832), False, 'from alembic import op\n'), ((10872, 10898), 'alembic.op.f', 'op.f', (['"""ix_query_scheduled"""'], {}), "('ix_query_scheduled')\n", (10876, 10898), False, 'from alembic import op\n'), ((10938, 10963), 'alembic.op.f', 'op.f', (['"""ix_query_progress"""'], {}), "('ix_query_progress')\n", (10942, 10963), False, 'from alembic import op\n'), ((11003, 11028), 'alembic.op.f', 'op.f', (['"""ix_query_finished"""'], {}), "('ix_query_finished')\n", (11007, 11028), False, 'from alembic import op\n'), ((11068, 11099), 'alembic.op.f', 'op.f', (['"""ix_query_executed_query"""'], {}), "('ix_query_executed_query')\n", (11072, 11099), False, 'from alembic import op\n'), ((11139, 11167), 'alembic.op.f', 'op.f', (['"""ix_query_database_id"""'], {}), "('ix_query_database_id')\n", (11143, 11167), False, 'from alembic import op\n'), ((11207, 11231), 'alembic.op.f', 'op.f', (['"""ix_query_catalog"""'], {}), "('ix_query_catalog')\n", (11211, 11231), False, 'from alembic import op\n'), ((11298, 11335), 'alembic.op.f', 'op.f', (['"""ix_noderelationship_parent_id"""'], {}), "('ix_noderelationship_parent_id')\n", (11302, 11335), False, 'from alembic import op\n'), ((11386, 11422), 'alembic.op.f', 'op.f', (['"""ix_noderelationship_child_id"""'], {}), "('ix_noderelationship_child_id')\n", (11390, 11422), False, 'from alembic import op\n'), ((11511, 11533), 'alembic.op.f', 'op.f', (['"""ix_column_name"""'], {}), "('ix_column_name')\n", (11515, 11533), False, 'from alembic import op\n'), ((11574, 11594), 'alembic.op.f', 'op.f', (['"""ix_column_id"""'], {}), "('ix_column_id')\n", (11578, 11594), False, 'from alembic import op\n'), ((11635, 11665), 'alembic.op.f', 'op.f', (['"""ix_column_dimension_id"""'], {}), "('ix_column_dimension_id')\n", (11639, 11665), False, 'from alembic import op\n'), ((11706, 11740), 'alembic.op.f', 'op.f', (['"""ix_column_dimension_column"""'], {}), "('ix_column_dimension_column')\n", (11710, 11740), False, 'from alembic import op\n'), ((11809, 11827), 'alembic.op.f', 'op.f', (['"""ix_node_id"""'], {}), "('ix_node_id')\n", (11813, 11827), False, 'from alembic import op\n'), ((11866, 11892), 'alembic.op.f', 'op.f', (['"""ix_node_expression"""'], {}), "('ix_node_expression')\n", (11870, 11892), False, 'from alembic import op\n'), ((11931, 11958), 'alembic.op.f', 'op.f', (['"""ix_node_description"""'], {}), "('ix_node_description')\n", (11935, 11958), False, 'from alembic import op\n'), ((12023, 12052), 'alembic.op.f', 'op.f', (['"""ix_database_read_only"""'], {}), "('ix_database_read_only')\n", (12027, 12052), False, 'from alembic import op\n'), ((12095, 12117), 'alembic.op.f', 'op.f', (['"""ix_database_id"""'], {}), "('ix_database_id')\n", (12099, 12117), False, 'from alembic import op\n'), ((12160, 12191), 'alembic.op.f', 'op.f', (['"""ix_database_description"""'], {}), "('ix_database_description')\n", (12164, 12191), False, 'from alembic import op\n'), ((12234, 12258), 'alembic.op.f', 'op.f', (['"""ix_database_cost"""'], {}), "('ix_database_cost')\n", (12238, 12258), False, 'from alembic import op\n'), ((12301, 12326), 'alembic.op.f', 'op.f', (['"""ix_database_async"""'], {}), "('ix_database_async')\n", (12305, 12326), False, 'from alembic import op\n'), ((12369, 12392), 'alembic.op.f', 'op.f', (['"""ix_database_URI"""'], {}), "('ix_database_URI')\n", (12373, 12392), False, 'from alembic import op\n'), ((560, 571), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (569, 571), True, 'import sqlalchemy as sa\n'), ((621, 647), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (632, 647), True, 'import sqlalchemy as sa\n'), ((697, 723), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (708, 723), True, 'import sqlalchemy as sa\n'), ((765, 777), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (775, 777), True, 'import sqlalchemy as sa\n'), ((828, 862), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (860, 862), False, 'import sqlmodel\n'), ((905, 939), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (937, 939), False, 'import sqlmodel\n'), ((989, 1001), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (999, 1001), True, 'import sqlalchemy as sa\n'), ((1046, 1058), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1056, 1058), True, 'import sqlalchemy as sa\n'), ((1102, 1112), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (1110, 1112), True, 'import sqlalchemy as sa\n'), ((1865, 1876), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1874, 1876), True, 'import sqlalchemy as sa\n'), ((1926, 1952), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (1937, 1952), True, 'import sqlalchemy as sa\n'), ((2002, 2028), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (2013, 2028), True, 'import sqlalchemy as sa\n'), ((2097, 2167), 'sqlalchemy.Enum', 'sa.Enum', (['"""SOURCE"""', '"""TRANSFORM"""', '"""METRIC"""', '"""DIMENSION"""'], {'name': '"""nodetype"""'}), "('SOURCE', 'TRANSFORM', 'METRIC', 'DIMENSION', name='nodetype')\n", (2104, 2167), True, 'import sqlalchemy as sa\n'), ((2231, 2243), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2241, 2243), True, 'import sqlalchemy as sa\n'), ((2294, 2328), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2326, 2328), False, 'import sqlmodel\n'), ((2378, 2412), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (2410, 2412), False, 'import sqlmodel\n'), ((2846, 2984), 'sqlalchemy.Enum', 'sa.Enum', (['"""BYTES"""', '"""STR"""', '"""FLOAT"""', '"""INT"""', '"""DECIMAL"""', '"""BOOL"""', '"""DATETIME"""', '"""DATE"""', '"""TIME"""', '"""TIMEDELTA"""', '"""LIST"""', '"""DICT"""'], {'name': '"""columntype"""'}), "('BYTES', 'STR', 'FLOAT', 'INT', 'DECIMAL', 'BOOL', 'DATETIME',\n 'DATE', 'TIME', 'TIMEDELTA', 'LIST', 'DICT', name='columntype')\n", (2853, 2984), True, 'import sqlalchemy as sa\n'), ((3267, 3279), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3277, 3279), True, 'import sqlalchemy as sa\n'), ((3323, 3357), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3355, 3357), False, 'import sqlmodel\n'), ((3410, 3422), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (3420, 3422), True, 'import sqlalchemy as sa\n'), ((3503, 3537), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (3535, 3537), False, 'import sqlmodel\n'), ((4227, 4239), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4237, 4239), True, 'import sqlalchemy as sa\n'), ((4287, 4299), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (4297, 4299), True, 'import sqlalchemy as sa\n'), ((4926, 4964), 'sqlalchemy_utils.types.uuid.UUIDType', 'sqlalchemy_utils.types.uuid.UUIDType', ([], {}), '()\n', (4962, 4964), False, 'import sqlalchemy_utils\n'), ((5016, 5028), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (5026, 5028), True, 'import sqlalchemy as sa\n'), ((5076, 5110), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5108, 5110), False, 'import sqlmodel\n'), ((5157, 5191), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5189, 5191), False, 'import sqlmodel\n'), ((5271, 5305), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5303, 5305), False, 'import sqlmodel\n'), ((5382, 5416), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5414, 5416), False, 'import sqlmodel\n'), ((5465, 5478), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (5476, 5478), True, 'import sqlalchemy as sa\n'), ((5525, 5538), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (5536, 5538), True, 'import sqlalchemy as sa\n'), ((5586, 5599), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (5597, 5599), True, 'import sqlalchemy as sa\n'), ((5644, 5678), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (5676, 5678), False, 'import sqlmodel\n'), ((5726, 5736), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (5734, 5736), True, 'import sqlalchemy as sa\n'), ((6942, 6954), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6952, 6954), True, 'import sqlalchemy as sa\n'), ((7001, 7013), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (7011, 7013), True, 'import sqlalchemy as sa\n'), ((7065, 7077), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (7075, 7077), True, 'import sqlalchemy as sa\n'), ((7125, 7159), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (7157, 7159), False, 'import sqlmodel\n'), ((7206, 7240), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (7238, 7240), False, 'import sqlmodel\n'), ((7285, 7319), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (7317, 7319), False, 'import sqlmodel\n'), ((7364, 7374), 'sqlalchemy.Float', 'sa.Float', ([], {}), '()\n', (7372, 7374), True, 'import sqlalchemy as sa\n'), ((8307, 8319), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (8317, 8319), True, 'import sqlalchemy as sa\n'), ((8368, 8380), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (8378, 8380), True, 'import sqlalchemy as sa\n'), ((8998, 9010), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (9008, 9010), True, 'import sqlalchemy as sa\n'), ((9059, 9071), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (9069, 9071), True, 'import sqlalchemy as sa\n')] |
# -*- coding: utf-8 -*-
from typing import List
from database import engine
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from models import Device, Project, Task, TaskWithProject
from sqlmodel import Session, SQLModel, select
# init fastapi
app: FastAPI = FastAPI()
# init CORS
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"])
# mount api endpoint
api: FastAPI = FastAPI(title="Scitizen")
app.mount("/api", api)
@app.on_event("startup")
def on_startup() -> None:
"""Setup event on app.
It is used to create the database and the tables.
"""
SQLModel.metadata.create_all(engine)
@api.post("/devices/{device_uuid}", response_model=Device)
def upsert_device(device_uuid: str, device: Device) -> Device:
"""Upsert a device.
It is used to create a device in the database if it does not already exists,
else it is used to update the existing one.
Args:
device_uuid:
The uuid of the device to upsert.
device:
The device data.
Returns:
The upserted device.
"""
with Session(engine) as session:
# check if the device exists
statement = select(Device).where(Device.uuid == device_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = device
# sync the data
for key, value in device.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/devices/{device_uuid}", response_model=Device)
def select_device(device_uuid: str):
"""Select a device.
It is used to get a device data from the database.
Args:
device_uuid:
The uuid of the device to get the data from.
Returns:
The device data.
"""
with Session(engine) as session:
statement = select(Device).where(Device.uuid == device_uuid)
result = session.exec(statement).first()
return result
@api.get("/devices", response_model=List[Device])
def select_devices():
"""Select all devices.
It is used to get all devices data from the database.
Returns:
All devices data.
"""
with Session(engine) as session:
statement = select(Device)
results = session.exec(statement).all()
return results
@api.post("/projects/{project_uuid}", response_model=Project)
def upsert_project(project_uuid: str, project: Project) -> Project:
"""Upsert a project.
It is used to create a project in the database if it does not already exists,
else it is used to update the existing one.
Args:
project_uuid:
The uuid of the project to upsert.
project:
The project data.
Returns:
The upserted project.
"""
with Session(engine) as session:
# check if the project exists
statement = select(Project).where(Project.uuid == project_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = project
# sync the data
for key, value in project.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/projects/{project_uuid}", response_model=Project)
def select_project(project_uuid: str):
"""Select a project.
It is used to get a project data from the database.
Args:
project_uuid:
The uuid of the project to get the data from.
Returns:
The project data.
"""
with Session(engine) as session:
statement = select(Project).where(Project.uuid == project_uuid)
result = session.exec(statement).first()
return result
@api.get("/projects", response_model=List[Project])
def select_projects():
"""Select all projects.
It is used to get all projects data from the database.
Returns:
All projects data.
"""
with Session(engine) as session:
statement = select(Project)
results = session.exec(statement).all()
return results
@api.post("/tasks/{task_uuid}", response_model=Task)
def upsert_task(task_uuid: str, task: Task) -> Task:
"""Upsert a task.
It is used to create a task in the database if it does not already exists,
else it is used to update the existing one.
Args:
task_uuid:
The uuid of the task to upsert.
task:
The task data.
Returns:
The upserted task.
"""
with Session(engine) as session:
# check if the task exists
statement = select(Task).where(Task.uuid == task_uuid)
result = session.exec(statement).first()
# if not, create it
if result is None:
result = task
# sync the data
for key, value in task.dict(exclude_unset=True).items():
setattr(result, key, value)
# persist the data to the database
session.add(result)
session.commit()
session.refresh(result)
return result
@api.get("/tasks/{task_uuid}", response_model=TaskWithProject)
def select_task(task_uuid: str):
"""Select a task.
It is used to get a task data from the database.
Args:
task_uuid:
The uuid of the task to get the data from.
Returns:
The task data.
"""
with Session(engine) as session:
statement = select(Task, Project).join(Project).where(Task.uuid == task_uuid)
task, project = session.exec(statement).first() # type: ignore
result = TaskWithProject()
for key, value in task.dict().items():
setattr(result, key, value)
result.project = project
return result
@api.get("/tasks", response_model=List[TaskWithProject])
def select_tasks():
"""Select all tasks.
It is used to get all tasks data from the database.
Returns:
All tasks data.
"""
with Session(engine) as session:
statement = select(Task, Project).join(Project)
results = session.exec(statement).all()
tasks = []
for task, project in results:
result = TaskWithProject()
for key, value in task.dict().items():
setattr(result, key, value)
result.project = project
tasks.append(result)
return tasks
@api.put("/tasks/clean")
def clean_tasks():
"""Clean all tasks.
It is used to run maintenance queries on the database in order to keep
consistent data on tasks.
"""
with Session(engine) as session:
with open("./data/clean_failed_tasks.sql", "r", encoding="utf-8") as stream:
statement = stream.read()
session.exec(statement)
with open("./data/clean_succeeded_tasks.sql", "r", encoding="utf-8") as stream:
statement = stream.read()
session.exec(statement)
session.commit()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.select"
] | [((293, 302), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (300, 302), False, 'from fastapi import FastAPI\n'), ((430, 455), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Scitizen"""'}), "(title='Scitizen')\n", (437, 455), False, 'from fastapi import FastAPI\n'), ((627, 663), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (655, 663), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1112, 1127), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1119, 1127), False, 'from sqlmodel import Session, SQLModel, select\n'), ((1978, 1993), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1985, 1993), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2362, 2377), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2369, 2377), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2410, 2424), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (2416, 2424), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2959, 2974), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (2966, 2974), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3841, 3856), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3848, 3856), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4234, 4249), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4241, 4249), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4282, 4297), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (4288, 4297), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4787, 4802), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (4794, 4802), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5632, 5647), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (5639, 5647), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5835, 5852), 'models.TaskWithProject', 'TaskWithProject', ([], {}), '()\n', (5850, 5852), False, 'from models import Device, Project, Task, TaskWithProject\n'), ((6210, 6225), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6217, 6225), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6818, 6833), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6825, 6833), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6420, 6437), 'models.TaskWithProject', 'TaskWithProject', ([], {}), '()\n', (6435, 6437), False, 'from models import Device, Project, Task, TaskWithProject\n'), ((1197, 1211), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (1203, 1211), False, 'from sqlmodel import Session, SQLModel, select\n'), ((2026, 2040), 'sqlmodel.select', 'select', (['Device'], {}), '(Device)\n', (2032, 2040), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3045, 3060), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (3051, 3060), False, 'from sqlmodel import Session, SQLModel, select\n'), ((3889, 3904), 'sqlmodel.select', 'select', (['Project'], {}), '(Project)\n', (3895, 3904), False, 'from sqlmodel import Session, SQLModel, select\n'), ((4870, 4882), 'sqlmodel.select', 'select', (['Task'], {}), '(Task)\n', (4876, 4882), False, 'from sqlmodel import Session, SQLModel, select\n'), ((6258, 6279), 'sqlmodel.select', 'select', (['Task', 'Project'], {}), '(Task, Project)\n', (6264, 6279), False, 'from sqlmodel import Session, SQLModel, select\n'), ((5680, 5701), 'sqlmodel.select', 'select', (['Task', 'Project'], {}), '(Task, Project)\n', (5686, 5701), False, 'from sqlmodel import Session, SQLModel, select\n')] |
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from typing import List, Optional
from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column
from pydantic import EmailStr
class UserGroup(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
name: str
users: List["User"] = Relationship(back_populates="group")
class User(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
username: str = Field(sa_column=Column('username', VARCHAR, unique=True))
email: EmailStr
password: str
first_name: str
last_name: str
public_key: str
private_key: str
profile_pic: Optional[str]
is_staff: bool = Field(default=False)
group_id: Optional[int] = Field(foreign_key="usergroup.id")
group: Optional[UserGroup] = Relationship(back_populates="users")
class RemoteUser(SQLModel, table=True):
id: int = Field(default=None, primary_key=True)
remote_id: str = Field(sa_column=Column('username', VARCHAR, unique=True))
inbox: str
public_key: Optional[str]
def generate_new_pk() -> RSAPrivateKey:
return rsa.generate_private_key(public_exponent=65537, key_size=4096)
def load_pk(pkstr: str) -> RSAPrivateKey:
loaded_pk = serialization.load_pem_private_key(pkstr, password=None)
def create_user(username: str, password: str, email: str, first_name: str, last_name: str, profile_pic: Optional[str]) -> User:
user = User(
username=username,
password=password,
email=email,
first_name=first_name,
last_name=last_name,
profile_pic=profile_pic,
)
pk = generate_new_pk()
pub_key = pk.public_key()
user.public_key = pub_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
).decode('utf-8')
user.private_key = pk.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption()
).decode('utf-8')
return user | [
"sqlmodel.Relationship",
"sqlmodel.Field",
"sqlmodel.Column"
] | [((374, 411), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (379, 411), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((453, 489), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""group"""'}), "(back_populates='group')\n", (465, 489), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((540, 577), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (545, 577), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((826, 846), 'sqlmodel.Field', 'Field', ([], {'default': '(False)'}), '(default=False)\n', (831, 846), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((878, 911), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""usergroup.id"""'}), "(foreign_key='usergroup.id')\n", (883, 911), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((945, 981), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""users"""'}), "(back_populates='users')\n", (957, 981), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((1038, 1075), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1043, 1075), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((1253, 1315), 'cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key', 'rsa.generate_private_key', ([], {'public_exponent': '(65537)', 'key_size': '(4096)'}), '(public_exponent=65537, key_size=4096)\n', (1277, 1315), False, 'from cryptography.hazmat.primitives.asymmetric import rsa\n'), ((1376, 1432), 'cryptography.hazmat.primitives.serialization.load_pem_private_key', 'serialization.load_pem_private_key', (['pkstr'], {'password': 'None'}), '(pkstr, password=None)\n', (1410, 1432), False, 'from cryptography.hazmat.primitives import serialization\n'), ((614, 654), 'sqlmodel.Column', 'Column', (['"""username"""', 'VARCHAR'], {'unique': '(True)'}), "('username', VARCHAR, unique=True)\n", (620, 654), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((1113, 1153), 'sqlmodel.Column', 'Column', (['"""username"""', 'VARCHAR'], {'unique': '(True)'}), "('username', VARCHAR, unique=True)\n", (1119, 1153), False, 'from sqlmodel import SQLModel, Field, Relationship, VARCHAR, Column\n'), ((2154, 2182), 'cryptography.hazmat.primitives.serialization.NoEncryption', 'serialization.NoEncryption', ([], {}), '()\n', (2180, 2182), False, 'from cryptography.hazmat.primitives import serialization\n')] |
import uuid
from datetime import datetime
from sqlmodel import Field
from api.db.models.base import BaseModel, BaseTable
class IssueCredentialBase(BaseModel):
tenant_id: uuid.UUID = Field(nullable=False)
wallet_id: uuid.UUID = Field(nullable=False)
connection_id: uuid.UUID = Field(nullable=False)
cred_type: str = Field(nullable=False)
cred_protocol: str = Field(nullable=False)
cred_def_id: str = Field(nullable=True, default=None)
credential: str = Field(nullable=False)
issue_role: str = Field(nullable=False)
issue_state: str = Field(nullable=False)
# workflow_id will be null until the tenant kcks it off
workflow_id: uuid.UUID = Field(nullable=True, default=None)
cred_exch_id: uuid.UUID = Field(nullable=True, default=None)
rev_reg_id: str = Field(nullable=True, default=None)
cred_rev_id: str = Field(nullable=True, default=None)
class IssueCredential(IssueCredentialBase, BaseTable, table=True):
# This is the class that represents the table
pass
class IssueCredentialCreate(IssueCredentialBase):
# This is the class that represents interface for creating a tenant
# we must set all the required fields,
# but do not need to set optional (and shouldn't)
pass
class IssueCredentialRead(IssueCredentialBase):
# This is the class that represents interface for reading a tenant
# here we indicate id, created_at and updated_at must be included
id: uuid.UUID
created_at: datetime
updated_at: datetime
class IssueCredentialUpdate(BaseModel):
# This is our update interface
# This does NOT inherit from IssueCredentialBase,
# so no need to worry about accidentally updating id or other fields
id: uuid.UUID
issue_state: str = Field(nullable=False)
workflow_id: uuid.UUID = Field(nullable=True, default=None)
cred_exch_id: uuid.UUID = Field(nullable=True, default=None)
rev_reg_id: str = Field(nullable=True, default=None)
cred_rev_id: str = Field(nullable=True, default=None)
| [
"sqlmodel.Field"
] | [((190, 211), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (195, 211), False, 'from sqlmodel import Field\n'), ((239, 260), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (244, 260), False, 'from sqlmodel import Field\n'), ((292, 313), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (297, 313), False, 'from sqlmodel import Field\n'), ((335, 356), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (340, 356), False, 'from sqlmodel import Field\n'), ((382, 403), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (387, 403), False, 'from sqlmodel import Field\n'), ((427, 461), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (432, 461), False, 'from sqlmodel import Field\n'), ((484, 505), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (489, 505), False, 'from sqlmodel import Field\n'), ((528, 549), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (533, 549), False, 'from sqlmodel import Field\n'), ((573, 594), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (578, 594), False, 'from sqlmodel import Field\n'), ((684, 718), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (689, 718), False, 'from sqlmodel import Field\n'), ((749, 783), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (754, 783), False, 'from sqlmodel import Field\n'), ((806, 840), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (811, 840), False, 'from sqlmodel import Field\n'), ((864, 898), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (869, 898), False, 'from sqlmodel import Field\n'), ((1761, 1782), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)'}), '(nullable=False)\n', (1766, 1782), False, 'from sqlmodel import Field\n'), ((1812, 1846), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1817, 1846), False, 'from sqlmodel import Field\n'), ((1877, 1911), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1882, 1911), False, 'from sqlmodel import Field\n'), ((1934, 1968), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1939, 1968), False, 'from sqlmodel import Field\n'), ((1992, 2026), 'sqlmodel.Field', 'Field', ([], {'nullable': '(True)', 'default': 'None'}), '(nullable=True, default=None)\n', (1997, 2026), False, 'from sqlmodel import Field\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
class UserBase(SQLModel):
name: str
class User(UserBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
class UserCreate(UserBase):
pass
class UserRead(UserBase):
id: int
| [
"sqlmodel.Field"
] | [((168, 205), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (173, 205), False, 'from sqlmodel import Field, SQLModel\n')] |
import uuid
from logging import getLogger
from typing import Optional
from fastapi import UploadFile
from sqlmodel import select, Session
from .models import User
logger = getLogger("uvicorn.error")
def get_user(username: str, session: Session) -> Optional[User]:
statement = select(User).where(User.username == username)
user = session.exec(statement).first()
if user:
return user
return None
def save_file(file: UploadFile) -> str:
filename = uuid.uuid4()
path = f"static/{filename}"
with open(path, "wb") as f:
content = file.file.read()
f.write(content)
return path
| [
"sqlmodel.select"
] | [((175, 201), 'logging.getLogger', 'getLogger', (['"""uvicorn.error"""'], {}), "('uvicorn.error')\n", (184, 201), False, 'from logging import getLogger\n'), ((480, 492), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (490, 492), False, 'import uuid\n'), ((285, 297), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (291, 297), False, 'from sqlmodel import select, Session\n')] |
from typing import TYPE_CHECKING, Union
from uuid import UUID
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import BaseORMModel
from joj.horse.models.domain import Domain
from joj.horse.models.domain_role import DomainRole
from joj.horse.models.permission import DefaultRole
from joj.horse.utils.errors import BizError, ErrorCode
if TYPE_CHECKING:
from joj.horse.models.user import User
class DomainUser(BaseORMModel, table=True): # type: ignore[call-arg]
__tablename__ = "domain_users"
__table_args__ = (UniqueConstraint("domain_id", "user_id"),)
role: str
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="users")
user_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("users.id", ondelete="CASCADE"), nullable=False
)
)
user: "User" = Relationship(back_populates="domain_users")
@classmethod
async def add_domain_user(
cls, domain_id: UUID, user_id: UUID, role: Union[str, DefaultRole]
) -> "DomainUser":
role = str(role)
# check domain user
domain_user = await DomainUser.get_or_none(domain_id=domain_id, user_id=user_id)
if domain_user is not None:
raise BizError(ErrorCode.UserAlreadyInDomainBadRequestError)
# check domain role
await DomainRole.ensure_exists(domain_id=domain_id, role=role)
# add member
domain_user = DomainUser(domain_id=domain_id, user_id=user_id, role=role)
return domain_user
@classmethod
async def update_domain_user(
cls, domain_id: UUID, user_id: UUID, role: Union[str, DefaultRole]
) -> "DomainUser":
role = str(role)
# check domain user
domain_user = await DomainUser.get_or_none(domain_id=domain_id, user_id=user_id)
if domain_user is None:
raise BizError(ErrorCode.UserAlreadyInDomainBadRequestError)
# check domain role
await DomainRole.ensure_exists(domain_id=domain_id, role=role)
# update role
domain_user.role = role
return domain_user
| [
"sqlmodel.Relationship"
] | [((883, 919), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""users"""'}), "(back_populates='users')\n", (895, 919), False, 'from sqlmodel import Field, Relationship\n'), ((1086, 1129), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""domain_users"""'}), "(back_populates='domain_users')\n", (1098, 1129), False, 'from sqlmodel import Field, Relationship\n'), ((651, 691), 'sqlalchemy.schema.UniqueConstraint', 'UniqueConstraint', (['"""domain_id"""', '"""user_id"""'], {}), "('domain_id', 'user_id')\n", (667, 691), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((1473, 1527), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.UserAlreadyInDomainBadRequestError'], {}), '(ErrorCode.UserAlreadyInDomainBadRequestError)\n', (1481, 1527), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n'), ((1570, 1626), 'joj.horse.models.domain_role.DomainRole.ensure_exists', 'DomainRole.ensure_exists', ([], {'domain_id': 'domain_id', 'role': 'role'}), '(domain_id=domain_id, role=role)\n', (1594, 1626), False, 'from joj.horse.models.domain_role import DomainRole\n'), ((2099, 2153), 'joj.horse.utils.errors.BizError', 'BizError', (['ErrorCode.UserAlreadyInDomainBadRequestError'], {}), '(ErrorCode.UserAlreadyInDomainBadRequestError)\n', (2107, 2153), False, 'from joj.horse.utils.errors import BizError, ErrorCode\n'), ((2196, 2252), 'joj.horse.models.domain_role.DomainRole.ensure_exists', 'DomainRole.ensure_exists', ([], {'domain_id': 'domain_id', 'role': 'role'}), '(domain_id=domain_id, role=role)\n', (2220, 2252), False, 'from joj.horse.models.domain_role import DomainRole\n'), ((783, 827), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""domains.id"""'], {'ondelete': '"""CASCADE"""'}), "('domains.id', ondelete='CASCADE')\n", (793, 827), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n'), ((992, 1034), 'sqlalchemy.schema.ForeignKey', 'ForeignKey', (['"""users.id"""'], {'ondelete': '"""CASCADE"""'}), "('users.id', ondelete='CASCADE')\n", (1002, 1034), False, 'from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint\n')] |
import os
from fastapi import *
from psycopg2.errors import UndefinedTable
from sqlmodel import Session, select, text
from sqlalchemy.exc import ProgrammingError
from .models.timelog import TimeLog
from .models.calendar import Calendar
from .utils import (
engine,
create_db,
tags_metadata,
execute_sample_sql,
)
from .api import (
user,
timelog,
forecast,
epic,
epic_area,
client,
rate,
team,
role,
sponsor,
capacity,
demand,
)
import csv
app = FastAPI(title="timeflow app API", openapi_tags=tags_metadata)
app.include_router(timelog.router)
app.include_router(forecast.router)
app.include_router(user.router)
app.include_router(epic.router)
app.include_router(epic_area.router)
app.include_router(client.router)
app.include_router(rate.router)
app.include_router(team.router)
app.include_router(role.router)
app.include_router(sponsor.router)
app.include_router(capacity.router)
app.include_router(demand.router)
@app.on_event("startup")
def on_startup():
with Session(engine) as session:
if os.getenv("TIMEFLOW_DEV") == "true":
try:
statement = select(TimeLog)
results = session.exec(statement)
except ProgrammingError:
create_db()
execute_sample_sql()
elif os.getenv("TIMEFLOW_DEV") == "false":
try:
statement = select(TimeLog)
results = session.exec(statement)
except ProgrammingError:
create_db()
@app.on_event("startup")
def implement_calendar_table():
with Session(engine) as session:
try:
statement = select(Calendar.year_name).where(Calendar.id == 1)
result = session.exec(statement).one()
except Exception as e:
print(e)
values_sql = f"""INSERT INTO app_db.calendar (date, year_number, year_name, quarter_number, quarter_name
, month_number, month_name, week_number, week_name, week_day_number, week_day_name)
VALUES """
with open("backend/calendar.csv") as csvfile:
reader = csv.reader(csvfile, delimiter=",", quotechar="|")
values_list = []
for index, row in enumerate(reader):
if index > 0 and row[0] != "":
_row = [f"'{item}'" for item in row]
row_sql = ", ".join(_row)
values = f"({row_sql}),"
values_sql += values
values_sql += f"({row_sql});"
session.execute(text(values_sql))
session.commit()
| [
"sqlmodel.Session",
"sqlmodel.select",
"sqlmodel.text"
] | [((1035, 1050), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1042, 1050), False, 'from sqlmodel import Session, select, text\n'), ((1620, 1635), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1627, 1635), False, 'from sqlmodel import Session, select, text\n'), ((1074, 1099), 'os.getenv', 'os.getenv', (['"""TIMEFLOW_DEV"""'], {}), "('TIMEFLOW_DEV')\n", (1083, 1099), False, 'import os\n'), ((1156, 1171), 'sqlmodel.select', 'select', (['TimeLog'], {}), '(TimeLog)\n', (1162, 1171), False, 'from sqlmodel import Session, select, text\n'), ((1337, 1362), 'os.getenv', 'os.getenv', (['"""TIMEFLOW_DEV"""'], {}), "('TIMEFLOW_DEV')\n", (1346, 1362), False, 'import os\n'), ((1420, 1435), 'sqlmodel.select', 'select', (['TimeLog'], {}), '(TimeLog)\n', (1426, 1435), False, 'from sqlmodel import Session, select, text\n'), ((1685, 1711), 'sqlmodel.select', 'select', (['Calendar.year_name'], {}), '(Calendar.year_name)\n', (1691, 1711), False, 'from sqlmodel import Session, select, text\n'), ((2182, 2231), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""|"""'}), "(csvfile, delimiter=',', quotechar='|')\n", (2192, 2231), False, 'import csv\n'), ((2652, 2668), 'sqlmodel.text', 'text', (['values_sql'], {}), '(values_sql)\n', (2656, 2668), False, 'from sqlmodel import Session, select, text\n')] |
from sqlmodel import create_engine, SQLModel, Session
from .config import settings
engine = create_engine(
settings.db.uri,
echo=settings.db.echo,
connect_args=settings.db.connect_args
)
def init_db():
SQLModel.metadata.create_all(engine)
def drop_db():
SQLModel.metadata.drop_all(engine)
def get_session():
with Session(engine) as session:
yield session | [
"sqlmodel.Session",
"sqlmodel.create_engine",
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.SQLModel.metadata.drop_all"
] | [((95, 192), 'sqlmodel.create_engine', 'create_engine', (['settings.db.uri'], {'echo': 'settings.db.echo', 'connect_args': 'settings.db.connect_args'}), '(settings.db.uri, echo=settings.db.echo, connect_args=settings\n .db.connect_args)\n', (108, 192), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((223, 259), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (251, 259), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((281, 315), 'sqlmodel.SQLModel.metadata.drop_all', 'SQLModel.metadata.drop_all', (['engine'], {}), '(engine)\n', (307, 315), False, 'from sqlmodel import create_engine, SQLModel, Session\n'), ((346, 361), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (353, 361), False, 'from sqlmodel import create_engine, SQLModel, Session\n')] |
from sqlmodel import Session
from aot_quotes.common.db import Quotes, engine
def seed_data(filename):
with Session(engine) as session:
with open(filename, "r") as fp:
for line in fp.readlines():
quote_obj = Quotes(quote=line.strip())
session.add(quote_obj)
session.commit()
if __name__ == "__main__":
seed_data("./seed/data.txt")
| [
"sqlmodel.Session"
] | [((114, 129), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (121, 129), False, 'from sqlmodel import Session\n')] |
from unittest.mock import patch
import pytest
from sqlalchemy.exc import NoResultFound
from sqlmodel import Session, create_engine, delete
from ...conftest import get_testing_print_function
def test_tutorial(clear_sqlmodel):
from docs_src.tutorial.one import tutorial005 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
with pytest.raises(NoResultFound):
mod.main()
with Session(mod.engine) as session:
# TODO: create delete() function
# TODO: add overloads for .exec() with delete object
session.exec(delete(mod.Hero))
session.add(mod.Hero(name="<NAME>", secret_name="<NAME>", age=24))
session.commit()
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.select_heroes()
assert calls == [
[
"Hero:",
{
"id": 1,
"name": "<NAME>",
"secret_name": "<NAME>",
"age": 24,
},
]
]
| [
"sqlmodel.delete",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((337, 366), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (350, 366), False, 'from sqlmodel import Session, create_engine, delete\n'), ((376, 404), 'pytest.raises', 'pytest.raises', (['NoResultFound'], {}), '(NoResultFound)\n', (389, 404), False, 'import pytest\n'), ((414, 424), 'docs_src.tutorial.one.tutorial005.main', 'mod.main', ([], {}), '()\n', (422, 424), True, 'from docs_src.tutorial.one import tutorial005 as mod\n'), ((434, 453), 'sqlmodel.Session', 'Session', (['mod.engine'], {}), '(mod.engine)\n', (441, 453), False, 'from sqlmodel import Session, create_engine, delete\n'), ((784, 822), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (789, 822), False, 'from unittest.mock import patch\n'), ((832, 851), 'docs_src.tutorial.one.tutorial005.select_heroes', 'mod.select_heroes', ([], {}), '()\n', (849, 851), True, 'from docs_src.tutorial.one import tutorial005 as mod\n'), ((589, 605), 'sqlmodel.delete', 'delete', (['mod.Hero'], {}), '(mod.Hero)\n', (595, 605), False, 'from sqlmodel import Session, create_engine, delete\n'), ((627, 680), 'docs_src.tutorial.one.tutorial005.Hero', 'mod.Hero', ([], {'name': '"""<NAME>"""', 'secret_name': '"""<NAME>"""', 'age': '(24)'}), "(name='<NAME>', secret_name='<NAME>', age=24)\n", (635, 680), True, 'from docs_src.tutorial.one import tutorial005 as mod\n')] |
from typing import Optional
import uuid as uuid_pkg
from sqlmodel import SQLModel, Field
from datetime import datetime
class BaseUUIDModel(SQLModel):
id: uuid_pkg.UUID = Field(
default_factory=uuid_pkg.uuid4,
primary_key=True,
index=True,
nullable=False,
)
updated_at: Optional[datetime]
created_at: Optional[datetime] | [
"sqlmodel.Field"
] | [((175, 262), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid_pkg.uuid4', 'primary_key': '(True)', 'index': '(True)', 'nullable': '(False)'}), '(default_factory=uuid_pkg.uuid4, primary_key=True, index=True,\n nullable=False)\n', (180, 262), False, 'from sqlmodel import SQLModel, Field\n')] |
"""All Models for database are defined here"""
from typing import Optional, List
from sqlalchemy import Column, ForeignKey, Integer
from sqlmodel import Field, SQLModel, Relationship
class VrfLink(SQLModel, table=True):
"""Used for vrf to device link"""
vrf_id: Optional[int] = Field(default=None, foreign_key="vrf.id", primary_key=True)
device_id: Optional[int] = Field(
default=None, foreign_key="device.id", primary_key=True
)
class Platform(SQLModel, table=True):
"""Used to define platforms"""
id: Optional[int] = Field(default=None, primary_key=True)
platform_name: str = Field(index=True)
class Vrf(SQLModel, table=True):
"""used to define VRFs"""
id: Optional[int] = Field(default=None, primary_key=True)
vrf_name: str = Field(index=True)
devices: List["Device"] = Relationship(back_populates="vrfs", link_model=VrfLink)
class Device(SQLModel, table=True):
"""Used to define a simple device"""
id: Optional[int] = Field(default=None, primary_key=True)
name: str = Field(index=True)
mgmt: str
platform_id: Optional[int] = Field(
sa_column=Column(
Integer, ForeignKey("platform.id", ondelete="SET NULL", onupdate="CASCADE")
)
)
vrfs: List["Vrf"] = Relationship(back_populates="devices", link_model=VrfLink)
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((288, 347), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""vrf.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='vrf.id', primary_key=True)\n", (293, 347), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((379, 441), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""device.id"""', 'primary_key': '(True)'}), "(default=None, foreign_key='device.id', primary_key=True)\n", (384, 441), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((555, 592), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (560, 592), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((618, 635), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (623, 635), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((725, 762), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (730, 762), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((783, 800), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (788, 800), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((831, 886), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""vrfs"""', 'link_model': 'VrfLink'}), "(back_populates='vrfs', link_model=VrfLink)\n", (843, 886), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((990, 1027), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (995, 1027), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1044, 1061), 'sqlmodel.Field', 'Field', ([], {'index': '(True)'}), '(index=True)\n', (1049, 1061), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1270, 1328), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""devices"""', 'link_model': 'VrfLink'}), "(back_populates='devices', link_model=VrfLink)\n", (1282, 1328), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((1163, 1229), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""platform.id"""'], {'ondelete': '"""SET NULL"""', 'onupdate': '"""CASCADE"""'}), "('platform.id', ondelete='SET NULL', onupdate='CASCADE')\n", (1173, 1229), False, 'from sqlalchemy import Column, ForeignKey, Integer\n')] |
from typing import TYPE_CHECKING, List, Optional
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .hero import Hero
class TeamBase(SQLModel):
name: str
headquarters: str
class Team(TeamBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
heroes: List["Hero"] = Relationship(back_populates="team")
class TeamCreate(TeamBase):
pass
class TeamRead(TeamBase):
id: int
class TeamUpdate(SQLModel):
id: Optional[int] = None
name: Optional[str] = None
headquarters: Optional[str] = None
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((285, 322), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (290, 322), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((353, 388), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""team"""'}), "(back_populates='team')\n", (365, 388), False, 'from sqlmodel import Field, Relationship, SQLModel\n')] |
from sqlalchemy.sql.schema import MetaData
from sqlmodel import Field, SQLModel
from datetime import datetime
from uuid import UUID, uuid4
class DbtLog(SQLModel, table=True):
"""
Table: __Dbt_Log
"""
__tablename__ = "__Dbt_Log"
Id: UUID = Field(default_factory=uuid4, primary_key=True)
TaskId: str = Field(max_length=128)
Data: str = Field(index=False)
Timestamp: datetime = Field(index=False, default_factory=datetime.utcnow) | [
"sqlmodel.Field"
] | [((262, 308), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'uuid4', 'primary_key': '(True)'}), '(default_factory=uuid4, primary_key=True)\n', (267, 308), False, 'from sqlmodel import Field, SQLModel\n'), ((327, 348), 'sqlmodel.Field', 'Field', ([], {'max_length': '(128)'}), '(max_length=128)\n', (332, 348), False, 'from sqlmodel import Field, SQLModel\n'), ((365, 383), 'sqlmodel.Field', 'Field', ([], {'index': '(False)'}), '(index=False)\n', (370, 383), False, 'from sqlmodel import Field, SQLModel\n'), ((410, 461), 'sqlmodel.Field', 'Field', ([], {'index': '(False)', 'default_factory': 'datetime.utcnow'}), '(index=False, default_factory=datetime.utcnow)\n', (415, 461), False, 'from sqlmodel import Field, SQLModel\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from itertools import chain
from json import dumps
from pathlib import Path
from typing import List
from uuid import UUID
import typer
from prettytable import ALL, PrettyTable
from sqlalchemy import update
from sqlmodel import Session, select
import dbgen.cli.styles as styles
from dbgen.cli.options import config_option, model_arg_option
from dbgen.cli.utils import test_connection, validate_model_str
from dbgen.configuration import initialize
from dbgen.core.metadata import ModelEntity
model_app = typer.Typer(name='model', no_args_is_help=True)
@model_app.command('list')
def list_models(config_file: Path = config_option, tags: List[str] = typer.Option(None, '-t')):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
tags = tags or []
statement = select( # type: ignore
ModelEntity.id,
ModelEntity.name,
ModelEntity.created_at,
ModelEntity.last_run,
ModelEntity.tags,
) # type: ignore
if tags:
statement = statement.where(ModelEntity.tags.op('&&')(tags)) # type: ignore
columns = ['id', 'name', 'created_at', 'last_run', 'tags']
table = PrettyTable(field_names=columns, align='l', hrules=ALL)
with Session(meta_engine) as session:
result = session.exec(statement)
for model_id, model_name, created_at, last_run, tags in result:
table.add_row((model_id, model_name, created_at, last_run, tags))
styles.theme_typer_print(str(table))
@model_app.command('tag')
def tag(model_id: UUID, tags: List[str], config_file: Path = config_option):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
existing_tags = session.exec(select(ModelEntity.tags).where(ModelEntity.id == model_id)).one_or_none()
if existing_tags is None:
raise typer.BadParameter(f"Invalid model_id, no model with ID {model_id}")
new_tags = set(chain(existing_tags, tags))
session.execute(update(ModelEntity).values(tags=new_tags).where(ModelEntity.id == model_id))
session.commit()
@model_app.command('serialize')
def model_serialize(
model_str: str = model_arg_option,
out_file: Path = typer.Option(
None, '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
model = validate_model_str(model_str)
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
model_row = model._get_model_row()
# Check for existing row and if found grab its created_at
created_at = session.exec(
select(ModelEntity.created_at).where(ModelEntity.id == model.uuid)
).one_or_none()
if created_at is None:
session.merge(model_row)
session.commit()
styles.good_typer_print(f"Loaded model {model.name!r} into the database with ID {model.uuid}")
else:
styles.good_typer_print(f"Model {model.name!r} already existed with ID {model.uuid}")
if out_file:
out_file.write_text(dumps(model_row.graph_json))
styles.good_typer_print(f"Wrote serialized graph to {out_file}")
@model_app.command('export')
def model_export(
model_id: UUID,
out_file: Path = typer.Option(
'model.json', '-o', '--out', help='Path to write the serialized model to in json format'
),
config_file: Path = config_option,
):
# Notify of config file
if config_file:
_, meta_conn = initialize(config_file)
test_connection(meta_conn)
meta_engine = meta_conn.get_engine()
with Session(meta_engine) as session:
# Check for existing row and if found grab its created_at
graph_json = session.exec(
select(ModelEntity.graph_json).where(ModelEntity.id == model_id)
).one_or_none()
if not graph_json:
raise ValueError(f"Invalid model_id: No model found with model_id {model_id}")
out_file.write_text(dumps(graph_json))
styles.good_typer_print(f"Wrote serialized graph to {out_file}")
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((1102, 1149), 'typer.Typer', 'typer.Typer', ([], {'name': '"""model"""', 'no_args_is_help': '(True)'}), "(name='model', no_args_is_help=True)\n", (1113, 1149), False, 'import typer\n'), ((1248, 1272), 'typer.Option', 'typer.Option', (['None', '"""-t"""'], {}), "(None, '-t')\n", (1260, 1272), False, 'import typer\n'), ((1375, 1401), 'dbgen.cli.utils.test_connection', 'test_connection', (['meta_conn'], {}), '(meta_conn)\n', (1390, 1401), False, 'from dbgen.cli.utils import test_connection, validate_model_str\n'), ((1481, 1589), 'sqlmodel.select', 'select', (['ModelEntity.id', 'ModelEntity.name', 'ModelEntity.created_at', 'ModelEntity.last_run', 'ModelEntity.tags'], {}), '(ModelEntity.id, ModelEntity.name, ModelEntity.created_at,\n ModelEntity.last_run, ModelEntity.tags)\n', (1487, 1589), False, 'from sqlmodel import Session, select\n'), ((1838, 1893), 'prettytable.PrettyTable', 'PrettyTable', ([], {'field_names': 'columns', 'align': '"""l"""', 'hrules': 'ALL'}), "(field_names=columns, align='l', hrules=ALL)\n", (1849, 1893), False, 'from prettytable import ALL, PrettyTable\n'), ((2372, 2398), 'dbgen.cli.utils.test_connection', 'test_connection', (['meta_conn'], {}), '(meta_conn)\n', (2387, 2398), False, 'from dbgen.cli.utils import test_connection, validate_model_str\n'), ((3008, 3107), 'typer.Option', 'typer.Option', (['None', '"""-o"""', '"""--out"""'], {'help': '"""Path to write the serialized model to in json format"""'}), "(None, '-o', '--out', help=\n 'Path to write the serialized model to in json format')\n", (3020, 3107), False, 'import typer\n'), ((3172, 3201), 'dbgen.cli.utils.validate_model_str', 'validate_model_str', (['model_str'], {}), '(model_str)\n', (3190, 3201), False, 'from dbgen.cli.utils import test_connection, validate_model_str\n'), ((3302, 3328), 'dbgen.cli.utils.test_connection', 'test_connection', (['meta_conn'], {}), '(meta_conn)\n', (3317, 3328), False, 'from dbgen.cli.utils import test_connection, validate_model_str\n'), ((4212, 4319), 'typer.Option', 'typer.Option', (['"""model.json"""', '"""-o"""', '"""--out"""'], {'help': '"""Path to write the serialized model to in json format"""'}), "('model.json', '-o', '--out', help=\n 'Path to write the serialized model to in json format')\n", (4224, 4319), False, 'import typer\n'), ((4472, 4498), 'dbgen.cli.utils.test_connection', 'test_connection', (['meta_conn'], {}), '(meta_conn)\n', (4487, 4498), False, 'from dbgen.cli.utils import test_connection, validate_model_str\n'), ((4951, 5015), 'dbgen.cli.styles.good_typer_print', 'styles.good_typer_print', (['f"""Wrote serialized graph to {out_file}"""'], {}), "(f'Wrote serialized graph to {out_file}')\n", (4974, 5015), True, 'import dbgen.cli.styles as styles\n'), ((1347, 1370), 'dbgen.configuration.initialize', 'initialize', (['config_file'], {}), '(config_file)\n', (1357, 1370), False, 'from dbgen.configuration import initialize\n'), ((1903, 1923), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (1910, 1923), False, 'from sqlmodel import Session, select\n'), ((2344, 2367), 'dbgen.configuration.initialize', 'initialize', (['config_file'], {}), '(config_file)\n', (2354, 2367), False, 'from dbgen.configuration import initialize\n'), ((2450, 2470), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (2457, 2470), False, 'from sqlmodel import Session, select\n'), ((3274, 3297), 'dbgen.configuration.initialize', 'initialize', (['config_file'], {}), '(config_file)\n', (3284, 3297), False, 'from dbgen.configuration import initialize\n'), ((3379, 3399), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (3386, 3399), False, 'from sqlmodel import Session, select\n'), ((4057, 4121), 'dbgen.cli.styles.good_typer_print', 'styles.good_typer_print', (['f"""Wrote serialized graph to {out_file}"""'], {}), "(f'Wrote serialized graph to {out_file}')\n", (4080, 4121), True, 'import dbgen.cli.styles as styles\n'), ((4444, 4467), 'dbgen.configuration.initialize', 'initialize', (['config_file'], {}), '(config_file)\n', (4454, 4467), False, 'from dbgen.configuration import initialize\n'), ((4550, 4570), 'sqlmodel.Session', 'Session', (['meta_engine'], {}), '(meta_engine)\n', (4557, 4570), False, 'from sqlmodel import Session, select\n'), ((4928, 4945), 'json.dumps', 'dumps', (['graph_json'], {}), '(graph_json)\n', (4933, 4945), False, 'from json import dumps\n'), ((2646, 2714), 'typer.BadParameter', 'typer.BadParameter', (['f"""Invalid model_id, no model with ID {model_id}"""'], {}), "(f'Invalid model_id, no model with ID {model_id}')\n", (2664, 2714), False, 'import typer\n'), ((2738, 2764), 'itertools.chain', 'chain', (['existing_tags', 'tags'], {}), '(existing_tags, tags)\n', (2743, 2764), False, 'from itertools import chain\n'), ((3768, 3867), 'dbgen.cli.styles.good_typer_print', 'styles.good_typer_print', (['f"""Loaded model {model.name!r} into the database with ID {model.uuid}"""'], {}), "(\n f'Loaded model {model.name!r} into the database with ID {model.uuid}')\n", (3791, 3867), True, 'import dbgen.cli.styles as styles\n'), ((3889, 3979), 'dbgen.cli.styles.good_typer_print', 'styles.good_typer_print', (['f"""Model {model.name!r} already existed with ID {model.uuid}"""'], {}), "(\n f'Model {model.name!r} already existed with ID {model.uuid}')\n", (3912, 3979), True, 'import dbgen.cli.styles as styles\n'), ((4020, 4047), 'json.dumps', 'dumps', (['model_row.graph_json'], {}), '(model_row.graph_json)\n', (4025, 4047), False, 'from json import dumps\n'), ((1714, 1739), 'dbgen.core.metadata.ModelEntity.tags.op', 'ModelEntity.tags.op', (['"""&&"""'], {}), "('&&')\n", (1733, 1739), False, 'from dbgen.core.metadata import ModelEntity\n'), ((2520, 2544), 'sqlmodel.select', 'select', (['ModelEntity.tags'], {}), '(ModelEntity.tags)\n', (2526, 2544), False, 'from sqlmodel import Session, select\n'), ((2790, 2809), 'sqlalchemy.update', 'update', (['ModelEntity'], {}), '(ModelEntity)\n', (2796, 2809), False, 'from sqlalchemy import update\n'), ((3568, 3598), 'sqlmodel.select', 'select', (['ModelEntity.created_at'], {}), '(ModelEntity.created_at)\n', (3574, 3598), False, 'from sqlmodel import Session, select\n'), ((4696, 4726), 'sqlmodel.select', 'select', (['ModelEntity.graph_json'], {}), '(ModelEntity.graph_json)\n', (4702, 4726), False, 'from sqlmodel import Session, select\n')] |
from select import select
from app.schemas.common import (
IGetResponseBase,
IPostResponseBase,
IDeleteResponseBase,
)
from app.utils.zeroshot_nlp import analyze_text
from app.schemas.zeroshot_inference import (
ZeroShotInferenceCreate,
ZeroShotInferenceRead,
)
from fastapi_pagination import Page, Params
from sqlmodel.ext.asyncio.session import AsyncSession
from fastapi import APIRouter, Depends, HTTPException, Query
from app.api import deps
from app import crud
from app.models import ZeroShotInference
from app.models import ZeroShotInferenceBase
from app.models.user import User
from sqlmodel import select
router = APIRouter()
@router.get(
"/zero-shot-classification-inferences/",
response_model=IGetResponseBase[Page[ZeroShotInference]],
)
async def get_zero_shot_classification_inferences(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
inferences = await crud.zeroshot_inference.get_multi_paginated(
db_session, params=params
)
return IGetResponseBase[Page[ZeroShotInference]](data=inferences)
@router.get(
"/zero-shot-classification-inferences/order_by_created_at/",
response_model=IGetResponseBase[Page[ZeroShotInference]],
)
async def zero_shot_classification_inferences_order_by_created_at(
params: Params = Depends(),
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
query = select(ZeroShotInference).order_by(ZeroShotInference.created_at)
inferences = await crud.zeroshot_inference.get_multi_paginated(
db_session, query=query, params=params
)
return IGetResponseBase[Page[ZeroShotInferenceRead]](data=inferences)
@router.post(
"/zero-shot-classification-predict/",
response_model=IPostResponseBase[ZeroShotInferenceRead],
)
async def predict(
request: ZeroShotInferenceBase,
db_session: AsyncSession = Depends(deps.get_db),
current_user: User = Depends(deps.get_current_active_user),
):
text = request.text
labels = request.candidate_labels
result = await analyze_text(text, labels)
text = result[0]
candidate_labels = result[1]
res = result[2]
inference = ZeroShotInferenceCreate(
text=text, candidate_labels=candidate_labels, result=res
)
my_inference = await crud.zeroshot_inference.create_inference(
db_session, obj_in=inference, user_id=current_user.id
)
return IPostResponseBase(data=ZeroShotInferenceRead.from_orm(my_inference))
| [
"sqlmodel.select"
] | [((644, 655), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (653, 655), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((852, 861), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (859, 861), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((894, 914), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (901, 914), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((941, 978), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (948, 978), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1393, 1402), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (1400, 1402), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1435, 1455), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (1442, 1455), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((1482, 1519), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (1489, 1519), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2003, 2023), 'fastapi.Depends', 'Depends', (['deps.get_db'], {}), '(deps.get_db)\n', (2010, 2023), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2050, 2087), 'fastapi.Depends', 'Depends', (['deps.get_current_active_user'], {}), '(deps.get_current_active_user)\n', (2057, 2087), False, 'from fastapi import APIRouter, Depends, HTTPException, Query\n'), ((2292, 2377), 'app.schemas.zeroshot_inference.ZeroShotInferenceCreate', 'ZeroShotInferenceCreate', ([], {'text': 'text', 'candidate_labels': 'candidate_labels', 'result': 'res'}), '(text=text, candidate_labels=candidate_labels,\n result=res)\n', (2315, 2377), False, 'from app.schemas.zeroshot_inference import ZeroShotInferenceCreate, ZeroShotInferenceRead\n'), ((1006, 1076), 'app.crud.zeroshot_inference.get_multi_paginated', 'crud.zeroshot_inference.get_multi_paginated', (['db_session'], {'params': 'params'}), '(db_session, params=params)\n', (1049, 1076), False, 'from app import crud\n'), ((1624, 1712), 'app.crud.zeroshot_inference.get_multi_paginated', 'crud.zeroshot_inference.get_multi_paginated', (['db_session'], {'query': 'query', 'params': 'params'}), '(db_session, query=query, params\n =params)\n', (1667, 1712), False, 'from app import crud\n'), ((2174, 2200), 'app.utils.zeroshot_nlp.analyze_text', 'analyze_text', (['text', 'labels'], {}), '(text, labels)\n', (2186, 2200), False, 'from app.utils.zeroshot_nlp import analyze_text\n'), ((2414, 2513), 'app.crud.zeroshot_inference.create_inference', 'crud.zeroshot_inference.create_inference', (['db_session'], {'obj_in': 'inference', 'user_id': 'current_user.id'}), '(db_session, obj_in=inference,\n user_id=current_user.id)\n', (2454, 2513), False, 'from app import crud\n'), ((1536, 1561), 'sqlmodel.select', 'select', (['ZeroShotInference'], {}), '(ZeroShotInference)\n', (1542, 1561), False, 'from sqlmodel import select\n'), ((2559, 2603), 'app.schemas.zeroshot_inference.ZeroShotInferenceRead.from_orm', 'ZeroShotInferenceRead.from_orm', (['my_inference'], {}), '(my_inference)\n', (2589, 2603), False, 'from app.schemas.zeroshot_inference import ZeroShotInferenceCreate, ZeroShotInferenceRead\n')] |
from datetime import date
from typing import List, Optional
from api.ecoindex.models.responses import ApiEcoindex
from api.models.enums import Version
from sqlalchemy.ext.asyncio.session import AsyncSession
from sqlmodel import select
from db.helper import date_filter
async def get_host_list_db(
session: AsyncSession,
version: Optional[Version] = Version.v1,
q: Optional[str] = None,
date_from: Optional[date] = None,
date_to: Optional[date] = None,
) -> List[str]:
statement = select(ApiEcoindex.host).where(
ApiEcoindex.version == version.get_version_number()
)
if q:
statement = statement.filter(ApiEcoindex.host.like(f"%{q}%"))
statement = date_filter(statement=statement, date_from=date_from, date_to=date_to)
statement = statement.group_by(ApiEcoindex.host).order_by(ApiEcoindex.host)
hosts = await session.execute(statement)
return hosts.scalars().all()
| [
"sqlmodel.select"
] | [((704, 774), 'db.helper.date_filter', 'date_filter', ([], {'statement': 'statement', 'date_from': 'date_from', 'date_to': 'date_to'}), '(statement=statement, date_from=date_from, date_to=date_to)\n', (715, 774), False, 'from db.helper import date_filter\n'), ((508, 532), 'sqlmodel.select', 'select', (['ApiEcoindex.host'], {}), '(ApiEcoindex.host)\n', (514, 532), False, 'from sqlmodel import select\n'), ((654, 685), 'api.ecoindex.models.responses.ApiEcoindex.host.like', 'ApiEcoindex.host.like', (['f"""%{q}%"""'], {}), "(f'%{q}%')\n", (675, 685), False, 'from api.ecoindex.models.responses import ApiEcoindex\n')] |
from datetime import datetime
import dateutil.parser
import json
import requests
from requests.models import to_key_val_list
from sqlmodel import Field, Session, SQLModel, create_engine, select
from fastapi.logger import logger
from database import engine
from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby
def get_daft_search_result():
try:
response = requests.get('http://daft:8000/search_result/')
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_daft_details(url):
try:
print(url)
params = {
'url': url,
'method': 'json_details',
}
response = requests.get(
'http://daft:8000/listing_details/', params=params)
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return response.json()
def get_routes_json(from_lat, from_long, to_lat, to_long):
try:
data = {
"from_point": {"lat": from_lat, "long": from_long},
"to_point": {"lat": to_lat, "long": to_long}
}
response = requests.post(
'http://location:8000/route/', data=json.dumps(data))
response.raise_for_status()
return response.json()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
logger.error(error)
return {}
def get_routes(listing: Listing):
ret_ = []
with Session(engine) as session:
interest_points_sttm = select(InterestPoint).\
where(InterestPoint.is_active == True)
interest_points = session.exec(interest_points_sttm).all()
for interest_point in interest_points:
routes = get_routes_json(
listing.latitude, listing.longitude,
interest_point.latitude, interest_point.longitude)
print('routes')
print(routes)
for route in routes:
ret_.append(Route(
interest_point_id=interest_point.id,
waking_distance=route['waking_distance'],
total_distance=route['total_distance'],
total_time=route['total_time'],
public_transport_count=route['public_transport_count'],
))
print(ret_)
return ret_
def get_places_nearby_json(from_lat, from_long, query):
try:
data = {"lat": from_lat, "long": from_long}
response = requests.post(
'http://location:8000/interest_places_nearby/', data=json.dumps(data))
response.raise_for_status()
# Additional code will only run if the request is successful
except requests.exceptions.HTTPError as error:
print(error)
return response.json()
def get_places_nearby(listing: Listing):
ret_ = []
query = 'Grocery'
places = get_places_nearby_json(
from_lat=listing.latitude, from_long=listing.longitude,
query=query)
for place in places:
ret_.append(PlaceNearby(
name=place['name'],
latitude=place['lat'],
longitude=place['long'],
address=place['address'],
distance=place['distance'],
website=place['website'],
website_domain=place['website_domain'],
chain_name=place['chain_name'],
query=query,
))
return ret_
def save_new_listing(search_result, listing_d):
with Session(engine) as session:
listing = Listing()
# Search Result
listing.source = 'daft'
listing.is_active = True
listing.url = search_result['url']
listing.address = search_result['title']
listing.price = search_result['monthly_price']
listing.latitude = search_result['latitude']
listing.longitude = search_result['longitude']
listing.publish_date = dateutil.parser.isoparse(
search_result['publish_date'])
# Details:
listing.source_id = listing_d['id']
listing.source_code = listing_d['daftShortcode']
listing.title = listing_d['title']
listing.bedrooms = listing_d['numBedrooms']
listing.bathrooms = listing_d['numBathrooms']
listing.description = listing_d['description']
listing.last_updated = listing_d['lastUpdateDate']
listing.images_count = listing_d['totalImages']
listing.views = listing_d['listingViews']
facilities_arr = []
for facility in listing_d['facilities']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'facilities')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='facilities'
)
facilities_arr.append(facility_obj)
for facility in listing_d['propertyOverview']:
facility_sttm = select(Facility).\
where(Facility.name == facility.title()).\
where(Facility.category == 'overview')
facility_obj = session.exec(facility_sttm).first()
if(not facility_obj):
facility_obj = Facility(
name=facility.title(),
category='overview'
)
facilities_arr.append(facility_obj)
listing.facilities = facilities_arr
listing.images = [Image(url=x['url'], url_600=x['url_600']) for x in listing_d['images']]
listing.routes = get_routes(listing)
listing.places_nearby = get_places_nearby(listing)
# Saving it
session.add(listing)
session.commit()
def give_it_a_try(how_many = 25):
ret_ = {}
daft_search_results = get_daft_search_result()
daft_result_list = daft_search_results['result_list']
c = 0
details = []
with Session(engine) as session:
for daft_result in daft_result_list:
statement = select(Listing).\
where(Listing.source == 'daft').\
where(Listing.url == daft_result['url']).\
where(Listing.price == daft_result['monthly_price'])
results = session.exec(statement).first()
if results:
continue
pass # Check telegram sent message
else:
print(daft_result['url'])
details = get_daft_details(daft_result['url'])
save_new_listing(daft_result, details)
c += 1
if c < how_many:
continue
break
return details
| [
"sqlmodel.Session",
"sqlmodel.select"
] | [((413, 460), 'requests.get', 'requests.get', (['"""http://daft:8000/search_result/"""'], {}), "('http://daft:8000/search_result/')\n", (425, 460), False, 'import requests\n'), ((833, 897), 'requests.get', 'requests.get', (['"""http://daft:8000/listing_details/"""'], {'params': 'params'}), "('http://daft:8000/listing_details/', params=params)\n", (845, 897), False, 'import requests\n'), ((1773, 1788), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1780, 1788), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((3788, 3803), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (3795, 3803), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((3834, 3843), 'models.Listing', 'Listing', ([], {}), '()\n', (3841, 3843), False, 'from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby\n'), ((6332, 6347), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (6339, 6347), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((1106, 1125), 'fastapi.logger.logger.error', 'logger.error', (['error'], {}), '(error)\n', (1118, 1125), False, 'from fastapi.logger import logger\n'), ((1676, 1695), 'fastapi.logger.logger.error', 'logger.error', (['error'], {}), '(error)\n', (1688, 1695), False, 'from fastapi.logger import logger\n'), ((3347, 3606), 'models.PlaceNearby', 'PlaceNearby', ([], {'name': "place['name']", 'latitude': "place['lat']", 'longitude': "place['long']", 'address': "place['address']", 'distance': "place['distance']", 'website': "place['website']", 'website_domain': "place['website_domain']", 'chain_name': "place['chain_name']", 'query': 'query'}), "(name=place['name'], latitude=place['lat'], longitude=place[\n 'long'], address=place['address'], distance=place['distance'], website=\n place['website'], website_domain=place['website_domain'], chain_name=\n place['chain_name'], query=query)\n", (3358, 3606), False, 'from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby\n'), ((5885, 5926), 'models.Image', 'Image', ([], {'url': "x['url']", 'url_600': "x['url_600']"}), "(url=x['url'], url_600=x['url_600'])\n", (5890, 5926), False, 'from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby\n'), ((1463, 1479), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1473, 1479), False, 'import json\n'), ((1832, 1853), 'sqlmodel.select', 'select', (['InterestPoint'], {}), '(InterestPoint)\n', (1838, 1853), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((2879, 2895), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2889, 2895), False, 'import json\n'), ((2295, 2522), 'models.Route', 'Route', ([], {'interest_point_id': 'interest_point.id', 'waking_distance': "route['waking_distance']", 'total_distance': "route['total_distance']", 'total_time': "route['total_time']", 'public_transport_count': "route['public_transport_count']"}), "(interest_point_id=interest_point.id, waking_distance=route[\n 'waking_distance'], total_distance=route['total_distance'], total_time=\n route['total_time'], public_transport_count=route['public_transport_count']\n )\n", (2300, 2522), False, 'from models import Listing, Facility, Image, InterestPoint, Route, RouteCreate, PlaceNearby\n'), ((4884, 4900), 'sqlmodel.select', 'select', (['Facility'], {}), '(Facility)\n', (4890, 4900), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((5393, 5409), 'sqlmodel.select', 'select', (['Facility'], {}), '(Facility)\n', (5399, 5409), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n'), ((6430, 6445), 'sqlmodel.select', 'select', (['Listing'], {}), '(Listing)\n', (6436, 6445), False, 'from sqlmodel import Field, Session, SQLModel, create_engine, select\n')] |
from minio import Minio
import os
from typing import Optional
from glob import glob
import pathlib
from sqlmodel import Field, Session, SQLModel, create_engine
def get_images(folder:str="../cls_labeling/public/images"):
return glob(str(pathlib.Path(folder,"**","*.jpg")), recursive=True)
class Image(SQLModel, table=True):
key: Optional[int] = Field(default=None, primary_key=True)
image_name: str
label: str
image_url: str
if __name__ == "__main__":
engine = create_engine("sqlite:///image.db")
client = Minio(
"localhost:9001",
secure=False,
access_key="<KEY>",
secret_key="<KEY>"
)
bucket_found = client.bucket_exists("image")
if not bucket_found:
client.make_bucket("image")
else:
for obj in client.list_objects("image"):
client.remove_object("image", obj.object_name)
client.remove_bucket("image")
client.make_bucket("image")
os.remove("./image.db")
SQLModel.metadata.create_all(engine)
images = []
for i, image in enumerate(get_images()):
print(pathlib.Path(image).stem, image)
image_name = pathlib.Path(image).stem+'.jpg'
client.fput_object(
"image", image_name, image
)
image_url = f"http://localhost:9001/image/{image_name}"
images.append(
Image(key=i, image_name=pathlib.Path(image).stem, label="", image_url=image_url)
)
with Session(engine) as session:
for data in images: session.add(data)
session.commit() | [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.Field",
"sqlmodel.create_engine"
] | [((355, 392), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (360, 392), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((489, 524), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///image.db"""'], {}), "('sqlite:///image.db')\n", (502, 524), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((538, 615), 'minio.Minio', 'Minio', (['"""localhost:9001"""'], {'secure': '(False)', 'access_key': '"""<KEY>"""', 'secret_key': '"""<KEY>"""'}), "('localhost:9001', secure=False, access_key='<KEY>', secret_key='<KEY>')\n", (543, 615), False, 'from minio import Minio\n'), ((994, 1030), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (1022, 1030), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((965, 988), 'os.remove', 'os.remove', (['"""./image.db"""'], {}), "('./image.db')\n", (974, 988), False, 'import os\n'), ((1473, 1488), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1480, 1488), False, 'from sqlmodel import Field, Session, SQLModel, create_engine\n'), ((241, 276), 'pathlib.Path', 'pathlib.Path', (['folder', '"""**"""', '"""*.jpg"""'], {}), "(folder, '**', '*.jpg')\n", (253, 276), False, 'import pathlib\n'), ((1108, 1127), 'pathlib.Path', 'pathlib.Path', (['image'], {}), '(image)\n', (1120, 1127), False, 'import pathlib\n'), ((1162, 1181), 'pathlib.Path', 'pathlib.Path', (['image'], {}), '(image)\n', (1174, 1181), False, 'import pathlib\n'), ((1396, 1415), 'pathlib.Path', 'pathlib.Path', (['image'], {}), '(image)\n', (1408, 1415), False, 'import pathlib\n')] |
"""initial
Revision ID: a57c89b47e7b
Revises:
Create Date: 2021-11-01 04:27:56.134285
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a57c89b47e7b'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('increment',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_increment_id'), 'increment', ['id'], unique=False)
op.create_table('listings',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_listings_id'), 'listings', ['id'], unique=False)
op.create_table('song',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('artist', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('year', sa.Integer(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_song_artist'), 'song', ['artist'], unique=False)
op.create_index(op.f('ix_song_id'), 'song', ['id'], unique=False)
op.create_index(op.f('ix_song_name'), 'song', ['name'], unique=False)
op.create_index(op.f('ix_song_year'), 'song', ['year'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_song_year'), table_name='song')
op.drop_index(op.f('ix_song_name'), table_name='song')
op.drop_index(op.f('ix_song_id'), table_name='song')
op.drop_index(op.f('ix_song_artist'), table_name='song')
op.drop_table('song')
op.drop_index(op.f('ix_listings_id'), table_name='listings')
op.drop_table('listings')
op.drop_index(op.f('ix_increment_id'), table_name='increment')
op.drop_table('increment')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((1752, 1773), 'alembic.op.drop_table', 'op.drop_table', (['"""song"""'], {}), "('song')\n", (1765, 1773), False, 'from alembic import op\n'), ((1843, 1868), 'alembic.op.drop_table', 'op.drop_table', (['"""listings"""'], {}), "('listings')\n", (1856, 1868), False, 'from alembic import op\n'), ((1940, 1966), 'alembic.op.drop_table', 'op.drop_table', (['"""increment"""'], {}), "('increment')\n", (1953, 1966), False, 'from alembic import op\n'), ((456, 485), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (479, 485), True, 'import sqlalchemy as sa\n'), ((512, 535), 'alembic.op.f', 'op.f', (['"""ix_increment_id"""'], {}), "('ix_increment_id')\n", (516, 535), False, 'from alembic import op\n'), ((659, 688), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (682, 688), True, 'import sqlalchemy as sa\n'), ((715, 737), 'alembic.op.f', 'op.f', (['"""ix_listings_id"""'], {}), "('ix_listings_id')\n", (719, 737), False, 'from alembic import op\n'), ((1060, 1089), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1083, 1089), True, 'import sqlalchemy as sa\n'), ((1116, 1138), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (1120, 1138), False, 'from alembic import op\n'), ((1194, 1212), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (1198, 1212), False, 'from alembic import op\n'), ((1264, 1284), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (1268, 1284), False, 'from alembic import op\n'), ((1338, 1358), 'alembic.op.f', 'op.f', (['"""ix_song_year"""'], {}), "('ix_song_year')\n", (1342, 1358), False, 'from alembic import op\n'), ((1530, 1550), 'alembic.op.f', 'op.f', (['"""ix_song_year"""'], {}), "('ix_song_year')\n", (1534, 1550), False, 'from alembic import op\n'), ((1589, 1609), 'alembic.op.f', 'op.f', (['"""ix_song_name"""'], {}), "('ix_song_name')\n", (1593, 1609), False, 'from alembic import op\n'), ((1648, 1666), 'alembic.op.f', 'op.f', (['"""ix_song_id"""'], {}), "('ix_song_id')\n", (1652, 1666), False, 'from alembic import op\n'), ((1705, 1727), 'alembic.op.f', 'op.f', (['"""ix_song_artist"""'], {}), "('ix_song_artist')\n", (1709, 1727), False, 'from alembic import op\n'), ((1792, 1814), 'alembic.op.f', 'op.f', (['"""ix_listings_id"""'], {}), "('ix_listings_id')\n", (1796, 1814), False, 'from alembic import op\n'), ((1887, 1910), 'alembic.op.f', 'op.f', (['"""ix_increment_id"""'], {}), "('ix_increment_id')\n", (1891, 1910), False, 'from alembic import op\n'), ((421, 433), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (431, 433), True, 'import sqlalchemy as sa\n'), ((624, 636), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (634, 636), True, 'import sqlalchemy as sa\n'), ((823, 857), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (855, 857), False, 'import sqlmodel\n'), ((900, 934), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (932, 934), False, 'import sqlmodel\n'), ((975, 987), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (985, 987), True, 'import sqlalchemy as sa\n'), ((1025, 1037), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1035, 1037), True, 'import sqlalchemy as sa\n')] |
from datetime import datetime, timedelta
import pendulum
import prefect
from prefect import Flow, task
from prefect.run_configs import DockerRun
from prefect.schedules import CronSchedule
from prefect.storage import GitHub
from scrapy.crawler import CrawlerProcess
from sqlmodel import SQLModel, create_engine
from imdb_rating.dependencies.spiders import IMDBSpider
@task
def scrap_movies_from_imdb():
"""
Scrap movies from IMDB and store them into a PostgreSQL database using SQLModel.
Run a scrapy crawler process to launch a spider.
"""
logger = prefect.context.get("logger")
# engine = create_engine('postgresql://postgres:postgres@localhost:5432/imdb')
engine = create_engine("sqlite:///imdb.db")
SQLModel.metadata.create_all(engine)
start = datetime.today() - timedelta(days=90)
end = datetime.today() + timedelta(days=30)
process = CrawlerProcess()
process.crawl(IMDBSpider, start=start, end=end, engine=engine)
process.start()
schedule = CronSchedule("0 0 * * *", start_date=pendulum.now(tz="Europe/Paris"))
storage = GitHub(repo="PeregHer/imdb-rating-predictions", path="imdb_rating/workflow/flow.py")
run_config = DockerRun(image="imdb-scraping:latest")
with Flow(
"imdb_scraping", schedule=schedule, storage=storage, run_config=run_config
) as flow:
scrap_movies_from_imdb()
# flow.register(project_name="imdb-scraping", tags=["imdb-scraping"])
# flow.run() | [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.create_engine"
] | [((1089, 1178), 'prefect.storage.GitHub', 'GitHub', ([], {'repo': '"""PeregHer/imdb-rating-predictions"""', 'path': '"""imdb_rating/workflow/flow.py"""'}), "(repo='PeregHer/imdb-rating-predictions', path=\n 'imdb_rating/workflow/flow.py')\n", (1095, 1178), False, 'from prefect.storage import GitHub\n'), ((1188, 1227), 'prefect.run_configs.DockerRun', 'DockerRun', ([], {'image': '"""imdb-scraping:latest"""'}), "(image='imdb-scraping:latest')\n", (1197, 1227), False, 'from prefect.run_configs import DockerRun\n'), ((573, 602), 'prefect.context.get', 'prefect.context.get', (['"""logger"""'], {}), "('logger')\n", (592, 602), False, 'import prefect\n'), ((700, 734), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///imdb.db"""'], {}), "('sqlite:///imdb.db')\n", (713, 734), False, 'from sqlmodel import SQLModel, create_engine\n'), ((739, 775), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (767, 775), False, 'from sqlmodel import SQLModel, create_engine\n'), ((890, 906), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', ([], {}), '()\n', (904, 906), False, 'from scrapy.crawler import CrawlerProcess\n'), ((1235, 1320), 'prefect.Flow', 'Flow', (['"""imdb_scraping"""'], {'schedule': 'schedule', 'storage': 'storage', 'run_config': 'run_config'}), "('imdb_scraping', schedule=schedule, storage=storage, run_config=run_config\n )\n", (1239, 1320), False, 'from prefect import Flow, task\n'), ((789, 805), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (803, 805), False, 'from datetime import datetime, timedelta\n'), ((808, 826), 'datetime.timedelta', 'timedelta', ([], {'days': '(90)'}), '(days=90)\n', (817, 826), False, 'from datetime import datetime, timedelta\n'), ((837, 853), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (851, 853), False, 'from datetime import datetime, timedelta\n'), ((856, 874), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (865, 874), False, 'from datetime import datetime, timedelta\n'), ((1045, 1076), 'pendulum.now', 'pendulum.now', ([], {'tz': '"""Europe/Paris"""'}), "(tz='Europe/Paris')\n", (1057, 1076), False, 'import pendulum\n')] |
"""
Query related functions.
"""
from datetime import datetime, timezone
from typing import List, Tuple
import sqlparse
from sqlalchemy import text
from sqlmodel import Session, create_engine
from datajunction.config import Settings
from datajunction.models.query import (
ColumnMetadata,
Query,
QueryResults,
QueryState,
QueryWithResults,
StatementResults,
)
from datajunction.typing import ColumnType, Description, SQLADialect, Stream, TypeEnum
def get_columns_from_description(
description: Description,
dialect: SQLADialect,
) -> List[ColumnMetadata]:
"""
Extract column metadata from the cursor description.
For now this uses the information from the cursor description, which only allow us to
distinguish between 4 types (see ``TypeEnum``). In the future we should use a type
inferrer to determine the types based on the query.
"""
type_map = {
TypeEnum.STRING: ColumnType.STR,
TypeEnum.BINARY: ColumnType.BYTES,
TypeEnum.NUMBER: ColumnType.FLOAT,
TypeEnum.DATETIME: ColumnType.DATETIME,
}
columns = []
for column in description or []:
name, native_type = column[:2]
for dbapi_type in TypeEnum:
if native_type == getattr(dialect.dbapi, dbapi_type.value, None):
type_ = type_map[dbapi_type]
break
else:
# fallback to string
type_ = ColumnType.STR
columns.append(ColumnMetadata(name=name, type=type_))
return columns
def run_query(query: Query) -> List[Tuple[str, List[ColumnMetadata], Stream]]:
"""
Run a query and return its results.
For each statement we return a tuple with the statement SQL, a description of the
columns (name and type) and a stream of rows (tuples).
"""
engine = create_engine(query.database.URI, **query.database.extra_params)
connection = engine.connect()
output: List[Tuple[str, List[ColumnMetadata], Stream]] = []
statements = sqlparse.parse(query.executed_query)
for statement in statements:
# Druid doesn't like statements that end in a semicolon...
sql = str(statement).strip().rstrip(";")
results = connection.execute(text(sql))
stream = (tuple(row) for row in results)
columns = get_columns_from_description(
results.cursor.description,
engine.dialect,
)
output.append((sql, columns, stream))
return output
def process_query(
session: Session,
settings: Settings,
query: Query,
) -> QueryWithResults:
"""
Process a query.
"""
query.scheduled = datetime.now(timezone.utc)
query.state = QueryState.SCHEDULED
query.executed_query = query.submitted_query
errors = []
query.started = datetime.now(timezone.utc)
try:
root = []
for sql, columns, stream in run_query(query):
rows = list(stream)
root.append(
StatementResults(
sql=sql,
columns=columns,
rows=rows,
row_count=len(rows),
),
)
results = QueryResults(__root__=root)
query.state = QueryState.FINISHED
query.progress = 1.0
except Exception as ex: # pylint: disable=broad-except
results = QueryResults(__root__=[])
query.state = QueryState.FAILED
errors = [str(ex)]
query.finished = datetime.now(timezone.utc)
session.add(query)
session.commit()
session.refresh(query)
settings.results_backend.add(str(query.id), results.json())
return QueryWithResults(results=results, errors=errors, **query.dict())
| [
"sqlmodel.create_engine"
] | [((1834, 1898), 'sqlmodel.create_engine', 'create_engine', (['query.database.URI'], {}), '(query.database.URI, **query.database.extra_params)\n', (1847, 1898), False, 'from sqlmodel import Session, create_engine\n'), ((2015, 2051), 'sqlparse.parse', 'sqlparse.parse', (['query.executed_query'], {}), '(query.executed_query)\n', (2029, 2051), False, 'import sqlparse\n'), ((2657, 2683), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (2669, 2683), False, 'from datetime import datetime, timezone\n'), ((2809, 2835), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (2821, 2835), False, 'from datetime import datetime, timezone\n'), ((3490, 3516), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (3502, 3516), False, 'from datetime import datetime, timezone\n'), ((3197, 3224), 'datajunction.models.query.QueryResults', 'QueryResults', ([], {'__root__': 'root'}), '(__root__=root)\n', (3209, 3224), False, 'from datajunction.models.query import ColumnMetadata, Query, QueryResults, QueryState, QueryWithResults, StatementResults\n'), ((1479, 1516), 'datajunction.models.query.ColumnMetadata', 'ColumnMetadata', ([], {'name': 'name', 'type': 'type_'}), '(name=name, type=type_)\n', (1493, 1516), False, 'from datajunction.models.query import ColumnMetadata, Query, QueryResults, QueryState, QueryWithResults, StatementResults\n'), ((2239, 2248), 'sqlalchemy.text', 'text', (['sql'], {}), '(sql)\n', (2243, 2248), False, 'from sqlalchemy import text\n'), ((3375, 3400), 'datajunction.models.query.QueryResults', 'QueryResults', ([], {'__root__': '[]'}), '(__root__=[])\n', (3387, 3400), False, 'from datajunction.models.query import ColumnMetadata, Query, QueryResults, QueryState, QueryWithResults, StatementResults\n')] |
from datetime import datetime
from typing import List, Optional
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
class ObserverBase(SQLModel):
phone: str
email: str
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Observer(ObserverBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
measurements: List["Measurement"] = Relationship(
back_populates="observer", sa_relationship_kwargs={"cascade": "all,delete"}
)
class ObserverCreate(ObserverBase):
pass
class ObserverRead(ObserverBase):
id: int
class MeasurementBase(SQLModel):
temperaturescale: str
temperature: int
organizationid: int
siteid: int
date_time: Optional[datetime] = Field(
sa_column=Column(DateTime, default=datetime.utcnow)
)
observer_id: Optional[int] = Field(default=None, foreign_key="observer.id")
class Config:
anystr_strip_whitespace = True
anystr_lower = True
class Measurement(MeasurementBase, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
observer: Optional[Observer] = Relationship(back_populates="measurements")
class MeasurementCreate(MeasurementBase):
pass
class MeasurementRead(MeasurementBase):
id: int
| [
"sqlmodel.Field",
"sqlmodel.Column",
"sqlmodel.Relationship"
] | [((350, 387), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (355, 387), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((428, 521), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""observer"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete'}"}), "(back_populates='observer', sa_relationship_kwargs={'cascade':\n 'all,delete'})\n", (440, 521), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((891, 937), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""observer.id"""'}), "(default=None, foreign_key='observer.id')\n", (896, 937), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((1098, 1135), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (1103, 1135), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((1171, 1214), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""measurements"""'}), "(back_populates='measurements')\n", (1183, 1214), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n'), ((810, 851), 'sqlmodel.Column', 'Column', (['DateTime'], {'default': 'datetime.utcnow'}), '(DateTime, default=datetime.utcnow)\n', (816, 851), False, 'from sqlmodel import Column, DateTime, Field, Relationship, SQLModel\n')] |
from __future__ import annotations
import typing as t
import strawberry
from sqlmodel import Field, Relationship, SQLModel
from .schema_generation import create_array_relationship_resolver, create_query_root
class AddressModel(SQLModel, table=True):
__tablename__ = "addresses"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
street: str
state: str
country: str
zip: str
users: t.List["UserModel"] = Relationship(back_populates="address")
class UserModel(SQLModel, table=True):
__tablename__ = "users"
id: t.Optional[int] = Field(
default=None, primary_key=True, index=True, nullable=False
)
age: int
password: t.Optional[str]
address_id: t.Optional[int] = Field(default=None, foreign_key="addresses.id")
address: t.Optional[AddressModel] = Relationship(back_populates="users")
@strawberry.experimental.pydantic.type(
UserModel, fields=["id", "age", "password", "address_id", "address"]
)
class User:
pass
@strawberry.experimental.pydantic.type(
AddressModel, fields=["id", "street", "state", "country", "zip"]
)
class Address:
users: t.List[create_array_relationship_resolver(User)] = strawberry.field(
resolver=create_array_relationship_resolver(User)
)
Query = create_query_root([User, Address])
schema = strawberry.Schema(query=Query)
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((906, 1017), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['UserModel'], {'fields': "['id', 'age', 'password', 'address_id', 'address']"}), "(UserModel, fields=['id', 'age',\n 'password', 'address_id', 'address'])\n", (943, 1017), False, 'import strawberry\n'), ((1044, 1151), 'strawberry.experimental.pydantic.type', 'strawberry.experimental.pydantic.type', (['AddressModel'], {'fields': "['id', 'street', 'state', 'country', 'zip']"}), "(AddressModel, fields=['id', 'street',\n 'state', 'country', 'zip'])\n", (1081, 1151), False, 'import strawberry\n'), ((1368, 1398), 'strawberry.Schema', 'strawberry.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (1385, 1398), False, 'import strawberry\n'), ((313, 378), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'index': '(True)', 'nullable': '(False)'}), '(default=None, primary_key=True, index=True, nullable=False)\n', (318, 378), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((487, 525), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""address"""'}), "(back_populates='address')\n", (499, 525), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((621, 686), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'index': '(True)', 'nullable': '(False)'}), '(default=None, primary_key=True, index=True, nullable=False)\n', (626, 686), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((778, 825), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""addresses.id"""'}), "(default=None, foreign_key='addresses.id')\n", (783, 825), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((866, 902), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""users"""'}), "(back_populates='users')\n", (878, 902), False, 'from sqlmodel import Field, Relationship, SQLModel\n')] |
# -*- coding: utf-8 -*-
# @Time : 2022/1/2 17:50
# @Author : WhaleFall
# @Site :
# @File : __init__.py.py
# @Software: PyCharm
# Flask 应用初始化,工厂函数
from flask import Flask
from flask_login import LoginManager
from config import config
from sqlmodel import create_engine,SQLModel
# 实例化一个登录组件
login_manager = LoginManager()
login_manager.login_view = 'auth.login' # 登录的蓝图
login_manager.login_message = "请小可爱先登录!"
def create_app(config_name):
"""
工厂函数,指定一个配置类型
程序入口文件千万不能和 `app` 重名,惨痛教训!!
"""
app = Flask(__name__) # 实例化
app.config.from_object(config[config_name]) # 从配置类读取配置
config[config_name].init_app(app) # 调用静态方法初始化组件
# 注册组件
login_manager.init_app(app) # 登录组件
# 数据库
from app import models
app.config['engine'] = create_engine(config[config_name].SQLALCHEMY_DATABASE_URI, echo=True)
# 注册蓝图
from .main import main
app.register_blueprint(main)
from .dali import dali
app.register_blueprint(dali)
from .admin import admin
app.register_blueprint(admin)
return app
| [
"sqlmodel.create_engine"
] | [((307, 321), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (319, 321), False, 'from flask_login import LoginManager\n'), ((518, 533), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (523, 533), False, 'from flask import Flask\n'), ((770, 839), 'sqlmodel.create_engine', 'create_engine', (['config[config_name].SQLALCHEMY_DATABASE_URI'], {'echo': '(True)'}), '(config[config_name].SQLALCHEMY_DATABASE_URI, echo=True)\n', (783, 839), False, 'from sqlmodel import create_engine, SQLModel\n')] |
from typing import List
from app.schemas.role import IRoleCreate, IRoleUpdate
from app.models.role import Role
from app.models.user import User
from app.crud.base_sqlmodel import CRUDBase
from sqlmodel.ext.asyncio.session import AsyncSession
from datetime import datetime
from sqlmodel import select
from uuid import UUID
class CRUDRole(CRUDBase[Role, IRoleCreate, IRoleUpdate]):
async def get_role_by_name(self, db_session: AsyncSession, *, name: str) -> Role:
role = await db_session.exec(select(Role).where(Role.name == name))
return role.first()
async def add_role_to_user(self, db_session: AsyncSession, *, user: User, role_id: UUID) -> Role:
role = await super().get(db_session, role_id)
role.users.append(user)
db_session.add(role)
await db_session.commit()
await db_session.refresh(role)
return role
role = CRUDRole(Role)
| [
"sqlmodel.select"
] | [((504, 516), 'sqlmodel.select', 'select', (['Role'], {}), '(Role)\n', (510, 516), False, 'from sqlmodel import select\n')] |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends, Query, Response
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import func, select
from icon_governance.db import get_session
from icon_governance.models.delegations import Delegation
router = APIRouter()
@router.get("/governance/delegations/{address}")
async def get_delegations(
response: Response,
address: str,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of delegations."""
query = (
select(Delegation)
.where(Delegation.address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
@router.get("/governance/votes/{address}")
async def get_delegations(
address: str,
response: Response,
skip: int = Query(0),
limit: int = Query(100, gt=0, lt=101),
session: AsyncSession = Depends(get_session),
) -> List[Delegation]:
"""Return list of votes."""
query = (
select(Delegation)
.where(Delegation.prep_address == address)
.offset(skip)
.limit(limit)
.order_by(Delegation.value.desc())
)
result = await session.execute(query)
delegations = result.scalars().all()
# Check if exists
if len(delegations) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
# Return the count in header
query_count = select([func.count(Delegation.address)]).where(Delegation.prep_address == address)
result_count = await session.execute(query_count)
total_count = str(result_count.scalars().all()[0])
response.headers["x-total-count"] = total_count
return delegations
| [
"sqlmodel.func.count",
"sqlmodel.select"
] | [((340, 351), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (349, 351), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((488, 496), 'fastapi.Query', 'Query', (['(0)'], {}), '(0)\n', (493, 496), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((515, 539), 'fastapi.Query', 'Query', (['(100)'], {'gt': '(0)', 'lt': '(101)'}), '(100, gt=0, lt=101)\n', (520, 539), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((569, 589), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (576, 589), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1479, 1487), 'fastapi.Query', 'Query', (['(0)'], {}), '(0)\n', (1484, 1487), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1506, 1530), 'fastapi.Query', 'Query', (['(100)'], {'gt': '(0)', 'lt': '(101)'}), '(100, gt=0, lt=101)\n', (1511, 1530), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1560, 1580), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (1567, 1580), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((801, 824), 'icon_governance.models.delegations.Delegation.value.desc', 'Delegation.value.desc', ([], {}), '()\n', (822, 824), False, 'from icon_governance.models.delegations import Delegation\n'), ((984, 1033), 'fastapi.Response', 'Response', ([], {'status_code': 'HTTPStatus.NO_CONTENT.value'}), '(status_code=HTTPStatus.NO_CONTENT.value)\n', (992, 1033), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1791, 1814), 'icon_governance.models.delegations.Delegation.value.desc', 'Delegation.value.desc', ([], {}), '()\n', (1812, 1814), False, 'from icon_governance.models.delegations import Delegation\n'), ((1974, 2023), 'fastapi.Response', 'Response', ([], {'status_code': 'HTTPStatus.NO_CONTENT.value'}), '(status_code=HTTPStatus.NO_CONTENT.value)\n', (1982, 2023), False, 'from fastapi import APIRouter, Depends, Query, Response\n'), ((1094, 1124), 'sqlmodel.func.count', 'func.count', (['Delegation.address'], {}), '(Delegation.address)\n', (1104, 1124), False, 'from sqlmodel import func, select\n'), ((2084, 2114), 'sqlmodel.func.count', 'func.count', (['Delegation.address'], {}), '(Delegation.address)\n', (2094, 2114), False, 'from sqlmodel import func, select\n'), ((674, 692), 'sqlmodel.select', 'select', (['Delegation'], {}), '(Delegation)\n', (680, 692), False, 'from sqlmodel import func, select\n'), ((1659, 1677), 'sqlmodel.select', 'select', (['Delegation'], {}), '(Delegation)\n', (1665, 1677), False, 'from sqlmodel import func, select\n')] |
"""add power
Revision ID: 135aec058ce1
Revises: 4400883a1249
Create Date: 2021-12-28 11:38:37.439383
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "135aec058ce1"
down_revision = "4400883a1249"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("preps", sa.Column("power", sqlmodel.sql.sqltypes.AutoString(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("preps", "power")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((613, 645), 'alembic.op.drop_column', 'op.drop_column', (['"""preps"""', '"""power"""'], {}), "('preps', 'power')\n", (627, 645), False, 'from alembic import op\n'), ((437, 471), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (469, 471), False, 'import sqlmodel\n')] |
from __future__ import annotations
from sqlmodel import Session, select
from src.models import User
from src.schemas.user import CreateUser
from src.services.auth import check_password_hash, generate_password_hash
def get_user_by_email(db: Session, email: str) -> User | None:
"""
Finds a user with the given email in the database and returns it.
Returns None if a user with the email does not exists.
"""
stmt = select(User).where(User.email == email)
return db.exec(stmt).first()
def create_user(user_data: CreateUser) -> User:
"""
Creates a user and returns it.
"""
password = generate_password_hash(user_data.password)
created_user = User(name=user_data.name, email=user_data.email, password=password)
return created_user
def get_user_by_email_and_password(
db: Session, email: str, password: str
) -> User | None:
"""
Checks if the given email and password are valid, and returns the user.
Returns None if the email or password is wrong.
"""
user = get_user_by_email(db, email)
if user is None or not check_password_hash(password, user.password):
return None
return user
| [
"sqlmodel.select"
] | [((627, 669), 'src.services.auth.generate_password_hash', 'generate_password_hash', (['user_data.password'], {}), '(user_data.password)\n', (649, 669), False, 'from src.services.auth import check_password_hash, generate_password_hash\n'), ((690, 757), 'src.models.User', 'User', ([], {'name': 'user_data.name', 'email': 'user_data.email', 'password': 'password'}), '(name=user_data.name, email=user_data.email, password=password)\n', (694, 757), False, 'from src.models import User\n'), ((437, 449), 'sqlmodel.select', 'select', (['User'], {}), '(User)\n', (443, 449), False, 'from sqlmodel import Session, select\n'), ((1094, 1138), 'src.services.auth.check_password_hash', 'check_password_hash', (['password', 'user.password'], {}), '(password, user.password)\n', (1113, 1138), False, 'from src.services.auth import check_password_hash, generate_password_hash\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
class Example(SQLModel, table=True):
"""测试一下"""
id: Optional[int] = Field(default=None, primary_key=True)
message: str
| [
"sqlmodel.Field"
] | [((145, 182), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (150, 182), False, 'from sqlmodel import Field, SQLModel\n')] |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from sqlmodel import create_engine
def main():
engine = create_engine('postgresql://michaelstatt@localhost/sqlalchemy')
file_name = Path(__file__).parent / 'test.csv'
test_conn = engine.raw_connection()
try:
with test_conn.cursor() as curs:
with open(file_name) as f:
curs.copy_from(f, 'test', null="None", columns=['name', 'tags'], sep='|')
f.seek(0)
print(f.read())
test_conn.commit()
finally:
test_conn.close()
if __name__ == '__main__':
main()
| [
"sqlmodel.create_engine"
] | [((685, 748), 'sqlmodel.create_engine', 'create_engine', (['"""postgresql://michaelstatt@localhost/sqlalchemy"""'], {}), "('postgresql://michaelstatt@localhost/sqlalchemy')\n", (698, 748), False, 'from sqlmodel import create_engine\n'), ((765, 779), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (769, 779), False, 'from pathlib import Path\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import root_validator
from datetime import datetime
# {
# "user_id": 1,
# "start_time": "2022-01-19T08:30:00.000Z",
# "end_time": "2022-01-19T09:30:00.000Z",
# "client_id": 1,
# "epic_id": 1,
# "count_hours": 0,
# "count_days": 0,
# "month": 0,
# "year": 0
# }
class TimeLog(SQLModel, table=True):
"""Create an SQLModel for timelogs"""
id: Optional[int] = Field(default=None, primary_key=True)
user_id: int = Field(foreign_key="app_db.appuser.id")
start_time: datetime
end_time: datetime
epic_id: int = Field(foreign_key="app_db.epic.id")
count_hours: float
count_days: float
month: int
year: int
epic_area_id: int = Field(foreign_key="app_db.epicarea.id")
created_at: datetime
updated_at: datetime
is_locked: bool = False
__table_args__ = {"schema": "app_db"}
@root_validator(pre=True)
def check_time_delta(cls, values):
assert (
values["start_time"] < values["end_time"]
), "start_time must be smaller then end_time"
return values
# @validator("count_hours", always=True)
def daily_hours(cls, hours_input):
assert hours_input < 12, "user worked over 12 hours"
return hours_input
# @validator("year", always=True)
def valid_year(cls, year_input):
assert year_input in range(
2021, datetime.now().year + 1
), "year value not in range [2021, current year]"
return year_input
| [
"sqlmodel.Field"
] | [((473, 510), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (478, 510), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((530, 568), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.appuser.id"""'}), "(foreign_key='app_db.appuser.id')\n", (535, 568), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((636, 671), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.epic.id"""'}), "(foreign_key='app_db.epic.id')\n", (641, 671), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((770, 809), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""app_db.epicarea.id"""'}), "(foreign_key='app_db.epicarea.id')\n", (775, 809), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((937, 961), 'pydantic.root_validator', 'root_validator', ([], {'pre': '(True)'}), '(pre=True)\n', (951, 961), False, 'from pydantic import root_validator\n'), ((1451, 1465), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1463, 1465), False, 'from datetime import datetime\n')] |
from http import HTTPStatus
from typing import List
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlmodel import select
from starlette.responses import Response
from icon_governance.db import get_session
from icon_governance.models.preps import Prep
router = APIRouter()
@router.get("/preps")
async def get_preps(
session: AsyncSession = Depends(get_session),
) -> List[Prep]:
"""Return list of preps which is limitted to 150 records so no skip."""
result = await session.execute(select(Prep).order_by(Prep.delegated.desc()))
preps = result.scalars().all()
return preps
@router.get("/preps/{address}")
async def get_prep(
address: str,
session: AsyncSession = Depends(get_session),
) -> List[Prep]:
"""Return a single prep."""
result = await session.execute(select(Prep).where(Prep.address == address))
preps = result.scalars().all()
if len(preps) == 0:
return Response(status_code=HTTPStatus.NO_CONTENT.value)
return preps
| [
"sqlmodel.select"
] | [((346, 357), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (355, 357), False, 'from fastapi import APIRouter, Depends\n'), ((431, 451), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (438, 451), False, 'from fastapi import APIRouter, Depends\n'), ((780, 800), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (787, 800), False, 'from fastapi import APIRouter, Depends\n'), ((1006, 1055), 'starlette.responses.Response', 'Response', ([], {'status_code': 'HTTPStatus.NO_CONTENT.value'}), '(status_code=HTTPStatus.NO_CONTENT.value)\n', (1014, 1055), False, 'from starlette.responses import Response\n'), ((603, 624), 'icon_governance.models.preps.Prep.delegated.desc', 'Prep.delegated.desc', ([], {}), '()\n', (622, 624), False, 'from icon_governance.models.preps import Prep\n'), ((581, 593), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (587, 593), False, 'from sqlmodel import select\n'), ((886, 898), 'sqlmodel.select', 'select', (['Prep'], {}), '(Prep)\n', (892, 898), False, 'from sqlmodel import select\n')] |
"""
Rewards are Txs with the claim-iscore method but since this service only listens for
new Txs, this job backfills the value and iscore from the logs service.
"""
import json
from requests import RequestException, get
from sqlmodel import func, select
from icon_governance.config import settings
from icon_governance.log import logger
from icon_governance.models.rewards import Reward
from icon_governance.utils.rpc import convert_hex_int
def get_iscore_value(tx_hash):
"""Get rewards value and Tx from logs service."""
try:
response = get(f"{settings.LOGS_SERVICE_URL}/api/v1/logs?transaction_hash={tx_hash}")
except RequestException as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
if response.status_code == 200:
try:
data = json.loads(response.json()[0]["data"])
return convert_hex_int(data[0]) / 1e18, convert_hex_int(data[1]) / 1e18
except Exception as e:
logger.info(f"Exception in iscore - \n{e} - \n{tx_hash}")
return None, None
else:
logger.info(f"Could not find Tx hash from logs service {tx_hash}")
def get_rewards(session):
"""
Cron to get all the values and iscores for rewards txs. Works by getting all the
iscore distributions which are picked up by the transactions processor and insert
them into a DB. The values are then inserted with this cron job by querying for
rewards that have no value.
"""
count = (
session.execute(select([func.count(Reward.address)]).where(Reward.value == None))
.scalars()
.all()
)
logger.info(f"Found {count} number of rewards records.")
chunk_size = 10
for i in range(0, int(count[0] / chunk_size) + 1):
rewards = (
session.execute(select(Reward).where(Reward.value == None).limit(chunk_size))
.scalars()
.all()
)
for r in rewards:
# Get value from logs service
iscore, value = get_iscore_value(tx_hash=r.tx_hash)
if iscore is None:
continue
r.value = value
r.iscore = iscore
session.add(r)
try:
session.commit()
except:
session.rollback()
raise
| [
"sqlmodel.func.count",
"sqlmodel.select"
] | [((1647, 1703), 'icon_governance.log.logger.info', 'logger.info', (['f"""Found {count} number of rewards records."""'], {}), "(f'Found {count} number of rewards records.')\n", (1658, 1703), False, 'from icon_governance.log import logger\n'), ((558, 632), 'requests.get', 'get', (['f"""{settings.LOGS_SERVICE_URL}/api/v1/logs?transaction_hash={tx_hash}"""'], {}), "(f'{settings.LOGS_SERVICE_URL}/api/v1/logs?transaction_hash={tx_hash}')\n", (561, 632), False, 'from requests import RequestException, get\n'), ((1100, 1166), 'icon_governance.log.logger.info', 'logger.info', (['f"""Could not find Tx hash from logs service {tx_hash}"""'], {}), "(f'Could not find Tx hash from logs service {tx_hash}')\n", (1111, 1166), False, 'from icon_governance.log import logger\n'), ((675, 734), 'icon_governance.log.logger.info', 'logger.info', (['f"""Exception in iscore - \n{e} - \n{tx_hash}"""'], {}), '(f"""Exception in iscore - \n{e} - \n{tx_hash}""")\n', (686, 734), False, 'from icon_governance.log import logger\n'), ((994, 1053), 'icon_governance.log.logger.info', 'logger.info', (['f"""Exception in iscore - \n{e} - \n{tx_hash}"""'], {}), '(f"""Exception in iscore - \n{e} - \n{tx_hash}""")\n', (1005, 1053), False, 'from icon_governance.log import logger\n'), ((886, 910), 'icon_governance.utils.rpc.convert_hex_int', 'convert_hex_int', (['data[0]'], {}), '(data[0])\n', (901, 910), False, 'from icon_governance.utils.rpc import convert_hex_int\n'), ((919, 943), 'icon_governance.utils.rpc.convert_hex_int', 'convert_hex_int', (['data[1]'], {}), '(data[1])\n', (934, 943), False, 'from icon_governance.utils.rpc import convert_hex_int\n'), ((1544, 1570), 'sqlmodel.func.count', 'func.count', (['Reward.address'], {}), '(Reward.address)\n', (1554, 1570), False, 'from sqlmodel import func, select\n'), ((1828, 1842), 'sqlmodel.select', 'select', (['Reward'], {}), '(Reward)\n', (1834, 1842), False, 'from sqlmodel import func, select\n')] |
from typing import Optional
from sqlmodel import Session
from db.base import engine
from db.models import Plant
def create_plants():
plant_1 = Plant(name="Hebe")
plant_2 = Plant(name="Astilbe")
plant_3 = Plant(name="Sedum")
plant_4 = Plant(name="Helenium")
plant_5 = Plant(name="Heather")
session = Session(engine)
session.add(plant_1)
session.add(plant_2)
session.add(plant_3)
session.add(plant_4)
session.add(plant_5)
session.commit()
session.close()
def main():
create_plants()
if __name__ == "__main__":
main() | [
"sqlmodel.Session"
] | [((150, 168), 'db.models.Plant', 'Plant', ([], {'name': '"""Hebe"""'}), "(name='Hebe')\n", (155, 168), False, 'from db.models import Plant\n'), ((183, 204), 'db.models.Plant', 'Plant', ([], {'name': '"""Astilbe"""'}), "(name='Astilbe')\n", (188, 204), False, 'from db.models import Plant\n'), ((219, 238), 'db.models.Plant', 'Plant', ([], {'name': '"""Sedum"""'}), "(name='Sedum')\n", (224, 238), False, 'from db.models import Plant\n'), ((253, 275), 'db.models.Plant', 'Plant', ([], {'name': '"""Helenium"""'}), "(name='Helenium')\n", (258, 275), False, 'from db.models import Plant\n'), ((290, 311), 'db.models.Plant', 'Plant', ([], {'name': '"""Heather"""'}), "(name='Heather')\n", (295, 311), False, 'from db.models import Plant\n'), ((327, 342), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (334, 342), False, 'from sqlmodel import Session\n')] |
from datetime import datetime
from sqlmodel import Field, SQLModel, Relationship
from typing import Optional
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import JSON
class TextInferenceBase(SQLModel):
text: str = Field(nullable=False, index=True)
class TextInference(TextInferenceBase, table=True):
id: Optional[int] = Field(default=None, nullable=False, primary_key=True)
result: dict[str, float] = Field(nullable=False, sa_column=Column(JSON))
created_at: Optional[datetime]
updated_at: Optional[datetime]
created_by_id: Optional[int] = Field(default=None, foreign_key="user.id")
created_by: "User" = Relationship(
sa_relationship_kwargs={
"lazy": "selectin",
"primaryjoin": "TextInference.created_by_id == User.id",
}
)
| [
"sqlmodel.Field",
"sqlmodel.Relationship"
] | [((240, 273), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'index': '(True)'}), '(nullable=False, index=True)\n', (245, 273), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((352, 405), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'nullable': '(False)', 'primary_key': '(True)'}), '(default=None, nullable=False, primary_key=True)\n', (357, 405), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((588, 630), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'foreign_key': '"""user.id"""'}), "(default=None, foreign_key='user.id')\n", (593, 630), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((656, 774), 'sqlmodel.Relationship', 'Relationship', ([], {'sa_relationship_kwargs': "{'lazy': 'selectin', 'primaryjoin': 'TextInference.created_by_id == User.id'}"}), "(sa_relationship_kwargs={'lazy': 'selectin', 'primaryjoin':\n 'TextInference.created_by_id == User.id'})\n", (668, 774), False, 'from sqlmodel import Field, SQLModel, Relationship\n'), ((469, 481), 'sqlalchemy.Column', 'Column', (['JSON'], {}), '(JSON)\n', (475, 481), False, 'from sqlalchemy import Column\n')] |
from sqlite3.dbapi2 import Timestamp, adapt
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import validator
from datetime import datetime, date
from fastapi import HTTPException
import re
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
short_name: str
first_name: str
last_name: str
email: str
role_id: int
team_id: Optional[int] = None
start_date: date
created_at: datetime
updated_at: datetime
is_active: bool
@validator("short_name", always=True)
def valid_short_name(cls, sn_input):
assert sn_input.isalpha(), "only alphabet letters allowed in short name"
assert sn_input.islower(), "short name contains small letters only"
return sn_input
@validator("first_name", always=True)
def valid_first_name(cls, first_name):
assert first_name.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in first name"
if first_name[0].isupper() == False:
raise HTTPException(
status_code=400, detail="first name should start with a capital letter"
)
return first_name
@validator("last_name", always=True)
def valid_last_name(cls, ln_input):
assert ln_input.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in last name"
return ln_input
@validator("email", always=True)
def valid_email(cls, email_input):
regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
assert re.fullmatch(regex, email_input), "email format incorrect"
return email_input
| [
"sqlmodel.Field"
] | [((287, 324), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (292, 324), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((547, 583), 'pydantic.validator', 'validator', (['"""short_name"""'], {'always': '(True)'}), "('short_name', always=True)\n", (556, 583), False, 'from pydantic import validator\n'), ((812, 848), 'pydantic.validator', 'validator', (['"""first_name"""'], {'always': '(True)'}), "('first_name', always=True)\n", (821, 848), False, 'from pydantic import validator\n'), ((1226, 1261), 'pydantic.validator', 'validator', (['"""last_name"""'], {'always': '(True)'}), "('last_name', always=True)\n", (1235, 1261), False, 'from pydantic import validator\n'), ((1451, 1482), 'pydantic.validator', 'validator', (['"""email"""'], {'always': '(True)'}), "('email', always=True)\n", (1460, 1482), False, 'from pydantic import validator\n'), ((1608, 1640), 're.fullmatch', 're.fullmatch', (['regex', 'email_input'], {}), '(regex, email_input)\n', (1620, 1640), False, 'import re\n'), ((1077, 1168), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""first name should start with a capital letter"""'}), "(status_code=400, detail=\n 'first name should start with a capital letter')\n", (1090, 1168), False, 'from fastapi import HTTPException\n')] |
from enum import Enum
from typing import Dict, Optional, Union
from sqlmodel import Field, SQLModel, create_engine
# https://stackoverflow.com/questions/65209934/pydantic-enum-field-does-not-get-converted-to-string
class EventType(str, Enum):
BUILD_IMAGE = 'build_image'
CREATE_CONTAINER = 'create_container'
class Event(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
event_type: EventType
event_payload: str
event_status: Optional[int] = Field(default=None)
| [
"sqlmodel.Field"
] | [((378, 415), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (383, 415), False, 'from sqlmodel import Field, SQLModel, create_engine\n'), ((499, 518), 'sqlmodel.Field', 'Field', ([], {'default': 'None'}), '(default=None)\n', (504, 518), False, 'from sqlmodel import Field, SQLModel, create_engine\n')] |
from sqlmodel import Session, select
from .models import Person, engine
def create_person(nome: str, idade:int):
person = Person(nome=nome, idade=idade)
with Session(engine) as session:
session.add(person)
session.commit()
session.refresh(person)
return person
def all_person():
query = select(Person)
with Session(engine) as session:
result = session.execute(query).scalars().all()
return result | [
"sqlmodel.Session",
"sqlmodel.select"
] | [((335, 349), 'sqlmodel.select', 'select', (['Person'], {}), '(Person)\n', (341, 349), False, 'from sqlmodel import Session, select\n'), ((167, 182), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (174, 182), False, 'from sqlmodel import Session, select\n'), ((363, 378), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (370, 378), False, 'from sqlmodel import Session, select\n')] |
from decimal import Decimal
from unittest.mock import patch
from sqlmodel import create_engine
from ...conftest import get_testing_print_function
expected_calls = [
[
"Hero 1:",
{
"name": "Deadpond",
"age": None,
"id": 1,
"secret_name": "<NAME>",
"money": Decimal("1.100"),
},
],
[
"Hero 2:",
{
"name": "Rusty-Man",
"age": 48,
"id": 3,
"secret_name": "<NAME>",
"money": Decimal("2.200"),
},
],
["Total money: 3.300"],
]
def test_tutorial(clear_sqlmodel):
from docs_src.advanced.decimal import tutorial001 as mod
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(mod.sqlite_url)
calls = []
new_print = get_testing_print_function(calls)
with patch("builtins.print", new=new_print):
mod.main()
assert calls == expected_calls
| [
"sqlmodel.create_engine"
] | [((760, 789), 'sqlmodel.create_engine', 'create_engine', (['mod.sqlite_url'], {}), '(mod.sqlite_url)\n', (773, 789), False, 'from sqlmodel import create_engine\n'), ((866, 904), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {'new': 'new_print'}), "('builtins.print', new=new_print)\n", (871, 904), False, 'from unittest.mock import patch\n'), ((914, 924), 'docs_src.advanced.decimal.tutorial001.main', 'mod.main', ([], {}), '()\n', (922, 924), True, 'from docs_src.advanced.decimal import tutorial001 as mod\n'), ((339, 355), 'decimal.Decimal', 'Decimal', (['"""1.100"""'], {}), "('1.100')\n", (346, 355), False, 'from decimal import Decimal\n'), ((545, 561), 'decimal.Decimal', 'Decimal', (['"""2.200"""'], {}), "('2.200')\n", (552, 561), False, 'from decimal import Decimal\n')] |
from create_db import Student
from sqlmodel import Session, create_engine
student_1 = Student(id=1, first_name="Misal", last_name="Gupta", email="<EMAIL>")
student_2 = Student(id=2, first_name="Vivek", last_name="Kumar", email="<EMAIL>")
student_3 = Student(id=3, first_name="Himesh", last_name="Mahto", email="<EMAIL>")
sqlite_url = "sqlite:///school.db"
engine = create_engine(sqlite_url, echo=True)
session = Session(engine)
session.add(student_1)
session.add(student_2)
session.add(student_3)
session.commit()
session.close()
| [
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((87, 156), 'create_db.Student', 'Student', ([], {'id': '(1)', 'first_name': '"""Misal"""', 'last_name': '"""Gupta"""', 'email': '"""<EMAIL>"""'}), "(id=1, first_name='Misal', last_name='Gupta', email='<EMAIL>')\n", (94, 156), False, 'from create_db import Student\n'), ((169, 238), 'create_db.Student', 'Student', ([], {'id': '(2)', 'first_name': '"""Vivek"""', 'last_name': '"""Kumar"""', 'email': '"""<EMAIL>"""'}), "(id=2, first_name='Vivek', last_name='Kumar', email='<EMAIL>')\n", (176, 238), False, 'from create_db import Student\n'), ((251, 321), 'create_db.Student', 'Student', ([], {'id': '(3)', 'first_name': '"""Himesh"""', 'last_name': '"""Mahto"""', 'email': '"""<EMAIL>"""'}), "(id=3, first_name='Himesh', last_name='Mahto', email='<EMAIL>')\n", (258, 321), False, 'from create_db import Student\n'), ((367, 403), 'sqlmodel.create_engine', 'create_engine', (['sqlite_url'], {'echo': '(True)'}), '(sqlite_url, echo=True)\n', (380, 403), False, 'from sqlmodel import Session, create_engine\n'), ((414, 429), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (421, 429), False, 'from sqlmodel import Session, create_engine\n')] |
import re
from datetime import datetime
from enum import Enum
from functools import lru_cache
from inspect import Parameter, signature
from typing import (
TYPE_CHECKING,
Any,
Callable,
Generator,
Generic,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
from fastapi import Depends, File, Form, Request, UploadFile, params
from fastapi_utils.api_model import APIModel
from fastapi_utils.camelcase import snake2camel
from makefun import wraps
from pydantic import (
BaseModel as PydanticBaseModel,
ConstrainedInt,
ConstrainedStr,
create_model,
)
from pydantic.datetime_parse import parse_datetime
from pydantic.fields import Undefined
from pydantic.main import ModelMetaclass
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.functions import FunctionElement
from sqlalchemy.sql.schema import Column
from sqlalchemy.types import DateTime
from sqlmodel import Field, SQLModel
from starlette.datastructures import MultiDict
from joj.horse.utils.base import is_uuid
from joj.horse.utils.errors import ErrorCode
if TYPE_CHECKING:
Model = TypeVar("Model", bound="BaseModel")
class BaseModel(APIModel):
""""""
class Config:
validate_all = True
class Operation(Enum):
Create = "Create"
Read = "Read"
Update = "Update"
Delete = "Delete"
class NoneNegativeInt(ConstrainedInt):
ge = 0
# class PositiveInt(ConstrainedInt):
# gt = 0
class PaginationLimit(NoneNegativeInt):
le = 500
class LongStr(ConstrainedStr):
max_length = 256
class NoneEmptyStr(ConstrainedStr):
min_length = 1
class SearchQueryStr(ConstrainedStr):
min_length = 2
class NoneEmptyLongStr(LongStr, NoneEmptyStr):
pass
class UserInputURL(str):
URL_RE = re.compile(r"[\w-]+", flags=re.ASCII)
@classmethod
def __get_validators__(
cls,
) -> Generator[Callable[[Union[str, Any]], str], None, None]:
yield cls.validate
@classmethod
def validate(cls, v: Optional[str]) -> LongStr:
if not v:
return LongStr("")
if is_uuid(v):
raise ValueError("url can not be uuid")
if not UserInputURL.URL_RE.fullmatch(v):
raise ValueError("url can only contains [a-zA-Z0-9_-]")
return LongStr(v)
class LongText(ConstrainedStr):
max_length = 65536
class utcnow(FunctionElement):
type = DateTime()
@compiles(utcnow, "postgresql")
def pg_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
@compiles(utcnow, "mssql")
def ms_utcnow(element: Any, compiler: Any, **kwargs: Any) -> str:
return "GETUTCDATE()"
def get_datetime_column(**kwargs: Any) -> Column:
if "index" not in kwargs:
kwargs["index"] = False
if "nullable" not in kwargs:
kwargs["nullable"] = False
return Column(DateTime(timezone=True), **kwargs)
class UTCDatetime(datetime):
"""parse a datetime and convert in into UTC format"""
@classmethod
def __get_validators__(cls) -> Any:
yield cls.validate
@classmethod
def validate(cls, v: Any) -> datetime:
return datetime.fromtimestamp(parse_datetime(v).timestamp())
class BaseORMSchema(SQLModel, BaseModel):
pass
class URLORMSchema(BaseORMSchema):
url: str = Field("", description="(unique) url of the domain")
class URLCreateMixin(BaseModel):
if not TYPE_CHECKING:
url: UserInputURL = Field("", description="(unique) url of the domain")
class DomainMixin(BaseModel):
domain_id: UUID
class IDMixin(BaseModel):
id: UUID
class TimestampMixin(BaseModel):
created_at: Optional[datetime]
updated_at: Optional[datetime]
class EditMetaclass(ModelMetaclass):
async def edit_dependency(cls: Any) -> Any: # pragma: no cover
pass
def __new__(mcs, name: str, bases: Any, class_dict: Any, **kwargs: Any) -> Any:
cls = super().__new__(mcs, name, bases, class_dict, **kwargs)
async def edit_dependency(request: Request, edit: cls) -> cls: # type: ignore
data = await request.json()
for field in cls.__fields__.values():
if field.name not in data and field.alias not in data:
setattr(edit, field.name, Undefined)
return edit
cls.edit_dependency = edit_dependency
return cls
class FormMetaclass(ModelMetaclass):
"""
Adds an form_dependency class method to the original model.
The form_dependency class method can be used with FastAPI endpoints.
"""
async def form_dependency(cls: Any) -> Any: # pragma: no cover
pass
def __new__(mcs, name: str, bases: Any, class_dict: Any, **kwargs: Any) -> Any:
cls = super().__new__(mcs, name, bases, class_dict, **kwargs)
parameters = []
for field in cls.__fields__.values():
if field.type_ == UploadFile:
fastapi_type = File
else:
fastapi_type = Form
parameters.append(
Parameter(
field.name,
Parameter.POSITIONAL_ONLY,
default=(
fastapi_type(field.default)
if not field.required
else fastapi_type(...)
),
annotation=field.outer_type_,
)
)
async def form_dependency(**data: Any) -> cls: # type: ignore
return cls(**data)
sig = signature(form_dependency)
sig = sig.replace(parameters=parameters)
form_dependency.__signature__ = sig # type: ignore
cls.form_dependency = form_dependency
return cls
BT = TypeVar("BT", bound=PydanticBaseModel)
@lru_cache(maxsize=128)
def get_standard_list_response_sub_model(
cls: Type[PydanticBaseModel],
) -> Type[PydanticBaseModel]:
name = cls.__name__
return create_model(
f"{name}List",
count=(int, 0),
results=(List[cls], []), # type: ignore
__base__=BaseModel,
)
@lru_cache(maxsize=None)
def get_standard_response_model(
cls: Type[PydanticBaseModel], is_list: bool = False
) -> Tuple[Type[PydanticBaseModel], Optional[Type[PydanticBaseModel]]]:
name = cls.__name__
sub_model: Optional[Type[PydanticBaseModel]]
if is_list:
model_name = f"{name}ListResp"
sub_model = get_standard_list_response_sub_model(cls)
data_type = (Optional[sub_model], None)
else:
model_name = f"{name}Resp"
sub_model = None
data_type = (Optional[cls], None)
return (
create_model(
model_name,
error_code=(ErrorCode, ...),
error_msg=(Optional[str], None),
data=data_type,
__base__=BaseModel,
),
sub_model,
)
class Empty(BaseModel):
pass
class StandardErrorResponse(BaseModel):
error_code: ErrorCode
error_msg: Optional[str] = None
data: Optional[Any] = None
class StandardResponse(Generic[BT]):
def __class_getitem__(cls, item: Any) -> Type[Any]:
return get_standard_response_model(item)[0]
def __new__(
cls, data: Union[BT, Type[BT], Empty] = Empty()
) -> "StandardResponse[BT]":
response_type, _ = get_standard_response_model(type(data)) # type: ignore
response_data = data
return response_type( # type: ignore
error_code=ErrorCode.Success, error_msg=None, data=response_data
)
class StandardListResponse(Generic[BT]):
def __class_getitem__(cls, item: Any) -> Type[Any]:
return get_standard_response_model(item, True)[0]
def __new__(
cls,
results: Optional[List[BT]] = None,
count: Optional[int] = None,
) -> "StandardListResponse[BT]":
if results is None:
results = []
data_type = len(results) and type(results[0]) or Empty
response_type, sub_model_type = get_standard_response_model(data_type, True)
if count is None:
count = len(results)
response_data: PydanticBaseModel
if sub_model_type is None:
response_data = Empty()
else:
response_data = sub_model_type(count=count, results=results)
return response_type( # type: ignore
error_code=ErrorCode.Success, error_msg=None, data=response_data
)
class LimitOffsetPagination(BaseModel):
count: int
def camelcase_parameters(func: Any) -> Any:
func_sig = signature(func)
parameters = list(func_sig.parameters.values())
start_index = -1
for i, parameter in enumerate(parameters):
if (
parameter.default
and isinstance(parameter.default, (params.Query, params.Path))
and parameter.default.alias is None
):
if start_index < 0:
start_index = i
parameter.default.alias = snake2camel(parameter.name, start_lower=True)
if start_index >= 0:
parameters.insert(
start_index,
Parameter(
"camelcase_parameters_dependency",
kind=Parameter.POSITIONAL_OR_KEYWORD,
default=Depends(camelcase_parameters_dependency),
),
)
new_sig = func_sig.replace(parameters=parameters)
@wraps(func, new_sig=new_sig)
def wrapper(*args: Any, **kwargs: Any) -> Any:
if "camelcase_parameters_dependency" in kwargs:
del kwargs["camelcase_parameters_dependency"]
return func(*args, **kwargs)
return wrapper
def camelcase_parameters_dependency(request: Request) -> None:
query_params = request.query_params
new_params = MultiDict()
for k, v in query_params.multi_items():
if "_" in k:
camel = snake2camel(k, start_lower=True)
new_params.append(camel, v)
else:
new_params.append(k, v)
request._query_params = new_params
| [
"sqlmodel.Field"
] | [((2437, 2467), 'sqlalchemy.ext.compiler.compiles', 'compiles', (['utcnow', '"""postgresql"""'], {}), "(utcnow, 'postgresql')\n", (2445, 2467), False, 'from sqlalchemy.ext.compiler import compiles\n'), ((2585, 2610), 'sqlalchemy.ext.compiler.compiles', 'compiles', (['utcnow', '"""mssql"""'], {}), "(utcnow, 'mssql')\n", (2593, 2610), False, 'from sqlalchemy.ext.compiler import compiles\n'), ((5783, 5821), 'typing.TypeVar', 'TypeVar', (['"""BT"""'], {'bound': 'PydanticBaseModel'}), "('BT', bound=PydanticBaseModel)\n", (5790, 5821), False, 'from typing import TYPE_CHECKING, Any, Callable, Generator, Generic, List, Optional, Tuple, Type, TypeVar, Union\n'), ((5825, 5847), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(128)'}), '(maxsize=128)\n', (5834, 5847), False, 'from functools import lru_cache\n'), ((6136, 6159), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (6145, 6159), False, 'from functools import lru_cache\n'), ((1136, 1171), 'typing.TypeVar', 'TypeVar', (['"""Model"""'], {'bound': '"""BaseModel"""'}), "('Model', bound='BaseModel')\n", (1143, 1171), False, 'from typing import TYPE_CHECKING, Any, Callable, Generator, Generic, List, Optional, Tuple, Type, TypeVar, Union\n'), ((1795, 1832), 're.compile', 're.compile', (['"""[\\\\w-]+"""'], {'flags': 're.ASCII'}), "('[\\\\w-]+', flags=re.ASCII)\n", (1805, 1832), False, 'import re\n'), ((2423, 2433), 'sqlalchemy.types.DateTime', 'DateTime', ([], {}), '()\n', (2431, 2433), False, 'from sqlalchemy.types import DateTime\n'), ((3347, 3398), 'sqlmodel.Field', 'Field', (['""""""'], {'description': '"""(unique) url of the domain"""'}), "('', description='(unique) url of the domain')\n", (3352, 3398), False, 'from sqlmodel import Field, SQLModel\n'), ((5989, 6081), 'pydantic.create_model', 'create_model', (['f"""{name}List"""'], {'count': '(int, 0)', 'results': '(List[cls], [])', '__base__': 'BaseModel'}), "(f'{name}List', count=(int, 0), results=(List[cls], []),\n __base__=BaseModel)\n", (6001, 6081), False, 'from pydantic import BaseModel as PydanticBaseModel, ConstrainedInt, ConstrainedStr, create_model\n'), ((8599, 8614), 'inspect.signature', 'signature', (['func'], {}), '(func)\n', (8608, 8614), False, 'from inspect import Parameter, signature\n'), ((9417, 9445), 'makefun.wraps', 'wraps', (['func'], {'new_sig': 'new_sig'}), '(func, new_sig=new_sig)\n', (9422, 9445), False, 'from makefun import wraps\n'), ((9790, 9801), 'starlette.datastructures.MultiDict', 'MultiDict', ([], {}), '()\n', (9799, 9801), False, 'from starlette.datastructures import MultiDict\n'), ((2115, 2125), 'joj.horse.utils.base.is_uuid', 'is_uuid', (['v'], {}), '(v)\n', (2122, 2125), False, 'from joj.horse.utils.base import is_uuid\n'), ((2903, 2926), 'sqlalchemy.types.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (2911, 2926), False, 'from sqlalchemy.types import DateTime\n'), ((3488, 3539), 'sqlmodel.Field', 'Field', (['""""""'], {'description': '"""(unique) url of the domain"""'}), "('', description='(unique) url of the domain')\n", (3493, 3539), False, 'from sqlmodel import Field, SQLModel\n'), ((5575, 5601), 'inspect.signature', 'signature', (['form_dependency'], {}), '(form_dependency)\n', (5584, 5601), False, 'from inspect import Parameter, signature\n'), ((6692, 6819), 'pydantic.create_model', 'create_model', (['model_name'], {'error_code': '(ErrorCode, ...)', 'error_msg': '(Optional[str], None)', 'data': 'data_type', '__base__': 'BaseModel'}), '(model_name, error_code=(ErrorCode, ...), error_msg=(Optional[\n str], None), data=data_type, __base__=BaseModel)\n', (6704, 6819), False, 'from pydantic import BaseModel as PydanticBaseModel, ConstrainedInt, ConstrainedStr, create_model\n'), ((9014, 9059), 'fastapi_utils.camelcase.snake2camel', 'snake2camel', (['parameter.name'], {'start_lower': '(True)'}), '(parameter.name, start_lower=True)\n', (9025, 9059), False, 'from fastapi_utils.camelcase import snake2camel\n'), ((9887, 9919), 'fastapi_utils.camelcase.snake2camel', 'snake2camel', (['k'], {'start_lower': '(True)'}), '(k, start_lower=True)\n', (9898, 9919), False, 'from fastapi_utils.camelcase import snake2camel\n'), ((3211, 3228), 'pydantic.datetime_parse.parse_datetime', 'parse_datetime', (['v'], {}), '(v)\n', (3225, 3228), False, 'from pydantic.datetime_parse import parse_datetime\n'), ((9290, 9330), 'fastapi.Depends', 'Depends', (['camelcase_parameters_dependency'], {}), '(camelcase_parameters_dependency)\n', (9297, 9330), False, 'from fastapi import Depends, File, Form, Request, UploadFile, params\n')] |
import importlib
import os
from typing import Dict, List, Optional, Tuple
from types import ModuleType
import typer
from rich import inspect
from rich.prompt import Prompt
from rich.table import Table
from sqlalchemy import Column
from sqlalchemy.future.engine import Engine
from sqlmodel import SQLModel, create_engine
from ._console import console, error_console
def get_db_url(database_url: Optional[str] = None):
"""A helper function to get the database url."""
if not database_url:
database_url = os.getenv("DATABASE_URL")
if not database_url:
msg = "Please ensure that an environment variable is set for `DATABASE_URL` or pass in the url to the database_url option."
error_console.print(msg)
raise typer.Exit(code=1)
return database_url
def get_tables(models_module) -> Dict[str, SQLModel]:
"""Find all of the SQLModel tables."""
tables = {}
for name, obj in models_module.__dict__.items():
if isinstance(obj, type(SQLModel)) and name != "SQLModel":
tables[obj.__tablename__] = obj
return tables
def get_models(models_path: Optional[str] = None):
# Load the models provided by the user.
if not models_path:
models_path = os.getenv("MODELS_PATH")
if not models_path:
msg = "No modules_path specified. You can set a modules_path by either passing in a value to the -m option or by setting an environment variable `export MODELS_PATH='sqlcli_demo/models.py'`"
error_console.print(msg)
raise typer.Exit(code=1)
models_path = os.path.normpath(models_path)
path, filename = os.path.split(models_path)
module_name, ext = os.path.split(filename)
spec = importlib.util.spec_from_file_location(module_name, models_path)
models = importlib.util.module_from_spec(spec)
spec.loader.exec_module(models)
return models
def is_foreign_key(obj, field_name: str) -> bool:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return True
return False
def get_foreign_key_column_name(obj: SQLModel, field_name: str) -> str:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return fk.column.name
def get_foreign_key_table_name(obj: SQLModel, field_name: str) -> Optional[str]:
foreign_keys = [i for i in obj.__table__.foreign_keys]
for fk in foreign_keys:
if fk.parent.name == field_name:
return fk.column.table.name
return None
def sqlmodel_setup(
models_path: str, database_url: str
) -> Tuple[ModuleType, str, Engine, Dict[str, SQLModel]]:
"""Quickstart for getting required objects"""
models = get_models(models_path)
url = get_db_url(database_url)
engine = create_engine(url)
tables = get_tables(models)
return models, url, engine, tables
def create_rich_table(data: List[SQLModel], **kwargs) -> Table:
"""Convert a list of SQLModel objects into a rich table."""
table = Table(**kwargs)
# Note that column names are accessed via .__table__.columns._all_columns
# because it guarantees the correct order. If you were to use the more
# succinct `for col in data[0].dict().keys()` the order can change.
for col in data[0].__table__.columns._all_columns:
col_name = col.name
table.add_column(col_name)
for row in data:
row_data = []
for col in row.__table__.columns._all_columns:
row_data.append(row.dict()[col.name])
row_data = [str(i) for i in row_data]
table.add_row(*row_data)
return table
def validate_table_name(
table_name: Optional[str], tables: List[SQLModel]
) -> Tuple[SQLModel, str]:
if not table_name:
table_name = Prompt.ask("Please select a table", choices=tables.keys())
try:
obj = tables[table_name]
except KeyError:
error_console.print(
f"The provided table does not exist. Please select from one of:"
)
error_console.print(f"{list(tables.keys())}")
raise typer.Exit(code=1)
return obj, table_name
| [
"sqlmodel.create_engine"
] | [((1602, 1631), 'os.path.normpath', 'os.path.normpath', (['models_path'], {}), '(models_path)\n', (1618, 1631), False, 'import os\n'), ((1653, 1679), 'os.path.split', 'os.path.split', (['models_path'], {}), '(models_path)\n', (1666, 1679), False, 'import os\n'), ((1703, 1726), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (1716, 1726), False, 'import os\n'), ((1739, 1803), 'importlib.util.spec_from_file_location', 'importlib.util.spec_from_file_location', (['module_name', 'models_path'], {}), '(module_name, models_path)\n', (1777, 1803), False, 'import importlib\n'), ((1817, 1854), 'importlib.util.module_from_spec', 'importlib.util.module_from_spec', (['spec'], {}), '(spec)\n', (1848, 1854), False, 'import importlib\n'), ((2889, 2907), 'sqlmodel.create_engine', 'create_engine', (['url'], {}), '(url)\n', (2902, 2907), False, 'from sqlmodel import SQLModel, create_engine\n'), ((3121, 3136), 'rich.table.Table', 'Table', ([], {}), '(**kwargs)\n', (3126, 3136), False, 'from rich.table import Table\n'), ((522, 547), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""'], {}), "('DATABASE_URL')\n", (531, 547), False, 'import os\n'), ((1253, 1277), 'os.getenv', 'os.getenv', (['"""MODELS_PATH"""'], {}), "('MODELS_PATH')\n", (1262, 1277), False, 'import os\n'), ((769, 787), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (779, 787), False, 'import typer\n'), ((1564, 1582), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (1574, 1582), False, 'import typer\n'), ((4184, 4202), 'typer.Exit', 'typer.Exit', ([], {'code': '(1)'}), '(code=1)\n', (4194, 4202), False, 'import typer\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel
from datetime import datetime
class Calendar(SQLModel, table=True):
"""Create an SQLModel for a calendar"""
id: Optional[int] = Field(default=None, primary_key=True)
date: datetime
year_number: int
year_name: str
quarter_number: int
quarter_name: str
month_number: int
month_name: str
week_number: int
week_name: str
week_day_number: int
week_day_name: str
__table_args__ = {"schema": "app_db"}
| [
"sqlmodel.Field"
] | [((204, 241), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (209, 241), False, 'from sqlmodel import Field, SQLModel\n')] |
"""Movie models."""
import datetime
import typing
import pydantic
import sqlmodel
from app.models import mixins
from app.models import validators
if typing.TYPE_CHECKING:
from app.models.patron import Patron, PatronRead
class MovieBase(sqlmodel.SQLModel, mixins.ProposalMixin, mixins.LinksMixin):
"""Base Movie model."""
title_orig: str
title_en: str
title_it: str | None
release_date: datetime.date | None
running_time: pydantic.PositiveInt | None
notes: str | None
_normalize_title = pydantic.validator("title_orig",
"title_en",
"title_it",
allow_reuse=True)(
validators.normalize_title)
class Movie(MovieBase, mixins.TimestampsMixin, mixins.BaseMixin, table=True):
"""Movie database model."""
patron: "Patron" = sqlmodel.Relationship(
back_populates="movies", sa_relationship_kwargs={"lazy": "selectin"})
class MovieCreate(MovieBase):
"""Movie create model."""
class MovieRead(MovieBase):
"""Movie base model."""
id: pydantic.UUID4
class MovieReadWithPatron(MovieRead):
"""Movie read model with related patron."""
patron: "PatronRead" = None
class MovieUpdate(sqlmodel.SQLModel, mixins.LinksMixin):
"""Movie update model."""
# TODO: Set default to None when
# https://github.com/tiangolo/sqlmodel/issues/230 is resolved.
title_orig: str | None = ""
title_en: str | None = ""
title_it: str | None = ""
release_date: datetime.date | None = None
running_time: pydantic.PositiveInt | None = None
notes: str | None = None
_normalize_title = pydantic.validator("title_orig",
"title_en",
"title_it",
allow_reuse=True)(
validators.normalize_title)
| [
"sqlmodel.Relationship"
] | [((941, 1037), 'sqlmodel.Relationship', 'sqlmodel.Relationship', ([], {'back_populates': '"""movies"""', 'sa_relationship_kwargs': "{'lazy': 'selectin'}"}), "(back_populates='movies', sa_relationship_kwargs={\n 'lazy': 'selectin'})\n", (962, 1037), False, 'import sqlmodel\n'), ((529, 603), 'pydantic.validator', 'pydantic.validator', (['"""title_orig"""', '"""title_en"""', '"""title_it"""'], {'allow_reuse': '(True)'}), "('title_orig', 'title_en', 'title_it', allow_reuse=True)\n", (547, 603), False, 'import pydantic\n'), ((1742, 1816), 'pydantic.validator', 'pydantic.validator', (['"""title_orig"""', '"""title_en"""', '"""title_it"""'], {'allow_reuse': '(True)'}), "('title_orig', 'title_en', 'title_it', allow_reuse=True)\n", (1760, 1816), False, 'import pydantic\n')] |
import datetime
from typing import Optional
from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel
class HelpSessionBase(SQLModel):
"""A base model for storing information about users."""
claimant_id: int
channel_id: int
opened_at: datetime.datetime
closed_at: Optional[datetime.datetime]
class HelpSessionTable(HelpSessionBase, table=True):
"""A model for storing information about individual help sessions."""
__tablename__ = "help_sessions"
session_id: int = Field(primary_key=True)
claimant_id: int = Field(
sa_column=Column(
"claimant_id",
BigInteger,
ForeignKey("users.user_id"),
nullable=False
)
)
channel_id: int = Field(
sa_column=Column(
"channel_id",
BigInteger,
index=True,
nullable=False
)
)
opened_at: datetime.datetime = Field(
sa_column=Column(
DateTime(timezone=True),
nullable=False
)
)
closed_at: Optional[datetime.datetime] = Field(
sa_column=Column(
DateTime(timezone=True),
nullable=True
)
)
| [
"sqlmodel.Field",
"sqlmodel.DateTime",
"sqlmodel.Column",
"sqlmodel.ForeignKey"
] | [((526, 549), 'sqlmodel.Field', 'Field', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (531, 549), False, 'from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel\n'), ((788, 848), 'sqlmodel.Column', 'Column', (['"""channel_id"""', 'BigInteger'], {'index': '(True)', 'nullable': '(False)'}), "('channel_id', BigInteger, index=True, nullable=False)\n", (794, 848), False, 'from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel\n'), ((669, 696), 'sqlmodel.ForeignKey', 'ForeignKey', (['"""users.user_id"""'], {}), "('users.user_id')\n", (679, 696), False, 'from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel\n'), ((993, 1016), 'sqlmodel.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (1001, 1016), False, 'from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel\n'), ((1151, 1174), 'sqlmodel.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (1159, 1174), False, 'from sqlmodel import BigInteger, Column, DateTime, Field, ForeignKey, SQLModel\n')] |
from enum import Enum
from typing import TYPE_CHECKING, Optional
from sqlalchemy import Column
from sqlalchemy import Enum as SQLEnum
from sqlalchemy import ForeignKey, Integer
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from .message import Message, MessageList
class MessageTriggerType(Enum):
SIGN_UP = "Sign Up"
APPLICATION_SUBMITTED = "Application - Submitted"
APPLICATION_ACCEPTED = "Application - Accepted"
APPLICATION_REJECTED = "Application - Rejected"
INCOMPLETE_APPLICATION_24H = "Incomplete Application - 24hr"
INCOMPLETE_APPLICATION_7D = "Incomplete Application - 7 day"
class MessageTriggerBase(SQLModel):
trigger: MessageTriggerType = Field(
sa_column=Column(
SQLEnum(MessageTriggerType),
nullable=False,
primary_key=True,
)
)
class MessageTrigger(MessageTriggerBase, table=True):
__tablename__ = "message_triggers"
message_id: Optional[int] = Field(
sa_column=Column(
Integer(),
ForeignKey("messages.id", ondelete="CASCADE"),
nullable=True,
)
)
message: Optional["Message"] = Relationship()
class MessageTriggerRead(MessageTriggerBase):
message: Optional["MessageList"]
class MessageTriggerUpdate(SQLModel):
message_id: Optional[int]
| [
"sqlmodel.Relationship"
] | [((1182, 1196), 'sqlmodel.Relationship', 'Relationship', ([], {}), '()\n', (1194, 1196), False, 'from sqlmodel import Field, Relationship, SQLModel\n'), ((757, 784), 'sqlalchemy.Enum', 'SQLEnum', (['MessageTriggerType'], {}), '(MessageTriggerType)\n', (764, 784), True, 'from sqlalchemy import Enum as SQLEnum\n'), ((1033, 1042), 'sqlalchemy.Integer', 'Integer', ([], {}), '()\n', (1040, 1042), False, 'from sqlalchemy import ForeignKey, Integer\n'), ((1056, 1101), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""messages.id"""'], {'ondelete': '"""CASCADE"""'}), "('messages.id', ondelete='CASCADE')\n", (1066, 1101), False, 'from sqlalchemy import ForeignKey, Integer\n')] |
from typing import Optional
from sqlmodel import Field, SQLModel, Field
from pydantic import validator
from datetime import datetime, date
from fastapi import HTTPException
import re
class AppUser(SQLModel, table=True):
"""Create an SQLModel for users"""
id: Optional[int] = Field(default=None, primary_key=True)
username: str
first_name: str
last_name: str
email: str
role_id: int
team_id: Optional[int] = None
start_date: date
created_at: datetime
updated_at: datetime
is_active: bool
__table_args__ = {"schema": "app_db"}
@validator("first_name", always=True)
def valid_first_name(cls, first_name):
assert first_name.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in first name"
if first_name[0].isupper() == False:
raise HTTPException(
status_code=400, detail="first name should start with a capital letter"
)
return first_name
@validator("last_name", always=True)
def valid_last_name(cls, ln_input):
assert ln_input.replace(
" ", ""
).isalpha(), "only alphabet letters allowed in last name"
return ln_input
@validator("email", always=True)
def valid_email(cls, email_input):
regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
assert re.fullmatch(regex, email_input), "email format incorrect"
return email_input
| [
"sqlmodel.Field"
] | [((286, 323), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (291, 323), False, 'from sqlmodel import Field, SQLModel, Field\n'), ((587, 623), 'pydantic.validator', 'validator', (['"""first_name"""'], {'always': '(True)'}), "('first_name', always=True)\n", (596, 623), False, 'from pydantic import validator\n'), ((1001, 1036), 'pydantic.validator', 'validator', (['"""last_name"""'], {'always': '(True)'}), "('last_name', always=True)\n", (1010, 1036), False, 'from pydantic import validator\n'), ((1226, 1257), 'pydantic.validator', 'validator', (['"""email"""'], {'always': '(True)'}), "('email', always=True)\n", (1235, 1257), False, 'from pydantic import validator\n'), ((1383, 1415), 're.fullmatch', 're.fullmatch', (['regex', 'email_input'], {}), '(regex, email_input)\n', (1395, 1415), False, 'import re\n'), ((852, 943), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(400)', 'detail': '"""first name should start with a capital letter"""'}), "(status_code=400, detail=\n 'first name should start with a capital letter')\n", (865, 943), False, 'from fastapi import HTTPException\n')] |
import os
from fastapi import FastAPI
from sqlmodel import create_engine, SQLModel
from .configurations import env
from .models import * # init models package
class AppFactory(object):
def __init__(self):
self._app = None
@staticmethod
def _get_all_router():
from pigeon.blog.services.routers import __all_routers__
return __all_routers__
def _apply_router(self):
if not isinstance(self._app, FastAPI):
raise RuntimeError("self._app isn't initialized.")
routers = AppFactory._get_all_router()
for r in routers:
self._app.include_router(r)
def _ensure_sql(self):
if not isinstance(self._app, FastAPI):
return
@self._app.on_event("startup")
def sql_startup():
engine = get_engine()
SQLModel.metadata.create_all(engine)
@self._app.on_event("shutdown")
def sql_shutdown():
pass
def __call__(self, *args, **kwargs):
self._app = FastAPI(
title="Pigeon Blog",
)
self._apply_router()
self._ensure_sql()
return self._app
| [
"sqlmodel.SQLModel.metadata.create_all"
] | [((1026, 1054), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""Pigeon Blog"""'}), "(title='Pigeon Blog')\n", (1033, 1054), False, 'from fastapi import FastAPI\n'), ((841, 877), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (869, 877), False, 'from sqlmodel import create_engine, SQLModel\n')] |
from sqlmodel import select
from sqlalchemy.sql import expression
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import defer
from typing import Any
from app.db import models, pagination, session_scope
from app.logs import fastapi_logger
def get_user(email: str) -> Any:
""" Get User Data based on email"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email).options(defer('password'))
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
def get_user_password(email: str) -> Any:
""" Get User Password based on email"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email)
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
def get_active_user(email: str) -> Any:
""" Get User Data based on email and active status"""
try:
with session_scope() as db:
statement = select(models.User).where(
models.User.email == email).where(
models.User.is_active == expression.true()).options(defer('password'))
results = db.exec(statement)
data = results.one()
return data
except SQLAlchemyError as e:
fastapi_logger.exception("get_user")
return None
| [
"sqlmodel.select"
] | [((363, 378), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (376, 378), False, 'from app.db import models, pagination, session_scope\n'), ((654, 690), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""get_user"""'], {}), "('get_user')\n", (678, 690), False, 'from app.logs import fastapi_logger\n'), ((826, 841), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (839, 841), False, 'from app.db import models, pagination, session_scope\n'), ((1090, 1126), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""get_user"""'], {}), "('get_user')\n", (1114, 1126), False, 'from app.logs import fastapi_logger\n'), ((1274, 1289), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (1287, 1289), False, 'from app.db import models, pagination, session_scope\n'), ((1633, 1669), 'app.logs.fastapi_logger.exception', 'fastapi_logger.exception', (['"""get_user"""'], {}), "('get_user')\n", (1657, 1669), False, 'from app.logs import fastapi_logger\n'), ((491, 508), 'sqlalchemy.orm.defer', 'defer', (['"""password"""'], {}), "('password')\n", (496, 508), False, 'from sqlalchemy.orm import defer\n'), ((1470, 1487), 'sqlalchemy.orm.defer', 'defer', (['"""password"""'], {}), "('password')\n", (1475, 1487), False, 'from sqlalchemy.orm import defer\n'), ((874, 893), 'sqlmodel.select', 'select', (['models.User'], {}), '(models.User)\n', (880, 893), False, 'from sqlmodel import select\n'), ((411, 430), 'sqlmodel.select', 'select', (['models.User'], {}), '(models.User)\n', (417, 430), False, 'from sqlmodel import select\n'), ((1443, 1460), 'sqlalchemy.sql.expression.true', 'expression.true', ([], {}), '()\n', (1458, 1460), False, 'from sqlalchemy.sql import expression\n'), ((1322, 1341), 'sqlmodel.select', 'select', (['models.User'], {}), '(models.User)\n', (1328, 1341), False, 'from sqlmodel import select\n')] |
"""init database
Revision ID: 60e58d3a26fa
Revises:
Create Date: 2021-11-24 18:06:53.935899
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = '60e58d3a26fa'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('street_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('house_number', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('city', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('zip_code', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_address_city'), 'address', ['city'], unique=False)
op.create_index(op.f('ix_address_house_number'), 'address', ['house_number'], unique=False)
op.create_index(op.f('ix_address_id'), 'address', ['id'], unique=False)
op.create_index(op.f('ix_address_street_name'), 'address', ['street_name'], unique=False)
op.create_index(op.f('ix_address_zip_code'), 'address', ['zip_code'], unique=False)
op.create_table('product',
sa.Column('name', sa.String(), nullable=True),
sa.Column('id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(op.f('ix_product_id'), 'product', ['id'], unique=False)
op.create_table('customer',
sa.Column('mobile_number', sa.String(), nullable=True),
sa.Column('email', sa.String(), nullable=True),
sa.Column('first_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('last_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('birth_date', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('gender', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=True),
sa.Column('address_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['address_id'], ['address.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('mobile_number')
)
op.create_index(op.f('ix_customer_address_id'), 'customer', ['address_id'], unique=False)
op.create_index(op.f('ix_customer_birth_date'), 'customer', ['birth_date'], unique=False)
op.create_index(op.f('ix_customer_first_name'), 'customer', ['first_name'], unique=False)
op.create_index(op.f('ix_customer_gender'), 'customer', ['gender'], unique=False)
op.create_index(op.f('ix_customer_id'), 'customer', ['id'], unique=False)
op.create_index(op.f('ix_customer_last_name'), 'customer', ['last_name'], unique=False)
op.create_table('customerproductlink',
sa.Column('customer_id', sa.Integer(), nullable=True),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customer.id'], ),
sa.ForeignKeyConstraint(['product_id'], ['product.id'], ),
sa.PrimaryKeyConstraint('customer_id', 'product_id')
)
op.create_index(op.f('ix_customerproductlink_customer_id'), 'customerproductlink', ['customer_id'], unique=False)
op.create_index(op.f('ix_customerproductlink_product_id'), 'customerproductlink', ['product_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_customerproductlink_product_id'), table_name='customerproductlink')
op.drop_index(op.f('ix_customerproductlink_customer_id'), table_name='customerproductlink')
op.drop_table('customerproductlink')
op.drop_index(op.f('ix_customer_last_name'), table_name='customer')
op.drop_index(op.f('ix_customer_id'), table_name='customer')
op.drop_index(op.f('ix_customer_gender'), table_name='customer')
op.drop_index(op.f('ix_customer_first_name'), table_name='customer')
op.drop_index(op.f('ix_customer_birth_date'), table_name='customer')
op.drop_index(op.f('ix_customer_address_id'), table_name='customer')
op.drop_table('customer')
op.drop_index(op.f('ix_product_id'), table_name='product')
op.drop_table('product')
op.drop_index(op.f('ix_address_zip_code'), table_name='address')
op.drop_index(op.f('ix_address_street_name'), table_name='address')
op.drop_index(op.f('ix_address_id'), table_name='address')
op.drop_index(op.f('ix_address_house_number'), table_name='address')
op.drop_index(op.f('ix_address_city'), table_name='address')
op.drop_table('address')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((3717, 3753), 'alembic.op.drop_table', 'op.drop_table', (['"""customerproductlink"""'], {}), "('customerproductlink')\n", (3730, 3753), False, 'from alembic import op\n'), ((4183, 4208), 'alembic.op.drop_table', 'op.drop_table', (['"""customer"""'], {}), "('customer')\n", (4196, 4208), False, 'from alembic import op\n'), ((4276, 4300), 'alembic.op.drop_table', 'op.drop_table', (['"""product"""'], {}), "('product')\n", (4289, 4300), False, 'from alembic import op\n'), ((4647, 4671), 'alembic.op.drop_table', 'op.drop_table', (['"""address"""'], {}), "('address')\n", (4660, 4671), False, 'from alembic import op\n'), ((778, 807), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (801, 807), True, 'import sqlalchemy as sa\n'), ((834, 857), 'alembic.op.f', 'op.f', (['"""ix_address_city"""'], {}), "('ix_address_city')\n", (838, 857), False, 'from alembic import op\n'), ((914, 945), 'alembic.op.f', 'op.f', (['"""ix_address_house_number"""'], {}), "('ix_address_house_number')\n", (918, 945), False, 'from alembic import op\n'), ((1010, 1031), 'alembic.op.f', 'op.f', (['"""ix_address_id"""'], {}), "('ix_address_id')\n", (1014, 1031), False, 'from alembic import op\n'), ((1086, 1116), 'alembic.op.f', 'op.f', (['"""ix_address_street_name"""'], {}), "('ix_address_street_name')\n", (1090, 1116), False, 'from alembic import op\n'), ((1180, 1207), 'alembic.op.f', 'op.f', (['"""ix_address_zip_code"""'], {}), "('ix_address_zip_code')\n", (1184, 1207), False, 'from alembic import op\n'), ((1384, 1413), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1407, 1413), True, 'import sqlalchemy as sa\n'), ((1419, 1446), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""'], {}), "('name')\n", (1438, 1446), True, 'import sqlalchemy as sa\n'), ((1473, 1494), 'alembic.op.f', 'op.f', (['"""ix_product_id"""'], {}), "('ix_product_id')\n", (1477, 1494), False, 'from alembic import op\n'), ((2104, 2159), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['address_id']", "['address.id']"], {}), "(['address_id'], ['address.id'])\n", (2127, 2159), True, 'import sqlalchemy as sa\n'), ((2167, 2196), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2190, 2196), True, 'import sqlalchemy as sa\n'), ((2202, 2230), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""email"""'], {}), "('email')\n", (2221, 2230), True, 'import sqlalchemy as sa\n'), ((2236, 2272), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""mobile_number"""'], {}), "('mobile_number')\n", (2255, 2272), True, 'import sqlalchemy as sa\n'), ((2299, 2329), 'alembic.op.f', 'op.f', (['"""ix_customer_address_id"""'], {}), "('ix_customer_address_id')\n", (2303, 2329), False, 'from alembic import op\n'), ((2393, 2423), 'alembic.op.f', 'op.f', (['"""ix_customer_birth_date"""'], {}), "('ix_customer_birth_date')\n", (2397, 2423), False, 'from alembic import op\n'), ((2487, 2517), 'alembic.op.f', 'op.f', (['"""ix_customer_first_name"""'], {}), "('ix_customer_first_name')\n", (2491, 2517), False, 'from alembic import op\n'), ((2581, 2607), 'alembic.op.f', 'op.f', (['"""ix_customer_gender"""'], {}), "('ix_customer_gender')\n", (2585, 2607), False, 'from alembic import op\n'), ((2667, 2689), 'alembic.op.f', 'op.f', (['"""ix_customer_id"""'], {}), "('ix_customer_id')\n", (2671, 2689), False, 'from alembic import op\n'), ((2745, 2774), 'alembic.op.f', 'op.f', (['"""ix_customer_last_name"""'], {}), "('ix_customer_last_name')\n", (2749, 2774), False, 'from alembic import op\n'), ((2981, 3038), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['customer_id']", "['customer.id']"], {}), "(['customer_id'], ['customer.id'])\n", (3004, 3038), True, 'import sqlalchemy as sa\n'), ((3046, 3101), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['product_id']", "['product.id']"], {}), "(['product_id'], ['product.id'])\n", (3069, 3101), True, 'import sqlalchemy as sa\n'), ((3109, 3161), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""customer_id"""', '"""product_id"""'], {}), "('customer_id', 'product_id')\n", (3132, 3161), True, 'import sqlalchemy as sa\n'), ((3188, 3230), 'alembic.op.f', 'op.f', (['"""ix_customerproductlink_customer_id"""'], {}), "('ix_customerproductlink_customer_id')\n", (3192, 3230), False, 'from alembic import op\n'), ((3306, 3347), 'alembic.op.f', 'op.f', (['"""ix_customerproductlink_product_id"""'], {}), "('ix_customerproductlink_product_id')\n", (3310, 3347), False, 'from alembic import op\n'), ((3540, 3581), 'alembic.op.f', 'op.f', (['"""ix_customerproductlink_product_id"""'], {}), "('ix_customerproductlink_product_id')\n", (3544, 3581), False, 'from alembic import op\n'), ((3635, 3677), 'alembic.op.f', 'op.f', (['"""ix_customerproductlink_customer_id"""'], {}), "('ix_customerproductlink_customer_id')\n", (3639, 3677), False, 'from alembic import op\n'), ((3772, 3801), 'alembic.op.f', 'op.f', (['"""ix_customer_last_name"""'], {}), "('ix_customer_last_name')\n", (3776, 3801), False, 'from alembic import op\n'), ((3844, 3866), 'alembic.op.f', 'op.f', (['"""ix_customer_id"""'], {}), "('ix_customer_id')\n", (3848, 3866), False, 'from alembic import op\n'), ((3909, 3935), 'alembic.op.f', 'op.f', (['"""ix_customer_gender"""'], {}), "('ix_customer_gender')\n", (3913, 3935), False, 'from alembic import op\n'), ((3978, 4008), 'alembic.op.f', 'op.f', (['"""ix_customer_first_name"""'], {}), "('ix_customer_first_name')\n", (3982, 4008), False, 'from alembic import op\n'), ((4051, 4081), 'alembic.op.f', 'op.f', (['"""ix_customer_birth_date"""'], {}), "('ix_customer_birth_date')\n", (4055, 4081), False, 'from alembic import op\n'), ((4124, 4154), 'alembic.op.f', 'op.f', (['"""ix_customer_address_id"""'], {}), "('ix_customer_address_id')\n", (4128, 4154), False, 'from alembic import op\n'), ((4227, 4248), 'alembic.op.f', 'op.f', (['"""ix_product_id"""'], {}), "('ix_product_id')\n", (4231, 4248), False, 'from alembic import op\n'), ((4319, 4346), 'alembic.op.f', 'op.f', (['"""ix_address_zip_code"""'], {}), "('ix_address_zip_code')\n", (4323, 4346), False, 'from alembic import op\n'), ((4388, 4418), 'alembic.op.f', 'op.f', (['"""ix_address_street_name"""'], {}), "('ix_address_street_name')\n", (4392, 4418), False, 'from alembic import op\n'), ((4460, 4481), 'alembic.op.f', 'op.f', (['"""ix_address_id"""'], {}), "('ix_address_id')\n", (4464, 4481), False, 'from alembic import op\n'), ((4523, 4554), 'alembic.op.f', 'op.f', (['"""ix_address_house_number"""'], {}), "('ix_address_house_number')\n", (4527, 4554), False, 'from alembic import op\n'), ((4596, 4619), 'alembic.op.f', 'op.f', (['"""ix_address_city"""'], {}), "('ix_address_city')\n", (4600, 4619), False, 'from alembic import op\n'), ((434, 468), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (466, 468), False, 'import sqlmodel\n'), ((517, 551), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (549, 551), False, 'import sqlmodel\n'), ((592, 626), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (624, 626), False, 'import sqlmodel\n'), ((671, 705), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (703, 705), False, 'import sqlmodel\n'), ((744, 756), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (754, 756), True, 'import sqlalchemy as sa\n'), ((1301, 1312), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1310, 1312), True, 'import sqlalchemy as sa\n'), ((1350, 1362), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1360, 1362), True, 'import sqlalchemy as sa\n'), ((1592, 1603), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1601, 1603), True, 'import sqlalchemy as sa\n'), ((1644, 1655), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (1653, 1655), True, 'import sqlalchemy as sa\n'), ((1701, 1735), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1733, 1735), False, 'import sqlmodel\n'), ((1781, 1815), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1813, 1815), False, 'import sqlmodel\n'), ((1862, 1896), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1894, 1896), False, 'import sqlmodel\n'), ((1939, 1973), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (1971, 1973), False, 'import sqlmodel\n'), ((2012, 2024), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2022, 2024), True, 'import sqlalchemy as sa\n'), ((2070, 2082), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2080, 2082), True, 'import sqlalchemy as sa\n'), ((2889, 2901), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2899, 2901), True, 'import sqlalchemy as sa\n'), ((2947, 2959), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2957, 2959), True, 'import sqlalchemy as sa\n')] |
import os
from pathlib import Path
from app import MyApp
from dotenv import load_dotenv
from sqlmodel import Session, SQLModel, create_engine
load_dotenv()
MOVIES_PATH = Path(os.getenv("MOVIES_FILEPATH", None))
dbfile = Path("database.db")
engine = create_engine("sqlite:///database.db", echo=False)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def main():
if not dbfile.exists():
create_db_and_tables()
with Session(engine) as session:
MyApp.run(
title="Media Organizer",
log="textual.log",
path=str(MOVIES_PATH.absolute()),
session=session,
)
if __name__ == "__main__":
main()
| [
"sqlmodel.SQLModel.metadata.create_all",
"sqlmodel.Session",
"sqlmodel.create_engine"
] | [((144, 157), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (155, 157), False, 'from dotenv import load_dotenv\n'), ((224, 243), 'pathlib.Path', 'Path', (['"""database.db"""'], {}), "('database.db')\n", (228, 243), False, 'from pathlib import Path\n'), ((253, 303), 'sqlmodel.create_engine', 'create_engine', (['"""sqlite:///database.db"""'], {'echo': '(False)'}), "('sqlite:///database.db', echo=False)\n", (266, 303), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((178, 212), 'os.getenv', 'os.getenv', (['"""MOVIES_FILEPATH"""', 'None'], {}), "('MOVIES_FILEPATH', None)\n", (187, 212), False, 'import os\n'), ((338, 374), 'sqlmodel.SQLModel.metadata.create_all', 'SQLModel.metadata.create_all', (['engine'], {}), '(engine)\n', (366, 374), False, 'from sqlmodel import Session, SQLModel, create_engine\n'), ((458, 473), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (465, 473), False, 'from sqlmodel import Session, SQLModel, create_engine\n')] |
from typing import Optional
from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean
from datetime import datetime
class BaseModel(SQLModel):
"""
BaseModel class
"""
class Config:
use_enum_values = True
class BaseTableFields(SQLModel):
"""
BaseTableField class
"""
id: Optional[int] = Field(default=None, primary_key=True, nullable=False)
created_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
nullable=False,
)
)
updated_at: Optional[datetime] = Field(
default=None,
sa_column=Column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
)
is_active: Optional[bool] = Field(
default=None,
sa_column=Column(Boolean, server_default='true', default=True)
)
| [
"sqlmodel.func.now",
"sqlmodel.Field",
"sqlmodel.Column",
"sqlmodel.DateTime"
] | [((344, 397), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)', 'nullable': '(False)'}), '(default=None, primary_key=True, nullable=False)\n', (349, 397), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((937, 989), 'sqlmodel.Column', 'Column', (['Boolean'], {'server_default': '"""true"""', 'default': '(True)'}), "(Boolean, server_default='true', default=True)\n", (943, 989), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((503, 526), 'sqlmodel.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (511, 526), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((716, 739), 'sqlmodel.DateTime', 'DateTime', ([], {'timezone': '(True)'}), '(timezone=True)\n', (724, 739), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((555, 565), 'sqlmodel.func.now', 'func.now', ([], {}), '()\n', (563, 565), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((768, 778), 'sqlmodel.func.now', 'func.now', ([], {}), '()\n', (776, 778), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n'), ((801, 811), 'sqlmodel.func.now', 'func.now', ([], {}), '()\n', (809, 811), False, 'from sqlmodel import SQLModel, Field, func, DateTime, Column, Boolean\n')] |
"""Initial Migration3
Revision ID: 849d12c13c8a
Revises:
Create Date: 2021-12-19 17:06:36.345137
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = '849d12<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'user', sa.Column('id', sa.Integer(), nullable=True),
sa.Column('username', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('email', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('password_hashed', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('is_admin', sa.Boolean(), nullable=False), sa.Column('is_disabled', sa.Boolean(), nullable=False),
sa.Column('is_verified', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id', 'username', 'email')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False)
op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False)
op.create_index(op.f('ix_user_is_admin'), 'user', ['is_admin'], unique=False)
op.create_index(op.f('ix_user_is_disabled'), 'user', ['is_disabled'], unique=False)
op.create_index(op.f('ix_user_is_verified'), 'user', ['is_verified'], unique=False)
op.create_index(op.f('ix_user_password_hashed'), 'user', ['password_hashed'], unique=False)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_password_hashed'), table_name='user')
op.drop_index(op.f('ix_user_is_verified'), table_name='user')
op.drop_index(op.f('ix_user_is_disabled'), table_name='user')
op.drop_index(op.f('ix_user_is_admin'), table_name='user')
op.drop_index(op.f('ix_user_id'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((2106, 2127), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (2119, 2127), False, 'from alembic import op\n'), ((898, 948), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""', '"""username"""', '"""email"""'], {}), "('id', 'username', 'email')\n", (921, 948), True, 'import sqlalchemy as sa\n'), ((975, 996), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (979, 996), False, 'from alembic import op\n'), ((1051, 1069), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (1055, 1069), False, 'from alembic import op\n'), ((1121, 1145), 'alembic.op.f', 'op.f', (['"""ix_user_is_admin"""'], {}), "('ix_user_is_admin')\n", (1125, 1145), False, 'from alembic import op\n'), ((1203, 1230), 'alembic.op.f', 'op.f', (['"""ix_user_is_disabled"""'], {}), "('ix_user_is_disabled')\n", (1207, 1230), False, 'from alembic import op\n'), ((1291, 1318), 'alembic.op.f', 'op.f', (['"""ix_user_is_verified"""'], {}), "('ix_user_is_verified')\n", (1295, 1318), False, 'from alembic import op\n'), ((1379, 1410), 'alembic.op.f', 'op.f', (['"""ix_user_password_hashed"""'], {}), "('ix_user_password_hashed')\n", (1383, 1410), False, 'from alembic import op\n'), ((1475, 1499), 'alembic.op.f', 'op.f', (['"""ix_user_username"""'], {}), "('ix_user_username')\n", (1479, 1499), False, 'from alembic import op\n'), ((1675, 1699), 'alembic.op.f', 'op.f', (['"""ix_user_username"""'], {}), "('ix_user_username')\n", (1679, 1699), False, 'from alembic import op\n'), ((1738, 1769), 'alembic.op.f', 'op.f', (['"""ix_user_password_hashed"""'], {}), "('ix_user_password_hashed')\n", (1742, 1769), False, 'from alembic import op\n'), ((1808, 1835), 'alembic.op.f', 'op.f', (['"""ix_user_is_verified"""'], {}), "('ix_user_is_verified')\n", (1812, 1835), False, 'from alembic import op\n'), ((1874, 1901), 'alembic.op.f', 'op.f', (['"""ix_user_is_disabled"""'], {}), "('ix_user_is_disabled')\n", (1878, 1901), False, 'from alembic import op\n'), ((1940, 1964), 'alembic.op.f', 'op.f', (['"""ix_user_is_admin"""'], {}), "('ix_user_is_admin')\n", (1944, 1964), False, 'from alembic import op\n'), ((2003, 2021), 'alembic.op.f', 'op.f', (['"""ix_user_id"""'], {}), "('ix_user_id')\n", (2007, 2021), False, 'from alembic import op\n'), ((2060, 2081), 'alembic.op.f', 'op.f', (['"""ix_user_email"""'], {}), "('ix_user_email')\n", (2064, 2081), False, 'from alembic import op\n'), ((434, 446), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (444, 446), True, 'import sqlalchemy as sa\n'), ((494, 528), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (526, 528), False, 'import sqlmodel\n'), ((574, 608), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (606, 608), False, 'import sqlmodel\n'), ((664, 698), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (696, 698), False, 'import sqlmodel\n'), ((747, 759), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (757, 759), True, 'import sqlalchemy as sa\n'), ((803, 815), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (813, 815), True, 'import sqlalchemy as sa\n'), ((867, 879), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (877, 879), True, 'import sqlalchemy as sa\n')] |
"""participant id as string
Revision ID: <KEY>
Revises: 11<PASSWORD>3<PASSWORD>
Create Date: 2022-04-04 04:34:56.202331+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "11505f38b<PASSWORD>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(
"applications_participant_id_fkey", "applications", type_="foreignkey"
)
op.alter_column(
"applications",
"participant_id",
type_=sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
)
op.alter_column(
"participants", "id", type_=sqlmodel.sql.sqltypes.AutoString(), nullable=False
)
op.create_foreign_key(
None,
"applications",
"participants",
["participant_id"],
["id"],
ondelete="CASCADE",
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(
"applications_participant_id_fkey", "applications", type_="foreignkey"
)
op.alter_column(
"applications", "participant_id", type_=sa.Integer(), nullable=False
)
op.alter_column("participants", "id", type_=sa.Integer(), nullable=False)
op.create_foreign_key(
None,
"applications",
"participants",
["participant_id"],
["id"],
ondelete="CASCADE",
)
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((420, 514), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""applications_participant_id_fkey"""', '"""applications"""'], {'type_': '"""foreignkey"""'}), "('applications_participant_id_fkey', 'applications',\n type_='foreignkey')\n", (438, 514), False, 'from alembic import op\n'), ((794, 906), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""participants"""', "['participant_id']", "['id']"], {'ondelete': '"""CASCADE"""'}), "(None, 'applications', 'participants', [\n 'participant_id'], ['id'], ondelete='CASCADE')\n", (815, 906), False, 'from alembic import op\n'), ((1081, 1175), 'alembic.op.drop_constraint', 'op.drop_constraint', (['"""applications_participant_id_fkey"""', '"""applications"""'], {'type_': '"""foreignkey"""'}), "('applications_participant_id_fkey', 'applications',\n type_='foreignkey')\n", (1099, 1175), False, 'from alembic import op\n'), ((1372, 1484), 'alembic.op.create_foreign_key', 'op.create_foreign_key', (['None', '"""applications"""', '"""participants"""', "['participant_id']", "['id']"], {'ondelete': '"""CASCADE"""'}), "(None, 'applications', 'participants', [\n 'participant_id'], ['id'], ondelete='CASCADE')\n", (1393, 1484), False, 'from alembic import op\n'), ((610, 644), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (642, 644), False, 'import sqlmodel\n'), ((733, 767), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (765, 767), False, 'import sqlmodel\n'), ((1255, 1267), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1265, 1267), True, 'import sqlalchemy as sa\n'), ((1338, 1350), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1348, 1350), True, 'import sqlalchemy as sa\n')] |
from datetime import datetime
import logging
from typing import List, Optional
from pydantic import BaseConfig
from sqlmodel import Field, SQLModel, Session
import shortuuid
import random
from faker import Faker
# Line items that would be on a receipt
# Each line item has an id, sku, price, quantity, and transaction_id
class LineItem(SQLModel, table=True):
id: str = Field(default=None, primary_key=True)
sku: str = Field(foreign_key="product.sku")
price: float
quantity: int
transaction_id: int = Field(foreign_key="transaction.id")
# Each transaction has an id, store_id, date, and total
class Transaction(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
store_id: str = Field(foreign_key="store.id")
date: datetime
total: float
# TODO extract real sales data, transform it, and load into db
# for now random data in the correct format will do.
def run(engine):
# Allow arbitary types for Pydantic validation
BaseConfig.arbitrary_types_allowed = True
# Create a fake data generator
fake = Faker()
# Create a session to interact with the database
with Session(engine) as session:
# Get all of the store ids
store_ids = session.exec(f'SELECT id FROM store').fetchall()
logging.debug(store_ids)
# Get all of the products
products = session.exec(f'SELECT * FROM product').fetchall()
logging.debug(p.name for p in products)
# Define a list of transactions and sales
transactions = List(Transaction)
sales = List(LineItem)
# generate 100k random transactions
for i in range(0, 100000):
# lineitems is a temp list to hold the line items for this transaction
lineitems = List(LineItem)
# temp_products is a temp copy of the products list to prevent picking the same product twice in the same transaction
# [:] it to make a copy and not a reference
temp_products = products[:]
# shuffle the temp products list to make it random
random.shuffle(temp_products)
# add a random amount of line items to the transaction (no more than the total aount of products to prevent index out of range)
for j in range(0, random.randint(0, len(products)-1)):
# pick the next product from the temp products list and remove it from the list (pop)
p = temp_products.pop()
# create a new line item with the current transaction id, product p, and a random quantity
lineitems.append(LineItem(transaction_id=i, id=shortuuid.uuid(), price=p.price, quantity=random.randint(1, 10), sku=p.sku))
# add the line items for this transaction to the sales list
sales.extend(lineitems)
# create a new transaction with a random store id (from the list of store ids), date, and total
transactions.append(Transaction(
store_id=random.choice(store_ids)[0],
date=fake.date_time_between(start_date="-1y", end_date="now"),
total=sum(item.price * item.quantity for item in lineitems),
id=i
))
# insert the transactions into the database
session.add_all(transactions)
session.commit()
# insert the sales into the database
session.add_all(sales)
session.commit()
| [
"sqlmodel.Session",
"sqlmodel.Field"
] | [((374, 411), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (379, 411), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((427, 459), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""product.sku"""'}), "(foreign_key='product.sku')\n", (432, 459), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((521, 556), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""transaction.id"""'}), "(foreign_key='transaction.id')\n", (526, 556), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((679, 716), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (684, 716), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((737, 766), 'sqlmodel.Field', 'Field', ([], {'foreign_key': '"""store.id"""'}), "(foreign_key='store.id')\n", (742, 766), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((1082, 1089), 'faker.Faker', 'Faker', ([], {}), '()\n', (1087, 1089), False, 'from faker import Faker\n'), ((1153, 1168), 'sqlmodel.Session', 'Session', (['engine'], {}), '(engine)\n', (1160, 1168), False, 'from sqlmodel import Field, SQLModel, Session\n'), ((1293, 1317), 'logging.debug', 'logging.debug', (['store_ids'], {}), '(store_ids)\n', (1306, 1317), False, 'import logging\n'), ((1429, 1468), 'logging.debug', 'logging.debug', (['(p.name for p in products)'], {}), '(p.name for p in products)\n', (1442, 1468), False, 'import logging\n'), ((1542, 1559), 'typing.List', 'List', (['Transaction'], {}), '(Transaction)\n', (1546, 1559), False, 'from typing import List, Optional\n'), ((1576, 1590), 'typing.List', 'List', (['LineItem'], {}), '(LineItem)\n', (1580, 1590), False, 'from typing import List, Optional\n'), ((1777, 1791), 'typing.List', 'List', (['LineItem'], {}), '(LineItem)\n', (1781, 1791), False, 'from typing import List, Optional\n'), ((2093, 2122), 'random.shuffle', 'random.shuffle', (['temp_products'], {}), '(temp_products)\n', (2107, 2122), False, 'import random\n'), ((2642, 2658), 'shortuuid.uuid', 'shortuuid.uuid', ([], {}), '()\n', (2656, 2658), False, 'import shortuuid\n'), ((2684, 2705), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (2698, 2705), False, 'import random\n'), ((3005, 3029), 'random.choice', 'random.choice', (['store_ids'], {}), '(store_ids)\n', (3018, 3029), False, 'import random\n')] |
"""add messages
Revision ID: d2388da5bbfd
Revises: <PASSWORD>
Create Date: 2022-05-04 21:49:29.234380+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from common.database.tables.types import TimeStamp
# revision identifiers, used by Alembic.
revision = "d2388da5bbfd"
down_revision = "3<PASSWORD>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"messages",
sa.Column(
"created_at",
TimeStamp(),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
TimeStamp(),
server_default=sa.func.now(),
nullable=False,
),
sa.Column("sent", sa.Boolean(), nullable=False),
sa.Column("subject", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("content", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"recipients",
sa.Column("message_id", sa.Integer(), nullable=True),
sa.Column(
"group",
sa.Enum(
"EVERYONE",
"APPLICATION_COMPLETE",
"APPLICATION_INCOMPLETE",
"STATUS_ACCEPTED",
"STATUS_DENIED",
"STATUS_PENDING",
name="group",
),
nullable=False,
),
sa.Column("id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["message_id"], ["messages.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("recipients")
op.drop_table("messages")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((1885, 1912), 'alembic.op.drop_table', 'op.drop_table', (['"""recipients"""'], {}), "('recipients')\n", (1898, 1912), False, 'from alembic import op\n'), ((1917, 1942), 'alembic.op.drop_table', 'op.drop_table', (['"""messages"""'], {}), "('messages')\n", (1930, 1942), False, 'from alembic import op\n'), ((1076, 1105), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1099, 1105), True, 'import sqlalchemy as sa\n'), ((1638, 1714), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['message_id']", "['messages.id']"], {'ondelete': '"""CASCADE"""'}), "(['message_id'], ['messages.id'], ondelete='CASCADE')\n", (1661, 1714), True, 'import sqlalchemy as sa\n'), ((1724, 1753), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1747, 1753), True, 'import sqlalchemy as sa\n'), ((547, 558), 'common.database.tables.types.TimeStamp', 'TimeStamp', ([], {}), '()\n', (556, 558), False, 'from common.database.tables.types import TimeStamp\n'), ((698, 709), 'common.database.tables.types.TimeStamp', 'TimeStamp', ([], {}), '()\n', (707, 709), False, 'from common.database.tables.types import TimeStamp\n'), ((818, 830), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (828, 830), True, 'import sqlalchemy as sa\n'), ((878, 912), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (910, 912), False, 'import sqlmodel\n'), ((960, 994), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (992, 994), False, 'import sqlmodel\n'), ((1037, 1049), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1047, 1049), True, 'import sqlalchemy as sa\n'), ((1188, 1200), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1198, 1200), True, 'import sqlalchemy as sa\n'), ((1270, 1411), 'sqlalchemy.Enum', 'sa.Enum', (['"""EVERYONE"""', '"""APPLICATION_COMPLETE"""', '"""APPLICATION_INCOMPLETE"""', '"""STATUS_ACCEPTED"""', '"""STATUS_DENIED"""', '"""STATUS_PENDING"""'], {'name': '"""group"""'}), "('EVERYONE', 'APPLICATION_COMPLETE', 'APPLICATION_INCOMPLETE',\n 'STATUS_ACCEPTED', 'STATUS_DENIED', 'STATUS_PENDING', name='group')\n", (1277, 1411), True, 'import sqlalchemy as sa\n'), ((1599, 1611), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1609, 1611), True, 'import sqlalchemy as sa\n'), ((587, 600), 'sqlalchemy.func.now', 'sa.func.now', ([], {}), '()\n', (598, 600), True, 'import sqlalchemy as sa\n'), ((738, 751), 'sqlalchemy.func.now', 'sa.func.now', ([], {}), '()\n', (749, 751), True, 'import sqlalchemy as sa\n')] |
"""add events
Revision ID: 02338256c6aa
Revises: 108677b68119
Create Date: 2022-06-01 03:17:51.063172+00:00
"""
import sqlalchemy as sa
import sqlmodel
from alembic import op
from common.database.tables.types import TimeStamp
# revision identifiers, used by Alembic.
revision = "02338256c6aa"
down_revision = "108677b68119"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"events",
sa.Column(
"valid_from",
TimeStamp(timezone=True),
nullable=False,
),
sa.Column(
"valid_until",
TimeStamp(timezone=True),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("code", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("enabled", sa.Boolean(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("events")
# ### end Alembic commands ###
| [
"sqlmodel.sql.sqltypes.AutoString"
] | [((1176, 1199), 'alembic.op.drop_table', 'op.drop_table', (['"""events"""'], {}), "('events')\n", (1189, 1199), False, 'from alembic import op\n'), ((1015, 1044), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1038, 1044), True, 'import sqlalchemy as sa\n'), ((546, 570), 'common.database.tables.types.TimeStamp', 'TimeStamp', ([], {'timezone': '(True)'}), '(timezone=True)\n', (555, 570), False, 'from common.database.tables.types import TimeStamp\n'), ((669, 693), 'common.database.tables.types.TimeStamp', 'TimeStamp', ([], {'timezone': '(True)'}), '(timezone=True)\n', (678, 693), False, 'from common.database.tables.types import TimeStamp\n'), ((760, 794), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (792, 794), False, 'import sqlmodel\n'), ((839, 873), 'sqlmodel.sql.sqltypes.AutoString', 'sqlmodel.sql.sqltypes.AutoString', ([], {}), '()\n', (871, 873), False, 'import sqlmodel\n'), ((921, 933), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (931, 933), True, 'import sqlalchemy as sa\n'), ((976, 988), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (986, 988), True, 'import sqlalchemy as sa\n')] |
from datetime import datetime
from typing import TYPE_CHECKING, List, Optional
from uuid import UUID, uuid4
from pydantic import EmailStr, constr, validator
from sqlmodel import Column, Field, Relationship, SQLModel
from sqlmodel.sql.sqltypes import GUID
from ...utils.date import now_datetime
if TYPE_CHECKING:
from .order import Order
from .user import User
class BaseClient(SQLModel):
name: str = Field(description="Client name")
email: Optional[EmailStr] = Field(description="Client email", nullable=True)
phone: Optional[constr(regex=r"^\d{2}9\d{8}$")] = Field(description="Client cellphone", nullable=True) # noqa
zip_code: Optional[str] = Field(description="Postal code", nullable=True)
address: Optional[str] = Field(description="Address of Client", nullable=True)
@validator("name")
def validate_name(cls, value: str) -> str:
return value.title()
class CreateClient(BaseClient):
pass
class UpdateClient(BaseClient):
id: UUID = Field(description="Client ID")
class QueryClient(SQLModel):
name: Optional[str] = Field(description="Name of client for query")
class Client(BaseClient, table=True):
__tablename__ = "clients"
id: UUID = Field(default_factory=uuid4, description="Client ID", sa_column=Column("id", GUID(), primary_key=True))
owner_id: UUID = Field(description="User ID that owns the client", foreign_key="users.id")
created_at: datetime = Field(default_factory=now_datetime)
owner: "User" = Relationship()
orders: List["Order"] = Relationship(
back_populates="client",
sa_relationship_kwargs={"cascade": "all,delete", "lazy": "selectin", "passive_deletes": True},
)
| [
"sqlmodel.Field",
"sqlmodel.sql.sqltypes.GUID",
"sqlmodel.Relationship"
] | [((417, 449), 'sqlmodel.Field', 'Field', ([], {'description': '"""Client name"""'}), "(description='Client name')\n", (422, 449), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((482, 530), 'sqlmodel.Field', 'Field', ([], {'description': '"""Client email"""', 'nullable': '(True)'}), "(description='Client email', nullable=True)\n", (487, 530), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((585, 637), 'sqlmodel.Field', 'Field', ([], {'description': '"""Client cellphone"""', 'nullable': '(True)'}), "(description='Client cellphone', nullable=True)\n", (590, 637), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((676, 723), 'sqlmodel.Field', 'Field', ([], {'description': '"""Postal code"""', 'nullable': '(True)'}), "(description='Postal code', nullable=True)\n", (681, 723), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((753, 806), 'sqlmodel.Field', 'Field', ([], {'description': '"""Address of Client"""', 'nullable': '(True)'}), "(description='Address of Client', nullable=True)\n", (758, 806), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((813, 830), 'pydantic.validator', 'validator', (['"""name"""'], {}), "('name')\n", (822, 830), False, 'from pydantic import EmailStr, constr, validator\n'), ((999, 1029), 'sqlmodel.Field', 'Field', ([], {'description': '"""Client ID"""'}), "(description='Client ID')\n", (1004, 1029), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((1087, 1132), 'sqlmodel.Field', 'Field', ([], {'description': '"""Name of client for query"""'}), "(description='Name of client for query')\n", (1092, 1132), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((1344, 1417), 'sqlmodel.Field', 'Field', ([], {'description': '"""User ID that owns the client"""', 'foreign_key': '"""users.id"""'}), "(description='User ID that owns the client', foreign_key='users.id')\n", (1349, 1417), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((1445, 1480), 'sqlmodel.Field', 'Field', ([], {'default_factory': 'now_datetime'}), '(default_factory=now_datetime)\n', (1450, 1480), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((1502, 1516), 'sqlmodel.Relationship', 'Relationship', ([], {}), '()\n', (1514, 1516), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((1545, 1681), 'sqlmodel.Relationship', 'Relationship', ([], {'back_populates': '"""client"""', 'sa_relationship_kwargs': "{'cascade': 'all,delete', 'lazy': 'selectin', 'passive_deletes': True}"}), "(back_populates='client', sa_relationship_kwargs={'cascade':\n 'all,delete', 'lazy': 'selectin', 'passive_deletes': True})\n", (1557, 1681), False, 'from sqlmodel import Column, Field, Relationship, SQLModel\n'), ((551, 582), 'pydantic.constr', 'constr', ([], {'regex': '"""^\\\\d{2}9\\\\d{8}$"""'}), "(regex='^\\\\d{2}9\\\\d{8}$')\n", (557, 582), False, 'from pydantic import EmailStr, constr, validator\n'), ((1296, 1302), 'sqlmodel.sql.sqltypes.GUID', 'GUID', ([], {}), '()\n', (1300, 1302), False, 'from sqlmodel.sql.sqltypes import GUID\n')] |
from typing import Optional, List
from pydantic import BaseModel, validator, ValidationError
from sqlmodel import SQLModel, Field
class User(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
username: str = Field(nullable=False, sa_column_kwargs={"unique": True})
superuser: bool = False
password: str
# Serializers
class ValidatePassword(BaseModel):
@validator('confirm_password', allow_reuse=True, check_fields=False)
def validate_password(cls, v, values, **kwargs):
if v != values['password']:
raise ValueError("Passwords don't match")
return v
class UserIn(ValidatePassword):
username: str
superuser: bool = False
password: str
confirm_password: str
class UserPatch(ValidatePassword):
superuser: Optional[bool]
password: Optional[str]
confirm_password: Optional[str]
class UserOut(BaseModel):
username: str
superuser: bool
UserOutList = List[UserOut]
| [
"sqlmodel.Field"
] | [((190, 227), 'sqlmodel.Field', 'Field', ([], {'default': 'None', 'primary_key': '(True)'}), '(default=None, primary_key=True)\n', (195, 227), False, 'from sqlmodel import SQLModel, Field\n'), ((248, 304), 'sqlmodel.Field', 'Field', ([], {'nullable': '(False)', 'sa_column_kwargs': "{'unique': True}"}), "(nullable=False, sa_column_kwargs={'unique': True})\n", (253, 304), False, 'from sqlmodel import SQLModel, Field\n'), ((408, 475), 'pydantic.validator', 'validator', (['"""confirm_password"""'], {'allow_reuse': '(True)', 'check_fields': '(False)'}), "('confirm_password', allow_reuse=True, check_fields=False)\n", (417, 475), False, 'from pydantic import BaseModel, validator, ValidationError\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.