file_path stringlengths 3 280 | file_language stringclasses 66 values | content stringlengths 1 1.04M | repo_name stringlengths 5 92 | repo_stars int64 0 154k | repo_description stringlengths 0 402 | repo_primary_language stringclasses 108 values | developer_username stringlengths 1 25 | developer_name stringlengths 0 30 | developer_company stringlengths 0 82 |
|---|---|---|---|---|---|---|---|---|---|
authx_extra/__init__.py | Python | """Extra utilities for authx, including session, profiler & caching ✨"""
__version__ = "1.2.0"
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/addons/expiry.py | Python | from typing import Callable, Optional
import pytz
from authx._internal import end_of_day, end_of_week, tz_now, utc
from authx_extra.extra._cache import HTTPCache
class HTTPExpiry:
@staticmethod
async def get_ttl(
ttl_in_seconds: Optional[int] = None,
end_of_day: bool = True,
end_of_week: Optional[bool] = None,
ttl_func: Optional[Callable] = None,
tz: pytz.timezone = utc,
) -> int:
"""Return the seconds till expiry of cache. Defaults to one day"""
tz = HTTPCache.tz or tz
if ttl_func:
return await ttl_func()
if ttl_in_seconds:
return ttl_in_seconds
if end_of_day:
return await HTTPExpiry.expires_end_of_day(tz=tz)
return await HTTPExpiry.expires_end_of_week(tz=tz) if end_of_week else 86400
@staticmethod
async def expires_end_of_week(tz=utc) -> int:
"""Returns the seconds till expiry at the end of the week"""
now = tz_now()
local_time = now.astimezone(tz=tz)
eow = end_of_week(dt=local_time)
return int((eow - local_time).total_seconds())
@staticmethod
async def expires_end_of_day(tz=utc) -> int:
"""Returns the seconds till expiry at the end of the day"""
now = tz_now()
local_time = now.astimezone(tz=tz)
eod = end_of_day(dt=local_time)
return int((eod - local_time).total_seconds())
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/addons/keys.py | Python | from typing import Any, List, Optional
from authx_extra.extra._cache import HTTPCache
class HTTPKeys:
@staticmethod
async def generate_key(
key: str,
config: HTTPCache,
obj: Optional[Any] = None,
obj_attr: Optional[str] = None,
) -> str:
"""Converts a raw key passed by the user to a key with an parameter passed by the user and associates a namespace"""
_key = (
key.format(
getattr(obj, obj_attr),
)
if obj
else key
)
return await HTTPKeys.generate_namespaced_key(key=_key, config=config)
@staticmethod
async def generate_keys(
keys: List[str],
config: HTTPCache,
obj: Optional[Any] = None,
obj_attr: Optional[str] = None,
) -> List[str]:
"""Converts a list of raw keys passed by the user to a list of namespaced keys with an optional parameter if passed"""
return [
await HTTPKeys.generate_key(
key=k, config=config, obj=obj, obj_attr=obj_attr
)
for k in keys
]
@staticmethod
async def generate_namespaced_key(key: str, config: HTTPCache) -> str:
"""Adds a namespace to the key"""
return f"{config.namespace}:{key}".replace(" ", "")
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/cache.py | Python | import json
from functools import wraps
from typing import Any, Callable, List, Optional, Tuple, Union
import redis
from authx._internal import log_error, log_info
from authx_extra.addons.expiry import HTTPExpiry
from authx_extra.addons.keys import HTTPKeys
from authx_extra.extra._cache import HTTPCache
class HTTPCacheBackend:
def __init__(self, redis: redis.Redis, namespace: Optional[str] = None):
self.redis = redis
self.namespace = namespace or HTTPCache.namespace
async def set(
self,
key: str,
value: str,
ttl_in_seconds: Optional[int] = None,
ttl_func: Optional[Callable] = None,
end_of_day: bool = False,
end_of_week: bool = False,
):
ttl: int = await HTTPExpiry.get_ttl(
ttl_in_seconds=ttl_in_seconds,
end_of_day=end_of_day,
end_of_week=end_of_week,
ttl_func=ttl_func,
)
stringified_value = value if isinstance(value, bytes) else json.dumps(value)
with self.redis.pipeline(transaction=True) as pipe:
pipe.multi()
pipe.delete(key)
pipe.set(key, stringified_value, ex=ttl)
log_info(msg=f"CacheSet: {key}")
result = pipe.execute()
del_status, set_status = result
if del_status:
log_info(msg=f"CacheClearedOnSet: {key}")
if set_status:
log_info(msg=f"CacheSet: {key}")
return result
async def get(self, key: str) -> Tuple[Union[int, None], Union[Any, None]]:
with self.redis.pipeline(transaction=True) as pipe:
pipe.ttl(key).get(key)
ttl, result = pipe.execute()
if result:
original_val = json.loads(result)
log_info(msg=f"CacheHit: {key}")
else:
original_val = None
return ttl, original_val
async def invalidate(self, key: str) -> bool:
"""Invalidates the passed key"""
with self.redis.pipeline(transaction=True) as pipe:
pipe.multi()
pipe.delete(key)
log_info(msg=f"CacheInvalidated: {key}")
result = pipe.execute()
return result
async def invalidate_all(self, keys: List) -> List[bool]:
"""Invalidates a collection of keys"""
with self.redis.pipeline(transaction=True) as pipe:
pipe.multi()
for key in keys:
pipe.delete(key)
log_info(msg=f"CacheInvalidated: {key}")
return pipe.execute()
def cache(
key: str,
obj: Optional[Any] = None,
obj_attr: Optional[str] = None,
ttl_in_seconds: Optional[int] = None,
expire_end_of_day: bool = True,
expire_end_of_week: bool = False,
ttl_func: Optional[Callable] = None,
namespace: Optional[str] = None,
):
"""Decorator method that sets the return value to cache before returning."""
if not namespace:
namespace = HTTPCache.namespace
def wrapper(func: Callable):
@wraps(func)
async def inner(*args, **kwargs):
try:
# extracts the `id` attribute from the `obj_attr` parameter passed to the `@cache` method
_obj = kwargs.get(f"{obj}")
_key = await HTTPKeys.generate_key(
key=key, config=HTTPCache, obj=_obj, obj_attr=obj_attr
)
_cache = HTTPCacheBackend(
redis=HTTPCache.redis_client, namespace=namespace
)
_request = kwargs.get("request")
_response = kwargs.get("response")
# check cache and return if value is present
ttl, response = await _cache.get(key=_key)
if response:
if _request and _response:
_response.headers["Cache-Control"] = f"max-age={ttl}"
_response.headers["Cache-Hit"] = "true"
return response
# if not a cache-hit populate current response.
_computed_response = await func(*args, **kwargs)
# if http request store the response body data
_cacheable_response = (
_computed_response.body
if kwargs.get("request", None)
else _computed_response
)
await _cache.set(
key=_key,
value=_cacheable_response,
ttl_in_seconds=ttl_in_seconds,
ttl_func=ttl_func,
end_of_day=expire_end_of_day,
end_of_week=expire_end_of_week,
)
return _computed_response
except Exception as e:
log_error(msg=f"Cache Error: {e}", e=e, method="cache")
return await func(*args, **kwargs)
return inner
return wrapper
def invalidate_cache(
key: str = [],
keys: Optional[List] = None,
obj: Optional[Any] = None,
obj_attr: Optional[str] = None,
namespace: Optional[str] = None,
):
"""Invalidates a specific cache key"""
if not namespace:
namespace = HTTPCache.namespace
if key:
keys = [key]
def wrapper(func: Callable):
@wraps(func)
async def inner(*args, **kwargs):
try:
# extracts the `id` attribute from the `obj_attr` giparameter passed to the `@cache` method
_obj = kwargs.get(f"{obj}")
_keys = await HTTPKeys.generate_keys(
keys=keys, config=HTTPCache, obj=_obj, obj_attr=obj_attr
)
_cache = HTTPCacheBackend(
redis=HTTPCache.redis_client, namespace=namespace
)
await _cache.invalidate_all(keys=_keys)
_computed_response = await func(*args, **kwargs)
return _computed_response
except Exception as e:
log_error(msg=f"Cache Error: {e}", e=e, method="cache")
return await func(*args, **kwargs)
return inner
return wrapper
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/extra/_cache.py | Python | import redis
from authx._internal._logger import log_debug
from authx._internal._utils import utc
from pytz import timezone
class HTTPCache:
redis_url: str
namespace: str
tz: timezone
@classmethod
def init(
cls,
redis_url: str,
tz: timezone = utc,
namespace: str = "httpcache",
):
cls.redis_url = redis_url
cls.namespace = namespace
cls.tz = tz
cls.redis_client = redis.Redis.from_url(redis_url)
log_debug(msg=f"PING: {cls.redis_client.ping()}", loc=f"{__name__}")
return cls
@classmethod
def __str__(cls):
return f"<HTTPCache redis_url={cls.redis_url}, namespace={cls.namespace} client={cls.redis_client}"
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/extra/_memory.py | Python | import time
from typing import Any, Optional
class MemoryIO:
raw_memory_store: dict[str, dict[str, Any]]
"""
MemoryIO is a class that implements the IO interface for the session store.
It is used to store session data in memory.
"""
def __init__(self) -> None:
"""Initialize an instance of MemoryIO.
Creates a dictionary to store the session data.
"""
self.raw_memory_store = {}
async def has_session_id(self, session_id: str) -> bool:
return session_id in self.raw_memory_store
async def has_no_session_id(self, session_id: str) -> bool:
return session_id not in self.raw_memory_store
async def create_store(self, session_id: str) -> dict[str, Any]:
self.raw_memory_store[session_id] = {
"created_at": int(time.time()),
"store": {},
}
await self.save_store(session_id)
return self.raw_memory_store.get(session_id, {}).get("store", {})
async def get_store(self, session_id: str) -> Optional[dict[str, Any]]:
if self.raw_memory_store.get(session_id):
return self.raw_memory_store.get(session_id, {}).get("store")
else:
return None
async def save_store(self, session_id: str) -> None:
await self.get_store(session_id)
async def gc(self) -> None:
if len(self.raw_memory_store) >= 100:
await self.cleanup_old_sessions()
async def cleanup_old_sessions(self) -> None:
current_time = int(time.time())
sessions_to_delete = [
session_id
for session_id, session_info in self.raw_memory_store.items()
if current_time - session_info["created_at"] > 3600 * 12
]
for session_id in sessions_to_delete:
del self.raw_memory_store[session_id]
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/metrics.py | Python | import functools
import os
import time
import typing
import prometheus_client
from fastapi import FastAPI, Request
from prometheus_client.multiprocess import MultiProcessCollector
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import Response
class MetricsMiddleware(BaseHTTPMiddleware):
"""Metrics middleware collecting prometheus metrics for each request."""
def __init__(
self,
app: FastAPI,
prefix: str = "authx_",
buckets: typing.Tuple[float, ...] = (
0.002,
0.05,
0.1,
prometheus_client.utils.INF,
),
) -> None:
"""Initialize a new MetricsMiddleware instance."""
super().__init__(app)
self.request_count = request_count(prefix)
self.request_time = request_time(prefix, buckets)
async def dispatch(self, request: Request, call_next: typing.Callable):
"""Record request method, path and status when dispatching."""
method = request.method
path = request.url.path
status = 500
begin = time.time()
try:
response = await call_next(request)
status = response.status_code
finally:
# track urls w/ params grouped, eg. /items/123 -> /items/{id}
router = request.scope.get("router")
endpoint = request.scope.get("endpoint")
if router and endpoint:
for route in router.routes:
route_app = getattr(route, "app", None)
route_endpoint = getattr(route, "endpoint", None)
if endpoint in (route_app, route_endpoint):
path = route.path
break
end = time.time()
labels = [method, path, status]
self.request_count.labels(*labels).inc()
self.request_time.labels(*labels).observe(end - begin)
return response
@functools.lru_cache()
def request_count(prefix: str) -> prometheus_client.Counter:
"""Return request count metric for the app prefix (cached/singleton)."""
return prometheus_client.Counter(
f"{prefix}requests_total",
"Total HTTP requests",
("method", "path", "status"),
registry=get_registry(),
)
@functools.lru_cache()
def request_time(
prefix: str, buckets: typing.Tuple[float, ...]
) -> prometheus_client.Histogram:
"""Return request time metric for the app prefix (cached/singleton)."""
return prometheus_client.Histogram(
f"{prefix}request_duration_seconds",
"HTTP request duration in seconds",
("method", "path", "status"),
buckets=buckets,
registry=get_registry(),
)
@functools.lru_cache()
def get_registry() -> prometheus_client.registry.CollectorRegistry:
"""Get the metrics collector registry."""
registry = prometheus_client.CollectorRegistry()
if "PROMETHEUS_MULTIPROC_DIR" in os.environ:
MultiProcessCollector(registry)
return registry
def get_metrics(_: Request) -> Response:
"""Handler exposing the prometheus metrics."""
metrics = prometheus_client.generate_latest(get_registry())
return Response(metrics, media_type=prometheus_client.CONTENT_TYPE_LATEST)
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/oauth2.py | Python | import datetime
import json
import logging
import typing
import urllib.request
import jose.jwt
from authx.exceptions import InvalidToken
from fastapi.requests import HTTPConnection
from fastapi.responses import JSONResponse
from fastapi.websockets import WebSocket
from starlette.types import ASGIApp, Receive, Scope, Send
logger = logging.getLogger(__name__)
def _get_keys(url_or_keys: typing.Union[str, typing.Any]) -> typing.Any:
if not isinstance(url_or_keys, str) or not url_or_keys.startswith("https://"):
return url_or_keys
logger.info("Getting jwk from %s...", url_or_keys)
with urllib.request.urlopen(url_or_keys) as f:
return json.loads(f.read().decode())
def _validate_provider(
provider_name: str, provider: typing.Dict[str, typing.Any]
) -> None:
mandatory_keys = {"issuer", "keys", "audience"}
if not mandatory_keys.issubset(set(provider)):
raise ValueError(
f'Each provider must contain the following keys: {mandatory_keys}. Provider "{provider_name}" is missing {mandatory_keys - set(provider)}.'
)
keys = provider["keys"]
if isinstance(keys, str) and keys.startswith("http://"):
raise ValueError(
f'When "keys" is a url, it must start with "https://". This is not true in the provider "{provider_name}"'
)
class MiddlewareOauth2:
def __init__(
self,
app: ASGIApp,
providers: typing.Dict[str, typing.Dict[str, typing.Any]],
public_paths: typing.Optional[typing.Set[str]] = None,
get_keys: typing.Optional[typing.Callable[[typing.Any], typing.Any]] = None,
key_refresh_minutes: typing.Optional[
typing.Union[int, typing.Dict[str, int]]
] = None,
) -> None:
self._app = app
for provider in providers:
_validate_provider(provider, providers[provider])
self._providers = providers
self._get_keys = get_keys or _get_keys
self._public_paths = public_paths or set()
if key_refresh_minutes is None:
self._timeout = dict.fromkeys(providers)
elif isinstance(key_refresh_minutes, dict):
self._timeout = {
provider: datetime.timedelta(minutes=key_refresh_minutes[provider])
for provider in providers
}
else:
self._timeout = {
provider: datetime.timedelta(minutes=key_refresh_minutes)
for provider in providers
}
# cached attribute and respective timeout
self._last_retrieval: typing.Dict[str, datetime.datetime] = {}
self._keys: typing.Dict[str, typing.Any] = {}
def _provider_claims(self, provider: str, token: str) -> typing.Any:
issuer = self._providers[provider]["issuer"]
audience = self._providers[provider]["audience"]
logger.debug(
'Trying to decode token for provider "%s", issuer "%s", audience "%s"...',
provider,
issuer,
audience,
)
decoded = jose.jwt.decode(
token,
self._provider_keys(provider),
issuer=issuer,
audience=audience,
options={"verify_at_hash": False},
)
logger.debug("Token decoded.")
return decoded
def claims(self, token: str) -> typing.Tuple[str, typing.Dict[str, str]]:
errors: typing.Dict[str, str] = {}
for provider in self._providers:
try:
return provider, self._provider_claims(provider, token)
except jose.exceptions.ExpiredSignatureError as e:
# if the token has expired, it is at least from this provider.
logger.debug("Token has expired.")
errors = str(e)
break
except jose.exceptions.JWTClaimsError as e:
logger.debug("Invalid claims")
errors[provider] = str(e)
except jose.exceptions.JOSEError as e: # the catch-all of Jose
logger.warning(e, exc_info=True)
errors[provider] = str(e)
raise InvalidToken(errors)
@staticmethod
async def _prepare_error_response(
message: str, status_code: int, scope: Scope, receive: Receive, send: Send
) -> None:
if scope["type"] == "http":
response = JSONResponse(
{"message": message},
status_code=status_code,
)
return await response(scope, receive, send)
else:
websocket = WebSocket(scope, receive, send)
return await websocket.close(code=1008)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
request = HTTPConnection(scope)
if request.url.path in self._public_paths:
return await self._app(scope, receive, send)
# check for authorization header and token on it.
if "authorization" in request.headers and request.headers[
"authorization"
].startswith("Bearer "):
token = request.headers["authorization"][len("Bearer ") :]
try:
provider, claims = self.claims(token)
scope["oauth2-claims"] = claims
scope["oauth2-provider"] = provider
except InvalidToken as e:
return await self._prepare_error_response(
e.errors, 401, scope, receive, send
)
elif "authorization" in request.headers:
logger.debug('No "Bearer" in authorization header')
return await self._prepare_error_response(
'The "authorization" header must start with "Bearer "',
400,
scope,
receive,
send,
)
else:
logger.debug("No authorization header")
return await self._prepare_error_response(
'The request does not contain an "authorization" header',
400,
scope,
receive,
send,
)
return await self._app(scope, receive, send)
def _should_refresh(self, provider: str) -> bool:
if self._keys.get(provider, None) is None:
# we do not even have the key (first time) => should refresh
return True
elif self._timeout[provider] is None:
# we have a key and no timeout => do not refresh
return False
# have the key and have timeout => check if we passed the timeout
return (
self._last_retrieval[provider] + self._timeout[provider]
< datetime.datetime.utcnow()
)
def _refresh_keys(self, provider: str) -> None:
self._keys[provider] = self._get_keys(self._providers[provider]["keys"])
self._last_retrieval[provider] = datetime.datetime.utcnow()
def _provider_keys(self, provider: str) -> typing.Any:
if self._should_refresh(provider):
self._refresh_keys(provider)
return self._keys[provider]
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/profiler.py | Python | import codecs
import time
from logging import getLogger
from typing import Optional
from pyinstrument import Profiler, renderers
from starlette.requests import Request
from starlette.routing import Router
from starlette.types import ASGIApp, Message, Receive, Scope, Send
logger = getLogger("profiler")
class ProfilerMiddleware:
def __init__(
self,
app: ASGIApp,
*,
server_app: Optional[Router] = None,
profiler_interval: float = 0.0001,
profiler_output_type: str = "text",
is_print_each_request: bool = True,
**profiler_kwargs,
):
self.app = app
self._profiler = Profiler(interval=profiler_interval)
self._server_app = server_app
self._output_type = profiler_output_type
self._print_each_request = is_print_each_request
self._profiler_kwargs: dict = profiler_kwargs
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
# register an event handler for profiler stop
if self._server_app is not None:
self._server_app.add_event_handler("shutdown", self.get_result)
if scope["type"] != "http":
await self.app(scope, receive, send)
return
self._profiler.start()
request = Request(scope, receive=receive)
method = request.method
path = request.url.path
begin = time.perf_counter()
# Default status code used when the application does not return a valid response
# or an unhandled exception occurs.
status_code = 404
async def wrapped_send(message: Message) -> None:
if message["type"] == "http.response.start":
nonlocal status_code
status_code = message["status"]
await send(message)
try:
await self.app(scope, receive, wrapped_send)
finally:
if scope["type"] == "http":
self._profiler.stop()
end = time.perf_counter()
if self._print_each_request:
print(
f"Method: {method}, "
f"Path: {path}, "
f"Duration: {end - begin}, "
f"Status: {status_code}"
)
print(self._profiler.output_text(**self._profiler_kwargs))
async def get_result(self):
if self._output_type == "text":
print(self._profiler.output_text(**self._profiler_kwargs))
elif self._output_type == "html":
html_name = self._profiler_kwargs.get("html_file_name")
if html_name is None:
html_name = "authx_profiling_results.html"
html_code = renderers.HTMLRenderer().render(
session=self._profiler.last_session
)
with codecs.open(html_name, "w", "utf-8") as f:
f.write(html_code)
elif self._output_type == "json":
json_name = self._profiler_kwargs.get("json_file_name")
if json_name is None:
json_name = "authx_profiling_results.json"
json_code = renderers.JSONRenderer().render(
session=self._profiler.last_session
)
with codecs.open(json_name, "w", "utf-8") as f:
f.write(json_code)
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
authx_extra/session.py | Python | import uuid
from http.cookies import SimpleCookie
from authx._internal import SignatureSerializer
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
from authx_extra.extra._memory import MemoryIO
class SessionIntegration:
def __init__(self, store, session_id, session_save):
self.session_store = store
self.session_id = session_id
self.session_save = session_save
async def get_session(self):
"""Get the session store."""
return self.session_store
async def clear_session(self):
"""Clear the session store."""
self.session_store.clear()
async def get_session_id(self):
"""Get the session ID."""
return self.session_id
async def save_session(self):
"""Save the session store."""
await self.session_save()
class SessionMiddleware(BaseHTTPMiddleware):
"""
A FastAPI middleware for managing user sessions.
"""
def __init__(
self,
app,
secret_key,
store=MemoryIO(),
http_only=True,
secure=True,
max_age=0,
session_cookie="sid",
session_object="session",
skip_session_header=None,
logger=None,
cookie_path="/", # Added cookie_path parameter
):
super().__init__(app)
self.cookie_path = cookie_path # Store cookie_path
self.skip_session_header = skip_session_header
self.http_only = http_only
self.max_age = max_age
self.secure = secure
self.secret_key = secret_key
self.session_cookie_name = session_cookie
self.session_store = store
self.serializer = SignatureSerializer(self.secret_key, expired_in=self.max_age)
self.session_object = session_object
self.logger = logger
if self.logger is None:
class ConsoleLogger:
def info(self, str):
pass
def debug(self, str):
pass
self.logger = ConsoleLogger()
self.logger.debug(
f"Session Middleware initialized http_only:{http_only} secure:{secure} "
f"session_key:'{session_object}' session_cookie_name:{session_cookie} "
f"store:{store} cookie_path:{cookie_path}"
)
def create_session_cookie(self, session_id):
"""
Create and sign a session cookie.
Args:
session_id (str): The session ID.
Returns:
SimpleCookie: The signed session cookie.
"""
session_id_dict_obj = {self.session_cookie_name: session_id}
signed_session_id = self.serializer.encode(session_id_dict_obj)
cookie = SimpleCookie()
cookie[self.session_cookie_name] = signed_session_id
self.logger.debug(
f"[session_id:'{session_id}'] Creating new Cookie object... cookie[{self.session_cookie_name}]"
)
# Set cookie path
cookie[self.session_cookie_name]["path"] = self.cookie_path
self.logger.debug(
f"[session_id:'{session_id}'] cookie[{self.session_cookie_name}]['path']={self.cookie_path}"
)
if self.http_only:
self.logger.debug(
f"[session_id:'{session_id}'] cookie[{self.session_cookie_name}]['httponly'] enabled"
)
cookie[self.session_cookie_name]["httponly"] = True
if self.secure:
self.logger.debug(
f"[session_id:'{session_id}'] cookie[{self.session_cookie_name}]['secure'] enabled"
)
cookie[self.session_cookie_name]["secure"] = True
if self.max_age > 0:
self.logger.debug(
f"[session_id:'{session_id}'] cookie[{self.session_cookie_name}]['maxage']={self.max_age} enabled"
)
cookie[self.session_cookie_name]["max-age"] = self.max_age
return cookie
def skip_session_header_check(self, request: Request) -> bool:
"""
Check if session management should be skipped based on the request header.
Args:
request (Request): The incoming request.
Returns:
bool: True if session management should be skipped, False otherwise.
"""
skip_header = self.skip_session_header
if skip_header is None:
self.logger.debug("Do not use skip_header option.")
return False
if isinstance(skip_header, dict):
skip_header = [skip_header]
header_names = []
for header in skip_header:
header_name = header.get("header_name")
header_value = header.get("header_value")
header_names.append(header_name)
self.logger.debug(
f"Use skip_header option. skip_header:'{header_name}':'{header_value}'"
)
request_header_value = request.headers.get(header_name)
self.logger.debug(
f"Use skip_header option. Checking request header: '{header_name}':'{request_header_value}'"
)
if (
header_value == "*" and request_header_value is not None
) or request_header_value == header_value:
self.logger.debug("Use skip_header option. skip_header matched!")
return True
self.logger.debug(
f"Use skip_header option. skip_headers:{header_names} not matched in request headers."
)
return False
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
"""
Dispatch the request, handling session management.
Args:
request (Request): The incoming request.
call_next (RequestResponseEndpoint): The next request handler.
Returns:
Response: The response from the request handler.
"""
if self.skip_session_header_check(request):
self.logger.debug("Skip session management.")
return await call_next(request)
signed_session_id = request.cookies.get(self.session_cookie_name)
cookie = None
if signed_session_id is None:
self.logger.info("Completely new access with no session cookies")
cookie = await self.create_new_session_id_and_store(request, cause="new")
else:
decoded_dict, err = self.serializer.decode(signed_session_id)
if decoded_dict is not None:
self.logger.debug("Cookie signature validation success")
session_id = decoded_dict.get(self.session_cookie_name)
session_store = await self.session_store.get_store(session_id)
if session_store is None:
self.logger.info(
f"[session_id:'{session_id}'] Session cookie available. But no store for this sessionId found. Maybe store had cleaned."
)
cookie = await self.create_new_session_id_and_store(
request, cause="valid_cookie_but_no_store"
)
else:
self.logger.info(
f"[session_id:'{session_id}'] Session cookie and Store is available! set session_mgr to request.state.{self.session_object}"
)
setattr(
request.state,
self.session_object,
SessionIntegration(
store=session_store,
session_id=session_id,
session_save=lambda: self.session_store.save_store(
session_id
),
),
)
session_store["__cause__"] = "success"
else:
self.logger.info(
f"Session cookies available but verification failed! err:{err}"
)
cookie = await self.create_new_session_id_and_store(
request, cause=f"renew after {err}"
)
response = await call_next(request)
if cookie is not None:
cookie_val = cookie.output(header="").strip()
self.logger.info("Set response header 'Set-Cookie' to signed cookie value")
response.headers["Set-Cookie"] = cookie_val
return response
async def create_new_session_id_and_store(self, request, cause=None):
"""
Create a new session ID and its corresponding store.
Args:
request: The incoming request.
cause (str): The cause of creating a new session ID.
Returns:
SimpleCookie: The signed session cookie.
"""
session_id = str(uuid.uuid4())
session_store = await self.session_store.create_store(session_id)
self.logger.debug(
f"[session_id:'{session_id}'(NEW)] New session_id and store for session_id created."
)
if cause is not None:
session_store["__cause__"] = cause
fast_session_obj = SessionIntegration(
store=session_store,
session_id=session_id,
session_save=lambda: self.session_store.save_store(session_id),
)
self.logger.info(
f"[session_id:'{session_id}'(NEW)] Set session_mgr to request.state.{self.session_object} "
)
setattr(request.state, self.session_object, fast_session_obj)
await self.session_store.gc()
return self.create_session_cookie(session_id)
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/clean.sh | Shell | #!/bin/sh -e
rm -f `find . -type f -name '*.py[co]' `
rm -f `find . -type f -name '*~' `
rm -f `find . -type f -name '.*~' `
rm -f `find . -type f -name .coverage`
rm -f `find . -type f -name ".coverage.*"`
rm -rf `find . -name __pycache__`
rm -rf `find . -type d -name '*.egg-info' `
rm -rf `find . -type d -name 'pip-wheel-metadata' `
rm -rf `find . -type d -name .pytest_cache`
rm -rf `find . -type d -name .ruff_cache`
rm -rf `find . -type d -name .cache`
rm -rf `find . -type d -name .mypy_cache`
rm -rf `find . -type d -name htmlcov`
rm -rf `find . -type d -name "*.egg-info"`
rm -rf `find . -type d -name build`
rm -rf `find . -type d -name dist`
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/docker.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
echo "REDIS_URL=${REDIS_URL}"
export PYTHONPATH=.
# Check if Redis container is already running
if [[ "$(docker inspect -f '{{.State.Running}}' redis 2>/dev/null)" == "true" ]]; then
echo "Redis container is already running. Running tests again..."
# Run tests
pytest --cov=authx_extra --cov-report=html -xv --color=yes --disable-warnings --cov-fail-under=80
else
# Remove any existing Redis container
docker rm -f redis || true
# Start Redis container
docker run -d -p 6379:6379 --name redis redis
# Run tests
pytest --cov=authx_extra --cov-report=html -xv --color=yes --disable-warnings --cov-fail-under=80
# Shutdown Redis container
docker stop redis
docker rm redis
fi
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/format.sh | Shell | #!/usr/bin/env bash
set -e
set -x
pre-commit run --all-files --verbose --show-diff-on-failure
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/lint.sh | Shell | #!/usr/bin/env bash
set -e
set -x
mypy --show-error-codes authx_extra
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/requirements.sh | Shell | #!/usr/bin/env bash
uv lock --upgrade
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/extra/test_memory.py | Python | from time import time
import pytest
from authx_extra.extra._memory import MemoryIO
@pytest.fixture
def memory_io():
return MemoryIO()
@pytest.mark.asyncio
async def test_create_store(memory_io):
session_id = "123"
store = await memory_io.create_store(session_id)
assert store == {}
assert memory_io.raw_memory_store[session_id]["store"] == {}
@pytest.mark.asyncio
async def test_get_store_existing(memory_io):
session_id = "123"
memory_io.raw_memory_store[session_id] = {
"created_at": int(time()),
"store": {"key": "value"},
}
store = await memory_io.get_store(session_id)
assert store == {"key": "value"}
@pytest.mark.asyncio
async def test_get_store_nonexistent(memory_io):
session_id = "123"
store = await memory_io.get_store(session_id)
assert store is None
@pytest.mark.asyncio
async def test_save_store(memory_io):
session_id = "123"
memory_io.raw_memory_store[session_id] = {
"created_at": int(time()),
"store": {"key": "value"},
}
await memory_io.save_store(session_id)
assert await memory_io.get_store(session_id) == {"key": "value"}
@pytest.mark.asyncio
async def test_cleanup_old_sessions(memory_io):
current_time = int(time())
memory_io.raw_memory_store = {
"1": {"created_at": current_time - 3600 * 12 - 1, "store": {}},
"2": {"created_at": current_time - 3600 * 12, "store": {}},
"3": {"created_at": current_time - 3600 * 12 + 1, "store": {}},
}
await memory_io.cleanup_old_sessions()
expected_output = {
"2": {"created_at": current_time - 3600 * 12, "store": {}},
"3": {"created_at": current_time - 3600 * 12 + 1, "store": {}},
}
assert memory_io.raw_memory_store == expected_output
@pytest.mark.asyncio
async def test_has_session_id():
store = MemoryIO()
await store.create_store("test-id")
assert await store.has_session_id("test-id")
assert not await store.has_no_session_id("test-id")
@pytest.mark.asyncio
async def test_get_store():
store = MemoryIO()
await store.create_store("test-id")
assert await store.get_store("test-id") == {}
assert await store.get_store("nonexistent-id") is None
@pytest.mark.asyncio
async def populate_old_sessions(memory_io, count, created_at):
for i in range(count):
memory_io.raw_memory_store[str(i)] = {
"created_at": created_at,
"store": {},
}
@pytest.mark.asyncio
async def test_gc_cleanup_old_sessions(memory_io):
# Populate raw_memory_store with 100 sessions older than 12 hours
current_time = int(time())
twelve_hours_ago = current_time - 3600 * 12
await populate_old_sessions(memory_io, 100, twelve_hours_ago)
# Add one more session within 12 hours
extra_session_id = "1000"
memory_io.raw_memory_store[extra_session_id] = {
"created_at": current_time,
"store": {},
}
# Ensure gc triggers cleanup
await memory_io.gc()
# Ensure old sessions are cleaned up
assert len(memory_io.raw_memory_store) == 101
assert extra_session_id in memory_io.raw_memory_store
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_cache.py | Python | import os
from datetime import datetime
import redis
from fastapi import FastAPI, Request, Response
from fastapi.responses import JSONResponse
from fastapi.testclient import TestClient
from authx_extra.cache import HTTPCache, cache, invalidate_cache
REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/2")
redis_client = redis.from_url(REDIS_URL)
class User:
id: str = "112358"
user = User()
app = FastAPI()
HTTPCache.init(redis_url=REDIS_URL, namespace="test_namespace")
@app.get("/b/home")
@cache(key="b.home", ttl_in_seconds=180)
async def home(request: Request, response: Response):
return JSONResponse({"page": "home", "datetime": str(datetime.utcnow())})
@app.get("/b/logged-in")
@cache(key="b.logged_in.{}", obj="user", obj_attr="id")
async def logged_in(request: Request, response: Response, user=user):
return JSONResponse(
{"page": "home", "user": user.id, "datetime": str(datetime.utcnow())}
)
async def my_ttl_callable():
return 3600
@app.get("/b/ttl_callable")
@cache(key="b.ttl_callable_expiry", ttl_func=my_ttl_callable)
async def path_with_ttl_callable(request: Request, response: Response):
return JSONResponse(
{"page": "path_with_ttl_callable", "datetime": str(datetime.utcnow())}
)
@app.post("/b/logged-in")
@invalidate_cache(
key="b.logged_in.{}", obj="user", obj_attr="id", namespace="test_namespace"
)
async def post_logged_in(request: Request, response: Response, user=user):
return JSONResponse(
{"page": "home", "user": user.id, "datetime": str(datetime.utcnow())}
)
@app.get("/b/profile")
@cache(key="b.profile.{}", obj="user", obj_attr="id")
async def logged_in(request: Request, response: Response, user=user):
return JSONResponse(
{"page": "profile", "user": user.id, "datetime": str(datetime.utcnow())}
)
@app.post("/b/invalidate_multiple")
@invalidate_cache(
keys=["b.logged_in.{}", "b.profile.{}"],
obj="user",
obj_attr="id",
namespace="test_namespace",
)
async def invalidate_multiple(request: Request, response: Response, user=user):
return JSONResponse(
{"page": "invalidate_multiple", "datetime": str(datetime.utcnow())}
)
def test_invalidate_multiple():
client = TestClient(app)
redis_client.flushdb()
response = extracted_result(client, "/b/logged-in")
response2 = extracted_result(client, "/b/profile")
assert response2.headers["Cache-hit"] == "true"
response3 = client.post(
"/b/invalidate_multiple",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response3.status_code == 200
assert redis_client.get("test_namespace:b.logged_in.112358") is None
assert redis_client.get("test_namespace:b.profile.112358") is None
def extracted_result(client, arg):
result = client.get(
arg,
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert result.status_code == 200
result = client.get(
arg,
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
return result
def test_home_cached_response():
client = TestClient(app)
redis_client.flushdb()
response = client.get(
"/b/home",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.status_code == 200
response = client.get(
"/b/home",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.headers["Cache-hit"] == "true"
def test_with_ttl_callable():
import pytest
client = TestClient(app)
redis_client.flushdb()
response = client.get(
"/b/ttl_callable",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.status_code == 200
response = client.get(
"/b/ttl_callable",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.headers["Cache-hit"] == "true"
assert (
pytest.approx(
redis_client.ttl("test_namespace:b.ttl_callable_expiry"), rel=1e-3
)
== 3600
)
def test_home_cached_with_current_user():
client = TestClient(app)
redis_client.flushdb()
response = client.get(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.status_code == 200
response = client.get(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.headers["Cache-hit"] == "true"
assert response.status_code == 200
value = redis_client.get("test_namespace:b.logged_in.112358")
assert value is not None
def test_cache_invalidation():
client = TestClient(app)
redis_client.flushdb()
response = client.get(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.status_code == 200
response = client.get(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.headers["Cache-hit"] == "true"
assert response.status_code == 200
value = redis_client.get("test_namespace:b.logged_in.112358")
assert value is not None
client.post(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
response = client.get(
"/b/logged-in",
headers={
"Content-Type": "application/json",
"X-Product-Id": "0fb6a4d4-ae65-4f18-be44-edb9ace6b5bb",
},
)
assert response.headers.get("Cache-hit", None) is None
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_keys.py | Python | import os
import pytest
import redis
from authx_extra.addons.keys import HTTPKeys
from authx_extra.cache import HTTPCache
REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/3")
redis_client = redis.Redis.from_url(REDIS_URL)
class TestHTTPKeys:
@pytest.mark.asyncio
async def test_generate_keys(self):
redis_client.flushdb()
namespace = "test_namespace"
HTTPCache.init(redis_url=REDIS_URL, namespace=namespace)
namespaced_key = await HTTPKeys.generate_key(key="hello", config=HTTPCache)
assert namespaced_key == f"{namespace}:hello"
@pytest.mark.asyncio
async def test_generate_key_with_attr(self):
redis_client.flushdb()
class User:
id: str = "112358"
user = User()
namespace = "test_namespace"
HTTPCache.init(redis_url=REDIS_URL, namespace=namespace)
namespaced_key = await HTTPKeys.generate_key(
key="hello.{}", config=HTTPCache, obj=user, obj_attr="id"
)
assert namespaced_key == f"{namespace}:hello.112358"
@pytest.mark.asyncio
async def test_generate_keys_with_attr(self):
redis_client.flushdb()
class User:
id: str = "112358"
user = User()
namespace = "test_namespace"
HTTPCache.init(redis_url=REDIS_URL, namespace=namespace)
namespaced_keys = await HTTPKeys.generate_keys(
keys=["hello.{}", "foo.{}"], config=HTTPCache, obj=user, obj_attr="id"
)
namespaced_keys = sorted(namespaced_keys)
assert namespaced_keys[1] == f"{namespace}:hello.112358"
assert namespaced_keys[0] == f"{namespace}:foo.112358"
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_method_cache.py | Python | import os
import pytest
import redis
from authx_extra.cache import HTTPCache, cache
REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/2")
redis_client = redis.Redis.from_url(REDIS_URL)
HTTPCache.init(redis_url=REDIS_URL, namespace="test_namespace")
@cache(key="cache.me", ttl_in_seconds=360)
async def cache_me(x: int, invoke_count: int):
invoke_count += 1
result = x * 2
return [result, invoke_count]
async def my_ttl_callable():
return 3600
@cache(key="cache.me.ttl_callable", ttl_func=my_ttl_callable)
async def cache_me_with_ttl_callable(x: int, invoke_count: int):
invoke_count += 1
result = x * 2
return [result, invoke_count]
@cache(key="cache.me.tz_expire_end_of_day", expire_end_of_day=True)
async def cache_me_with_tz_end_of_day_expiry(x: int, invoke_count: int):
invoke_count += 1
result = x * 2
return [result, invoke_count]
class TestMethodCache:
@pytest.mark.asyncio
async def test_method_caching(self):
redis_client.flushdb()
invoke_count = 0
x = await cache_me(x=22, invoke_count=invoke_count)
y = await cache_me(x=22, invoke_count=invoke_count)
assert x[0] == y[0]
assert x[1] == y[1]
@pytest.mark.asyncio
async def test_ttl_callable(self):
redis_client.flushdb()
HTTPCache.init(redis_url=REDIS_URL, namespace="test_namespace")
await cache_me_with_ttl_callable(x=22, invoke_count=0)
await cache_me_with_ttl_callable(x=22, invoke_count=0)
assert (
pytest.approx(
redis_client.ttl("test_namespace:cache.me.ttl_callable"), rel=1e-3
)
== 3600
)
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_metrics.py | Python | import pytest
from fastapi import FastAPI, Response
from fastapi.testclient import TestClient
from prometheus_client.parser import text_string_to_metric_families
from authx_extra.metrics import MetricsMiddleware, get_metrics
@pytest.fixture(autouse=True)
def env(monkeypatch):
"""Run tests with 'PROMETHEUS_MULTIPROC_DIR' in the env by default."""
monkeypatch.setenv("PROMETHEUS_MULTIPROC_DIR", "/tmp")
@pytest.fixture
def client():
"""Return a test client for a simple FastAPI instance with metrics."""
app = FastAPI()
app.add_middleware(MetricsMiddleware)
app.add_route("/metrics", get_metrics)
app.add_route("/foo", lambda _: Response("bar"))
return TestClient(app)
def test_metrics_keys(client):
"""Test that all expected metric families are exported."""
metrics = scrape_metrics(client)
assert set(metrics.keys()) == {
"authx_request_duration_seconds",
"authx_requests",
}
def test_metrics_values(client):
"""Test that GET /foo is recorded via metrics."""
assert client.get("/foo")
metrics = scrape_metrics(client)
request_durations = metrics["authx_request_duration_seconds"]
assert "method=GET,path=/foo,status=200" in request_durations
def scrape_metrics(client):
"""GET /metrics response and return it parsed for asserting."""
response = client.get("/metrics")
metrics = {}
for metric in text_string_to_metric_families(response.text):
# skip standard python metrics
if not metric.name.startswith("authx_"):
continue
# "dictify" metrics for simple comparison in tests
metrics[metric.name] = labels = {}
for sample in metric.samples:
label = ",".join(f"{k}={v}" for k, v in sorted(sample.labels.items()))
labels[label] = sample.value
return metrics
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_oauth2.py | Python | import datetime
import unittest
import jose.jwt
from starlette.applications import Starlette
from starlette.middleware.cors import CORSMiddleware
from starlette.testclient import TestClient
from starlette.websockets import WebSocketDisconnect
from authx_extra.oauth2 import MiddlewareOauth2, _get_keys
def case_1(**kwargs):
key = "not-secret"
audience = "audience"
issuer = "https://example.com/"
if "get_keys" not in kwargs:
def _get_keys(path):
return key
kwargs["get_keys"] = _get_keys
app = Starlette()
app.add_middleware(
MiddlewareOauth2,
providers={
"custom": {
"keys": "https://example.com/tenant-id/v2.0/jwks",
"issuer": issuer,
"audience": audience,
}
},
**kwargs,
)
return app, key, audience, issuer
log_message1 = 'DEBUG:authx_extra.oauth2:Trying to decode token for provider "custom", issuer "https://example.com/", audience "audience"...'
log_message2 = "DEBUG:authx_extra.oauth2:Token has expired."
def case_2(keys):
audience = "audience"
issuer = "https://example.com/"
app = Starlette()
app.add_middleware(
MiddlewareOauth2,
providers={
"custom": {
"keys": keys,
"issuer": issuer,
"audience": audience,
}
},
)
return app, audience, issuer
def case_3(**kwargs):
key = "not-secret"
calls_to_get_keys = []
audience = "audience"
issuer = "https://example.com/"
def _get_keys(path):
calls_to_get_keys.append(path)
return key
app = Starlette()
kwargs["get_keys"] = _get_keys
app.add_middleware(
MiddlewareOauth2,
providers={
"custom": {
"keys": "https://example.com/tenant-id/v2.0/jwks",
"issuer": issuer,
"audience": audience,
}
},
**kwargs,
)
return app, key, audience, issuer, calls_to_get_keys
def good_claims(audience: str, issuer: str):
return {
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=3600),
"iat": datetime.datetime.utcnow(),
"aud": audience,
"iss": issuer,
}
class MiddlewareCheck:
def __init__(self, app, storage):
self._app = app
self._storage = storage
async def __call__(self, scope, receive, send):
self._storage["scope"] = scope
return await self._app(scope, receive, send)
class TestCase(unittest.TestCase):
def test_no_header(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
with self.assertLogs(None, level="DEBUG") as cm:
response = client.get("/")
self.assertEqual(
cm.output,
[
"DEBUG:asyncio:Using selector: EpollSelector",
"DEBUG:authx_extra.oauth2:No authorization header",
'INFO:httpx:HTTP Request: GET http://testserver/ "HTTP/1.1 400 Bad Request"',
],
)
self.assertEqual(response.status_code, 400)
def test_wrong_header(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
with self.assertLogs(None, level="DEBUG") as cm:
response = client.get("/", headers={"authorization": "Baa "})
self.assertEqual(
cm.output,
[
"DEBUG:asyncio:Using selector: EpollSelector",
'DEBUG:authx_extra.oauth2:No "Bearer" in authorization header',
'INFO:httpx:HTTP Request: GET http://testserver/ "HTTP/1.1 400 Bad Request"',
],
)
self.assertEqual(response.status_code, 400)
def test_all_good(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(
cm.output, [log_message1, "DEBUG:authx_extra.oauth2:Token decoded."]
)
self.assertEqual(response.status_code, 404)
def test_keys_as_dict(self):
key = "not-secret"
keys = {"keys": [key]}
app, audience, issuer = case_2(keys)
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
def test_cors_preflight_request(self):
app, key, audience, issuer = case_1()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["GET", "POST"],
allow_headers=["authorization"],
)
client = TestClient(app)
response = client.options(
"/",
headers={
"Access-Control-Request-Method": "GET",
"Origin": "*",
"Access-Control-Request-Headers": "Authorization",
},
)
self.assertEqual(response.status_code, 200)
def test_check_claims(self):
app, key, audience, issuer = case_1()
storage = {}
app.add_middleware(MiddlewareCheck, storage=storage)
client = TestClient(app)
claims = {
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=3600),
"iat": datetime.datetime.utcnow(),
"aud": audience,
"iss": issuer,
"custom": "a custom claim",
}
token = jose.jwt.encode(claims, key)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
self.assertEqual(storage["scope"]["oauth2-claims"], claims)
self.assertEqual(storage["scope"]["oauth2-provider"], "custom")
def test_ignore_at_hash(self):
"""Explicitly test that we ignore the ``at_hash`` of the jwt."""
app, key, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(
good_claims(audience, issuer), key, access_token="test_access_token"
)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
def test_wrong_key(self):
app, _, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), "wrong-key")
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(len(cm.output), 2)
self.assertEqual(cm.output[0], log_message1)
self.assertTrue("Signature verification failed" in cm.output[1])
self.assertEqual(response.status_code, 401)
self.assertEqual(
response.json(), {"message": {"custom": "Signature verification failed."}}
)
def test_expired(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(
{
"exp": datetime.datetime.utcnow() - datetime.timedelta(seconds=1800),
"iat": datetime.datetime.utcnow() - datetime.timedelta(seconds=3600),
"aud": audience,
"iss": issuer,
},
key,
)
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(
cm.output,
[
log_message1,
log_message2,
],
)
self.assertEqual(response.status_code, 401)
self.assertEqual(response.json(), {"message": "Signature has expired."})
def test_wrong_audience(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(
{
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=3600),
"iat": datetime.datetime.utcnow(),
"aud": "wrong-audience",
"iss": issuer,
},
key,
)
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(
cm.output,
[
log_message1,
"DEBUG:authx_extra.oauth2:Invalid claims",
],
)
self.assertEqual(response.status_code, 401)
self.assertEqual(response.json(), {"message": {"custom": "Invalid audience"}})
def test_wrong_issuer(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
token = jose.jwt.encode(
{
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=3600),
"iat": datetime.datetime.utcnow(),
"aud": audience,
"iss": "wrong-issuer",
},
key,
)
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(
cm.output,
[
log_message1,
"DEBUG:authx_extra.oauth2:Invalid claims",
],
)
self.assertEqual(response.status_code, 401)
self.assertEqual(response.json(), {"message": {"custom": "Invalid issuer"}})
def test_wrong_signature(self):
app, key, audience, issuer = case_1()
client = TestClient(app)
token = f"{jose.jwt.encode(good_claims(audience, issuer), key)}a"
with self.assertLogs("authx_extra.oauth2", level="DEBUG") as cm:
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(len(cm.output), 2)
self.assertEqual(cm.output[0], log_message1)
self.assertTrue("Signature verification failed" in cm.output[1])
self.assertEqual(response.status_code, 401)
self.assertEqual(
response.json(), {"message": {"custom": "Signature verification failed."}}
)
def test_public_path(self):
app, key, audience, issuer = case_1(public_paths={"/"})
client = TestClient(app)
response = client.get("/")
self.assertEqual(response.status_code, 404)
def test_default_get_keys(self):
app, key, audience, issuer = case_1(public_paths={"/"}, get_keys=None)
def test_wrong_provider(self):
with self.assertRaises(ValueError) as e:
MiddlewareOauth2(
None,
providers={
"custom": {
"keys": "https://example.com/tenant-id/v2.0/",
"audience": "audience",
}
},
)
self.assertIn("\"custom\" is missing {'issuer'}.", str(e.exception))
def test_get_keys(self):
keys = _get_keys("https://login.microsoftonline.com/common/discovery/v2.0/keys")
self.assertIn("keys", keys)
@unittest.skip("This test Fails need a new test case")
def test_wrong_configuration(self):
with self.assertRaises(ValueError):
case_2("http://example.com") # missing https
def test_key_timeout_none(self):
app, key, audience, issuer, calls_get_keys = case_3()
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
self.assertEqual(len(calls_get_keys), 1)
def test_key_timeout_zero(self):
app, key, audience, issuer, calls_get_keys = case_3(key_refresh_minutes=0)
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
self.callback(client, token, calls_get_keys, 1)
self.callback(client, token, calls_get_keys, 2)
def callback(self, client, token, calls_get_keys, additional_calls):
result = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(result.status_code, 404)
self.assertEqual(len(calls_get_keys), additional_calls)
return result
def test_key_timeout_two(self):
app, key, audience, issuer, calls_get_keys = case_3(key_refresh_minutes=2)
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
self.assertEqual(len(calls_get_keys), 1)
def test_key_refresh_dict(self):
app, key, audience, issuer, calls_get_keys = case_3(
key_refresh_minutes={"custom": 0}
)
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
response = client.get("/", headers={"authorization": f"Bearer {token}"})
response = client.get("/", headers={"authorization": f"Bearer {token}"})
self.assertEqual(response.status_code, 404)
self.assertEqual(len(calls_get_keys), 2)
def test_websocket_ok(self):
app, key, audience, issuer = case_1()
@app.websocket_route("/ws")
async def websocket_endpoint(websocket):
await websocket.accept()
await websocket.send_text("Hello, world!")
await websocket.close()
client = TestClient(app)
token = jose.jwt.encode(good_claims(audience, issuer), key)
with client.websocket_connect(
"/ws", headers={"authorization": f"Bearer {token}"}
) as websocket:
data = websocket.receive_text()
self.assertEqual(data, "Hello, world!")
def test_websocket_not_ok(self):
app, key, audience, issuer = case_1()
@app.websocket_route("/ws")
async def websocket_endpoint(websocket):
await websocket.accept()
await websocket.send_text("Hello, world!")
await websocket.close()
client = TestClient(app)
invalid_key = f"{key}a"
token = jose.jwt.encode(good_claims(audience, issuer), invalid_key)
with self.assertRaises(WebSocketDisconnect):
with client.websocket_connect(
"/ws", headers={"authorization": f"Bearer {token}"}
):
pass
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_profiler.py | Python | import json
import os
import sys
from io import StringIO
import pytest
from fastapi import FastAPI
from fastapi.responses import JSONResponse
from fastapi.testclient import TestClient
from authx_extra.profiler import ProfilerMiddleware
class ConsoleOutputRedirect:
def __init__(self, fp):
self.fp = fp
def write(self, s):
self.fp.write(s)
def writelines(self, lines):
self.fp.writelines(lines)
def flush(self):
self.fp.flush()
stdout_redirect = ConsoleOutputRedirect(sys.stdout)
@pytest.fixture(name="test_middleware")
def test_middleware():
def _test_middleware(**profiler_kwargs):
app = FastAPI()
if profiler_kwargs.get("profiler_output_type") != "text":
profiler_kwargs["server_app"] = app
app.add_middleware(ProfilerMiddleware, **profiler_kwargs)
@app.get("/test")
async def normal_request(request):
return JSONResponse({"retMsg": "Normal Request test Success!"})
return app
return _test_middleware
class TestProfilerMiddleware:
@pytest.fixture
def client(self, test_middleware):
return TestClient(test_middleware())
def test_profiler_print_at_console(self, client):
stdout_redirect.fp = StringIO()
temp_stdout, sys.stdout = sys.stdout, stdout_redirect
request_path = "/tests"
client.get(request_path)
sys.stdout = temp_stdout
assert f"Path: {request_path}" in stdout_redirect.fp.getvalue()
def test_profiler_export_to_html(self, test_middleware):
full_path = f"{os.getcwd()}/tests/authx_profiling_results.html"
with TestClient(
test_middleware(
profiler_output_type="html",
is_print_each_request=False,
html_file_name=full_path,
)
) as client:
# request
request_path = "/tests/output"
client.get(request_path)
with open(full_path) as f:
assert "profiler.py" in f.read()
def test_profiler_export_to_json(self, test_middleware):
full_path = f"{os.getcwd()}/tests/authx_profiling_results.json"
with TestClient(
test_middleware(
profiler_output_type="json",
is_print_each_request=False,
json_file_name=full_path,
)
) as client:
# request
request_path = "/tests/output"
client.get(request_path)
def test_normal_request(self, client):
response = client.get("/test")
assert response.status_code == 422
def test_profiler_output_text(self, test_middleware):
stdout_redirect.fp = StringIO()
temp_stdout, sys.stdout = sys.stdout, stdout_redirect
with TestClient(
test_middleware(
is_print_each_request=True,
profiler_output_type="text",
)
) as client:
client.get("/test")
sys.stdout = temp_stdout
output_text = stdout_redirect.fp.getvalue()
assert "Method: GET" in output_text
assert "Path: /test" in output_text
assert "Duration: " in output_text
def test_profiler_output_html(self, test_middleware):
full_path = f"{os.getcwd()}/tests/authx_profiling_results.html"
with TestClient(
test_middleware(
profiler_output_type="html",
is_print_each_request=False,
html_file_name=full_path,
)
) as client:
client.get("/test")
with open(full_path) as f:
html_content = f.read()
assert "profiler.py" in html_content
def test_profiler_output_json(self, test_middleware):
full_path = f"{os.getcwd()}/tests/authx_profiling_results.json"
with TestClient(
test_middleware(
profiler_output_type="json",
is_print_each_request=False,
json_file_name=full_path,
)
) as client:
client.get("/test")
with open(full_path) as f:
json_data = json.load(f)
assert isinstance(json_data, dict)
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_session_middleware.py | Python | import time
from unittest.mock import Mock
import pytest
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.responses import PlainTextResponse, Response
from starlette.routing import Route
from starlette.testclient import TestClient
from authx_extra.extra._memory import MemoryIO
from authx_extra.session import SessionMiddleware
def test_create_session_id_and_store():
async def test_route(request):
return PlainTextResponse("Hello, world!")
routes = [Route("/", endpoint=test_route)]
app = Starlette(routes=routes)
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=True,
max_age=3600,
secure=True,
session_cookie="sid",
)
client = TestClient(app)
response = client.get("/")
assert response.status_code == 200
assert response.text == "Hello, world!"
assert "sid" in response.cookies
def test_session_counter_increment():
async def test_route(request):
session = await request.state.session.get_session()
if "test_counter" not in session:
session["test_counter"] = 0
session["test_counter"] += 1
return PlainTextResponse(f"Counter: {session['test_counter']}")
routes = [Route("/", endpoint=test_route)]
app = Starlette(routes=routes)
is_cookie_secure = False
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=True,
max_age=3600,
secure=is_cookie_secure,
session_cookie="sid",
)
client = TestClient(app)
# First request
response = client.get("/")
assert response.status_code == 200
assert "Counter: 1" in response.text
# Second request
response = client.get("/")
assert response.status_code == 200
assert "Counter: 2" in response.text
# Third request
response = client.get("/")
assert response.status_code == 200
assert "Counter: 3" in response.text
def test_session_cookie_expiry():
async def test_route(request):
session = await request.state.session.get_session()
if "test_counter" not in session:
session["test_counter"] = 0
session["test_counter"] += 1
return PlainTextResponse(f"Counter: {session['test_counter']}")
routes = [Route("/", endpoint=test_route)]
app = Starlette(routes=routes)
is_cookie_secure = False
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=True,
max_age=1,
secure=is_cookie_secure,
session_cookie="sid",
)
client = TestClient(app)
# First request
response = client.get("/")
assert response.status_code == 200
assert "Counter: 1" in response.text
# Wait for more than max_age seconds
time.sleep(2)
# Second request after expiry
response = client.get("/")
assert response.status_code == 200
assert "Counter: 1" in response.text # Counter should reset to 1
def test_session_cookie_not_persisted_with_secure_option():
app = Starlette()
is_cookie_secure = True
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=True,
max_age=3600,
secure=is_cookie_secure,
session_cookie="sid",
)
@app.route("/")
async def test_route(request):
session = await request.state.session.get_session()
if "test_counter" not in session:
session["test_counter"] = 0
session["test_counter"] += 1
return PlainTextResponse(f"Counter: {session['test_counter']}")
client = TestClient(app)
# First request
response = client.get("/")
assert response.status_code == 200
assert "Counter: 1" in response.text
# Second request
response = client.get("/")
assert response.status_code == 200
# Since the secure option is set, the cookie should not be persisted
# and the counter should not increment.
assert "Counter: 1" in response.text
def test_check_httponly_flag_in_cookie():
async def test_route(request):
session = await request.state.session.get_session()
if "test_counter" not in session:
session["test_counter"] = 0
session["test_counter"] += 1
return PlainTextResponse(f"Counter: {session['test_counter']}")
app = Starlette()
app.add_route("/", test_route)
is_cookie_secure = False
is_http_only = True
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=is_http_only,
max_age=3600,
secure=is_cookie_secure,
session_cookie="sid",
)
client = TestClient(app)
# First request
response = client.get("/")
assert "HttpOnly" in response.headers["Set-Cookie"]
def test_check_no_httponly_flag_in_cookie():
async def test_route(request):
session = await request.state.session.get_session()
if "test_counter" not in session:
session["test_counter"] = 0
session["test_counter"] += 1
return PlainTextResponse(f"Counter: {session['test_counter']}")
app = Starlette()
app.add_route("/", test_route)
is_cookie_secure = False
is_http_only = False
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
http_only=is_http_only,
max_age=3600,
secure=is_cookie_secure,
session_cookie="sid",
)
client = TestClient(app)
# First request
response = client.get("/")
assert "HttpOnly" not in response.headers["Set-Cookie"]
@pytest.mark.asyncio
async def test_dispatch_should_skip_session_management_with_skip_header():
app = Mock(return_value=Response("OK"))
middleware = SessionMiddleware(
app=app,
secret_key="test",
skip_session_header={"header_name": "X-ApiTest-Skip", "header_value": "skip"},
)
headers = [(b"x-apitest-skip", b"skip")]
request = Request(scope={"type": "http", "headers": headers}, receive=None)
class MockResponse:
def __init__(self):
self.headers = {}
emulated_response = MockResponse()
async def call_next(request):
return emulated_response
response = await middleware.dispatch(request, call_next)
print(f"res:{response}")
assert not hasattr(request.state, "session")
@pytest.mark.asyncio
async def test_dispatch_should_not_skip_session_management_without_skip_header():
app = Mock(return_value=Response("OK"))
middleware = SessionMiddleware(
app=app,
secret_key="test",
skip_session_header={
"header_name": "X-FastSession-Skip",
"header_value": "skip",
},
)
headers = [(b"ignore", b"ignore")]
request = Request(scope={"type": "http", "headers": headers}, receive=None)
class MockResponse:
def __init__(self):
self.headers = {}
emulated_response = MockResponse()
async def call_next(request):
return emulated_response
response = await middleware.dispatch(request, call_next)
print(f"res:{response}")
assert hasattr(request.state, "session")
def test_cookie_path_setting():
async def test_route(request):
return PlainTextResponse("Hello, world!")
app = Starlette()
app.add_route("/", test_route)
custom_path = "/api"
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
cookie_path=custom_path,
session_cookie="sid",
)
client = TestClient(app)
response = client.get("/")
assert "sid" in response.cookies
assert f"Path={custom_path}" in response.headers["Set-Cookie"]
def test_default_cookie_path():
async def test_route(request):
return PlainTextResponse("Hello, world!")
app = Starlette()
app.add_route("/", test_route)
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
session_cookie="sid",
)
client = TestClient(app)
response = client.get("/")
assert "sid" in response.cookies
assert "Path=/" in response.headers["Set-Cookie"]
def test_cookie_path_session_persistence():
async def test_route(request):
session = await request.state.session.get_session()
if "counter" not in session:
session["counter"] = 0
session["counter"] += 1
return PlainTextResponse(f"Counter: {session['counter']}")
app = Starlette()
app.add_route("/api/count", test_route)
app.add_middleware(
SessionMiddleware,
secret_key="test-secret",
store=MemoryIO(),
cookie_path="/api",
session_cookie="sid",
secure=False,
)
client = TestClient(app)
# First request
response = client.get("/api/count")
assert response.text == "Counter: 1"
assert "Path=/api" in response.headers["Set-Cookie"]
# Second request should increment counter due to proper path
response = client.get("/api/count")
assert response.text == "Counter: 2"
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_skip_session_header_check_dict.py | Python | from unittest.mock import Mock
import pytest
from authx_extra.session import SessionMiddleware
@pytest.fixture
def middleware():
return SessionMiddleware(
app=None,
secret_key="test",
skip_session_header={"header_name": "X-TESTAPI-Skip", "header_value": "skip"},
)
def test_skip_session_header_check_dict_with_skip_header(middleware):
request = Mock()
request.headers = {"X-TESTAPI-Skip": "skip"}
assert middleware.skip_session_header_check(request) is True
def test_skip_session_header_check_dict_without_skip_header(middleware):
request = Mock()
request.headers = {"X-Other-Header": "value"}
assert middleware.skip_session_header_check(request) is False
def test_skip_session_header_check_dict_with_skip_header_and_different_value(
middleware,
):
request = Mock()
request.headers = {"X-TESTAPI-Skip": "other"}
assert middleware.skip_session_header_check(request) is False
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_skip_session_header_check_list.py | Python | from unittest.mock import Mock
import pytest
from authx_extra.session import SessionMiddleware
@pytest.fixture
def middleware():
return SessionMiddleware(
app=None,
secret_key="test",
skip_session_header=[
{"header_name": "X-APITEST-Skip", "header_value": "skip"},
{"header_name": "X-Another-Skip-Header", "header_value": "skip"},
],
)
def test_skip_session_header_check_list_with_multiple_skip_headers(middleware):
request1("X-APITEST-Skip", middleware)
request1("X-Another-Skip-Header", middleware)
def request1(argument, middleware):
request = Mock()
request.headers = {argument: "skip"}
assert middleware.skip_session_header_check(request) is True
def test_skip_session_header_check_list_with_multiple_headers_and_different_values(
middleware,
):
request2("X-APITEST-Skip", middleware)
request2("X-Another-Skip-Header", middleware)
def request2(argument, middleware):
request = Mock()
request.headers = {argument: "other"}
assert middleware.skip_session_header_check(request) is False
| yezz123/authx-extra | 7 | Extra utilities for authx, including session, profiler & caching ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
src/api.rs | Rust | use std::collections::HashMap;
#[derive(serde::Serialize)]
#[allow(non_camel_case_types)]
pub enum DockerErrorMessageType {
BLOB_UNKNOWN,
BLOB_UPLOAD_INVALID,
BLOB_UPLOAD_UNKNOWN,
DIGEST_INVALID,
MANIFEST_BLOB_UNKNOWN,
MANIFEST_INVALID,
MANIFEST_UNKNOWN,
MANIFEST_UNVERIFIED,
NAME_INVALID,
NAME_UNKNOWN,
SIZE_INVALID,
TAG_INVALID,
UNAUTHORIZED,
DENIED,
UNSUPPORTED,
}
#[derive(serde::Serialize)]
pub struct DockerError {
pub code: DockerErrorMessageType,
pub message: String,
pub detail: HashMap<String, String>,
}
#[derive(serde::Serialize)]
pub struct DockerErrorResponse {
errors: Vec<DockerError>,
}
impl DockerErrorResponse {
pub fn new_simple(code: DockerErrorMessageType, msg: &str) -> Self {
Self {
errors: vec![DockerError {
code,
message: msg.to_string(),
detail: HashMap::new(),
}],
}
}
}
#[derive(serde::Serialize)]
pub struct DockerTagsList {
pub name: String,
pub tags: Vec<String>,
}
#[derive(serde::Serialize)]
pub struct DockerCatalog {
pub repositories: Vec<String>,
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/constants.rs | Rust | /// JWT auth token lifetime
pub const AUTH_TOKENS_DURATION: u64 = 300;
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/docker.rs | Rust | #[derive(serde::Deserialize, Clone)]
#[allow(non_snake_case)]
pub struct DockerBlobRef {
pub mediaType: String,
pub digest: String,
pub size: Option<usize>,
}
#[allow(non_snake_case)]
#[derive(serde::Deserialize, Clone)]
pub struct DockerManifest {
pub schemaVersion: usize,
pub mediaType: String,
pub config: DockerBlobRef,
pub layers: Vec<DockerBlobRef>,
}
#[allow(non_snake_case)]
#[derive(serde::Deserialize, Clone)]
pub struct DockerManifestList {
pub schemaVersion: usize,
pub mediaType: String,
pub manifests: Vec<DockerBlobRef>,
}
#[allow(non_snake_case)]
#[derive(serde::Deserialize)]
pub struct DockerManifestOrManifestList {
pub schemaVersion: usize,
pub mediaType: String,
pub config: Option<DockerBlobRef>,
pub layers: Option<Vec<DockerBlobRef>>,
pub manifests: Option<Vec<DockerBlobRef>>,
}
impl DockerManifestOrManifestList {
pub fn get_manifest(&self) -> Option<DockerManifest> {
if self
.mediaType
.eq("application/vnd.docker.distribution.manifest.v2+json")
&& self.config.is_some()
&& self.layers.is_some()
{
return Some(DockerManifest {
schemaVersion: self.schemaVersion,
mediaType: self.mediaType.to_string(),
config: self.config.clone().unwrap(),
layers: self.layers.clone().unwrap(),
});
}
None
}
pub fn get_manifests_list(&self) -> Option<DockerManifestList> {
if self
.mediaType
.eq("application/vnd.docker.distribution.manifest.list.v2+json")
&& self.manifests.is_some()
{
return Some(DockerManifestList {
schemaVersion: self.schemaVersion,
mediaType: self.mediaType.to_string(),
manifests: self.manifests.clone().unwrap(),
});
}
None
}
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/lib.rs | Rust | pub mod api;
pub mod constants;
pub mod docker;
pub mod read_file_stream;
pub mod server;
pub mod storage;
pub mod utils;
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/main.rs | Rust | use std::io::{Error, ErrorKind};
use std::path::{Path, PathBuf};
use std::process;
use bcrypt::DEFAULT_COST;
use dockerust::server;
use dockerust::server::{Credentials, ServerConfig};
use dockerust::storage::clean_storage;
use dockerust::utils::{rand_str, request_input};
fn show_usage() {
let args = std::env::args().collect::<Vec<_>>();
eprintln!("Usage: {} {{init-config|serve|add_user}} [conf_file]", args[0]);
process::exit(-1);
}
fn init_config(conf_path: &Path) -> std::io::Result<()> {
if conf_path.exists() {
eprintln!("Configuration file already exists!");
process::exit(-4);
}
let conf = ServerConfig {
storage_path: PathBuf::from(request_input("storage path")?),
listen_address: request_input("listen_address (ex: 127.0.0.1:45654)")?,
access_url: request_input("access_url")?,
app_secret: rand_str(50),
credentials: vec![],
};
std::fs::write(
conf_path,
serde_yaml::to_string(&conf).map_err(|_| Error::new(ErrorKind::Other, "failed to deserialize"))?,
)?;
Ok(())
}
fn add_user(conf_path: &Path) -> std::io::Result<()> {
if !conf_path.exists() {
eprintln!("Configuration file does not exists!");
process::exit(-5);
}
let mut conf: ServerConfig = serde_yaml::from_str(&std::fs::read_to_string(conf_path)?)
.map_err(|_| Error::new(ErrorKind::Other, "failed to deserialize"))?;
conf.credentials.push(Credentials {
user_name: request_input("user name")?,
password_hash: bcrypt::hash(request_input("password")?, DEFAULT_COST)
.map_err(|_| Error::new(ErrorKind::Other, "failed to hash password"))?,
});
std::fs::write(
conf_path,
serde_yaml::to_string(&conf).map_err(|_| Error::new(ErrorKind::Other, "failed to serialize config"))?,
)?;
println!("User added.");
Ok(())
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let args = std::env::args().collect::<Vec<_>>();
if args.len() != 3 {
show_usage();
}
let conf_path: &Path = args[2].as_ref();
match args[1].as_str() {
"serve" => { /* Default usage*/ }
"init-config" => init_config(conf_path)?,
"add_user" => add_user(conf_path)?,
_ => show_usage(),
}
if !conf_path.exists() {
eprintln!("Specified configuration file does not exists!");
process::exit(-2);
}
let config: ServerConfig = serde_yaml::from_str(&std::fs::read_to_string(conf_path)?)
.map_err(|_| Error::new(ErrorKind::Other, "failed to deserialize"))?;
if !config.storage_path.exists() {
eprintln!("Specified storage path does not exists!");
process::exit(-3);
}
println!("Cleaning storage...");
clean_storage(&config.storage_path).unwrap();
println!("Server will start to listen on {}", config.listen_address);
server::start(config).await
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/read_file_stream.rs | Rust | use std::io::Read;
use std::path::Path;
use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::web::Bytes;
use futures::Stream;
const CHUNK_SIZE: u64 = 1024 * 1024 * 50; // 50 MB
pub struct ReadFileStream {
file_size: u64,
processed: usize,
file: std::fs::File,
error: bool,
}
impl ReadFileStream {
pub fn new(path: &Path) -> std::io::Result<Self> {
Ok(Self {
file_size: path.metadata()?.len(),
processed: 0,
file: std::fs::File::open(path)?,
error: false,
})
}
}
impl Stream for ReadFileStream {
type Item = actix_web::Result<Bytes>;
fn poll_next(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
if self.error {
return Poll::Ready(None);
}
let mut chunk = Vec::with_capacity(CHUNK_SIZE as usize);
let size = self.file.by_ref().take(CHUNK_SIZE).read_to_end(&mut chunk);
let size = match size {
Err(e) => {
eprintln!("Failed to read from file! {}", e);
self.error = true;
return Poll::Ready(Some(Err(actix_web::Error::from(e))));
}
Ok(size) => size,
};
if size == 0 {
return Poll::Ready(None);
}
self.processed += size;
Poll::Ready(Some(Ok(Bytes::from(chunk))))
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.file_size as usize - self.processed, None)
}
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/server.rs | Rust | use actix_web::body::SizedStream;
use actix_web::http::Method;
use actix_web::web::Data;
use actix_web::{web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer};
use base64::{engine::general_purpose as b64decoder, Engine as _};
use futures::StreamExt;
use jsonwebtoken::{encode, Validation};
use regex::Regex;
use std::cmp::min;
use std::collections::HashSet;
use std::error::Error;
use std::fs::OpenOptions;
use std::io::{ErrorKind, Write};
use std::path::PathBuf;
use std::str::FromStr;
use uuid::Uuid;
use crate::api::{DockerCatalog, DockerErrorMessageType, DockerErrorResponse, DockerTagsList};
use crate::constants::AUTH_TOKENS_DURATION;
use crate::docker::DockerManifestOrManifestList;
use crate::read_file_stream::ReadFileStream;
use crate::storage::{clean_storage, get_docker_images_list, BlobReference, DockerImage};
use crate::utils::{create_empty_file, sha256sum, sha256sum_str, time};
#[derive(Clone, serde::Deserialize, serde::Serialize)]
pub struct Credentials {
pub user_name: String,
pub password_hash: String,
}
#[derive(Clone, serde::Deserialize, serde::Serialize)]
pub struct ServerConfig {
pub storage_path: PathBuf,
pub listen_address: String,
pub access_url: String,
pub app_secret: String,
pub credentials: Vec<Credentials>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
struct InvalidAuthResponse {
details: &'static str,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
struct AuthResponse {
token: String,
access_token: String,
expires_in: u64,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
struct JWTClaims {
user: Option<String>,
timeout: u64,
}
impl ServerConfig {
pub fn need_auth(&self) -> bool {
!self.credentials.is_empty()
}
fn get_encoding_secret(&self) -> jsonwebtoken::EncodingKey {
jsonwebtoken::EncodingKey::from_secret(self.app_secret.as_ref())
}
fn get_decoding_secret(&self) -> jsonwebtoken::DecodingKey {
jsonwebtoken::DecodingKey::from_secret(self.app_secret.as_ref())
}
fn jwt_algorithm(&self) -> jsonwebtoken::Algorithm {
jsonwebtoken::Algorithm::HS512
}
fn get_auth_validation_algorithm(&self) -> jsonwebtoken::Validation {
let mut val = Validation::new(self.jwt_algorithm());
val.validate_exp = false;
val.required_spec_claims = HashSet::new();
val
}
pub fn check_auth(&self, user: &str, password: &str) -> bool {
for cred in &self.credentials {
if cred.user_name.eq(user) && bcrypt::verify(password, &cred.password_hash).unwrap_or(false) {
return true;
}
}
false
}
}
fn ok_or_internal_error<E>(r: Result<HttpResponse, E>) -> HttpResponse
where
E: Error,
{
match r {
Ok(e) => e,
Err(e) => {
println!("Error! {}", e);
HttpResponse::InternalServerError().body("500 Internal Server Error")
}
}
}
fn request_auth(conf: &ServerConfig, error: Option<&'static str>) -> HttpResponse {
let realm = format!("{}/token", conf.access_url);
let service = conf.access_url.split("://").last().unwrap_or("dockerust");
let complement = match error {
None => "".to_string(),
Some(e) => format!(",error=\"{}\"", e),
};
HttpResponse::Unauthorized()
.insert_header((
"WWW-Authenticate",
format!(
"Bearer realm=\"{}\",service=\"{}\",scope=\"access\"{}",
realm, service, complement
),
))
.json(DockerErrorResponse::new_simple(
DockerErrorMessageType::UNAUTHORIZED,
"please authenticate",
))
}
fn check_auth(req: &HttpRequest, conf: &ServerConfig, user: &mut Option<String>) -> Option<HttpResponse> {
if !conf.need_auth() {
*user = Some("anonymous".to_string());
return None;
}
let auth_part: String = req
.headers()
.get("authorization")
.map(|s| s.to_str().unwrap_or(""))
.unwrap_or("")
.to_string()
.replace("Bearer ", "");
if auth_part.is_empty() {
return Some(request_auth(conf, None));
}
let token = jsonwebtoken::decode::<JWTClaims>(
&auth_part,
&conf.get_decoding_secret(),
&conf.get_auth_validation_algorithm(),
);
let token = match token {
Ok(s) => s,
Err(e) => {
eprintln!("Failed to decode JWT token: {}", e);
return Some(request_auth(conf, None));
}
};
if token.claims.timeout < time() {
return Some(request_auth(conf, Some("invalid_token")));
}
if let Some(id) = token.claims.user {
*user = Some(id);
}
None
}
fn insufficient_authorizations(conf: &ServerConfig) -> HttpResponse {
request_auth(conf, Some("insufficient_scope"))
}
async fn get_auth_token(config: web::Data<ServerConfig>, r: HttpRequest) -> HttpResponse {
ok_or_internal_error::<std::io::Error>((move || {
let mut user = None;
let auth_part: String = r
.headers()
.get("authorization")
.map(|s| s.to_str().unwrap_or(""))
.unwrap_or("")
.to_string()
.replace("Basic ", "");
if !auth_part.is_empty() {
let decoded = b64decoder::STANDARD.decode(auth_part).unwrap_or_default();
let decoded = String::from_utf8_lossy(&decoded);
let split: Vec<&str> = decoded.splitn(2, ':').collect();
let username = split.first().unwrap_or(&"");
let password = split.get(1).unwrap_or(&"");
if config.check_auth(username, password) {
user = Some(username.to_string());
} else {
return Ok(HttpResponse::Unauthorized()
.insert_header(("www-authenticate", "Basic realm=\"dockerust\""))
.json(InvalidAuthResponse {
details: "incorrect username or password",
}));
}
}
let claim = JWTClaims {
user,
timeout: time() + AUTH_TOKENS_DURATION,
};
let token = encode(
&jsonwebtoken::Header::new(config.jwt_algorithm()),
&claim,
&config.get_encoding_secret(),
)
.map_err(|_| std::io::Error::new(ErrorKind::Other, "failed to encode token"))?;
Ok(HttpResponse::Ok().json(AuthResponse {
access_token: token.to_string(),
token,
expires_in: AUTH_TOKENS_DURATION,
}))
})())
}
async fn not_found() -> HttpResponse {
HttpResponse::NotFound().body("404 Not Found")
}
async fn base(config: web::Data<ServerConfig>, r: HttpRequest) -> HttpResponse {
let mut user = None;
if let Some(e) = check_auth(&r, &config, &mut user) {
return e;
}
HttpResponse::Ok().finish()
}
#[derive(serde::Deserialize)]
struct CatalogRequest {
n: Option<usize>,
last: Option<String>,
}
async fn catalog(req: web::Query<CatalogRequest>, conf: web::Data<ServerConfig>) -> HttpResponse {
let images = match get_docker_images_list(&conf.storage_path) {
Ok(images) => images,
Err(e) => {
eprintln!("Failed to get the list of images! {:?}", e);
return HttpResponse::InternalServerError().json("500 Internal Error");
}
};
if images.is_empty() {
return HttpResponse::Ok().json(DockerCatalog { repositories: vec![] });
}
let start = match &req.last {
None => 0,
Some(s) => images.iter().position(|f| f.eq(s)).map(|f| f + 1).unwrap_or(0),
};
let end = start + req.n.unwrap_or(images.len() + 1);
HttpResponse::Ok().json(DockerCatalog {
repositories: images[min(start, images.len() - 1)..min(images.len(), end)].to_vec(),
})
}
fn get_tags_list(image: &DockerImage) -> std::io::Result<HttpResponse> {
if !image.image_path().exists() {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::NAME_UNKNOWN,
"repository name not known to registry",
)));
}
let tags = image.tags_list()?;
Ok(HttpResponse::Ok().json(DockerTagsList {
name: image.image.to_string(),
tags,
}))
}
async fn serve_blob(
blob_ref: &BlobReference,
image: &DockerImage,
content_type: &str,
) -> std::io::Result<HttpResponse> {
let blob_path = blob_ref.data_path(&image.storage_path);
if !blob_path.exists() {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::BLOB_UNKNOWN,
"blob not found",
)));
}
let blob_len = blob_path.metadata()?.len();
let mut response = HttpResponse::Ok();
response
.content_type(content_type)
.insert_header(("Docker-Content-Digest", blob_ref.to_digest()))
.insert_header(("Etag", blob_ref.to_digest()));
Ok(response.body(SizedStream::new(blob_len, ReadFileStream::new(&blob_path)?)))
}
async fn get_manifest(image: &DockerImage, image_ref: &str) -> std::io::Result<HttpResponse> {
// Requested hash is included in the request
let blob_ref = if image_ref.starts_with("sha256") {
BlobReference::from_str(image_ref)?
}
// We must find ourselves the blob to load
else {
let manifest_path = image.manifest_tag_link_path(image_ref);
if !manifest_path.exists() {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::MANIFEST_UNKNOWN,
"manifest unknown",
)));
}
BlobReference::from_file(&manifest_path)?
};
if !image.manifests_revision_list()?.contains(&blob_ref) {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::MANIFEST_BLOB_UNKNOWN,
"manifest blob not attached to manifest",
)));
}
// Load manifest to get its type
let manifest: DockerManifestOrManifestList =
serde_json::from_str(&std::fs::read_to_string(blob_ref.data_path(&image.storage_path))?)?;
serve_blob(&blob_ref, image, &manifest.mediaType).await
}
async fn put_manifest(
image: &DockerImage,
image_ref: &str,
mut payload: web::Payload,
conf: &ServerConfig,
) -> std::io::Result<HttpResponse> {
// Get manifest data
let mut bytes = web::BytesMut::new();
while let Some(item) = payload.next().await {
bytes.extend_from_slice(
&item.map_err(|_| std::io::Error::new(ErrorKind::Other, "Failed to read a chunk of data"))?,
);
}
let manifest = String::from_utf8(bytes.as_ref().to_vec())
.map_err(|_| std::io::Error::new(ErrorKind::Other, "Failed to turn the manifest into a string"))?;
let blob_ref = BlobReference::from_sha256sum(sha256sum_str(&manifest)?);
// Write manifest
let blob_path = blob_ref.data_path(&conf.storage_path);
create_empty_file(&blob_path)?;
std::fs::write(blob_path, manifest)?;
// Write references to manifest
let mut list = vec![image.manifest_revision_path(&blob_ref)];
// Add a tag only if it is not a valid digest
if !BlobReference::is_valid_reference(image_ref) {
list.push(image.manifest_tag_link_path(image_ref));
}
for manifest_path in list {
create_empty_file(&manifest_path)?;
std::fs::write(manifest_path, blob_ref.to_digest())?;
}
let location = format!(
"{}/v2/{}/manifests/{}",
conf.access_url,
image.image,
blob_ref.to_digest()
);
Ok(HttpResponse::Created()
.insert_header(("Docker-Content-Digest", blob_ref.to_digest()))
.insert_header(("location", location))
.finish())
}
async fn delete_manifest(image: &DockerImage, digest: &str, conf: &ServerConfig) -> std::io::Result<HttpResponse> {
let blob = BlobReference::from_str(digest)?;
if !image.manifests_revision_list()?.contains(&blob) {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::MANIFEST_BLOB_UNKNOWN,
"manifest blob not attached to manifest",
)));
}
// Remove tags
for tag in image.get_tags_attached_to_manifest_blob(&blob)? {
std::fs::remove_dir_all(image.tags_path().join(tag))?;
}
// Remove reference
std::fs::remove_file(image.manifest_revision_path(&blob))?;
// Run garbage collector
clean_storage(&conf.storage_path)?;
Ok(HttpResponse::Accepted().finish())
}
async fn get_blob(image: &DockerImage, digest: &str) -> std::io::Result<HttpResponse> {
// Requested hash is included in the request
serve_blob(&BlobReference::from_str(digest)?, image, "application/octet-stream").await
}
async fn delete_blob(_image: &DockerImage, _digest: &str) -> std::io::Result<HttpResponse> {
Ok(HttpResponse::MethodNotAllowed().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::UNSUPPORTED,
"blobs are automatically garbage collected",
)))
}
fn blob_upload_response(
mut res: HttpResponseBuilder,
image: &DockerImage,
uuid: &str,
config: &ServerConfig,
) -> std::io::Result<HttpResponse> {
let location = format!("{}/v2/{}/blobs/uploads/{}", config.access_url, &image.image, uuid);
let offset = match std::fs::metadata(image.upload_storage_path(uuid))?.len() {
0 => 0,
s => s - 1,
};
Ok(res
.insert_header(("Range", format!("0-{}", offset)))
.insert_header(("Location", location))
.insert_header(("Docker-Upload-Uuid", uuid))
.finish())
}
async fn start_blob_upload(image: &DockerImage, config: &ServerConfig) -> std::io::Result<HttpResponse> {
let uuid = Uuid::new_v4().to_string();
let path = image.upload_storage_path(&uuid);
create_empty_file(&path)?;
blob_upload_response(HttpResponse::Accepted(), image, &uuid, config)
}
fn blob_upload_status(image: &DockerImage, uuid: &str, config: &ServerConfig) -> std::io::Result<HttpResponse> {
if !image.upload_storage_path(uuid).exists() {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::BLOB_UNKNOWN,
"blob unknown",
)));
}
blob_upload_response(HttpResponse::NoContent(), image, uuid, config)
}
async fn process_blob_upload(
image: &DockerImage,
uuid: &str,
mut payload: web::Payload,
) -> std::io::Result<Option<HttpResponse>> {
let payload_path = image.upload_storage_path(uuid);
if !payload_path.exists() {
return Ok(Some(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::BLOB_UNKNOWN,
"blob unknown",
))));
}
// Open file
let mut file = OpenOptions::new().append(true).open(image.upload_storage_path(uuid))?;
while let Some(chunk) = payload.next().await {
match chunk {
Ok(c) => {
file.write_all(&c)?;
}
Err(e) => {
eprintln!("Failed to read from blob upload request! {:?}", e);
return Ok(Some(
HttpResponse::InternalServerError().json("500 Internal Server Error"),
));
}
}
}
file.flush()?;
drop(file);
Ok(None)
}
async fn blob_upload_patch(
image: &DockerImage,
uuid: &str,
config: &ServerConfig,
payload: web::Payload,
) -> std::io::Result<HttpResponse> {
if let Some(res) = process_blob_upload(image, uuid, payload).await? {
return Ok(res);
}
blob_upload_response(HttpResponse::Accepted(), image, uuid, config)
}
async fn blob_upload_finish(
image: &DockerImage,
uuid: &str,
config: &ServerConfig,
payload: web::Payload,
digest: &str,
) -> std::io::Result<HttpResponse> {
// Process last chunk
if let Some(res) = process_blob_upload(image, uuid, payload).await? {
return Ok(res);
}
// Process chunk digest
let computed_digest = format!("sha256:{}", sha256sum(&image.upload_storage_path(uuid))?);
if !computed_digest.eq(digest) {
return Ok(HttpResponse::BadRequest().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::DIGEST_INVALID,
"invalid digest",
)));
}
// Move blob to its destination
let dest = BlobReference::from_str(digest)?.data_path(&config.storage_path);
create_empty_file(&dest)?;
std::fs::rename(image.upload_storage_path(uuid), &dest)?;
let end_of_blob_range = std::fs::metadata(&dest)?.len() - 1;
let location = format!("{}/v2/{}/blobs/{}", config.access_url, &image.image, digest);
Ok(HttpResponse::Created()
.insert_header(("Content-Range", format!("0-{}", end_of_blob_range)))
.insert_header(("Docker-Content-Digest", digest))
.insert_header(("Location", location))
.finish())
}
fn cancel_blob_upload(image: &DockerImage, uuid: &str) -> std::io::Result<HttpResponse> {
if !image.upload_storage_path(uuid).exists() {
return Ok(HttpResponse::NotFound().json(DockerErrorResponse::new_simple(
DockerErrorMessageType::BLOB_UNKNOWN,
"blob unknown",
)));
}
std::fs::remove_file(image.upload_storage_path(uuid))?;
Ok(HttpResponse::NoContent()
.insert_header(("content-length", "0"))
.finish())
}
#[derive(serde::Deserialize)]
struct RequestQuery {
digest: Option<String>,
}
async fn requests_dispatcher(
r: HttpRequest,
config: web::Data<ServerConfig>,
payload: web::Payload,
query: web::Query<RequestQuery>,
) -> HttpResponse {
let mut user = None;
if let Some(e) = check_auth(&r, &config, &mut user) {
return e;
}
let parts = r.uri().path().split('/').skip(2).collect::<Vec<_>>();
if parts.len() < 3 {
return not_found().await;
}
// Get tags list `/v2/<name>/tags/list`
if r.uri().path().ends_with("/tags/list") {
let image = DockerImage::new(&config.storage_path, &parts[..parts.len() - 2].join("/"));
return ok_or_internal_error(get_tags_list(&image));
}
// Manifest manipulation `/v2/<name>/manifests/<reference>`
else if parts[parts.len() - 2].eq("manifests") {
let image = DockerImage::new(&config.storage_path, &parts[..parts.len() - 2].join("/"));
let image_ref = parts.last().unwrap();
// Get manifest
match *r.method() {
Method::GET => return ok_or_internal_error(get_manifest(&image, image_ref).await),
Method::HEAD => return ok_or_internal_error(get_manifest(&image, image_ref).await),
Method::PUT => {
if user.is_none() {
return insufficient_authorizations(&config);
}
return ok_or_internal_error(put_manifest(&image, image_ref, payload, &config).await);
}
Method::DELETE => {
if user.is_none() {
return insufficient_authorizations(&config);
}
return ok_or_internal_error(delete_manifest(&image, image_ref, &config).await);
}
_ => {}
}
}
// Blobs manipulation `/v2/<name>/blobs/<digest>`
else if parts[parts.len() - 2].eq("blobs") {
let image = DockerImage::new(&config.storage_path, &parts[..parts.len() - 2].join("/"));
let digest = parts.last().unwrap();
match *r.method() {
Method::GET => return ok_or_internal_error(get_blob(&image, digest).await),
Method::HEAD => return ok_or_internal_error(get_blob(&image, digest).await),
Method::DELETE => {
if user.is_none() {
return insufficient_authorizations(&config);
}
return ok_or_internal_error(delete_blob(&image, digest).await);
}
_ => {}
}
}
// Request blobs upload
else if r.uri().path().ends_with("/blobs/uploads/") {
if user.is_none() {
return insufficient_authorizations(&config);
}
return ok_or_internal_error(
start_blob_upload(
&DockerImage::new(&config.storage_path, &parts[..parts.len() - 3].join("/")),
&config,
)
.await,
);
}
// Manage blogs upload
else if parts[parts.len() - 3] == "blobs" && parts[parts.len() - 2] == "uploads" {
if user.is_none() {
return insufficient_authorizations(&config);
}
let image = DockerImage::new(&config.storage_path, &parts[..parts.len() - 3].join("/"));
let uuid = parts.last().unwrap_or(&"");
if !Regex::new(r"^[0-9a-zA-Z\-]+$").unwrap().is_match(uuid) {
return HttpResponse::BadRequest().json("Invalid UUID !");
}
match *r.method() {
Method::GET => return ok_or_internal_error(blob_upload_status(&image, uuid, &config)),
Method::PATCH => return ok_or_internal_error(blob_upload_patch(&image, uuid, &config, payload).await),
Method::PUT => {
return ok_or_internal_error(
blob_upload_finish(
&image,
uuid,
&config,
payload,
query.digest.as_ref().unwrap_or(&String::new()),
)
.await,
)
}
Method::DELETE => return ok_or_internal_error(cancel_blob_upload(&image, uuid)),
_ => {}
}
}
not_found().await
}
pub async fn start(config: ServerConfig) -> std::io::Result<()> {
let listen_address = config.listen_address.to_string();
HttpServer::new(move || {
App::new()
.app_data(Data::new(config.clone()))
.route("/token", web::to(get_auth_token))
.route("/v2/", web::get().to(base))
.route("/v2/_catalog", web::get().to(catalog))
.route("/v2/{tail:.*}", web::to(requests_dispatcher))
.route("{tail:.*}", web::to(not_found))
})
.bind(listen_address)?
.run()
.await
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/storage.rs | Rust | use std::io::ErrorKind;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use crate::docker::{DockerBlobRef, DockerManifest, DockerManifestOrManifestList};
const BASE_PATH: &str = "docker/registry/v2/";
#[derive(Debug, Eq, PartialEq)]
pub struct BlobReference {
alg: String,
hash: String,
}
impl BlobReference {
pub fn from_docker_blob_ref(r: &DockerBlobRef) -> std::io::Result<Self> {
Self::from_str(&r.digest)
}
pub fn from_sha256sum(hash: String) -> Self {
Self {
alg: "sha256".to_string(),
hash,
}
}
pub fn is_valid_reference(r: &str) -> bool {
Self::from_str(r).is_ok()
}
pub fn from_file(path: &Path) -> std::io::Result<Self> {
Self::from_str(&std::fs::read_to_string(path)?)
}
pub fn to_digest(&self) -> String {
format!("{}:{}", self.alg, self.hash)
}
pub fn data_path(&self, storage_path: &Path) -> PathBuf {
storage_path
.join(BASE_PATH)
.join("blobs")
.join(&self.alg)
.join(&self.hash[..2])
.join(&self.hash)
.join("data")
}
pub fn is_empty_ref(&self) -> bool {
self.alg == "sha256" && self.hash == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
}
}
impl FromStr for BlobReference {
type Err = std::io::Error;
fn from_str(content: &str) -> Result<Self, Self::Err> {
let split = content.splitn(2, ':').collect::<Vec<_>>();
if split.len() != 2 {
return Err(std::io::Error::new(ErrorKind::Other, "Expected 2 entries!"));
}
if split[1].len() <= 2 {
return Err(std::io::Error::new(ErrorKind::Other, "Blob hash is too small!"));
}
Ok(Self {
alg: split[0].to_string(),
hash: split[1].to_string(),
})
}
}
#[derive(Debug)]
pub struct DockerImage {
pub storage_path: PathBuf,
pub image: String,
}
impl DockerImage {
pub fn new(storage: &Path, image: &str) -> Self {
Self {
storage_path: storage.to_path_buf(),
image: image.to_string(),
}
}
pub fn image_path(&self) -> PathBuf {
self.storage_path.join(BASE_PATH).join("repositories").join(&self.image)
}
pub fn tags_path(&self) -> PathBuf {
self.image_path().join("_manifests/tags")
}
pub fn revisions_path(&self) -> PathBuf {
self.image_path().join("_manifests/revisions")
}
pub fn tags_list(&self) -> std::io::Result<Vec<String>> {
let mut list = vec![];
if !self.tags_path().exists() {
return Ok(vec![]);
}
for entry in std::fs::read_dir(self.tags_path())? {
let entry = entry?;
if entry.metadata()?.is_dir() {
let manifest_tag = entry.file_name().to_string_lossy().to_string();
// We check the link actually exists before adding it to the list
if self.manifest_tag_link_path(&manifest_tag).exists() {
list.push(manifest_tag);
}
}
}
Ok(list)
}
pub fn get_tags_attached_to_manifest_blob(&self, b: &BlobReference) -> std::io::Result<Vec<String>> {
let mut list = vec![];
for tag in self.tags_list()? {
let blob = BlobReference::from_file(&self.manifest_tag_link_path(&tag))?;
if &blob == b {
list.push(tag);
}
}
Ok(list)
}
pub fn manifests_revision_list(&self) -> std::io::Result<Vec<BlobReference>> {
let list_path = self.revisions_path().join("sha256");
if !list_path.exists() {
return Ok(vec![]);
}
let mut list = vec![];
for entry in std::fs::read_dir(list_path)? {
let entry = entry?;
if entry.metadata()?.is_dir() {
let link_file = entry.path().join("link");
if link_file.exists() {
list.push(BlobReference::from_file(&link_file)?);
}
}
}
Ok(list)
}
pub fn manifest_tag_link_path(&self, manifest_ref: &str) -> PathBuf {
self.tags_path().join(manifest_ref).join("current/link")
}
pub fn manifest_revision_path(&self, blob: &BlobReference) -> PathBuf {
self.revisions_path().join(&blob.alg).join(&blob.hash).join("link")
}
pub fn upload_storage_path(&self, uuid: &str) -> PathBuf {
self.image_path().join("_uploads").join(uuid)
}
}
pub fn recurse_images_scan(path: &Path, start: &Path) -> std::io::Result<Vec<String>> {
if !path.exists() || !path.is_dir() {
return Ok(vec![]);
}
let mut list = vec![];
for entry in std::fs::read_dir(path)? {
let entry = entry?;
if !entry.file_type()?.is_dir() {
continue;
}
if entry.file_name().eq("_manifests") {
let image_path = path.to_string_lossy().to_string();
let start_path = start.to_string_lossy().to_string();
return Ok(vec![image_path[start_path.len() + 1..].to_string()]);
} else {
list.append(&mut recurse_images_scan(&entry.path(), start)?);
}
}
Ok(list)
}
/// Get the entire list of docker image available
pub fn get_docker_images_list(storage: &Path) -> std::io::Result<Vec<String>> {
let start = storage.join(BASE_PATH).join("repositories");
let mut list = recurse_images_scan(&start, &start)?;
list.sort();
Ok(list)
}
/// Get the entire list of blob references
pub fn get_blob_list(storage: &Path) -> std::io::Result<Vec<BlobReference>> {
let root = storage.join(BASE_PATH).join("blobs/sha256");
let mut list = vec![];
if !root.exists() {
return Ok(list);
}
// First level parsing
for entry in std::fs::read_dir(root)? {
let entry = entry?;
if !entry.metadata()?.is_dir() {
continue;
}
// Second level parsing
for entry in std::fs::read_dir(entry.path())? {
let entry = entry?;
if entry.metadata()?.is_dir() {
list.push(BlobReference::from_sha256sum(
entry.file_name().to_string_lossy().to_string(),
))
}
}
}
Ok(list)
}
fn is_blob_useless_in_docker_manifest(blob_ref: &BlobReference, manifest: &DockerManifest) -> std::io::Result<bool> {
// Check config
if &BlobReference::from_docker_blob_ref(&manifest.config)? == blob_ref {
return Ok(false);
}
// Check layers
for layer in &manifest.layers {
if &BlobReference::from_docker_blob_ref(layer)? == blob_ref {
return Ok(false);
}
}
Ok(true)
}
/// Check recursively manifest distribution files
fn is_blob_useless_in_distribution_file(
blob_ref: &BlobReference,
upper_manifest_ref: &BlobReference,
storage: &Path,
) -> std::io::Result<bool> {
let manifest_path = upper_manifest_ref.data_path(storage);
if !manifest_path.exists() {
return Ok(true);
}
let manifest: DockerManifestOrManifestList = serde_json::from_str(&std::fs::read_to_string(manifest_path)?)?;
// In case of manifest file
if let Some(manifest) = manifest.get_manifest() {
if !is_blob_useless_in_docker_manifest(blob_ref, &manifest)? {
return Ok(false);
}
}
// In case of distribution files => recurse scan
else if let Some(manifests_list) = manifest.get_manifests_list() {
for manifest_ref in &manifests_list.manifests {
let manifest_ref = BlobReference::from_docker_blob_ref(manifest_ref)?;
if &manifest_ref == blob_ref {
return Ok(false);
}
if &manifest_ref == upper_manifest_ref {
continue;
}
if !is_blob_useless_in_distribution_file(blob_ref, &manifest_ref, storage)? {
return Ok(false);
}
}
} else {
eprintln!("Unknown manifest type! {}", manifest.mediaType);
}
Ok(true)
}
/// Check if a blob is useless or not
pub fn is_blob_useless(blob_ref: &BlobReference, storage: &Path) -> std::io::Result<bool> {
// Scan all images
for image in get_docker_images_list(storage)? {
let image = DockerImage::new(storage, &image);
let mut manifest_blobs = image.manifests_revision_list()?;
// Process each image tags
for tag in image.tags_list()? {
let manifest_ref = BlobReference::from_file(&image.manifest_tag_link_path(&tag))?;
if !manifest_ref.is_empty_ref() {
manifest_blobs.push(manifest_ref);
}
}
for manifest_ref in manifest_blobs {
if &manifest_ref == blob_ref {
return Ok(false);
}
if !is_blob_useless_in_distribution_file(blob_ref, &manifest_ref, storage)? {
return Ok(false);
}
}
}
Ok(true)
}
/// Remove empty directories
fn remove_empty_dirs(path: &Path, can_remove: bool) -> std::io::Result<()> {
let mut found_files = false;
for entry in std::fs::read_dir(path)? {
let entry = entry?;
found_files = true;
if entry.metadata()?.is_dir() {
remove_empty_dirs(&entry.path(), true)?;
}
}
if !found_files && can_remove {
std::fs::remove_dir(path)?;
}
Ok(())
}
/// Run the garbage collector
pub fn clean_storage(storage: &Path) -> std::io::Result<()> {
for _ in 0..3 {
for blob in get_blob_list(storage)? {
// Empty blob
if blob.is_empty_ref() {
continue;
}
if !is_blob_useless(&blob, storage)? {
continue;
}
println!("Deleting useless blob {}", blob.to_digest());
std::fs::remove_dir_all(blob.data_path(storage).parent().unwrap())?;
}
remove_empty_dirs(storage, false)?;
}
Ok(())
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
src/utils.rs | Rust | //! Utilities
use std::io::{stdin, stdout, ErrorKind, Write};
use std::path::Path;
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
/// Create an empty file and all its parent directories
pub fn create_empty_file(path: &Path) -> std::io::Result<()> {
std::fs::create_dir_all(path.parent().unwrap_or(path))?;
std::fs::write(path, vec![])?;
Ok(())
}
/// Get the sha256 hash of a file
pub fn sha256sum(path: &Path) -> std::io::Result<String> {
let output = Command::new("sha256sum").arg(path.as_os_str()).output()?;
if !output.status.success() {
return Err(std::io::Error::new(ErrorKind::Other, "Failed to compute sum!"));
}
let hash = String::from_utf8_lossy(&output.stdout)
.split(' ')
.next()
.unwrap_or("")
.to_string();
Ok(hash)
}
/// Get the sha256 hash of a string
pub fn sha256sum_str(str: &str) -> std::io::Result<String> {
let temp = mktemp::Temp::new_file()?;
std::fs::write(&temp, str)?;
sha256sum(&temp)
}
/// Request user's input
pub fn request_input(field: &str) -> std::io::Result<String> {
print!("Please input {}: ", field);
stdout().flush()?;
let mut s = String::new();
stdin().read_line(&mut s)?;
if s.ends_with('\n') {
s.pop();
}
if s.ends_with('\r') {
s.pop();
}
Ok(s)
}
/// Generate a random string of a given size
///
/// ```
/// use dockerust::utils::rand_str;
///
/// let size = 10;
/// let rand = rand_str(size);
/// assert_eq!(size, rand.len());
/// ```
pub fn rand_str(len: usize) -> String {
thread_rng()
.sample_iter(&Alphanumeric)
.map(char::from)
.take(len)
.collect()
}
/// Get the current time since epoch
///
/// ```
/// use dockerust::utils::time;
///
/// let time = time();
/// ```
pub fn time() -> u64 {
SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs()
}
| yezz123/dockerust | 9 | 🦀 Functional Docker registry server in pure Rust. | Rust | yezz123 | Yasser Tahiri | Yezz LLC. |
docs/css/custom.css | CSS | .termynal-comment {
color: #4a968f;
font-style: italic;
display: block;
}
.termy {
/* For right to left languages */
direction: ltr;
}
.termy [data-termynal] {
white-space: pre-wrap;
}
/* Right to left languages */
code {
direction: ltr;
display: inline-block;
}
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
docs/css/termynal.css | CSS | /**
* termynal.js
*
* @author Ines Montani <ines@ines.io>
* @version 0.0.1
* @license MIT
*/
:root {
--color-bg: #252a33;
--color-text: #eee;
--color-text-subtle: #a2a2a2;
}
[data-termynal] {
width: 750px;
max-width: 100%;
background: var(--color-bg);
color: var(--color-text);
/* font-size: 18px; */
font-size: 15px;
/* font-family: 'Fira Mono', Consolas, Menlo, Monaco, 'Courier New', Courier, monospace; */
font-family: "Roboto Mono", "Fira Mono", Consolas, Menlo, Monaco,
"Courier New", Courier, monospace;
border-radius: 4px;
padding: 75px 45px 35px;
position: relative;
-webkit-box-sizing: border-box;
box-sizing: border-box;
}
[data-termynal]:before {
content: "";
position: absolute;
top: 15px;
left: 15px;
display: inline-block;
width: 15px;
height: 15px;
border-radius: 50%;
/* A little hack to display the window buttons in one pseudo element. */
background: #d9515d;
-webkit-box-shadow: 25px 0 0 #f4c025, 50px 0 0 #3ec930;
box-shadow: 25px 0 0 #f4c025, 50px 0 0 #3ec930;
}
[data-termynal]:after {
content: "bash";
position: absolute;
color: var(--color-text-subtle);
top: 5px;
left: 0;
width: 100%;
text-align: center;
}
a[data-terminal-control] {
text-align: right;
display: block;
color: #aebbff;
}
[data-ty] {
display: block;
line-height: 2;
}
[data-ty]:before {
/* Set up defaults and ensure empty lines are displayed. */
content: "";
display: inline-block;
vertical-align: middle;
}
[data-ty="input"]:before,
[data-ty-prompt]:before {
margin-right: 0.75em;
color: var(--color-text-subtle);
}
[data-ty="input"]:before {
content: "$";
}
[data-ty][data-ty-prompt]:before {
content: attr(data-ty-prompt);
}
[data-ty-cursor]:after {
content: attr(data-ty-cursor);
font-family: monospace;
margin-left: 0.5em;
-webkit-animation: blink 1s infinite;
animation: blink 1s infinite;
}
/* Cursor animation */
@-webkit-keyframes blink {
50% {
opacity: 0;
}
}
@keyframes blink {
50% {
opacity: 0;
}
}
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
docs/js/custom.js | JavaScript | function setupTermynal() {
document.querySelectorAll(".use-termynal").forEach((node) => {
node.style.display = "block";
new Termynal(node, {
lineDelay: 500,
});
});
const progressLiteralStart = "---> 100%";
const promptLiteralStart = "$ ";
const customPromptLiteralStart = "# ";
const termynalActivateClass = "termy";
let termynals = [];
function createTermynals() {
document
.querySelectorAll(`.${termynalActivateClass} .highlight`)
.forEach((node) => {
const text = node.textContent;
const lines = text.split("\n");
const useLines = [];
let buffer = [];
function saveBuffer() {
if (buffer.length) {
let isBlankSpace = true;
buffer.forEach((line) => {
if (line) {
isBlankSpace = false;
}
});
dataValue = {};
if (isBlankSpace) {
dataValue["delay"] = 0;
}
if (buffer[buffer.length - 1] === "") {
// A last single <br> won't have effect
// so put an additional one
buffer.push("");
}
const bufferValue = buffer.join("<br>");
dataValue["value"] = bufferValue;
useLines.push(dataValue);
buffer = [];
}
}
for (let line of lines) {
if (line === progressLiteralStart) {
saveBuffer();
useLines.push({
type: "progress",
});
} else if (line.startsWith(promptLiteralStart)) {
saveBuffer();
const value = line.replace(promptLiteralStart, "").trimEnd();
useLines.push({
type: "input",
value: value,
});
} else if (line.startsWith("// ")) {
saveBuffer();
const value = "💬 " + line.replace("// ", "").trimEnd();
useLines.push({
value: value,
class: "termynal-comment",
delay: 0,
});
} else if (line.startsWith(customPromptLiteralStart)) {
saveBuffer();
const promptStart = line.indexOf(promptLiteralStart);
if (promptStart === -1) {
console.error("Custom prompt found but no end delimiter", line);
}
const prompt = line
.slice(0, promptStart)
.replace(customPromptLiteralStart, "");
let value = line.slice(promptStart + promptLiteralStart.length);
useLines.push({
type: "input",
value: value,
prompt: prompt,
});
} else {
buffer.push(line);
}
}
saveBuffer();
const div = document.createElement("div");
node.replaceWith(div);
const termynal = new Termynal(div, {
lineData: useLines,
noInit: true,
lineDelay: 500,
});
termynals.push(termynal);
});
}
function loadVisibleTermynals() {
termynals = termynals.filter((termynal) => {
if (termynal.container.getBoundingClientRect().top - innerHeight <= 0) {
termynal.init();
return false;
}
return true;
});
}
window.addEventListener("scroll", loadVisibleTermynals);
createTermynals();
loadVisibleTermynals();
}
async function main() {
setupTermynal();
}
main();
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
docs/js/termynal.js | JavaScript | /**
* termynal.js
* A lightweight, modern and extensible animated terminal window, using
* async/await.
*
* @author Ines Montani <ines@ines.io>
* @version 0.0.1
* @license MIT
*/
"use strict";
/** Generate a terminal widget. */
class Termynal {
/**
* Construct the widget's settings.
* @param {(string|Node)=} container - Query selector or container element.
* @param {Object=} options - Custom settings.
* @param {string} options.prefix - Prefix to use for data attributes.
* @param {number} options.startDelay - Delay before animation, in ms.
* @param {number} options.typeDelay - Delay between each typed character, in ms.
* @param {number} options.lineDelay - Delay between each line, in ms.
* @param {number} options.progressLength - Number of characters displayed as progress bar.
* @param {string} options.progressChar – Character to use for progress bar, defaults to █.
* @param {number} options.progressPercent - Max percent of progress.
* @param {string} options.cursor – Character to use for cursor, defaults to ▋.
* @param {Object[]} lineData - Dynamically loaded line data objects.
* @param {boolean} options.noInit - Don't initialise the animation.
*/
constructor(container = "#termynal", options = {}) {
this.container =
typeof container === "string"
? document.querySelector(container)
: container;
this.pfx = `data-${options.prefix || "ty"}`;
this.originalStartDelay = this.startDelay =
options.startDelay ||
parseFloat(this.container.getAttribute(`${this.pfx}-startDelay`)) ||
600;
this.originalTypeDelay = this.typeDelay =
options.typeDelay ||
parseFloat(this.container.getAttribute(`${this.pfx}-typeDelay`)) ||
90;
this.originalLineDelay = this.lineDelay =
options.lineDelay ||
parseFloat(this.container.getAttribute(`${this.pfx}-lineDelay`)) ||
1500;
this.progressLength =
options.progressLength ||
parseFloat(this.container.getAttribute(`${this.pfx}-progressLength`)) ||
40;
this.progressChar =
options.progressChar ||
this.container.getAttribute(`${this.pfx}-progressChar`) ||
"█";
this.progressPercent =
options.progressPercent ||
parseFloat(this.container.getAttribute(`${this.pfx}-progressPercent`)) ||
100;
this.cursor =
options.cursor ||
this.container.getAttribute(`${this.pfx}-cursor`) ||
"▋";
this.lineData = this.lineDataToElements(options.lineData || []);
this.loadLines();
if (!options.noInit) this.init();
}
loadLines() {
// Load all the lines and create the container so that the size is fixed
// Otherwise it would be changing and the user viewport would be constantly
// moving as she/he scrolls
const finish = this.generateFinish();
finish.style.visibility = "hidden";
this.container.appendChild(finish);
// Appends dynamically loaded lines to existing line elements.
this.lines = [...this.container.querySelectorAll(`[${this.pfx}]`)].concat(
this.lineData
);
for (let line of this.lines) {
line.style.visibility = "hidden";
this.container.appendChild(line);
}
const restart = this.generateRestart();
restart.style.visibility = "hidden";
this.container.appendChild(restart);
this.container.setAttribute("data-termynal", "");
}
/**
* Initialise the widget, get lines, clear container and start animation.
*/
init() {
/**
* Calculates width and height of Termynal container.
* If container is empty and lines are dynamically loaded, defaults to browser `auto` or CSS.
*/
const containerStyle = getComputedStyle(this.container);
this.container.style.width =
containerStyle.width !== "0px" ? containerStyle.width : undefined;
this.container.style.minHeight =
containerStyle.height !== "0px" ? containerStyle.height : undefined;
this.container.setAttribute("data-termynal", "");
this.container.innerHTML = "";
for (let line of this.lines) {
line.style.visibility = "visible";
}
this.start();
}
/**
* Start the animation and rener the lines depending on their data attributes.
*/
async start() {
this.addFinish();
await this._wait(this.startDelay);
for (let line of this.lines) {
const type = line.getAttribute(this.pfx);
const delay = line.getAttribute(`${this.pfx}-delay`) || this.lineDelay;
if (type == "input") {
line.setAttribute(`${this.pfx}-cursor`, this.cursor);
await this.type(line);
await this._wait(delay);
} else if (type == "progress") {
await this.progress(line);
await this._wait(delay);
} else {
this.container.appendChild(line);
await this._wait(delay);
}
line.removeAttribute(`${this.pfx}-cursor`);
}
this.addRestart();
this.finishElement.style.visibility = "hidden";
this.lineDelay = this.originalLineDelay;
this.typeDelay = this.originalTypeDelay;
this.startDelay = this.originalStartDelay;
}
generateRestart() {
const restart = document.createElement("a");
restart.onclick = (e) => {
e.preventDefault();
this.container.innerHTML = "";
this.init();
};
restart.href = "#";
restart.setAttribute("data-terminal-control", "");
restart.innerHTML = "restart ↻";
return restart;
}
generateFinish() {
const finish = document.createElement("a");
finish.onclick = (e) => {
e.preventDefault();
this.lineDelay = 0;
this.typeDelay = 0;
this.startDelay = 0;
};
finish.href = "#";
finish.setAttribute("data-terminal-control", "");
finish.innerHTML = "fast →";
this.finishElement = finish;
return finish;
}
addRestart() {
const restart = this.generateRestart();
this.container.appendChild(restart);
}
addFinish() {
const finish = this.generateFinish();
this.container.appendChild(finish);
}
/**
* Animate a typed line.
* @param {Node} line - The line element to render.
*/
async type(line) {
const chars = [...line.textContent];
line.textContent = "";
this.container.appendChild(line);
for (let char of chars) {
const delay =
line.getAttribute(`${this.pfx}-typeDelay`) || this.typeDelay;
await this._wait(delay);
line.textContent += char;
}
}
/**
* Animate a progress bar.
* @param {Node} line - The line element to render.
*/
async progress(line) {
const progressLength =
line.getAttribute(`${this.pfx}-progressLength`) || this.progressLength;
const progressChar =
line.getAttribute(`${this.pfx}-progressChar`) || this.progressChar;
const chars = progressChar.repeat(progressLength);
const progressPercent =
line.getAttribute(`${this.pfx}-progressPercent`) || this.progressPercent;
line.textContent = "";
this.container.appendChild(line);
for (let i = 1; i < chars.length + 1; i++) {
await this._wait(this.typeDelay);
const percent = Math.round((i / chars.length) * 100);
line.textContent = `${chars.slice(0, i)} ${percent}%`;
if (percent > progressPercent) {
break;
}
}
}
/**
* Helper function for animation delays, called with `await`.
* @param {number} time - Timeout, in ms.
*/
_wait(time) {
return new Promise((resolve) => setTimeout(resolve, time));
}
/**
* Converts line data objects into line elements.
*
* @param {Object[]} lineData - Dynamically loaded lines.
* @param {Object} line - Line data object.
* @returns {Element[]} - Array of line elements.
*/
lineDataToElements(lineData) {
return lineData.map((line) => {
let div = document.createElement("div");
div.innerHTML = `<span ${this._attributes(line)}>${
line.value || ""
}</span>`;
return div.firstElementChild;
});
}
/**
* Helper function for generating attributes string.
*
* @param {Object} line - Line data object.
* @returns {string} - String of attributes.
*/
_attributes(line) {
let attrs = "";
for (let prop in line) {
// Custom add class
if (prop === "class") {
attrs += ` class=${line[prop]} `;
continue;
}
if (prop === "type") {
attrs += `${this.pfx}="${line[prop]}" `;
} else if (prop !== "value") {
attrs += `${this.pfx}-${prop}="${line[prop]}" `;
}
}
return attrs;
}
}
/**
* HTML API: If current script has container(s) specified, initialise Termynal.
*/
if (document.currentScript.hasAttribute("data-termynal-container")) {
const containers = document.currentScript.getAttribute(
"data-termynal-container"
);
containers.split("|").forEach((container) => new Termynal(container));
}
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/__init__.py | Python | """asynchronous ORM that uses pydantic models to represent database tables ✨"""
__version__ = "1.7.0"
from ormdantic.orm import Ormdantic
__all__ = ["Ormdantic"]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/__init__.py | Python | from ormdantic.generator._crud import OrmCrud as CRUD
from ormdantic.generator._lazy import generate as Generator
from ormdantic.generator._table import OrmTableGenerator as Table
__all__ = ["Table", "CRUD", "Generator"]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_crud.py | Python | """Handle table interactions for a model."""
from typing import Any, Generic
from pypika import Order
from pypika.queries import QueryBuilder
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker
from ormdantic.generator._field import OrmField
from ormdantic.generator._query import OrmQuery
from ormdantic.generator._serializer import OrmSerializer
from ormdantic.models import Map, OrmTable, Result
from ormdantic.types import ModelType
class OrmCrud(Generic[ModelType]):
"""Provides DB CRUD methods and table information for a model."""
def __init__(
self,
table_data: OrmTable, # type: ignore
table_map: Map,
engine: AsyncEngine,
) -> None:
"""Initialize OrmCrud."""
self._engine = engine
self._table_map = table_map
self._table_data = table_data
self.tablename = table_data.tablename
self.columns = table_data.columns
async def find_one(self, pk: Any, depth: int = 0) -> ModelType | None:
"""Find a model instance by primary key."""
result = await self._execute_query(
OrmField(self._table_data, self._table_map).get_find_one_query(pk, depth)
)
return OrmSerializer[ModelType | None](
table_data=self._table_data,
table_map=self._table_map,
result_set=result,
is_array=False,
depth=depth,
).deserialize()
async def find_many(
self,
where: dict[str, Any] | None = None,
order_by: list[str] | None = None,
order: Order = Order.asc,
limit: int = 0,
offset: int = 0,
depth: int = 0,
) -> Result[ModelType]:
"""Find many model instances."""
result = await self._execute_query(
OrmField(self._table_data, self._table_map).get_find_many_query(
where, order_by, order, limit, offset, depth
)
)
deserialized_data = OrmSerializer[ModelType | None](
table_data=self._table_data,
table_map=self._table_map,
result_set=result,
is_array=True,
depth=depth,
).deserialize()
return Result(
offset=offset,
limit=limit,
data=deserialized_data or [],
)
async def insert(self, model_instance: ModelType) -> ModelType:
"""Insert a model instance."""
await self._execute_query(
OrmQuery(model_instance, self._table_map).get_insert_query()
)
return model_instance
async def update(self, model_instance: ModelType) -> ModelType:
"""Update a record."""
await self._execute_query(
OrmQuery(model_instance, self._table_map).get_update_queries()
)
return model_instance
async def upsert(self, model_instance: ModelType) -> ModelType:
"""Insert a record if it does not exist, else update it."""
await self._execute_query(
OrmQuery(model_instance, self._table_map).get_upsert_query()
)
return model_instance
async def delete(self, pk: Any) -> bool:
"""Delete a model instance by primary key."""
await self._execute_query(
OrmField(self._table_data, self._table_map).get_delete_query(pk)
)
return True
async def count(self, where: dict[str, Any] | None = None, depth: int = 0) -> int:
"""Count records."""
result = await self._execute_query(
OrmField(self._table_data, self._table_map).get_count_query(where, depth)
)
return result.scalar()
async def _execute_query(self, query: QueryBuilder) -> Any:
"""Execute a query."""
async_session = async_sessionmaker(
self._engine, expire_on_commit=False, class_=AsyncSession
)
async with async_session() as session:
async with session.begin():
result = await session.execute(text(str(query)))
await session.commit()
await self._engine.dispose()
return result
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_field.py | Python | """Module for building queries from field data."""
from typing import Any
from pypika import Field, Order
from pypika.functions import Count
from pypika.queries import Query, QueryBuilder, Table
from ormdantic.handler import py_type_to_sql
from ormdantic.models import Map, OrmTable
class OrmField:
"""Build SQL queries from field information."""
def __init__(self, table_data: OrmTable, table_map: Map) -> None: # type: ignore
"""Build CRUD queries from tablename and field info.
:param table_data: Meta data of target table for SQL script.
:param table_map: Map of tablenames and models.
"""
self._table_data = table_data
self._table_map = table_map
self._table = Table(table_data.tablename)
self._query = Query.from_(self._table)
def get_find_one_query(self, pk: Any, depth: int = 1) -> QueryBuilder:
"""Get query to find one model."""
query, columns = self._build_joins(
Query.from_(self._table),
self._table_data,
depth,
self._columns(depth),
)
query = query.where(
self._table.field(self._table_data.pk)
== py_type_to_sql(self._table_map, pk)
).select(*columns)
return query
def get_find_many_query(
self,
where: dict[str, Any] | None,
order_by: list[str] | None,
order: Order,
limit: int,
offset: int,
depth: int,
) -> QueryBuilder:
"""Get find query for many records.
:param where: Dictionary of column name to desired value.
:param order_by: Columns to order by.
:param order: Order results by ascending or descending.
:param limit: Number of records to return.
:param offset: Number of records to offset by.
:param depth: Depth of relations to populate.
:return: A list of models representing table records.
"""
where = where or {}
order_by = order_by or []
query, columns = self._build_joins(
Query.from_(self._table),
self._table_data,
depth,
self._columns(depth),
)
for field, value in where.items():
query = query.where(self._table.field(field) == value)
query = query.orderby(*order_by, order=order).select(*columns)
if limit:
query = query.limit(limit)
if offset:
query = query.offset(offset)
return query
def get_delete_query(self, pk: Any) -> QueryBuilder:
"""Get a `delete` query.
:param pk: Primary key of the record to delete.
:return: Query to delete a record.
"""
return self._query.where(self._table.field(self._table_data.pk) == pk).delete()
def get_count_query(
self,
where: dict[str, Any] | None,
depth: int,
) -> QueryBuilder:
"""Get a `count` query.
:param where: Dictionary of column name to desired value.
:param depth: Depth of relations to populate.
:return: Query to count records.
"""
where = where or {}
query, columns = self._build_joins(
Query.from_(self._table),
self._table_data,
depth,
self._columns(depth),
)
for field, value in where.items():
query = query.where(self._table.field(field) == value)
return query.select(Count("*"))
def _build_joins(
self,
query: QueryBuilder,
table_data: OrmTable, # type: ignore
depth: int,
columns: list[Field],
table_tree: str | None = None,
) -> tuple[QueryBuilder, list[Field]]:
if depth <= 0:
return query, columns
if not (
relationships := self._table_map.name_to_data[
table_data.tablename
].relationships
):
return query, columns
depth -= 1
table_tree = table_tree or table_data.tablename
pypika_table: Table = Table(table_data.tablename)
if table_data.tablename != table_tree:
pypika_table = pypika_table.as_(table_tree)
# For each related table, add join to query.
for field_name, relation in relationships.items():
relation_name = f"{table_tree}/{field_name}"
rel_table = Table(relation.foreign_table).as_(relation_name)
if relation.back_references is not None:
query = query.left_join(rel_table).on(
pypika_table.field(table_data.pk)
== rel_table.field(relation.back_references)
)
else:
query = query.left_join(rel_table).on(
pypika_table.field(field_name)
== rel_table.field(
self._table_map.name_to_data[relation.foreign_table].pk
)
)
# Add columns of rel table to this query.
rel_table_data = self._table_map.name_to_data[relation.foreign_table]
columns.extend(
[
rel_table.field(c).as_(f"{relation_name}\\{c}")
for c in self._table_map.name_to_data[
relation.foreign_table
].columns
if depth <= 0 or c not in rel_table_data.relationships
]
)
# Add joins of relations of this table to query.
query, new_cols = self._build_joins(
query,
self._table_map.name_to_data[relation.foreign_table],
depth,
columns,
relation_name,
)
columns.extend([c for c in new_cols if c not in columns])
return query, columns
def _columns(self, depth: int) -> list[Field]:
table = Table(self._table_data.tablename)
return [
table.field(c).as_(f"{self._table_data.tablename}\\{c}")
for c in self._table_data.columns
if depth <= 0 or c not in self._table_data.relationships
]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_lazy.py | Python | import contextlib
import datetime
import random
import types
import typing
from enum import Enum
from typing import Any, Type
from uuid import UUID, uuid4
import pydantic
from pydantic import BaseModel
from pydantic.fields import ModelField
from ormdantic.handler import (
GetTargetLength,
RandomDatetimeValue,
RandomDateValue,
RandomNumberValue,
RandomStrValue,
RandomTimedeltaValue,
RandomTimeValue,
)
from ormdantic.types import ModelType, default_max_length
def generate(
model_type: Type[ModelType],
use_default_values: bool = True,
optionals_use_none: bool = False,
**kwargs: Any,
) -> ModelType:
"""
This is a function that generates an instance of a Pydantic model with random values.
It takes in a model type, and optional parameters use_default_values, `optionals_use_none`, and kwargs.
If `use_default_values` is True, the function will use the model's default values for fields that have them.
If `optionals_use_none` is True, the function will set optional fields to None instead of generating a random value for them.
`kwargs()` is a dictionary of attributes to set on the model instance, which will override any randomly generated values.
The function iterates through all the fields of the model and checks if a value has been provided in kwargs or if the field has a default value or default factory.
If none of these conditions are met, the function calls the `_get_value` function to generate a random value for the field based on its type annotation.
Finally, the function returns a new instance of the model type with the generated or provided field values.
"""
for field_name, model_field in model_type.__fields__.items():
if field_name in kwargs:
continue
if (
model_field.default is not None or model_field.default_factory is not None
) and use_default_values:
continue
kwargs[field_name] = _get_value(
model_field.annotation, model_field, use_default_values, optionals_use_none
)
return model_type(**kwargs)
def _get_value(
type_: Type, # type: ignore
model_field: ModelField,
use_default_values: bool,
optionals_use_none: bool,
) -> Any:
"""
This is a helper function that generates a random value for a given type.
It takes in four parameters: `type_`, `model_field`, `use_default_values`, and `optionals_use_none.type_` is the type of the value to generate,
`model_field` is an instance of a Pydantic model field, `use_default_values` is a boolean indicating whether to use default values, and `optionals_use_none` is a boolean indicating whether to set optional fields to None.
The function first checks if type_ is a dictionary, list, or union type, and generates random values accordingly.
If model_field is an optional field and optionals_use_none is True, the function returns None.
- `type_ is` a string or constrained string, the function returns a random string using the RandomStrValue class.
- `type_ is` a number or constrained number, the function returns a random number using the RandomNumberValue class.
- `type_ is` a boolean, the function returns a random boolean value.
- `type_ is` a Pydantic model, the function recursively generates a random instance of that model.
- `type_ is` an enum, the function returns a random value from the enum.
- `type_ is` a UUID, the function returns a randomly generated UUID.
- `type_ is` a date, time, timedelta, or datetime, the function returns a random value using the corresponding Random*Value class.
If none of these conditions are met, the function returns a default value for the given type.
"""
if isinstance(type_, typing.ForwardRef):
type_ = pydantic.typing.evaluate_forwardref(type_, None, None)
origin = typing.get_origin(type_)
if origin is dict:
k_type, v_type = typing.get_args(type_)
return {
_get_value(
k_type, model_field, use_default_values, optionals_use_none
): _get_value(v_type, model_field, use_default_values, optionals_use_none)
for _ in range(random.randint(1, default_max_length))
}
with contextlib.suppress(TypeError):
if origin is list or issubclass(type_, pydantic.types.ConstrainedList):
return _get_list_values(
type_, model_field, use_default_values, optionals_use_none
)
if origin and issubclass(origin, types.UnionType):
type_choices = [
it for it in typing.get_args(type_) if not issubclass(it, types.NoneType)
]
chosen_union_type = random.choice(type_choices)
return _get_value(
chosen_union_type, model_field, use_default_values, optionals_use_none
)
if model_field.allow_none and optionals_use_none:
return None
if type_ == str or issubclass(type_, pydantic.types.ConstrainedStr):
return RandomStrValue(model_field)
if type_ in [int, float] or isinstance(type_, pydantic.types.ConstrainedNumberMeta):
return RandomNumberValue(model_field)
if type_ == bool:
return random.random() > 0.5
if issubclass(type_, types.NoneType):
return None
if issubclass(type_, BaseModel):
return generate(type_, use_default_values, optionals_use_none)
if issubclass(type_, Enum):
return random.choice(list(type_))
if type_ == UUID:
return uuid4()
if type_ == datetime.date:
return RandomDateValue()
if type_ == datetime.time:
return RandomTimeValue()
if type_ == datetime.timedelta:
return RandomTimedeltaValue()
return RandomDatetimeValue() if type_ == datetime.datetime else type_()
def _get_list_values(
type_: Type | pydantic.types.ConstrainedList, # type: ignore
model_field: ModelField,
use_default_values: bool = True,
optionals_use_none: bool = False,
) -> list[Any]:
target_length = GetTargetLength(
model_field.field_info.min_items, model_field.field_info.max_items
)
items: list = [] # type: ignore
if issubclass(type_, pydantic.types.ConstrainedList): # type: ignore
list_types = typing.get_args(type_.item_type) or [
type_.item_type
] # pragma: no cover
else:
list_types = typing.get_args(type_)
while len(items) < target_length:
for arg in list_types:
value = _get_value(arg, model_field, use_default_values, optionals_use_none)
if model_field.field_info.unique_items and value in items:
continue # pragma: no cover
items.append(value)
return items
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_query.py | Python | from pypika import PostgreSQLQuery, Query, Table
from pypika.dialects import PostgreSQLQueryBuilder
from pypika.queries import QueryBuilder
from ormdantic.handler import py_type_to_sql
from ormdantic.models import Map
from ormdantic.types import ModelType
class OrmQuery:
"""Build SQL queries for model CRUD operations."""
def __init__(
self,
model: ModelType,
table_map: Map,
processed_models: list[ModelType] | None = None,
query: Query | PostgreSQLQuery | None = None,
) -> None:
self._model = model
# PostgreSQLQuery works for SQLite and PostgreSQL.
self._query: QueryBuilder | PostgreSQLQueryBuilder | Query | PostgreSQLQuery = (
query or PostgreSQLQuery
)
self._table_map = table_map
self._processed_models = processed_models or []
self._table_data = self._table_map.model_to_data[type(self._model)]
self._table = Table(self._table_data.tablename)
def get_insert_query(self) -> QueryBuilder | PostgreSQLQueryBuilder:
"""Get queries to insert model tree."""
return self._get_inserts_or_upserts(is_upsert=False)
def get_upsert_query(self) -> QueryBuilder | PostgreSQLQueryBuilder:
"""Get queries to upsert model tree."""
return self._get_inserts_or_upserts(is_upsert=True)
def get_update_queries(self) -> QueryBuilder | PostgreSQLQueryBuilder:
"""Get queries to update model tree."""
self._query = self._query.update(self._table)
for column, value in self._get_columns_and_values().items():
self._query = self._query.set(column, value)
self._query = self._query.where(
self._table.field(self._table_data.pk)
== self._model.__dict__[self._table_data.pk]
)
return self._query
def get_patch_queries( # type: ignore
self,
) -> list[QueryBuilder | PostgreSQLQueryBuilder]:
"""Get queries to patch model tree."""
def _get_inserts_or_upserts(
self, is_upsert: bool
) -> QueryBuilder | PostgreSQLQueryBuilder:
col_to_value = self._get_columns_and_values()
self._query = (
self._query.into(self._table)
.columns(*self._table_data.columns)
.insert(*col_to_value.values())
)
if is_upsert and isinstance(self._query, PostgreSQLQueryBuilder):
self._query = self._query.on_conflict(self._table_data.pk)
for column, value in col_to_value.items():
self._query = self._query.do_update(self._table.field(column), value)
return self._query
def _get_columns_and_values(self): # type: ignore
return {
column: py_type_to_sql(self._table_map, self._model.__dict__[column])
for column in self._table_data.columns
}
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_serializer.py | Python | import json
from types import NoneType
from typing import Any, Generic, get_args
from pydantic import BaseModel, Field
from sqlalchemy.engine import CursorResult
from ormdantic.models import Map, OrmTable
from ormdantic.types import ModelType, SerializedType
class ResultSchema(BaseModel):
"""Model to describe the schema of a model result."""
table_data: OrmTable | None = None # type: ignore
is_array: bool
references: dict[str, "ResultSchema"] = Field(default_factory=lambda: {})
class OrmSerializer(Generic[SerializedType]):
"""Generate Python models from a table map and result set."""
def __init__(
self,
table_data: OrmTable, # type: ignore
table_map: Map,
# TODO: Missing type parameters for generic type "CursorResult".
result_set: CursorResult[Any],
is_array: bool,
depth: int,
) -> None:
"""Generate Python models from a table map and result set.
:param table_data: Table data for the returned model type.
:param table_map: Map of tablenames and models.
:param result_set: SQL Alchemy cursor result.
:param is_array: Deserialize as a model or a list of models?
:param depth: Model tree depth.
"""
self._table_data = table_data
self._table_map = table_map
self._result_set = result_set
self._is_array = is_array
self._depth = depth
self._result_schema = ResultSchema(
is_array=is_array,
references={
table_data.tablename: self._get_result_schema(
table_data, depth, is_array
)
},
)
self._columns = [it[0] for it in self._result_set.cursor.description]
self._return_dict: dict[str, Any] = {}
def deserialize(self) -> SerializedType:
"""Deserialize the result set into Python models."""
for row in self._result_set:
row_schema = {}
for column_idx, column_tree in enumerate(self._columns):
# `node` is the currently acted on level of depth in return.
node = self._return_dict
# `schema` describes acted on level of depth.
schema = self._result_schema
column_tree, column = column_tree.split("\\")
current_tree = ""
for branch in column_tree.split("/"):
current_tree += f"/{branch}"
# Update schema position.
schema = schema.references[branch]
# Update last pk if this column is a pk.
if (
column == schema.table_data.pk # type: ignore
and current_tree == f"/{column_tree}"
):
row_schema[current_tree] = row[column_idx]
# If this branch in schema is absent from result set.
if row_schema[current_tree] is None:
break
# Initialize this object if it is None.
if node.get(branch) is None:
node[branch] = {}
if (
schema.is_array
and node[branch].get(row_schema[current_tree]) is None
):
node[branch][row_schema[current_tree]] = {}
# Set node to this level.
if schema.is_array:
node = node[branch][row_schema[current_tree]]
else:
node = node[branch]
# If we did not break.
else:
# Set value.
if column:
node[column] = row[column_idx]
if not self._return_dict:
return None # type: ignore
if self._result_schema.is_array:
return [
self._table_data.model(**record)
for record in self._prep_result(self._return_dict, self._result_schema)[
self._table_data.tablename
] # type: ignore
]
return self._table_data.model(
**self._prep_result(self._return_dict, self._result_schema)[
self._table_data.tablename
]
)
def _prep_result(
self, node: dict[Any, Any], schema: ResultSchema
) -> dict[str, Any]:
for key, val in node.items():
if td := schema.table_data:
node[key] = self._sql_type_to_py(td.model, key, val)
if key in schema.references:
ref_schema = schema.references[key]
if ref_schema.is_array:
node[key] = [
self._prep_result(v, ref_schema) for v in node[key].values()
]
else:
node[key] = self._prep_result(node[key], ref_schema)
return node
def _get_result_schema(
self,
table_data: OrmTable, # type: ignore
depth: int,
is_array: bool,
) -> ResultSchema | None:
if depth < 0:
return None
return ResultSchema(
table_data=table_data,
is_array=is_array,
references={
column: schema
for column, rel in table_data.relationships.items()
if (
schema := self._get_result_schema(
table_data=self._table_map.name_to_data[rel.foreign_table],
depth=depth - 1,
is_array=rel.back_references is not None,
)
)
is not None
},
)
@staticmethod
def _sql_type_to_py(model_type: type[ModelType], column: str, value: Any) -> Any:
if model_type.__fields__[column].type_ == dict:
return {} if value is None else json.loads(value)
if model_type.__fields__[column].type_ == list:
return [] if value is None else json.loads(value)
if value is None:
return None
if get_args(model_type.__fields__[column].type_):
for arg in get_args(model_type.__fields__[column].type_):
if arg is NoneType:
continue
try:
return arg(value)
except (AttributeError, TypeError):
continue
try:
if issubclass(model_type.__fields__[column].type_, BaseModel):
return json.loads(value)
except TypeError:
return value
return value
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/generator/_table.py | Python | """Module providing OrmTableGenerator."""
import uuid
from datetime import date, datetime
from types import UnionType
from typing import Any, get_args, get_origin
from pydantic import BaseModel
from pydantic.fields import ModelField
from sqlalchemy import (
JSON,
Boolean,
Column,
Date,
DateTime,
Float,
ForeignKey,
Integer,
MetaData,
String,
Table,
UniqueConstraint,
)
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.asyncio import AsyncEngine
from ormdantic.handler import TableName_From_Model, TypeConversionError
from ormdantic.models import Map, OrmTable
class OrmTableGenerator:
def __init__(
self,
engine: AsyncEngine,
metadata: MetaData,
table_map: Map,
) -> None:
"""Initialize OrmTableGenerator."""
self._engine = engine
self._metadata = metadata
self._table_map = table_map
self._tables: list[str] = []
async def init(self) -> None:
"""Generate SQL Alchemy tables."""
for tablename, table_data in self._table_map.name_to_data.items():
unique_constraints = (
UniqueConstraint(*cols, name=f"{'_'.join(cols)}_constraint")
for cols in table_data.unique_constraints
)
self._tables.append(tablename)
Table(
tablename,
self._metadata,
*self._get_columns(table_data),
*unique_constraints,
)
async with self._engine.begin() as conn:
await conn.run_sync(self._metadata.create_all)
def _get_columns(
self,
table_data: OrmTable, # type: ignore
) -> tuple[Column[Any] | Column[Any], ...]:
columns = []
for field_name, field in table_data.model.__fields__.items():
kwargs = {
"primary_key": field_name == table_data.pk,
"index": field_name in table_data.indexed,
"unique": field_name in table_data.unique,
"nullable": not field.required,
}
if field_name in table_data.back_references:
continue
column = self._get_column(field_name, field, **kwargs)
if column is not None:
columns.append(column)
return tuple(columns)
def _get_column(
self, field_name: str, field: ModelField, **kwargs: Any
) -> Column[Any] | None:
outer_origin = get_origin(field.outer_type_)
origin = get_origin(field.type_)
if outer_origin and outer_origin == list:
return self._get_column_from_type_args(
field_name, field, **kwargs
) # pragma: no cover
if origin:
if origin == UnionType:
return self._get_column_from_type_args(field_name, field, **kwargs)
else:
raise TypeConversionError(field.type_) # pragma: no cover
if get_origin(field.outer_type_) == dict:
return Column(field_name, JSON, **kwargs)
if field.type_ is uuid.UUID:
col_type = (
postgresql.UUID if self._engine.name == "postgres" else String(36)
)
return Column(field_name, col_type, **kwargs) # type: ignore
if issubclass(field.type_, BaseModel):
return Column(field_name, JSON, **kwargs)
if issubclass(field.type_, str):
return Column(field_name, String(field.field_info.max_length), **kwargs)
if issubclass(field.type_, float):
return Column(field_name, Float, **kwargs)
if issubclass(field.type_, int):
# bool is a subclass of int -> nested check
if issubclass(field.type_, bool):
return Column(field_name, Boolean, **kwargs)
return Column(field_name, Integer, **kwargs)
if issubclass(field.type_, date):
# datetime is a subclass of date -> nested check
if issubclass(field.type_, datetime):
return Column(field_name, DateTime, **kwargs)
return Column(field_name, Date, **kwargs)
# Catchall for dict/list or any other.
return Column(field_name, JSON, **kwargs)
def _get_column_from_type_args(
self, field_name: str, field: ModelField, **kwargs: Any
) -> Column[Any] | None:
for arg in get_args(field.type_):
if arg in [it.model for it in self._table_map.name_to_data.values()]:
foreign_table = TableName_From_Model(arg, self._table_map)
foreign_data = self._table_map.name_to_data[foreign_table]
return Column(
field_name,
ForeignKey(f"{foreign_table}.{foreign_data.pk}"),
**kwargs,
)
return None # pragma: no cover
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/handler/__init__.py | Python | from ormdantic.handler.errors import (
ConfigurationError,
MismatchingBackReferenceError,
MustUnionForeignKeyError,
TypeConversionError,
UndefinedBackReferenceError,
)
from ormdantic.handler.helper import (
Model_Instance,
TableName_From_Model,
py_type_to_sql,
)
from ormdantic.handler.random import _get_target_length as GetTargetLength
from ormdantic.handler.random import _random_date_value as RandomDateValue
from ormdantic.handler.random import _random_datetime_value as RandomDatetimeValue
from ormdantic.handler.random import _random_number_value as RandomNumberValue
from ormdantic.handler.random import _random_str_value as RandomStrValue
from ormdantic.handler.random import _random_time_value as RandomTimeValue
from ormdantic.handler.random import _random_timedelta_value as RandomTimedeltaValue
from ormdantic.handler.snake import snake as snake_case
__all__ = [
"TableName_From_Model",
"ConfigurationError",
"UndefinedBackReferenceError",
"MismatchingBackReferenceError",
"MustUnionForeignKeyError",
"TypeConversionError",
"snake_case",
"Model_Instance",
"py_type_to_sql",
"RandomStrValue",
"RandomNumberValue",
"RandomDatetimeValue",
"RandomDateValue",
"RandomTimedeltaValue",
"RandomTimeValue",
"GetTargetLength",
]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/handler/errors.py | Python | from typing import Type
import sqlalchemy
class ConfigurationError(Exception):
"""Raised for mal-configured database models or schemas."""
def __init__(self, msg: str):
super().__init__(msg)
class UndefinedBackReferenceError(ConfigurationError):
"""Raised when a back reference is missing from a table."""
def __init__(self, table_a: str, table_b: str, field: str) -> None:
super().__init__(
f'Many relation defined on "{table_a}.{field}" to table {table_b}" must be'
f' defined with a back reference on "{table_a}".'
)
class MismatchingBackReferenceError(ConfigurationError):
"""Raised when a back reference is typed incorrectly."""
def __init__(
self, table_a: str, table_b: str, field: str, back_reference: str
) -> None:
super().__init__(
f'Many relation defined on "{table_a}.{field}" to'
f' "{table_b}.{back_reference}" must use the same model type'
f" back-referenced."
)
class MustUnionForeignKeyError(ConfigurationError):
"""Raised when a relation field doesn't allow for just foreign key."""
def __init__(
self,
table_a: str,
table_b: str,
field: str,
model_b: Type, # type: ignore
pk_type: Type, # type: ignore
) -> None:
super().__init__(
f'Relation defined on "{table_a}.{field}" to "{table_b}" must be a union'
f' type of "Model | model_pk_type" e.g. "{model_b.__name__} | {pk_type}"'
)
class TypeConversionError(ConfigurationError):
"""Raised when a Python type fails to convert to SQL."""
def __init__(self, type: Type) -> None: # type: ignore
super().__init__(
f"Type {type} is not supported by SQLAlchemy {sqlalchemy.__version__}."
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/handler/helper.py | Python | """Utility functions used throughout the project."""
import json
from typing import Any, Type
from uuid import UUID
from pydantic import BaseModel
from ormdantic.models.models import Map
from ormdantic.types import ModelType
def TableName_From_Model(model: Type[ModelType], table_map: Map) -> str:
"""Get a tablename from the model and schema."""
return [
tablename
for tablename, data in table_map.name_to_data.items()
if data.model == model
][0]
def Model_Instance(model: BaseModel, table_map: Map) -> str:
"""Get a tablename from a model instance."""
return [k for k, v in table_map.name_to_data.items() if isinstance(model, v.model)][
0
]
def py_type_to_sql(table_map: Map, value: Any) -> Any:
"""Get value as SQL compatible type."""
if isinstance(value, UUID):
return str(value)
if isinstance(value, (dict, list)):
return json.dumps(value)
if isinstance(value, BaseModel) and type(value) in table_map.model_to_data:
tablename = Model_Instance(value, table_map)
return py_type_to_sql(
table_map, value.__dict__[table_map.name_to_data[tablename].pk]
)
return value.json() if isinstance(value, BaseModel) else value
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/handler/random.py | Python | import datetime
import math
import random
import string
from pydantic.fields import ModelField
from ormdantic.types import AnyNumber, default_max_length
def _random_str_value(model_field: ModelField) -> str:
"""Get a random string."""
target_length = _get_target_length(
model_field.field_info.min_length, model_field.field_info.max_length
)
choices = string.ascii_letters + string.digits
return _random_str(choices, target_length)
def _random_number_value(model_field: ModelField) -> AnyNumber:
"""Get a random number."""
default_max_difference = 256
iter_size = model_field.field_info.multiple_of or 1
# Determine lower bound.
lower = 0
if ge := model_field.field_info.ge:
while lower < ge:
lower += iter_size
if gt := model_field.field_info.gt:
while lower <= gt:
lower += iter_size
# Determine upper bound.
upper = lower + iter_size * default_max_difference
if le := model_field.field_info.le:
while upper > le:
upper -= iter_size
if lt := model_field.field_info.lt:
while upper >= lt:
upper -= iter_size
# Ensure lower bound is not greater than upper bound.
if (
not model_field.field_info.ge
and not model_field.field_info.gt
and lower > upper
):
lower = upper - iter_size * default_max_difference
# Ensure upper bound is not less than lower bound.
if not model_field.field_info.multiple_of:
return random.randint(lower, upper)
max_iter_distance = abs(math.floor((upper - lower) / iter_size))
return lower + iter_size * random.randint(1, max_iter_distance)
def _random_datetime_value() -> datetime.datetime:
"""Get a random datetime."""
dt = datetime.datetime.fromordinal(_random_date_value().toordinal())
dt += _random_timedelta_value()
return dt
def _random_date_value() -> datetime.date:
"""Get a random date."""
return datetime.date(
year=random.randint(1, 9999),
month=random.randint(1, 12),
day=random.randint(1, 28),
)
def _random_time_value() -> datetime.time:
"""Get a random time."""
return datetime.time(
hour=random.randint(0, 23),
minute=random.randint(0, 59),
second=random.randint(0, 59),
)
def _random_timedelta_value() -> datetime.timedelta:
"""Get a random timedelta."""
return datetime.timedelta(
hours=random.randint(0, 23),
minutes=random.randint(0, 59),
seconds=random.randint(0, 59),
)
def _random_str(choices: str, target_length: int) -> str:
"""Get a random string."""
return "".join(random.choice(choices) for _ in range(target_length))
def _get_target_length(min_length: int | None, max_length: int | None) -> int:
"""Get a random target length."""
if not min_length:
if max_length is not None:
min_length = random.randint(0, max_length - 1)
else:
min_length = random.randint(0, default_max_length)
max_length = max_length or random.randint(1, default_max_length) + min_length
return random.choice(range(min_length, max_length))
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/handler/snake.py | Python | import re
from typing import Union
def snake(string: str) -> str:
"""Return a version of the string in `snake_case`` format."""
return "_".join(w.lower() for w in get_words(string))
def get_words(string: str) -> list[str]:
"""Get a list of the words in a string in the order they appear."""
words = [it for it in re.split(r"\b|_", string) if re.match(r"[\d\w]", it)]
# Split on lower then upper: "oneTwo" -> ["one", "Two"]
words = _split_words_on_regex(words, re.compile(r"(?<=[a-z])(?=[A-Z])"))
# Split on upper then upper + lower: "JSONWord" -> ["JSON", "Word"]
words = _split_words_on_regex(words, re.compile(r"(?<=[A-Z])(?=[A-Z][a-z])"))
# Split on number + letter: "TO1Cat23dog" -> ["TO1", "Cat23", "dog"]
words = _split_words_on_regex(words, re.compile(r"(?<=\d)(?=[A-Za-z])"))
return words
def _split_words_on_regex(
words: list[str],
regex: Union[re.Pattern, str], # type: ignore
) -> list[str]:
"""Split a list of words on a regex, returning the split words."""
words = words.copy()
for i, word in enumerate(words):
split_words = re.split(regex, word)
if len(split_words) > 1:
words.pop(i)
for j, sw in enumerate(split_words):
words.insert(i + j, sw)
return words
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/models/__init__.py | Python | from ormdantic.models.models import Map, OrmTable, Relationship, Result
__all__ = [
"Relationship",
"OrmTable",
"Map",
"Result",
]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/models/models.py | Python | from typing import Generic, Type
from pydantic import BaseModel, Field
from pydantic.generics import GenericModel
from ormdantic.types import ModelType
class Result(GenericModel, Generic[ModelType]):
"""Search result object."""
offset: int
limit: int
data: list[ModelType]
class Relationship(BaseModel):
"""Describes a relationship from one table to another."""
foreign_table: str
back_references: str | None = None
class OrmTable(GenericModel, Generic[ModelType]):
"""
Class to store table information,
including relationships,
back references for many-to-many relationships.
"""
model: Type[ModelType]
tablename: str
pk: str
indexed: list[str]
unique: list[str]
unique_constraints: list[list[str]]
columns: list[str]
relationships: dict[str, Relationship]
back_references: dict[str, str]
class Map(BaseModel):
"""Map tablename to table data and model to table data."""
name_to_data: dict[str, OrmTable] = Field( # type: ignore
default_factory=lambda: {}
)
model_to_data: dict[ModelType, OrmTable] = Field( # type: ignore
default_factory=lambda: {}
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/orm.py | Python | """Module providing a way to create ORM models and schemas"""
from types import UnionType
from typing import Callable, ForwardRef, Type, get_args, get_origin
from pydantic.fields import ModelField
from sqlalchemy import MetaData
from sqlalchemy.ext.asyncio import create_async_engine
from ormdantic.generator import CRUD, Table
from ormdantic.handler import (
MismatchingBackReferenceError,
MustUnionForeignKeyError,
UndefinedBackReferenceError,
snake_case,
)
from ormdantic.models import Map, OrmTable, Relationship
from ormdantic.types import ModelType
class Ormdantic:
"""
Ormdantic provides a way to create ORM models and schemas.
"""
def __init__(self, connection: str) -> None:
"""Register models as ORM models and create schemas"""
self._metadata: MetaData | None = None
self._crud_generators: dict[Type, CRUD] = {} # type: ignore
self._engine = create_async_engine(connection)
self._table_map: Map = Map()
def __getitem__(self, item: Type[ModelType]) -> CRUD[ModelType]:
"""Get a `Table` for the given pydantic model."""
return self._crud_generators[item]
def table(
self,
tablename: str | None = None,
*,
pk: str,
indexed: list[str] | None = None,
unique: list[str] | None = None,
unique_constraints: list[list[str]] | None = None,
back_references: dict[str, str] | None = None,
) -> Callable[[Type[ModelType]], Type[ModelType]]:
"""Register a model as a database table."""
def _wrapper(cls: Type[ModelType]) -> Type[ModelType]:
"""Decorator function."""
tablename_ = tablename or snake_case(cls.__name__)
cls_back_references = back_references or {}
table_metadata = OrmTable[ModelType](
model=cls,
tablename=tablename_,
pk=pk,
indexed=indexed or [],
unique=unique or [],
unique_constraints=unique_constraints or [],
columns=[
field
for field in cls.__fields__
if field not in cls_back_references
],
relationships={},
back_references=cls_back_references,
)
self._table_map.model_to_data[cls] = table_metadata
self._table_map.name_to_data[tablename_] = table_metadata
return cls
return _wrapper
async def init(self) -> None:
"""Initialize ORM models."""
# Populate relation information.
for table_data in self._table_map.name_to_data.values():
rels = self.get(table_data)
table_data.relationships = rels
# Now that relation information is populated generate tables.
self._metadata = MetaData()
for table_data in self._table_map.name_to_data.values():
self._crud_generators[table_data.model] = CRUD(
table_data,
self._table_map,
self._engine,
)
await Table(self._engine, self._metadata, self._table_map).init()
async with self._engine.begin() as conn:
await conn.run_sync(self._metadata.create_all)
def get(self, table_data: OrmTable[ModelType]) -> dict[str, Relationship]:
"""Get relationships for a given table."""
relationships = {}
for field_name, field in table_data.model.__fields__.items():
related_table = self._get_related_table(field)
if related_table is None:
continue
if back_reference := table_data.back_references.get(field_name):
relationships[field_name] = self._get_many_relationship(
field_name, back_reference, table_data, related_table
)
continue
if get_origin(field.outer_type_) == list or field.type_ == ForwardRef(
f"{related_table.model.__name__}"
):
raise UndefinedBackReferenceError(
table_data.tablename, related_table.tablename, field_name
)
args = get_args(field.type_)
correct_type = (
related_table.model.__fields__[related_table.pk].type_ in args
)
origin = get_origin(field.type_)
if not args or origin != UnionType or not correct_type:
raise MustUnionForeignKeyError(
table_data.tablename,
related_table.tablename,
field_name,
related_table.model,
related_table.model.__fields__[related_table.pk].type_.__name__,
)
relationships[field_name] = Relationship(
foreign_table=related_table.tablename
)
return relationships
def _get_related_table(self, field: ModelField) -> OrmTable | None: # type: ignore
"""Get related table for a given field."""
related_table: OrmTable | None = None # type: ignore
# Try to get foreign model from union.
if args := get_args(field.type_):
for arg in args:
try:
related_table = self._table_map.model_to_data.get(arg)
except TypeError:
break
if related_table is not None:
break
# Try to get foreign table from type.
return related_table or self._table_map.model_to_data.get(field.type_)
@staticmethod
def _get_many_relationship(
field_name: str,
back_reference: str,
table_data: OrmTable, # type: ignore
related_table: OrmTable, # type: ignore
) -> Relationship:
"""Get many-to-many relationship."""
back_referenced_field = related_table.model.__fields__.get(back_reference)
# TODO: Check if back-reference is present but mismatched in type.
if (
table_data.model not in get_args(back_referenced_field.type_)
and table_data.model != back_referenced_field.type_
):
raise MismatchingBackReferenceError(
table_data.tablename,
related_table.tablename,
field_name,
back_reference,
)
# Is the back referenced field also a list?
return Relationship(
foreign_table=related_table.tablename, back_references=back_reference
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/types/__init__.py | Python | """Provides ModelType TypeVar used throughout lib."""
from ormdantic.types.base import (
AnyNumber,
ModelType,
SerializedType,
default_max_length,
)
__all__ = ["ModelType", "SerializedType", "AnyNumber", "default_max_length"]
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
ormdantic/types/base.py | Python | from numbers import Number
from typing import TypeAlias, TypeVar
from pydantic import BaseModel
# ModelType is a TypeVar that is bound to BaseModel, so it can only be used
# with subclasses of BaseModel.
ModelType = TypeVar("ModelType", bound=BaseModel)
# SerializedType is a TypeVar that is bound to dict, so it can only be used
# with subclasses of dict.
SerializedType = TypeVar("SerializedType")
# AnyNumber is a TypeAlias that is bound to Number, so it can only be used
# with subclasses of Number.
AnyNumber: TypeAlias = Number | float
# This is the default maximum length for strings generated by the generator.
default_max_length = 5
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/clean.sh | Shell | #!/bin/sh -e
rm -f `find . -type f -name '*.py[co]' `
rm -f `find . -type f -name '*~' `
rm -f `find . -type f -name '.*~' `
rm -f `find . -type f -name .coverage`
rm -f `find . -type f -name ".coverage.*"`
rm -rf `find . -name __pycache__`
rm -rf `find . -type d -name '*.egg-info' `
rm -rf `find . -type d -name 'pip-wheel-metadata' `
rm -rf `find . -type d -name .pytest_cache`
rm -rf `find . -type d -name .ruff_cache`
rm -rf `find . -type d -name .cache`
rm -rf `find . -type d -name .mypy_cache`
rm -rf `find . -type d -name htmlcov`
rm -rf `find . -type d -name "*.egg-info"`
rm -rf `find . -type d -name build`
rm -rf `find . -type d -name dist`
rm -rf `find . -name db.sqlite3`
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/docs_build.sh | Shell | #!/usr/bin/env bash
set -e
set -x
# Build the docs
mkdocs build -d build
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/docs_serve.sh | Shell | #!/usr/bin/env bash
set -e
set -x
# Serve the docs
mkdocs serve --livereload
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/format.sh | Shell | #!/usr/bin/env bash
set -e
set -x
pre-commit run --all-files --verbose --show-diff-on-failure
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/integration.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
python3 tests/integration/demo.py
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/lint.sh | Shell | #!/usr/bin/env bash
set -e
set -x
mypy --show-error-codes ormdantic
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/requirements.sh | Shell | #!/usr/bin/env bash
refresh-lockfiles() {
echo "Updating requirements/*.txt files using uv"
find requirements/ -name '*.txt' ! -name 'all.txt' -type f -delete
uv pip compile requirements/linting.in -o requirements/linting.txt
uv pip compile requirements/testing.in -o requirements/testing.txt
uv pip compile requirements/extra.in -o requirements/extra.txt
uv pip compile requirements/docs.in -o requirements/docs.txt
uv pip compile pyproject.toml -o requirements/pyproject.txt
uv pip install -r requirements/all.txt
}
refresh-lockfiles
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/test.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
pytest --cov=ormdantic --cov-report=xml
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/integration/demo.py | Python | import asyncio
from functools import wraps
from uuid import UUID, uuid4
from decouple import config
from pydantic import BaseModel, Field
from ormdantic import Ormdantic
connection = config("DATABASE_URL")
db = Ormdantic(connection)
@db.table(pk="id", indexed=["name"])
class Flavor(BaseModel):
"""A coffee flavor."""
id: UUID = Field(default_factory=uuid4)
name: str = Field(max_length=63)
@db.table(pk="id")
class Coffee(BaseModel):
"""Drink it in the morning."""
id: UUID = Field(default_factory=uuid4)
sweetener: str | None = Field(max_length=63)
sweetener_count: int | None = None
flavor: Flavor | UUID
def sleep_after(func):
@wraps(func)
async def wrapper(self, *args, **kwargs):
result = await func(self, *args, **kwargs)
await asyncio.sleep(self.sleep_duration)
return result
return wrapper
class CoffeeDemo:
def __init__(self, sleep_duration=5):
self.db = db
self.sleep_duration = sleep_duration
@sleep_after
async def init_db(self):
"""Initialize the database."""
async with self.db._engine.begin() as conn:
await self.db.init()
await conn.run_sync(self.db._metadata.drop_all) # type: ignore
await conn.run_sync(self.db._metadata.create_all) # type: ignore
@sleep_after
async def insert_flavor(self, name):
"""Insert a new flavor."""
flavor = Flavor(name=name)
await self.db[Flavor].insert(flavor)
return flavor
@sleep_after
async def insert_coffee(self, sweetener, flavor):
"""Insert a new coffee."""
coffee = Coffee(sweetener=sweetener, flavor=flavor)
await self.db[Coffee].insert(coffee)
return coffee
@sleep_after
async def count_flavors(self):
"""Count all flavors."""
count = await self.db[Flavor].count()
print(f"Total flavors: {count}")
@sleep_after
async def count_coffees_with_condition(self, sweetener, depth=1):
"""Count coffees with a specific sweetener."""
count = await self.db[Coffee].count(where={"sweetener": sweetener}, depth=depth)
print(f"Coffees with sweetener {sweetener}: {count}")
@sleep_after
async def find_flavor(self, flavor_id):
"""Find a flavor by ID."""
flavor = await self.db[Flavor].find_one(flavor_id)
print(f"Found flavor: {flavor}")
@sleep_after
async def find_coffee(self, coffee_id, depth=1):
"""Find a coffee by ID."""
coffee = await self.db[Coffee].find_one(coffee_id, depth=depth)
print(f"Found coffee: {coffee}")
@sleep_after
async def find_all_flavors(self):
"""Find all flavors."""
flavors = await self.db[Flavor].find_many()
print(f"All flavors: {flavors}")
@sleep_after
async def find_flavors_paginated(self, name, limit=2, offset=2):
"""Find flavors with pagination."""
flavors = await self.db[Flavor].find_many(
where={"name": name}, order_by=["id", "name"], limit=limit, offset=offset
)
print(f"Paginated flavors: {flavors}")
@sleep_after
async def update_flavor(self, flavor, new_name):
"""Update a flavor's name."""
flavor.name = new_name
updated_flavor = await self.db[Flavor].update(flavor)
print(f"Updated flavor: {updated_flavor}")
@sleep_after
async def upsert_flavor(self, flavor, new_name):
"""Upsert a flavor."""
flavor.name = new_name
upserted_flavor = await self.db[Flavor].upsert(flavor)
print(f"Upserted flavor: {upserted_flavor}")
@sleep_after
async def delete_flavor(self, flavor_name):
"""Delete a flavor by name."""
await self.db[Flavor].delete(flavor_name)
print(f"Deleted flavor: {flavor_name}")
async def run_demo(self):
"""Run the full demo."""
await self.init_db()
mocha = await self.insert_flavor("mocha")
await self.insert_coffee(1, mocha)
caramel = await self.insert_flavor("caramel")
await self.insert_coffee(2, caramel)
latte = await self.insert_flavor("latte")
await self.insert_coffee(3, latte)
mocha_chai = await self.insert_flavor("mocha-chai")
await self.insert_coffee(6, mocha_chai)
hot_chocolate = await self.insert_flavor("hot chocolate")
coffee = await self.insert_coffee(None, hot_chocolate)
await self.count_flavors()
await self.count_coffees_with_condition("6")
await self.find_flavor(mocha.id)
await self.find_coffee(coffee.id)
await self.find_all_flavors()
await self.find_flavors_paginated("mocha")
await self.update_flavor(mocha, "caramel")
await self.upsert_flavor(mocha, "vanilla")
await self.delete_flavor(mocha.name)
if __name__ == "__main__":
demo = CoffeeDemo(sleep_duration=5)
asyncio.run(demo.run_demo())
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_errors.py | Python | from __future__ import annotations
import asyncio
import unittest
from typing import Callable
from uuid import UUID, uuid4
import pytest
import sqlalchemy
from decouple import config
from pydantic import BaseModel, Field
from sqlalchemy import MetaData
from ormdantic import Ormdantic
from ormdantic.handler import (
MismatchingBackReferenceError,
MustUnionForeignKeyError,
TypeConversionError,
UndefinedBackReferenceError,
)
URL = config("DATABASE_URL")
connection = URL
db_1 = Ormdantic(connection)
db_2 = Ormdantic(connection)
db_3 = Ormdantic(connection)
db_4 = Ormdantic(connection)
db_5 = Ormdantic(connection)
@db_1.table(pk="id")
class UndefinedBackreference(BaseModel):
"""Missing explicit back-reference to raise exception."""
id: UUID = Field(default_factory=uuid4)
self_ref: list[UndefinedBackreference | UUID] | None
@db_2.table(pk="id", back_references={"other": "other"})
class MismatchedBackreferenceA(BaseModel):
"""Type of back-reference for "other" is not this model."""
id: UUID = Field(default_factory=uuid4)
other: list[MismatchedBackreferenceB] | None
@db_2.table(pk="id", back_references={"other": "other"})
class MismatchedBackreferenceB(BaseModel):
"""Type of back-reference for "other" is this model."""
id: UUID = Field(default_factory=uuid4)
other: list[MismatchedBackreferenceB] | None
@db_3.table(pk="id")
class Table_1(BaseModel):
"""A table."""
id: UUID = Field(default_factory=uuid4)
@db_3.table(pk="id")
class Table_2(BaseModel):
"""Another table."""
id: UUID = Field(default_factory=uuid4)
table: Table_1
@db_4.table(pk="id")
class Table_3(BaseModel):
"""Another table."""
id: UUID = Field(default_factory=uuid4)
@db_4.table(pk="id")
class Table_4(BaseModel):
"""Another table."""
id: UUID = Field(default_factory=uuid4)
table: Table_3 | int
@db_5.table(pk="id")
class Table_5(BaseModel):
"""Another table."""
id: UUID = Field(default_factory=uuid4)
table: Callable[[], int]
MismatchedBackreferenceA.update_forward_refs()
MismatchedBackreferenceB.update_forward_refs()
UndefinedBackreference.update_forward_refs()
class ormdanticErrorTesting(unittest.IsolatedAsyncioTestCase):
def setUp(self) -> None:
"""Setup clean sqlite database."""
async def _init(db: Ormdantic) -> None:
metadata = MetaData()
async with db._engine.begin() as conn:
await conn.run_sync(metadata.drop_all)
asyncio.run(_init(db_1))
asyncio.run(_init(db_2))
asyncio.run(_init(db_3))
asyncio.run(_init(db_4))
asyncio.run(_init(db_5))
@staticmethod
async def test_undefined_back_reference() -> None:
with pytest.raises(UndefinedBackReferenceError) as e:
await db_1.init()
assert e.value.args[0] == (
'Many relation defined on "undefined_backreference.self_ref" to table undefined_backreference" must be defined with a back reference on "undefined_backreference".'
)
@staticmethod
async def test_mismatched_back_reference() -> None:
with pytest.raises(MismatchingBackReferenceError) as e:
await db_2.init()
assert (
e.value.args[0]
== 'Many relation defined on "mismatched_backreference_a.other" to'
' "mismatched_backreference_b.other" must use the same model type'
" back-referenced."
)
@staticmethod
async def test_missing_foreign_key_union() -> None:
with pytest.raises(MustUnionForeignKeyError) as e:
await db_3.init()
assert (
e.value.args[0]
== 'Relation defined on "table_2.table" to "table_1" must be a union type of "Model |'
' model_pk_type" e.g. "Table_1 | UUID"'
)
@staticmethod
async def test_missing_wrong_pk_type() -> None:
with pytest.raises(MustUnionForeignKeyError) as e:
await db_4.init()
assert (
e.value.args[0]
== 'Relation defined on "table_4.table" to "table_3" must be a union type of "Model |'
' model_pk_type" e.g. "Table_3 | UUID"'
)
@staticmethod
async def test_conversion_type_error() -> None:
with pytest.raises(TypeConversionError) as e:
await db_5.init()
assert (
e.value.args[0]
== f"Type typing.Callable[[], int] is not supported by SQLAlchemy {sqlalchemy.__version__}."
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_generator.py | Python | from __future__ import annotations
import datetime
import types
from collections import OrderedDict
from enum import Enum, auto
from uuid import UUID, uuid4
from pydantic import BaseModel, Field
from ormdantic.generator import Generator
class Flavor(Enum):
"""Coffee flavors."""
MOCHA = auto()
VANILLA = auto()
class Brand(BaseModel):
"""A coffee brand."""
brand_name: str
class Coffee(BaseModel):
"""Test model."""
id: UUID = Field(default_factory=uuid4)
description: str | None
cream: bool
sweetener: int
flavor: Flavor
brand: Brand
volume: float = 3.14
bagels: list[str]
list_of_lists: list[list[str]]
dictionary: dict[str, list[int]]
union: str | int | list[str]
multiple_of_float: float = Field(multiple_of=3.14)
multiple_of_int_ge: int = Field(multiple_of=7, ge=1000)
multiple_of_int_gt: int = Field(multiple_of=7, gt=-1000)
multiple_of_int_le: int = Field(multiple_of=7, le=1000)
multiple_of_int_lt: int = Field(multiple_of=7, lt=-1000)
range_int_multiple_of: int = Field(lt=200, gt=101, multiple_of=11)
range_int: int = Field(le=200, ge=101)
always_none: types.NoneType = None # type: ignore
str_constraint_min: str = Field(min_length=101)
str_constraint_max: str = Field(max_length=200)
str_constraint_minmax: str = Field(min_length=101, max_length=200)
date_field: datetime.date
time_field: datetime.time
timedelta_field: datetime.timedelta
datetime_field: datetime.datetime
not_specifically_supported_type: OrderedDict # type: ignore
forward_ref_field: ForwardRefClass | None
class ForwardRefClass(BaseModel):
str_field: str
Coffee.update_forward_refs()
def test_validate() -> None:
"""Test validate."""
Generator(Coffee)
def test_generator() -> None:
assert Generator(Coffee).description is not None
def test_none() -> None:
model = Generator(Coffee, optionals_use_none=True)
assert model.description is None
def test_use_defaults() -> None:
id_ = uuid4()
assert Generator(Coffee, use_default_values=False).id != id_
assert Generator(Coffee, id=id_).id == id_
def test_use_kwargs() -> None:
brand = Brand(brand_name=str(uuid4()))
assert Generator(Coffee, brand=brand).brand == brand
def test_multiple_of_int_ge() -> None:
"""Test multiple_of with ge."""
model = Generator(Coffee)
assert model.multiple_of_int_ge % 7 == 0
assert model.multiple_of_int_ge >= 1000
def test_range_int_multiple_of() -> None:
"""Test range with multiple_of."""
model = Generator(Coffee)
assert model.range_int_multiple_of < 200
assert model.range_int_multiple_of > 101
assert model.range_int_multiple_of % 11 == 0
def test_range_int() -> None:
"""Test range with multiple_of."""
model = Generator(Coffee)
assert model.range_int <= 200
assert model.range_int >= 101
def test_str_constraint_min() -> None:
"""Test str constraint min_length."""
model = Generator(Coffee)
assert len(model.str_constraint_min) >= 101
def test_str_constraint_max() -> None:
"""Test str constraint max_length."""
model = Generator(Coffee)
assert len(model.str_constraint_max) <= 200
def test_str_constraint_minmax() -> None:
"""Test str constraint min_length and max_length."""
model = Generator(Coffee)
assert len(model.str_constraint_minmax) >= 101
assert len(model.str_constraint_minmax) <= 200
def test_not_specifically_supported_type() -> None:
"""Test not specifically supported type."""
model = Generator(Coffee)
assert isinstance(model.not_specifically_supported_type, OrderedDict)
def test_always_none() -> None:
"""Test always_none."""
model = Generator(Coffee)
assert model.always_none is None
def test_multiple_of_int_gt() -> None:
"""Test multiple_of_int_gt."""
model = Generator(Coffee)
assert model.multiple_of_int_gt % 7 == 0
assert model.multiple_of_int_gt > -1000
def test_multiple_of_int_le() -> None:
"""Test multiple_of_int_le."""
model = Generator(Coffee)
assert model.multiple_of_int_le % 7 == 0
assert model.multiple_of_int_le <= 1000
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_orm.py | Python | from __future__ import annotations
import asyncio
import unittest
from datetime import date, datetime
from typing import Any
from uuid import UUID, uuid4
from decouple import config
from pydantic import BaseModel, Field
from pypika import Order
from ormdantic import Ormdantic
URL = config("DATABASE_URL")
connection = URL
database = Ormdantic(connection)
class Money(BaseModel):
"""3 floating point numbers."""
currency: float = 1.0
val: float = 1.0
@database.table(
"flavors",
pk="id",
indexed=["strength"],
unique_constraints=[["name", "strength"]],
)
class Flavor(BaseModel):
"""A coffee flavor."""
id: UUID = Field(default_factory=uuid4)
name: str = Field(..., max_length=63)
strength: int | None = None
coffee: Coffee | UUID | None = None
created_at: date = Field(default_factory=date.today)
updated_at: date = Field(default_factory=date.today)
expire: datetime = Field(default_factory=datetime.now)
exist: bool = False
@database.table(pk="id")
class Coffee(BaseModel):
"""Drink it in the morning."""
id: UUID = Field(default_factory=uuid4)
primary_flavor: Flavor | UUID
secondary_flavor: Flavor | UUID | None
sweetener: str
cream: float
place: dict # type: ignore
ice: list # type: ignore
size: Money
attributes: dict[str, Any] | None = None
exist: bool = False
@database.table(pk="id")
class Table(BaseModel):
"""Drink it in the morning."""
id: UUID = Field(default_factory=uuid4)
Flavor.update_forward_refs()
class ormdanticTesting(unittest.IsolatedAsyncioTestCase):
def setUp(self) -> None:
"""Setup clean sqlite database."""
async def _init() -> None:
async with database._engine.begin() as conn:
await database.init()
await conn.run_sync(database._metadata.drop_all) # type: ignore
await conn.run_sync(database._metadata.create_all) # type: ignore
asyncio.run(_init())
async def test_find_nothing(self) -> None:
self.assertEqual(None, (await database[Flavor].find_one(uuid4())))
self.assertEqual(None, (await database[Coffee].find_one(uuid4(), depth=3)))
async def test_no_relation_insert_and_fine_one(self) -> None:
# Insert record.
record = Table()
find = await database[Table].insert(record)
# Find new record and compare.
self.assertDictEqual(
find.dict(),
(await database[Table].find_one(find.id, 1)).dict(), # type: ignore
)
async def test_insert_and_find_one(self) -> None:
# Insert record.
flavor = Flavor(name="mocha")
mocha = await database[Flavor].insert(flavor)
# Find new record and compare.
self.assertDictEqual(
mocha.dict(),
(await database[Flavor].find_one(mocha.id)).dict(), # type: ignore
)
async def test_insert_and_find_one_date(self) -> None:
# Test Date and Time fields
flavor = Flavor(name="mocha", created_at=date(2021, 1, 1))
mocha = await database[Flavor].insert(flavor)
# Find new record and compare.
self.assertDictEqual(
mocha.dict(),
(await database[Flavor].find_one(mocha.id)).dict(), # type: ignore
)
async def test_insert_and_find_one_bool(self) -> None:
# Insert record.
flavor = Flavor(name="mocha", exist=True)
mocha = await database[Flavor].insert(flavor)
# Find new record and compare.
self.assertDictEqual(
mocha.dict(),
(await database[Flavor].find_one(mocha.id)).dict(), # type: ignore
)
async def test_count(self) -> None:
# Insert 3 records.
await database[Flavor].insert(Flavor(name="mocha"))
await database[Flavor].insert(Flavor(name="mocha"))
await database[Flavor].insert(Flavor(name="caramel"))
# Count records.
self.assertEqual(1, await database[Flavor].count(where={"name": "caramel"}))
self.assertEqual(3, await database[Flavor].count())
async def test_find_many(self) -> None:
# Insert 3 records.
mocha1 = await database[Flavor].insert(Flavor(name="mocha"))
mocha2 = await database[Flavor].insert(Flavor(name="mocha"))
caramel = await database[Flavor].insert(Flavor(name="caramel"))
# Find two records with filter.
mochas = await database[Flavor].find_many(where={"name": "mocha"})
self.assertListEqual([mocha1, mocha2], mochas.data)
flavors = await database[Flavor].find_many()
self.assertListEqual([mocha1, mocha2, caramel], flavors.data)
async def test_find_many_order(self) -> None:
# Insert 3 records.
mocha1 = await database[Flavor].insert(Flavor(name="mocha", strength=3))
mocha2 = await database[Flavor].insert(Flavor(name="mocha", strength=2))
caramel = await database[Flavor].insert(Flavor(name="caramel"))
flavors = await database[Flavor].find_many(
order_by=["name", "strength"], order=Order.desc
)
self.assertListEqual([mocha1, mocha2, caramel], flavors.data)
async def test_find_many_pagination(self) -> None:
# Insert 4 records.
mocha1 = await database[Flavor].insert(Flavor(name="mocha"))
mocha2 = await database[Flavor].insert(Flavor(name="mocha"))
vanilla = await database[Flavor].insert(Flavor(name="vanilla"))
caramel = await database[Flavor].insert(Flavor(name="caramel"))
flavors_page_1 = await database[Flavor].find_many(limit=2)
self.assertListEqual([mocha1, mocha2], flavors_page_1.data)
flavors_page_2 = await database[Flavor].find_many(limit=2, offset=2)
self.assertListEqual([vanilla, caramel], flavors_page_2.data)
async def test_update(self) -> None:
# Insert record.
flavor = await database[Flavor].insert(Flavor(name="mocha"))
# Update record.
flavor.name = "caramel"
await database[Flavor].update(flavor)
# Find the updated record.
self.assertEqual(
flavor.name,
(await database[Flavor].find_one(flavor.id)).name, # type: ignore
)
async def test_update_datetime(self) -> None:
# Insert record.
flavor = await database[Flavor].insert(
Flavor(name="mocha", expire=datetime(2021, 1, 1, 1, 1, 1))
)
# Update record.
flavor.expire = datetime(2021, 1, 1, 1, 1, 2)
await database[Flavor].update(flavor)
# Find the updated record.
self.assertEqual(
flavor.expire,
(await database[Flavor].find_one(flavor.id)).expire, # type: ignore
)
async def test_upsert(self) -> None:
# Upsert record as insert.
flavor = await database[Flavor].upsert(Flavor(name="vanilla"))
await database[Flavor].upsert(flavor)
# Find all "vanilla" record.
flavors = await database[Flavor].find_many(where={"id": flavor.id})
self.assertEqual(1, len(flavors.data))
# Upsert as update.
flavor.name = "caramel"
await database[Flavor].upsert(flavor)
# Find one record.
flavors = await database[Flavor].find_many(where={"id": flavor.id})
self.assertEqual(1, len(flavors.data))
self.assertDictEqual(flavor.dict(), flavors.data[0].dict())
async def test_delete(self) -> None:
# Insert record.
caramel = Flavor(name="caramel")
await database[Flavor].insert(caramel)
# Delete record.
await database[Flavor].delete(caramel.id)
# Find one record.
self.assertIsNone(await database[Flavor].find_one(caramel.id))
async def test_insert_and_find_orm(self) -> None:
mocha = Flavor(name="mocha")
vanilla = Flavor(name="vanilla")
await database[Flavor].insert(mocha)
await database[Flavor].insert(vanilla)
coffee = Coffee(
primary_flavor=mocha,
secondary_flavor=vanilla,
sweetener="none",
cream=0,
place={"sum": 1},
ice=["cubes"],
size=Money(),
)
await database[Coffee].insert(coffee)
# Find record and compare.
coffee_dict = coffee.dict()
find_coffee = await database[Coffee].find_one(coffee.id, depth=1)
self.assertDictEqual(coffee_dict, find_coffee.dict()) # type: ignore
coffee_dict["primary_flavor"] = coffee_dict["primary_flavor"]["id"]
coffee_dict["secondary_flavor"] = coffee_dict["secondary_flavor"]["id"]
self.assertDictEqual(
coffee_dict,
(await database[Coffee].find_one(coffee.id)).dict(), # type: ignore
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_otm_relations.py | Python | from __future__ import annotations
import asyncio
import unittest
from uuid import UUID, uuid4
from decouple import config
from pydantic import BaseModel, Field
from ormdantic import Ormdantic
URL = config("DATABASE_URL")
connection = URL
database = Ormdantic(connection)
@database.table(pk="id", back_references={"many_a": "one_a", "many_b": "one_b"})
class One(BaseModel):
"""One will have many "Many"."""
id: UUID = Field(default_factory=uuid4)
many_a: list[Many] = Field(default_factory=lambda: [])
many_b: list[Many] | None = None
@database.table(pk="id")
class Many(BaseModel):
"""Has a "One" parent and "Many" siblings."""
id: UUID = Field(default_factory=uuid4)
one_a: One | UUID
one_b: One | UUID | None = None
One.update_forward_refs()
Many.update_forward_refs()
class ormdanticOneToManyRelationTesting(unittest.IsolatedAsyncioTestCase):
def setUp(self) -> None:
"""Setup clean sqlite database."""
async def _init() -> None:
async with database._engine.begin() as conn:
await database.init()
await conn.run_sync(database._metadata.drop_all) # type: ignore
await conn.run_sync(database._metadata.create_all) # type: ignore
asyncio.run(_init())
async def test_one_to_many_insert_and_get(self) -> None:
one_a = One()
one_b = One()
await database[One].insert(one_a)
await database[One].insert(one_b)
many_a = [Many(one_a=one_a), Many(one_a=one_a)]
many_b = [
Many(one_a=one_a, one_b=one_b),
Many(one_a=one_a, one_b=one_b),
Many(one_a=one_a, one_b=one_b),
]
for many in many_a + many_b:
await database[Many].insert(many)
find_one_a = await database[One].find_one(one_a.id, depth=2)
many_a_plus_b = many_a + many_b
many_a_plus_b.sort(key=lambda x: x.id)
find_one_a.many_a.sort(key=lambda x: x.id) # type: ignore
self.assertListEqual(many_a_plus_b, find_one_a.many_a) # type: ignore
self.assertIsNone(find_one_a.many_b) # type: ignore
find_one_b = await database[One].find_one(one_b.id, depth=2)
many_b.sort(key=lambda x: x.id)
find_one_b.many_b.sort(key=lambda x: x.id) # type: ignore
self.assertListEqual(many_b, find_one_b.many_b) # type: ignore
self.assertListEqual([], find_one_b.many_a) # type: ignore
many_a_idx_zero = await database[Many].find_one(many_a[0].id, depth=3)
many_a_idx_zero.one_a.many_a.sort(key=lambda x: x.id) # type: ignore
self.assertDictEqual(
find_one_a.dict(),
many_a_idx_zero.one_a.dict(), # type: ignore
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_snake.py | Python | from unittest import TestCase
from ormdantic.handler.snake import get_words
class SnakeTest(TestCase):
def __init__(self, *args) -> None: # type: ignore
self.snake_sample = "hello_yezz_data_happy"
super().__init__(*args)
def test_get_words_from_snake(self) -> None:
self.assertEqual(
["hello", "yezz", "data", "happy"], get_words(self.snake_sample)
)
def test_get_words_from_snake_with_uppercase(self) -> None:
self.assertEqual(
["HELLO", "YEZZ", "DATA", "HAPPY"], get_words(self.snake_sample.upper())
)
def test_get_words_from_snake_with_uppercase_and_underscore(self) -> None:
self.assertEqual(
["HELLO", "YEZZ", "DATA", "HAPPY"],
get_words(f"{self.snake_sample.upper()}_"),
)
def test_get_words_from_snake_with_underscore(self) -> None:
self.assertEqual(
["hello", "yezz", "data", "happy"], get_words(f"{self.snake_sample}_")
)
| yezz123/ormdantic | 150 | Asynchronous ORM that uses pydantic models to represent database tables ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/__init__.py | Python | """
Pagidantic is a Python package for pagination using Pydantic.
It's easy to use, lightweight, and easy to integrate with existing projects.
It helps in creating efficient pagination solution with minimal code, while maintaining type checking and data validation with Pydantic.
"""
__version__ = "2.0.0"
from pagidantic.dict import PagidanticDict
from pagidantic.factory import PagidanticFactory
from pagidantic.page import Page
from pagidantic.pagidantic import pagidantic
from pagidantic.paginator import Paginator
from pagidantic.set import PagidanticSet
__all__ = [
"PagidanticFactory",
"Paginator",
"pagidantic",
"PagidanticDict",
"PagidanticSet",
"Page",
]
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/dict.py | Python | from typing import Any, Dict, List
from pagidantic.paginator import Paginator
class PagidanticDict(Paginator):
"""
Initialize a Paginator for paginating a dictionary.
:param object_list: The dictionary to be paginated.
:type object_list: dict
:param page_limit: Number of items per page.
:type page_limit: int, optional
:param start_page: The page to start pagination from.
:type start_page: int, optional
"""
def __init__(
self, object_list: Dict[Any, Any], page_limit: int = 10, start_page: int = 0
):
self.object_list: Dict[Any, Any] = object_list
super().__init__(
object_list=self._dict_to_list(),
page_limit=page_limit,
start_page=start_page,
)
def _dict_to_list(self) -> List[Dict[Any, Any]]:
"""Transform dict to list of dicts."""
if not isinstance(self.object_list, dict):
raise TypeError(f"Expected dict object, not {type(self.object_list)}")
return [{k: v} for k, v in self.object_list.items()]
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/factory.py | Python | from typing import Type
from pydantic.dataclasses import dataclass
from pagidantic.dict import PagidanticDict
from pagidantic.paginator import Paginator
from pagidantic.set import PagidanticSet
@dataclass
class PagidanticFactory:
"""A factory class for creating paginator instances based on the type of objects to be paginated."""
objects_type: str
def get_paginator(self) -> Type[Paginator]:
"""
Returns paginator class/subclass.
:return: The appropriate paginator class/subclass based on the type of objects to be paginated.
:rtype: Type[Paginator]
:raise TypeError: If object_list is of an unsupported type.
"""
if self.objects_type in ["list", "tuple"]:
return Paginator
elif self.objects_type == "dict":
return PagidanticDict
elif self.objects_type == "set":
return PagidanticSet
else:
raise TypeError(
f"Unsupported type {self.objects_type} for object_list param."
)
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/page.py | Python | from typing import Sequence
class Page:
"""
A Page object, which contains a list of objects and some additional information such as the current page number and
the paginator instance.
"""
def __init__(
self, object_list: Sequence[object], page_number: int, paginator: object
):
"""
Initialize a Page object with a list of objects, the current page number, and a paginator instance.
:param object_list: A list of objects to be paginated.
:type object_list: Sequence
:param page_number: The current page number.
:type page_number: int
:param paginator: The paginator instance.
:type paginator: object
"""
self.object_list = object_list
self.page_number = page_number
self.paginator = paginator
def has_next(self) -> bool:
"""
Check if paginator has next page.
:return: True if the paginator has a next page, False otherwise.
:rtype: bool
"""
return self.page_number < self.paginator.total_pages # type: ignore
def has_previous(self) -> bool:
"""
Check if paginator has previous page.
:return: True if the paginator has a previous page, False otherwise.
:rtype: bool
"""
return self.page_number > 0
@property
def count(self) -> int:
"""
Return a number of page objects.
:return: The number of objects in the current page.
:rtype: int
"""
return len(self.object_list)
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/pagidantic.py | Python | from typing import Any, List, Set, Tuple, Type, Union
from pagidantic.factory import PagidanticFactory
from pagidantic.paginator import Paginator
def pagidantic(
object_list: Union[
list[List[Any]],
tuple[Tuple[Any]],
dict[Any, Any],
set[Set[Any]],
],
page_limit: int = 10,
start_page: int = 0,
) -> Paginator:
"""
This function create a paginator instance based on the type of the object list passed
:param object_list: list, tuple, dict or set of objects to be paginated.
:type object_list: Union[list, tuple, dict, set]
:param page_limit: Number of items per page.
:type page_limit: int, optional
:param start_page: The page to start pagination from.
:type start_page: int, optional
:return: paginator instance
:rtype: Paginator
"""
factory = PagidanticFactory(objects_type=type(object_list).__name__)
paginator: Type[Paginator] = factory.get_paginator()
return paginator(
object_list=object_list,
page_limit=page_limit,
start_page=start_page,
)
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/paginator.py | Python | import math
from functools import cached_property
from typing import Any, Dict, Generator, List, Sequence, Set, Tuple, Union
from pydantic.dataclasses import dataclass
from pagidantic.page import Page
@dataclass
class Paginator:
"""
Objects paginator. Should be initialised using paginate function.
Arguments:
:object_list: list of sequential objects that will be split across pages.
:page_limit: number of objects per page.
:start_page: number of page from which paginator will start.
"""
object_list: Union[
List[Any],
Tuple[Any],
Dict[Any, Any],
Set[Any],
]
page_limit: int = 10
start_page: int = 0
def __post_init__(self) -> None:
"""Executed after initial validation. Set initial page."""
self.page: Page = self._page(
object_list=self.get_objects(self.start_page),
page_number=self.start_page,
paginator=self,
)
def __iter__(self) -> Generator["Page", None, None]:
"""Iterate over paginator pages. Every iteration updates paginator page object."""
for _ in self.page_range:
yield self.page
self.get_next()
def get_objects(self, page_number: int) -> Sequence[Any]:
"""Retrieve page list of data."""
if not isinstance(page_number, int):
raise TypeError(f"{page_number} expected to be int.")
n = self.page_limit * page_number
return self.object_list[n : n + self.page_limit] # type: ignore
@property
def response(self) -> Dict[str, Any]:
"""Retrieve response result property."""
data = {
"data": self.page.object_list,
"page_number": self.page.page_number,
"has_next": self.has_next,
"has_previous": self.has_previous,
}
return self._create_response(**data)
@property
def has_next(self) -> bool:
"""Page's "has next" method."""
return self.page.has_next()
@property
def has_previous(self) -> bool:
"""Page's "has previous" method."""
return self.page.has_previous()
def get_next(self) -> None:
"""Get next page. Overrides paginator's page attribute"""
if self.has_next:
self.page.page_number += 1
next_page = self._page(
object_list=self.get_objects(self.page.page_number),
page_number=self.page.page_number,
paginator=self,
)
self.page = next_page
def get_previous(self) -> None:
"""Get previous page. Overrides paginator's page attribute."""
if self.has_previous:
self.page.page_number -= 1
previous_page = self._page(
object_list=self.get_objects(self.page.page_number),
page_number=self.page.page_number,
paginator=self,
)
self.page = previous_page
@staticmethod
def _page(*args: Any, **kwargs: Any) -> Page:
"""Returns Page object."""
return Page(*args, **kwargs)
def get_page_response(self, page_number: int = 0) -> Dict[str, Any]:
"""
Get response of requested page number.
number=0 equals first page.
"""
if not isinstance(page_number, int):
raise TypeError(f"{page_number} expected to be int.")
page = self._page(
object_list=self.get_objects(page_number),
page_number=page_number,
paginator=self,
)
data = {
"data": page.object_list,
"page_number": page.page_number,
"has_next": page.has_next(),
"has_previous": page.has_previous(),
}
return self._create_response(**data)
@cached_property
def total(self) -> int:
"""Return the total number of objects, across all pages."""
return len(self.object_list)
@property
def total_pages(self) -> int:
"""Number of total pages. Lack of additional pages means total is 0."""
return 0 if self.total == 0 else math.ceil(self.total / self.page_limit) - 1
@property
def page_range(self) -> range:
"""Return a range of pages."""
return range(self.total_pages + 1 - self.start_page)
def _create_response(self, **kwargs: Any) -> Dict[str, Any]:
"""Creates json response object."""
return {
"total_pages": self.total_pages,
"data": kwargs["data"],
"page_number": kwargs["page_number"],
"has_next": kwargs["has_next"],
"has_previous": kwargs["has_previous"],
}
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pagidantic/set.py | Python | from typing import Any, List
from pagidantic.paginator import Paginator
class PagidanticSet(Paginator):
"""
Initialize a Paginator for paginating a set.
:param object_list: The set to be paginated.
:type object_list: dict
:param page_limit: Number of items per page.
:type page_limit: int, optional
:param start_page: The page to start pagination from.
:type start_page: int, optional
"""
def __init__(
self, object_list: List[Any], page_limit: int = 10, start_page: int = 0
):
self.object_list = object_list
super().__init__(
object_list=self._set_to_list(),
page_limit=page_limit,
start_page=start_page,
)
def _set_to_list(self) -> List[Any]:
"""Transform set to list of sets."""
if not isinstance(self.object_list, set):
raise TypeError(f"Expected set object, not {type(self.object_list)}")
return list(self.object_list)
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/clean.sh | Shell | #!/bin/sh -e
rm -f `find . -type f -name '*.py[co]' `
rm -f `find . -type f -name '*~' `
rm -f `find . -type f -name '.*~' `
rm -f `find . -type f -name .coverage`
rm -f `find . -type f -name ".coverage.*"`
rm -rf `find . -name __pycache__`
rm -rf `find . -type d -name '*.egg-info' `
rm -rf `find . -type d -name 'pip-wheel-metadata' `
rm -rf `find . -type d -name .pytest_cache`
rm -rf `find . -type d -name .ruff_cache`
rm -rf `find . -type d -name .cache`
rm -rf `find . -type d -name .mypy_cache`
rm -rf `find . -type d -name htmlcov`
rm -rf `find . -type d -name "*.egg-info"`
rm -rf `find . -type d -name build`
rm -rf `find . -type d -name dist`
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/example.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
python tests/example/example.py
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/format.sh | Shell | #!/usr/bin/env bash
set -e
set -x
pre-commit run --all-files --verbose --show-diff-on-failure
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/lint.sh | Shell | #!/usr/bin/env bash
set -e
set -x
mypy --show-error-codes pagidantic
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/test.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
pytest --cov=pagidantic --cov-report=term-missing --cov-fail-under=80
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/test_html.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
pytest --cov=pagidantic --cov=tests --cov-report=html
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/conftest.py | Python | import pytest
from pagidantic import PagidanticDict, PagidanticSet, Paginator, pagidantic
def dict_to_list(dict_object: dict) -> list[dict]:
"""Transform dict to list of dicts."""
return [{k: v} for k, v in dict_object.items()]
@pytest.fixture()
def list_of_int() -> list[int]:
"""Generate list of integers."""
return list(range(1, 51))
@pytest.fixture()
def list_of_dict() -> list[dict]:
"""Generate list of dictionary."""
d = {f"{x}": x for x in range(1, 21)}
return dict_to_list(d)
@pytest.fixture()
def dict_data() -> dict:
"""Generate single dictionary."""
return {f"{x}": x for x in range(1, 11)}
@pytest.fixture()
def paginator(list_of_int) -> Paginator:
"""Return paginator object with list of integers as input."""
return pagidantic(object_list=list_of_int)
@pytest.fixture()
def second_paginator(list_of_dict) -> Paginator:
"""Return paginator object with list of dictionary as input, start page set to 1, limit 3."""
return pagidantic(list_of_dict, page_limit=3, start_page=1)
@pytest.fixture()
def paginator_dict_proxy(dict_data) -> PagidanticDict:
"""Return PaginatorDictProxy object with dictionary as input."""
return PagidanticDict(dict_data, page_limit=5)
@pytest.fixture()
def paginator_set_proxy() -> PagidanticSet:
"""Return PaginatorSetProxy object with set as input, limit set to 3."""
return PagidanticSet({1, 2, 3, 4, 5, 6, 7, 8}, page_limit=3)
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/example/example.py | Python | import json
from pagidantic import pagidantic
# retrieve data from generated.json file and convert it to python object
with open("tests/example/generated.json") as f:
object_list = json.load(f)
pagination = pagidantic(object_list, page_limit=2, start_page=0)
# get current returned page
def get_current_page():
return pagination.response
print(get_current_page())
# get next pageg
def get_next_page():
return pagination.get_next()
print(get_next_page())
# get previous page
def get_previous_page():
return pagination.get_previous()
print(get_previous_page())
# get page by number
def get_page_by_number():
return pagination.get_page_response(page_number=0)
print(get_page_by_number())
# get total pages
def get_total_pages():
return pagination.total_pages
print(get_total_pages())
# Count total objects
def count_total_objects():
return pagination.total
print(count_total_objects())
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_pagidantic_factory.py | Python | from pagidantic import PagidanticDict, PagidanticFactory, PagidanticSet, Paginator
def get_paginator(object_list):
factory = PagidanticFactory(type(object_list).__name__)
return factory.get_paginator()
class TestPaginatorFactory:
"""Tests for PaginatorFactory."""
def test_factory_result(self):
"""
Test if expected paginator class/subclass is returned based on object_list input type.
"""
object_list = [1, 2]
assert get_paginator(object_list) == Paginator
object_list = (1, 2, 3)
assert get_paginator(object_list) == Paginator
object_list = {"a": 1, "b": 3}
assert get_paginator(object_list) == PagidanticDict
object_list = {1, "b", True}
assert get_paginator(object_list) == PagidanticSet
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_pagidantic_paginator.py | Python | from pagidantic import Paginator
class TestListOfDictPaginator:
"""Tests for paginator object with list of dict as input."""
def test_init(self, second_paginator: Paginator, list_of_dict: list[dict]):
assert second_paginator.object_list == list_of_dict
assert second_paginator.page_limit == 3
assert second_paginator.start_page == 1
def test_response(self, second_paginator: Paginator):
expected = {
"total_pages": 6,
"data": [{"4": 4}, {"5": 5}, {"6": 6}],
"page_number": 1,
"has_next": True,
"has_previous": True,
}
assert second_paginator.response == expected
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_pagidantic_paginator_default.py | Python | import pytest
from pydantic import ValidationError
from pagidantic import Paginator, pagidantic
class TestDefaultPaginator:
"""Tests for Paginator object."""
def test_pagidantic_init(self):
"""Test pagidantic function initial values."""
assert pagidantic(object_list=[], page_limit=5, start_page=0)
assert pagidantic(object_list={1, "a"})
assert pagidantic(object_list=(1, "a"))
assert pagidantic(object_list={"a": 1})
assert pagidantic(object_list=[], page_limit=True, start_page=0)
assert pagidantic(object_list=[], page_limit=True, start_page=False)
assert pagidantic(object_list=[], page_limit=False, start_page="0")
with pytest.raises(TypeError):
assert pagidantic(object_list=None, page_limit=5, start_page=0)
with pytest.raises(TypeError):
assert pagidantic(object_list="", page_limit=5, start_page=0)
with pytest.raises(TypeError):
assert pagidantic(object_list=0, page_limit=5, start_page=0)
with pytest.raises(TypeError):
assert pagidantic(object_list=True, page_limit=5, start_page=0)
with pytest.raises(ValidationError):
assert pagidantic(object_list=[], page_limit="str", start_page=0)
with pytest.raises(ValidationError):
assert pagidantic(object_list=[], page_limit=3, start_page="str")
with pytest.raises(ValidationError):
assert pagidantic(object_list=[], page_limit=None, start_page=0)
with pytest.raises(ValidationError):
assert pagidantic(object_list=[], page_limit=None, start_page=None)
with pytest.raises(ValidationError):
assert pagidantic(object_list=[], page_limit=3, start_page=None)
def test_init(self, paginator: Paginator, list_of_int: list[int]):
"""Validate init assigment."""
assert paginator.object_list == list_of_int
assert paginator.page_limit == 10
assert paginator.start_page == 0
def test_init_valid_object_list(self):
"""Validate object list."""
paginator_list = pagidantic([1, 2, 3])
assert paginator_list.object_list == [1, 2, 3]
paginator_tuple = pagidantic((1, 2, 3))
assert paginator_tuple.object_list == [1, 2, 3]
paginator_dict = pagidantic({"a": 1, "b": 2})
assert paginator_dict.object_list == [
{
"a": 1,
},
{"b": 2},
]
paginator_set = pagidantic({1, 2, 3})
assert paginator_set.object_list == [1, 2, 3]
def test_response(self, paginator: Paginator):
"""Validate current object data."""
expected = {
"total_pages": 4,
"data": list(range(1, 11)),
"page_number": 0,
"has_next": True,
"has_previous": False,
}
assert paginator.response == expected
def test_get_next(self, paginator: Paginator):
"""Validate if response data is changed after calling next page data."""
paginator.get_next()
expected = {
"total_pages": 4,
"data": list(range(11, 21)),
"page_number": 1,
"has_next": True,
"has_previous": True,
}
assert paginator.response == expected
def test_get_previous(self):
"""Validate if response data is changed after calling next previous data."""
p = pagidantic(object_list=[1, 2, 3, 4, 5], page_limit=2, start_page=1)
p.get_previous()
expected = {
"total_pages": 2,
"data": list(range(1, 3)),
"page_number": 0,
"has_next": True,
"has_previous": False,
}
assert p.response == expected
def test_get_current_data(self, paginator: Paginator):
"""
Validate if page\'s object list is changed after multiple next/previous page calls.
"""
paginator.get_next()
paginator.get_next()
assert paginator.page.object_list == list(range(21, 31))
paginator.get_previous()
assert paginator.page.object_list == list(range(11, 21))
paginator.get_previous()
assert paginator.page.object_list == list(range(1, 11))
paginator.get_previous()
paginator.get_previous()
paginator.get_previous()
paginator.get_previous()
assert paginator.page.object_list == list(range(1, 11))
def test_get_previous_from_first_page(self, paginator: Paginator):
"""
Check if response data is unchanged after previous page call while start_page is set to 0.
"""
paginator.get_previous()
expected = {
"total_pages": 4,
"data": list(range(1, 11)),
"page_number": 0,
"has_next": True,
"has_previous": False,
}
assert paginator.response == expected
def test_get_next_previous(self, paginator: Paginator):
"""Check if response data is changed correctly after multiple next/previous page calls."""
paginator.get_next()
paginator.get_next()
paginator.get_next()
expected = {
"total_pages": 4,
"data": list(range(31, 41)),
"page_number": 3,
"has_next": True,
"has_previous": True,
}
assert paginator.response == expected
paginator.get_previous()
expected = {
"total_pages": 4,
"data": list(range(21, 31)),
"page_number": 2,
"has_next": True,
"has_previous": True,
}
assert paginator.response == expected
def test_get_page_response(self, paginator: Paginator):
"""Validate if requested page data is correct."""
response = paginator.get_page_response(4)
expected = {
"total_pages": 4,
"data": list(range(41, 51)),
"page_number": 4,
"has_next": False,
"has_previous": True,
}
assert response == expected
def test_get_page_response_page_number(self, paginator: Paginator):
"""Validate page_number param in get_page_response method."""
assert paginator.get_page_response(1)
assert paginator.get_page_response(True)
assert paginator.get_page_response(False)
with pytest.raises(TypeError):
assert paginator.get_page_response("test")
with pytest.raises(TypeError):
assert paginator.get_page_response("1")
def test_total(self, paginator: Paginator):
"""Validate total property."""
assert paginator.total == 50
def test_total_pages(self, paginator: Paginator):
"""Validate total_pages property."""
assert paginator.total_pages == 4
def test_page_range(self, paginator: Paginator):
"""Validate page_range property."""
assert paginator.page_range == range(5)
def test_get_objects(self, paginator: Paginator):
"""Validate get_objects method with given page number will return expected object_list."""
assert paginator.get_objects(1) == list(range(11, 21))
def test_get_objects_page_number(self, paginator: Paginator):
"""Validate page_number param in get_objects method."""
assert paginator.get_objects(1)
assert paginator.get_objects(True)
assert paginator.get_objects(False)
with pytest.raises(TypeError):
assert paginator.get_objects("test")
with pytest.raises(TypeError):
assert paginator.get_objects("1")
def test_has_next(self, paginator: Paginator):
"""Check if paginator have next page."""
assert paginator.has_next
def test_not_has_next(self):
"""Check if paginator does not have next page."""
p = pagidantic(object_list=[1, 2, 3], page_limit=5)
assert not p.has_next
def test_has_previous(self):
"""Check if paginator have previous page."""
p = pagidantic(object_list=[1, 2, 3, 4, 5], page_limit=2, start_page=1)
assert p.has_previous
def test_not_has_previous(self, paginator: Paginator):
"""Check if paginator does not have previous page."""
assert not paginator.has_previous
def test_iter(self, paginator: Paginator):
"""Check __iter__ method."""
for current_page in paginator:
expected = {
"total_pages": 4,
"data": current_page.object_list,
"page_number": current_page.page_number,
"has_next": current_page.has_next(),
"has_previous": current_page.has_previous(),
}
assert paginator.response == expected
def test_page(self, paginator: Paginator):
"""Check if Page creates expected data."""
page = paginator._page(
object_list=list(range(1, 11)), page_number=0, paginator=paginator
)
assert page.object_list == list(range(1, 11))
assert page.has_next()
assert not page.has_previous()
assert page.count == 10
def test_page_count(self):
"""Validate Page 'count' with different start_page value while iterating over paginator."""
object_list = [1, 2, 3, 4, 5]
limit = 2
paginator = pagidantic(object_list=object_list, page_limit=limit)
for index, page in enumerate(paginator):
if index == 2: # last page in this example
assert page.count == 1
else:
assert page.count == 2
paginator = pagidantic(object_list=object_list, page_limit=limit, start_page=1)
for index, page in enumerate(paginator):
if index == 1: # last page in this example
assert page.count == 1
else:
assert page.count == 2
paginator = pagidantic(object_list=object_list, page_limit=limit, start_page=2)
for page in paginator:
assert page.count == 1
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_pagidantic_proxy.py | Python | import pytest
from conftest import dict_to_list
from pagidantic import PagidanticDict, PagidanticSet
class TestPaginatorDictProxy:
"""Tests for Test PagidanticDict"""
def test_init(self, paginator_dict_proxy: PagidanticDict, dict_data: dict):
"""Validate init assigment."""
assert paginator_dict_proxy.object_list == dict_to_list(dict_data)
assert paginator_dict_proxy.page_limit == 5
assert paginator_dict_proxy.start_page == 0
def test_invalid_object_list(self):
"""Validate object_list param."""
with pytest.raises(TypeError):
PagidanticDict([1, 2, 3])
with pytest.raises(TypeError):
PagidanticDict({1, 2, 3})
with pytest.raises(TypeError):
PagidanticDict((1, 2, 3))
class TestPaginatorSetProxy:
"""Tests for Test PagidanticSet"""
def test_init(self, paginator_set_proxy: PagidanticSet):
"""Validate init assigment."""
assert paginator_set_proxy.object_list == [1, 2, 3, 4, 5, 6, 7, 8]
assert paginator_set_proxy.page_limit == 3
assert paginator_set_proxy.start_page == 0
def test_invalid_object_list(self):
"""Validate object_list param."""
with pytest.raises(TypeError):
PagidanticSet([1, 2, 3])
with pytest.raises(TypeError):
PagidanticSet({"a": 1})
with pytest.raises(TypeError):
PagidanticSet((1, 2, 3))
| yezz123/pagidantic | 16 | Pagination using Pydantic. Easy to use, lightweight, and easy to integrate with existing projects 💡 | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pgqb/__init__.py | Python | """A simple SQL query builder for PostgreSQL."""
from __future__ import annotations
__version__ = "0.1.0"
__all__ = (
"As",
"BIGINT",
"BIGSERIAL",
"BIT",
"BOOLEAN",
"BOX",
"BYTEA",
"CHAR",
"CIDR",
"CIRCLE",
"Column",
"Column",
"DATE",
"DOUBLE",
"Delete",
"Expression",
"From",
"INET",
"INTEGER",
"INTERVAL",
"InsertInto",
"JSON",
"JSONB",
"Join",
"LINE",
"LSEG",
"LeftJoin",
"LogicGate",
"MACADDR",
"MACADDR8",
"MONEY",
"NUMERIC",
"On",
"OrderBy",
"PATH",
"PGEnum",
"PG_LSN",
"PG_SNAPSHOT",
"POINT",
"POLYGON",
"QueryBuilder",
"QueryBuilder",
"REAL",
"RightJoin",
"SERIAL",
"SMALLINT",
"SMALLSERIAL",
"SQLType",
"Select",
"Set",
"TEXT",
"TEXT",
"TIME",
"TIMESTAMP",
"TSQUERY",
"TSVECTOR",
"Table",
"Table",
"UUID",
"UUID",
"Update",
"VARBIT",
"VARCHAR",
"Values",
"Where",
"XML",
"delete_from",
"insert_into",
"select",
"update",
)
from typing import Type
from pgqb.builder import (
As,
BooleanOperator,
Column,
Delete,
Expression,
From,
InsertInto,
Join,
LeftJoin,
LogicGate,
On,
OrderBy,
QueryBuilder,
RightJoin,
Select,
Set,
Table,
Update,
Values,
Where,
)
from pgqb.types import (
BIGINT,
BIGSERIAL,
BIT,
BOOLEAN,
BOX,
BYTEA,
CHAR,
CIDR,
CIRCLE,
DATE,
DOUBLE,
INET,
INTEGER,
INTERVAL,
JSON,
JSONB,
LINE,
LSEG,
MACADDR,
MACADDR8,
MONEY,
NUMERIC,
PATH,
PG_LSN,
PG_SNAPSHOT,
POINT,
POLYGON,
REAL,
SERIAL,
SMALLINT,
SMALLSERIAL,
TEXT,
TIME,
TIMESTAMP,
TSQUERY,
TSVECTOR,
UUID,
VARBIT,
VARCHAR,
XML,
PGEnum,
SQLType,
)
def insert_into(table: Type[Table]) -> InsertInto:
"""Build an insert query."""
return InsertInto(table)
def delete_from(table: Type[Table]) -> Delete:
"""Build a delete query."""
return Delete(table)
def select(*args: Column | Type[Table] | As) -> Select:
"""Build a select query."""
return Select(*args)
def update(table: Type[Table]) -> Update:
"""Build an update query."""
return Update(table)
def join(table: Type[Table]) -> Join:
"""Build a join query."""
return Join(table)
def left_join(table: Type[Table]) -> Join:
"""Build a left join query."""
return LeftJoin(table)
def right_join(table: Type[Table]) -> Join:
"""Build a right join query."""
return RightJoin(table)
def on(evaluation: Expression, *evaluations: Expression | LogicGate) -> On:
"""Build an "on" query for a join."""
return On(evaluation, *evaluations)
def and_(evaluation: Expression, *evaluations: Expression | LogicGate) -> LogicGate:
"""Build an "and" expression for a part of a query."""
return LogicGate(BooleanOperator.AND, evaluation, *evaluations)
def and_not(evaluation: Expression, *evaluations: Expression | LogicGate) -> LogicGate:
"""Build an "and not" expression for a part of a query."""
return LogicGate(BooleanOperator.AND_NOT, evaluation, *evaluations)
def or_(evaluation: Expression, *evaluations: Expression | LogicGate) -> LogicGate:
"""Build an "or" expression for a part of a query."""
return LogicGate(BooleanOperator.OR, evaluation, *evaluations)
def or_not(evaluation: Expression, *evaluations: Expression | LogicGate) -> LogicGate:
"""Build an "or not" expression for a part of a query."""
return LogicGate(BooleanOperator.OR_NOT, evaluation, *evaluations)
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pgqb/_snake.py | Python | import re
from typing import Union
def to_snake(string: str) -> str:
"""
Return a version of the string in `snake_case` format.
Args:
string: The string to convert to snake_case.
Returns:
The string in snake_case format.
"""
return "_".join(w.lower() for w in get_words(string))
def get_words(string: str) -> list[str]:
"""
Get a list of the words in a string in the order they appear.
Args:
string: The string to get the words from.
Returns:
A list of the words in the string.
"""
words = [it for it in re.split(r"\b|_", string) if re.match(r"\w", it)]
words = _split_words_on_regex(words, re.compile(r"(?<=[a-z])(?=[A-Z])"))
words = _split_words_on_regex(words, re.compile(r"(?<=[A-Z])(?=[A-Z][a-z])"))
words = _split_words_on_regex(words, re.compile(r"(?<=\d)(?=[A-Za-z])"))
return words
def _split_words_on_regex(words: list[str], regex: Union[re.Pattern, str]) -> list[str]: # type: ignore
"""
Split words on a regex.
Args:
words (list[str]): The list of words to split.
regex (Union[Pattern, str]): The regex to split on.
Returns:
list[str]: The list of words with the split words inserted.
"""
words = words.copy()
for i, word in enumerate(words):
split_words = re.split(regex, word)
if len(split_words) > 1:
words.pop(i)
for j, sw in enumerate(split_words):
words.insert(i + j, sw)
return words
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pgqb/builder.py | Python | """Query building functions for pgqb.
This module provides a set of classes and functions for building SQL queries
in a Pythonic way. It includes support for various SQL operations such as
SELECT, INSERT, UPDATE, DELETE, JOIN, WHERE, ORDER BY, LIMIT, and OFFSET.
"""
from __future__ import annotations
import abc
import enum
import sys
import typing
from abc import ABC
from typing import Any, Type
from pgqb._snake import to_snake as snake
from pgqb.types import PGEnum, SQLType
if sys.version_info >= (3, 11): # pragma: no cover
from typing import Self # pragma: no cover
else:
from typing_extensions import Self # pragma: no cover
class QueryBuilder(abc.ABC):
"""Base class for all query builders.
This abstract class defines the interface for all query builders in the system.
All specific query builder classes should inherit from this base class.
"""
@abc.abstractmethod
def prepare(self) -> tuple[str, list[Any]]:
"""Get all params and the SQL string.
Returns:
A tuple containing the SQL string and a list of parameters.
"""
class _OperatorMixin(QueryBuilder, abc.ABC): # noqa: PLW1641
"""Mixin class providing common SQL comparison and arithmetic operators.
This mixin adds methods for common SQL operators like >, <, =, !=, +, -, *, /, %.
It also includes IS and IS NOT operators.
"""
def __gt__(self, other: Any) -> Expression:
"""Greater than operator.
Args:
other: The value to compare against.
Returns:
An Expression representing the "greater than" comparison.
"""
return Expression(self, ">", other)
def __ge__(self, other: Any) -> Expression:
"""Greater than or equal to operator.
Args:
other: The value to compare against.
Returns:
An Expression representing the "greater than or equal to" comparison.
"""
return Expression(self, ">=", other)
def __lt__(self, other: Any) -> Expression:
"""Less than operator.
Args:
other: The value to compare against.
Returns:
An Expression representing the "less than" comparison.
"""
return Expression(self, "<", other)
def __le__(self, other: Any) -> Expression:
"""Less than or equal to operator.
Args:
other: The value to compare against.
Returns:
An Expression representing the "less than or equal to" comparison.
"""
return Expression(self, "<=", other)
def __eq__(self, other: object) -> Expression: # type: ignore
"""Equality operator.
For None, True, or False, uses IS operator instead of =.
Args:
other: The value to compare against.
Returns:
An Expression representing the equality comparison.
"""
if other is None or other is True or other is False:
return Expression(self, "IS", other)
return Expression(self, "=", other)
def __ne__(self, other: object) -> Expression: # type: ignore
"""Inequality operator.
For None, True, or False, uses IS NOT operator instead of !=.
Args:
other: The value to compare against.
Returns:
An Expression representing the inequality comparison.
"""
if other is None or other is True or other is False:
return Expression(self, "IS NOT", other)
return Expression(self, "!=", other)
def __add__(self, other: Any) -> Expression:
"""Addition operator.
Args:
other: The value to add.
Returns:
An Expression representing the addition operation.
"""
return Expression(self, "+", other)
def __sub__(self, other: Any) -> Expression:
"""Subtraction operator.
Args:
other: The value to subtract.
Returns:
An Expression representing the subtraction operation.
"""
return Expression(self, "-", other)
def __mul__(self, other: Any) -> Expression:
"""Multiplication operator.
Args:
other: The value to multiply by.
Returns:
An Expression representing the multiplication operation.
"""
return Expression(self, "*", other)
def __truediv__(self, other: Any) -> Expression:
"""Division operator.
Args:
other: The value to divide by.
Returns:
An Expression representing the division operation.
"""
return Expression(self, "/", other)
def __mod__(self, other: Any) -> Expression:
"""Modulo operator.
Args:
other: The value to mod by.
Returns:
An Expression representing the modulo operation.
"""
return Expression(self, "%", other)
def is_(self, other: Any) -> Expression:
"""Equality operator using IS.
Args:
other: The value to compare against.
Returns:
An Expression representing the IS comparison.
"""
if other is None or other is True or other is False:
return Expression(self, "IS", other)
return Expression(self, "=", other)
def is_not(self, other: Any) -> Expression:
"""Inequality operator using IS NOT.
Args:
other: The value to compare against.
Returns:
An Expression representing the IS NOT comparison.
"""
if other is None or other is True or other is False:
return Expression(self, "IS NOT", other)
return Expression(self, "!=", other)
class As(QueryBuilder):
"""Represents an SQL AS clause for aliasing.
This class is used to create aliases for columns or expressions in SQL queries.
"""
def __init__(self, sub_query: Column | Expression, alias: str) -> None:
"""Initialize an As instance.
Args:
sub_query: The Column or Expression to be aliased.
alias: The alias name.
"""
self._sub_query = sub_query
self._alias = alias
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the AS clause.
Returns:
A tuple containing the SQL string for the AS clause and any parameters.
"""
if isinstance(self._sub_query, Column):
return f"{self._sub_query} AS {self._alias}", []
sql, params = self._sub_query.prepare()
return f"{sql} AS {self._alias}", params
class Column(_OperatorMixin):
"""Represents a column in a SQL table.
This class encapsulates the properties and behaviors of a database column,
including its data type, constraints, and other attributes.
"""
def __init__( # noqa: PLR0913
self,
sql_type: SQLType | Type[PGEnum] | None = None,
*,
check: str | None = None,
default: Any | None = None,
foreign_key: Column | None = None,
index: bool = False,
null: bool = False,
primary: bool = False,
unique: bool = False,
) -> None:
"""Initialize a Column instance.
Args:
sql_type: The SQL type for this column.
check: The check constraint for this column.
default: The default value for this column.
foreign_key: The foreign key for this column.
index: Whether to create an index for this column.
null: Whether this column is nullable.
primary: Whether this column is a primary key.
unique: Whether this column is unique.
"""
self.name = ""
self.table = ""
self._asc = True
self._check = check
self._default = default
self._foreign_key = foreign_key
self._index = index
self._null = null
self._primary = primary
self._unique = unique
self._sql_type = sql_type
def __str__(self) -> str:
"""Get the string representation of this column.
Returns:
A string in the format "table.column".
"""
return f"{self.table}.{self.name}"
def __hash__(self) -> int:
"""Get the hash of this column.
Returns:
An integer hash value based on the string representation of the column.
"""
return hash(str(self))
def _create(self) -> str:
"""Get column create SQL.
Returns:
A string containing the SQL definition for creating this column.
"""
null = " NOT NULL" if not self._null and not self._primary else ""
unique = " UNIQUE" if self._unique else ""
check = f" CHECK ({self._check})" if self._check else ""
if self._default is not None:
default = f" DEFAULT {self._default}" if self._default else " DEFAULT ''"
else:
default = ""
if self._foreign_key is not None:
sql_type = self._foreign_key._sql_type
elif hasattr(self._sql_type, "pg_enum_name"):
sql_type = self._sql_type.pg_enum_name() # type: ignore
else:
sql_type = self._sql_type
return f'"{self.name}" {sql_type}{default}{null}{unique}{check}'
def prepare(self) -> tuple[str, list[Any]]:
"""Get column as SQL.
Returns:
A tuple containing the SQL string for this column and an empty list of parameters.
"""
return str(self), []
def as_(self, alias: str) -> As:
"""Create an alias for this column.
Args:
alias: The alias name for the column.
Returns:
An As instance representing the aliased column.
"""
return As(self, alias)
def asc(self) -> Column:
"""Set this column to be sorted in ascending order.
Returns:
A new Column instance with ascending sort order.
"""
return self.asc_or_desc(True)
def desc(self) -> Column:
"""Set this column to be sorted in descending order.
Returns:
A new Column instance with descending sort order.
"""
return self.asc_or_desc(False)
def asc_or_desc(self, asc: bool) -> Column:
"""Set this column to be sorted in ascending or descending order.
Args:
asc: True for ascending order, False for descending order.
Returns:
A new Column instance with the specified sort order.
"""
col = Column()
col.name = self.name
col.table = self.table
col._asc = asc
return col
class Table(type):
"""Metaclass for representing SQL tables.
This metaclass automatically processes class attributes to set up
table names and columns based on the class definition.
"""
__table_name__ = ""
__table_columns__: dict[str, Column] = {}
def __init_subclass__(cls, **kwargs: Any) -> None:
"""Initialize a subclass of Table.
This method is called when a new subclass of Table is created. It sets up
the table name and processes the class attributes to identify columns.
Args:
**kwargs: Additional keyword arguments.
"""
super().__init_subclass__(**kwargs)
table_name = f'"{snake(cls.__name__)}"'
table_columns = {}
for attr_name, attr_value in cls.__dict__.items(): # type: ignore
if isinstance(attr_value, Column):
column = getattr(cls, attr_name)
column.name = attr_name
column.table = table_name
table_columns[attr_name] = column
cls.__table_columns__ = table_columns
cls.__table_name__ = table_name
@classmethod
def create_table(cls) -> str:
"""Generate SQL to create the table.
Returns:
A string containing the SQL CREATE TABLE statement for this table.
"""
columns: list[str] = []
foreign_keys: dict[str, list[tuple[str, str]]] = {}
indexes: list[str] = []
primaries: list[str] = []
for col in cls.__table_columns__.values():
columns.append(col._create())
if fk := col._foreign_key:
foreign_keys[fk.table] = foreign_keys.get(fk.table) or []
foreign_keys[fk.table].append((col.name, fk.name))
if col._index:
indexes.append(f"CREATE INDEX ON {cls.__table_name__} ({col.name});")
if col._primary:
primaries.append(col.name)
col_str = ",\n ".join(columns)
if col_str:
col_str = f" {col_str}"
fks = []
for table, column_pairs in foreign_keys.items():
table_cols: list[str] = []
other_table_cols: list[str] = []
for pair in column_pairs:
table_cols.append(pair[0])
other_table_cols.append(pair[1])
tc = ", ".join(table_cols)
otc = ", ".join(other_table_cols)
fks.append(f",\n FOREIGN KEY ({tc}) REFERENCES {table} ({otc})")
fk_str = "".join(fks)
idx_str = "\n".join(indexes)
if idx_str:
idx_str = "\n" + idx_str
pk_str = ", ".join(primaries)
if pk_str:
pk_str = f",\n PRIMARY KEY ({pk_str})"
table = (
f"CREATE TABLE IF NOT EXISTS {cls.__table_name__}"
f" (\n{col_str}{pk_str}{fk_str}\n)"
)
return f"{table};{idx_str}"
class _LimitMixin(QueryBuilder, abc.ABC):
"""Mixin class for adding LIMIT clause functionality."""
def limit(self, limit: int) -> Limit:
"""Add a LIMIT clause to the query.
Args:
limit: The maximum number of rows to return.
Returns:
A Limit instance representing the LIMIT clause.
"""
return Limit(self, limit)
class _OffsetMixin(QueryBuilder, abc.ABC):
"""Mixin class for adding OFFSET clause functionality."""
def offset(self, offset: int) -> Offset:
"""Add an OFFSET clause to the query.
Args:
offset: The number of rows to skip.
Returns:
An Offset instance representing the OFFSET clause.
"""
return Offset(self, offset)
class _PaginateMixin(_OffsetMixin, _LimitMixin, ABC):
"""Mixin class combining LIMIT and OFFSET functionality for pagination."""
pass
class _OrderByMixin(QueryBuilder, abc.ABC):
"""Mixin class for adding ORDER BY clause functionality."""
def order_by(self, *columns: Column) -> OrderBy:
"""Add an ORDER BY clause to the query.
Args:
*columns: The columns to order by.
Returns:
An OrderBy instance representing the ORDER BY clause.
"""
return OrderBy(self, *columns)
class _WhereMixin(QueryBuilder, abc.ABC):
"""Mixin class for adding WHERE clause functionality."""
def where(
self, evaluation: Expression, *evaluations: Expression | LogicGate
) -> Where:
"""Add a WHERE clause to the query.
Args:
evaluation: The primary condition for the WHERE clause.
*evaluations: Additional conditions to be combined with AND.
Returns:
A Where instance representing the WHERE clause.
"""
return Where(self, evaluation, *evaluations)
class BooleanOperator(enum.Enum):
"""Enumeration of boolean operators used in SQL queries.
This enum defines the various boolean operators that can be used
in constructing complex SQL conditions.
"""
AND = "AND"
AND_NOT = "AND NOT"
OR = "OR"
OR_NOT = "OR NOT"
IS = "IS"
IS_NOT = "IS NOT"
IN = "IN"
NOT_IN = "NOT IN"
class LogicGate(QueryBuilder):
"""Represents a logical operation in SQL queries.
This class is used to construct complex logical conditions by combining
multiple expressions with boolean operators.
"""
def __init__(
self,
boolean_operator: BooleanOperator,
evaluation: Expression,
*evaluations: Expression | LogicGate,
) -> None:
"""Initialize a LogicGate instance.
Args:
boolean_operator: The boolean operator to use.
evaluation: The primary expression to evaluate.
*evaluations: Additional expressions to combine with the primary expression.
"""
self._evaluations = [evaluation] + list(evaluations)
self._boolean_operator = boolean_operator
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the logical operation for use in a SQL query.
Returns:
A tuple containing the SQL string for this logical operation and a list of parameters.
"""
sql, params = _prepare_expressions(*self._evaluations)
if len(self._evaluations) == 1:
return f" {self._boolean_operator.value} {sql}", params
return f" {self._boolean_operator.value} ({sql})", params
class Expression(_OperatorMixin):
"""Represents a SQL expression or condition.
This class is used to construct SQL expressions involving comparisons,
arithmetic operations, or function calls.
"""
def __init__(
self,
left_operand: Column | Expression | _OperatorMixin,
operator: str,
right_operand: Any,
) -> None:
"""Initialize an Expression instance.
Args:
left_operand: The left side of the expression.
operator: The operator to use in the expression.
right_operand: The right side of the expression.
"""
self._params: list[Any] = []
self._left_operand = left_operand
other_str = "?"
if isinstance(right_operand, Column):
other_str = str(right_operand)
elif isinstance(right_operand, Expression):
other_str, other_params = right_operand.prepare()
self._params.extend(other_params)
elif right_operand is None:
other_str = "NULL"
elif right_operand is True:
other_str = "TRUE"
elif right_operand is False:
other_str = "FALSE"
elif isinstance(right_operand, enum.Enum):
self._params.append(right_operand.value)
else:
self._params.append(right_operand)
self._operator = operator
self._other_str = other_str
def as_(self, alias: str) -> As:
"""Create an alias for this expression.
Args:
alias: The alias name for the expression.
Returns:
An As instance representing the aliased expression.
"""
return As(self, alias)
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the expression for use in a SQL query.
Returns:
A tuple containing the SQL string for this expression and a list of parameters.
"""
sql, params = self._left_operand.prepare()
return f"{sql} {self._operator} {self._other_str}", params + self._params
class Join(QueryBuilder):
"""Represents a JOIN operation in a SQL query.
This class is used to construct various types of JOIN clauses.
"""
def __init__(self, table: Type[Table]) -> None:
"""Initialize a Join instance.
Args:
table: The table to join with.
"""
self._table = table
self._on: On | None = None
self._keyword: str = "JOIN"
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the JOIN clause for use in a SQL query.
Returns:
A tuple containing the SQL string for this JOIN clause and a list of parameters.
Raises:
ValueError: If no ON condition has been set for the join.
"""
if self._on is None:
msg = "No condition set for join, need to call `join.on(...`"
raise ValueError(msg)
sql, params = self._on.prepare()
return f"{self._keyword} {self._table.__table_name__} {sql}", params
def on(self, *expressions: Expression | LogicGate) -> Self:
"""Set the ON condition for the JOIN.
Args:
*expressions: The conditions to use in the ON clause.
Returns:
The Join instance itself, allowing for method chaining.
"""
self._on = On(*expressions)
return self
class LeftJoin(Join):
"""Represents a LEFT JOIN operation in a SQL query."""
def __init__(self, table: Type[Table]) -> None:
"""Initialize a LeftJoin instance.
Args:
table: The table to left join with.
"""
super().__init__(table)
self._keyword = "LEFT JOIN"
class RightJoin(Join):
"""Represents a RIGHT JOIN operation in a SQL query."""
def __init__(self, table: Type[Table]) -> None:
"""Initialize a RightJoin instance.
Args:
table: The table to right join with.
"""
super().__init__(table)
self._keyword = "RIGHT JOIN"
class Limit(_OffsetMixin):
"""Represents a LIMIT clause in a SQL query."""
def __init__(self, subquery: QueryBuilder, limit: int) -> None:
"""Initialize a Limit instance.
Args:
subquery: The query to apply the LIMIT to.
limit: The maximum number of rows to return.
"""
self.subquery = subquery
self.limit = limit
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the LIMIT clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the LIMIT clause and a list of parameters.
"""
sql, params = self.subquery.prepare()
return f"{sql} LIMIT {self.limit}", params
class Offset(QueryBuilder):
"""Represents an OFFSET clause in a SQL query."""
def __init__(self, subquery: QueryBuilder, offset: int) -> None:
"""Initialize an Offset instance.
Args:
subquery: The query to apply the OFFSET to.
offset: The number of rows to skip.
"""
self.subquery = subquery
self.offset = offset
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the OFFSET clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the OFFSET clause and a list of parameters.
"""
sql, params = self.subquery.prepare()
return f"{sql} OFFSET {self.offset}", params
class On(_PaginateMixin):
"""Represents an ON clause in a JOIN operation."""
def __init__(self, *expressions: Expression | LogicGate) -> None:
"""Initialize an On instance.
Args:
*expressions: The conditions to use in the ON clause.
"""
self._expressions = expressions
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the ON clause for use in a SQL query.
Returns:
A tuple containing the SQL string for the ON clause and a list of parameters.
"""
sql, params = _prepare_expressions(*self._expressions)
return f"ON {sql}", params
class From(_WhereMixin, _PaginateMixin):
"""Represents a FROM clause in a SQL query."""
def __init__(self, select: Select, table: Type[Table], *joins: Join) -> None:
"""Initialize a From instance.
Args:
select: The SELECT clause of the query.
table: The main table to select from.
*joins: Any JOIN clauses to include.
"""
self._select = select
self._table = table
self._joins = joins
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the FROM clause for use in a SQL query.
Returns:
A tuple containing the SQL string for the FROM clause (including any JOINs) and a list of parameters.
"""
sql, params = self._select.prepare()
join_sql, joins_params = "", []
for join in self._joins:
s, p = join.prepare()
join_sql += f" {s}"
joins_params += p
return (
f"{sql} FROM {self._table.__table_name__}{join_sql}",
params + joins_params,
)
class OrderBy(_PaginateMixin):
"""Represents an ORDER BY clause in a SQL query."""
def __init__(
self, subquery: Select | From | On | _OrderByMixin, *columns: Column
) -> None:
"""Initialize an OrderBy instance.
Args:
subquery: The query to apply the ORDER BY to.
*columns: The columns to order by.
"""
self._subquery = subquery
self.columns = list(columns)
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the ORDER BY clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the ORDER BY clause and a list of parameters.
"""
sql, params = self._subquery.prepare()
order_by = ", ".join(
[f"{c} ASC" if c._asc else f"{c} DESC" for c in self.columns]
)
return f"{sql} ORDER BY {order_by}", params
class Values(QueryBuilder):
"""Represents a VALUES clause in an INSERT statement."""
def __init__(self, subquery: InsertInto, values: dict[Column | str, Any]) -> None:
"""Initialize a Values instance.
Args:
subquery: The INSERT INTO clause.
values: A dictionary mapping columns to their values.
"""
self._subquery = subquery
self.values = values
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the VALUES clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the VALUES clause and a list of parameter values.
"""
sql, _ = self._subquery.prepare()
values = ", ".join("?" for _ in range(len(self.values)))
column_strs = []
for column in self.values:
if isinstance(column, Column):
column_strs.append(f'"{column.name}"')
else:
column_name = self._subquery._table.__table_columns__[column].name
column_strs.append(f'"{column_name}"')
columns = ", ".join(column_strs)
return f"{sql} ({columns}) VALUES ({values})", list(self.values.values())
class Where(_OrderByMixin, _PaginateMixin):
"""Represents a WHERE clause in a SQL query."""
def __init__(
self,
subquery: Select | From | _WhereMixin,
*expressions: Expression | LogicGate,
) -> None:
"""Initialize a Where instance.
Args:
subquery: The query to apply the WHERE clause to.
*expressions: The conditions to use in the WHERE clause.
"""
self._subquery = subquery
sql, params = _prepare_expressions(*expressions)
self._eval_params = params
self._sql = sql
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the WHERE clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the WHERE clause and a list of parameters.
"""
sql, params = self._subquery.prepare()
return f"{sql} WHERE {self._sql}", params + self._eval_params
class Set(_WhereMixin):
"""Represents a SET clause in an UPDATE statement."""
def __init__(self, subquery: Update, values: dict[Column | str, Any]) -> None:
"""Initialize a Set instance.
Args:
subquery: The UPDATE clause.
values: A dictionary mapping columns to their new values.
"""
self._subquery = subquery
self.values = values
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the SET clause for use in a SQL query.
Returns:
A tuple containing the SQL string with the SET clause and a list of parameter values.
"""
sql, params = self._subquery.prepare()
sql += " SET "
sets = []
for column, param in self.values.items():
if isinstance(param, QueryBuilder):
sub_q, sub_q_params = param.prepare()
params.extend(sub_q_params)
value = f"({sub_q})"
else:
params.append(param)
value = "?"
if isinstance(column, Column):
sets.append(f'"{column.name}" = {value}')
else:
column_name = self._subquery._table.__table_columns__[column].name
sets.append(f'"{column_name}" = {value}')
return sql + ", ".join(sets), params
class InsertInto(QueryBuilder):
"""Represents an INSERT INTO statement in SQL."""
def __init__(self, table: Type[Table]) -> None:
"""Initialize an InsertInto instance.
Args:
table: The table to insert into.
"""
self._table = table
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the INSERT INTO clause for use in a SQL query.
Returns:
A tuple containing the SQL string for the INSERT INTO clause and an empty list of parameters.
"""
return f"INSERT INTO {self._table.__table_name__}", []
def values(self, values: dict[Column | str, Any] | None = None) -> Values:
"""Add a VALUES clause to the INSERT statement.
Args:
values: A dictionary mapping columns to their values. If None, an empty dict is used.
Returns:
A Values instance representing the VALUES clause.
"""
return Values(self, values or {})
class Delete(_WhereMixin):
"""Represents a DELETE statement in SQL."""
def __init__(self, table: Type[Table]) -> None:
"""Initialize a Delete instance.
Args:
table: The table to delete from.
"""
self._table = table
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the DELETE statement for use in a SQL query.
Returns:
A tuple containing the SQL string for the DELETE statement and an empty list of parameters.
"""
return f"DELETE FROM {self._table.__table_name__}", [] # noqa: S608
class Select(QueryBuilder):
"""Represents a SELECT statement in SQL."""
def __init__(self, *args: Column | Type[Table] | As) -> None:
"""Initialize a Select instance.
Args:
*args: The columns, tables, or aliases to select from.
Raises:
ValueError: If an argument is not a Column, Table, or As instance.
"""
self._columns: list[str] = []
self._params: list[Any] = []
for arg in args:
if isinstance(arg, Column):
self._columns.append(str(arg))
elif isinstance(arg, As):
sql, params = arg.prepare()
self._params.extend(params)
self._columns.append(sql)
elif issubclass(arg, Table):
self._columns.extend(map(str, arg.__table_columns__.values()))
else:
raise ValueError(f"Unsupported argument type: {type(arg)}")
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the SELECT statement for use in a SQL query.
Returns:
A tuple containing the SQL string for the SELECT statement and a list of parameters.
"""
select = f"SELECT {', '.join(self._columns)}"
return select, self._params
def from_(self, table: Type[Table], *args: Join) -> From:
"""Add a FROM clause to the SELECT statement.
Args:
table: The main table to select from.
*args: Any JOIN clauses to include.
Returns:
A From instance representing the FROM clause.
"""
return From(self, table, *args)
class Update(QueryBuilder):
"""Represents an UPDATE statement in SQL."""
def __init__(self, table: Type[Table]) -> None:
"""Initialize an Update instance.
Args:
table: The table to update.
"""
self._table = table
def set(self, values: dict[Column | str, Any]) -> Set:
"""Add a SET clause to the UPDATE statement.
Args:
values: A dictionary mapping columns to their new values.
Returns:
A Set instance representing the SET clause.
"""
return Set(self, values)
def prepare(self) -> tuple[str, list[Any]]:
"""Prepare the UPDATE statement for use in a SQL query.
Returns:
A tuple containing the SQL string for the UPDATE statement and an empty list of parameters.
"""
return f"UPDATE {self._table.__table_name__}", []
def _prepare_expressions(*expressions: Expression | LogicGate) -> tuple[str, list[Any]]:
"""Prepare multiple expressions for use in a SQL query.
This function is used internally to combine multiple expressions or logic gates
into a single SQL string with associated parameters.
Args:
*expressions: The expressions or logic gates to prepare.
Returns:
A tuple containing the combined SQL string and a list of all parameters.
"""
sql, params = "", []
for evaluation in expressions:
s, p = evaluation.prepare()
sql += s
params += p
return sql, params
if typing.TYPE_CHECKING:
"""Type hint for expressions."""
Expression = bool # type: ignore # pragma: no cover
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
pgqb/types.py | Python | """PostgreSQL types for pgqb query builder library.
This module contains classes for PostgreSQL data types. The classes are
used to specify the type of a column in a table. The classes are used to
generate the SQL for creating a table.
From the docs: https://www.postgresql.org/docs/current/datatype.html
"""
from __future__ import annotations
import enum
from pgqb import _snake as snake
class PGEnum(enum.Enum):
"""Enum type class."""
@classmethod
def pg_enum_name(cls) -> str:
"""Get the SQL name for this custom enum.
Args:
cls: The class to get the SQL name for.
Returns:
str: The SQL name for this custom enum.
"""
return snake.to_snake(cls.__name__).upper()
@classmethod
def pg_enum_get_create(cls) -> str:
"""Get create enum SQL.
Args:
cls: The class to get the create enum SQL for.
Returns:
str: The create enum SQL.
"""
options = ", ".join(f"'{it.value}'" for it in cls)
return f"CREATE TYPE {cls.pg_enum_name()} AS ENUM ({options});"
class SQLType:
"""Base SQL type class."""
def __str__(self) -> str:
"""Get the string representation of this column."""
return self.__class__.__name__
class BIGINT(SQLType):
"""Signed eight-byte integer."""
class BIGSERIAL(SQLType):
"""Auto-incrementing eight-byte integer."""
class BIT(SQLType):
"""Fixed-length bit string."""
class VARBIT(SQLType):
"""Variable-length bit string."""
class BOOLEAN(SQLType):
"""Logical Boolean (true/false)."""
class BOX(SQLType):
"""Rectangular box on a plane."""
class BYTEA(SQLType):
"""Binary data (“byte array”)."""
class CHAR(SQLType):
"""Fixed-length character string."""
def __init__(self, fixed_length: int | None = None) -> None:
"""Fixed-length character string.
Args:
fixed_length: The fixed length of the string.
"""
self._fixed_length = fixed_length
def __str__(self) -> str:
"""Get the string representation of this column."""
return f"CHAR({self._fixed_length})" if self._fixed_length else "CHAR"
class VARCHAR(SQLType):
"""Variable-length character string."""
def __init__(self, variable_length: int | None = None) -> None:
"""Variable-length character string.
Args:
variable_length: The variable length of the string.
"""
self._variable_length = variable_length
def __str__(self) -> str:
"""Get the string representation of this column."""
if self._variable_length:
return f"VARCHAR({self._variable_length})"
return "VARCHAR"
class CIDR(SQLType):
"""IPv4 or IPv6 network address."""
class CIRCLE(SQLType):
"""Circle on a plane."""
class DATE(SQLType):
"""Calendar date (year, month, day)."""
class DOUBLE(SQLType):
"""Double precision floating-point number (8 bytes)."""
def __str__(self) -> str:
"""Get the string representation of this column."""
return "DOUBLE PRECISION"
class INET(SQLType):
"""IPv4 or IPv6 host address."""
class INTEGER(SQLType):
"""Signed four-byte integer."""
class INTERVAL(SQLType):
"""Time span."""
def __init__(self, fields: str | None = None, precision: int | None = None) -> None:
"""Time span.
Args:
fields: The fields of the interval.
precision: The precision of the interval.
"""
self._fields = fields
self._precision = precision
def __str__(self) -> str:
"""Get the string representation of this column."""
fields = f" {self._fields}" if self._fields else ""
precision = f"({self._precision})" if self._precision else ""
return f"INTERVAL{fields}{precision}"
class JSON(SQLType):
"""Textual JSON data."""
class JSONB(SQLType):
"""Binary JSON data, decomposed."""
class LINE(SQLType):
"""Infinite line on a plane."""
class LSEG(SQLType):
"""Line segment on a plane."""
class MACADDR(SQLType):
"""MAC (Media Access Control) address."""
class MACADDR8(SQLType):
"""MAC (Media Access Control) address (EUI-64 format)."""
class MONEY(SQLType):
"""Currency amount."""
class NUMERIC(SQLType):
"""Exact numeric of selectable precision."""
def __init__(self, precision: int | None = None, scale: int | None = None) -> None:
"""Exact numeric of selectable precision.
Args:
precision: The precision of the numeric.
scale: The scale of the numeric.
"""
self._precision = precision
self._scale = scale
def __str__(self) -> str:
"""Get the string representation of this column."""
if self._precision and self._scale:
args = f"({self._precision}, {self._scale})"
elif self._precision:
args = f"({self._precision})"
elif self._scale:
msg = "Precision must be set if scale is"
raise ValueError(msg)
else:
args = ""
return f"NUMERIC{args}"
class PATH(SQLType):
"""Geometric path on a plane."""
# noinspection PyPep8Naming
class PG_LSN(SQLType): # noqa: N801
"""PostgreSQL Log Sequence Number."""
# noinspection PyPep8Naming
class PG_SNAPSHOT(SQLType): # noqa: N801
"""User-level transaction ID snapshot."""
class POINT(SQLType):
"""Geometric point on a plane."""
class POLYGON(SQLType):
"""Closed geometric path on a plane."""
class REAL(SQLType):
"""Single precision floating-point number (4 bytes)."""
class SMALLINT(SQLType):
"""Signed two-byte integer."""
class SMALLSERIAL(SQLType):
"""Auto-incrementing two-byte integer."""
class SERIAL(SQLType):
"""Auto-incrementing four-byte integer."""
class TEXT(SQLType):
"""Variable-length character string."""
class TIME(SQLType):
"""Time of day."""
def __init__(
self, precision: int | None = None, *, with_time_zone: bool = False
) -> None:
"""Time of day.
Args:
precision: The precision of the time.
with_time_zone: Whether to include the time zone.
"""
self._precision = precision
self._with_time_zone = with_time_zone
def __str__(self) -> str:
"""Get the string representation of this column."""
precision = f"({self._precision})" if self._precision else ""
tz = " WITH TIME ZONE" if self._with_time_zone else ""
return f"TIME{precision}{tz}"
class TIMESTAMP(SQLType):
"""Date and time."""
def __init__(
self, precision: int | None = None, *, with_time_zone: bool = False
) -> None:
"""Date and time.
Args:
precision: The precision of the timestamp.
with_time_zone: Whether to include the time zone.
"""
self._precision = precision
self._with_time_zone = with_time_zone
def __str__(self) -> str:
"""Get the string representation of this column."""
precision = f"({self._precision})" if self._precision else ""
tz = " WITH TIME ZONE" if self._with_time_zone else ""
return f"TIMESTAMP{precision}{tz}"
class TSQUERY(SQLType):
"""Text search query."""
class TSVECTOR(SQLType):
"""Text search document."""
class UUID(SQLType):
"""Universally unique identifier."""
class XML(SQLType):
"""XML data."""
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/clean.sh | Shell | #!/usr/bin/env bash
rm -f `find . -type f -name '*.py[co]' `
rm -f `find . -type f -name '*~' `
rm -f `find . -type f -name '.*~' `
rm -f `find . -type f -name .coverage`
rm -f `find . -type f -name coverage.xml`
rm -f `find . -type f -name ".coverage.*"`
rm -rf `find . -name __pycache__`
rm -rf `find . -type d -name '*.egg-info' `
rm -rf `find . -type d -name 'pip-wheel-metadata' `
rm -rf `find . -type d -name .pytest_cache`
rm -rf `find . -type d -name .cache`
rm -rf `find . -type d -name .mypy_cache`
rm -rf `find . -type d -name htmlcov`
rm -rf `find . -type d -name "*.egg-info"`
rm -rf `find . -type d -name build`
rm -rf `find . -type d -name dist`
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/format.sh | Shell | #!/usr/bin/env bash
set -e
set -x
pre-commit run --all-files --verbose --show-diff-on-failure
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/mypy.sh | Shell | #!/usr/bin/env bash
set -e
set -x
mypy --show-error-codes pgqb
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
scripts/tests.sh | Shell | #!/usr/bin/env bash
set -e
set -x
echo "ENV=${ENV}"
export PYTHONPATH=.
pytest --cov=pgqb --cov-report=xml
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_builder.py | Python | import enum
import uuid
from typing import Any
import pytest
from pgqb import (
Column,
Table,
and_,
and_not,
delete_from,
insert_into,
join,
left_join,
or_,
or_not,
right_join,
select,
update,
)
class MyEnum(enum.Enum):
"""Test enum."""
OPTION = "option"
class User(Table):
id = Column()
first = Column()
last = Column()
class Task(Table):
id = Column()
user_id = Column()
value = Column()
def test_select() -> None:
query, params = (
select(
User,
)
.from_(
User,
join(Task).on(Task.user_id == User.id and Task.user_id == User.id),
left_join(Task).on(Task.value == 1),
right_join(Task).on(Task.value >= 2),
join(Task).on(Task.value <= MyEnum.OPTION),
)
.where(
Task.value > "a string",
)
.order_by(
Task.value.asc(),
Task.value.desc(),
)
).prepare()
assert params == [1, 2, "option", "a string"]
expected = " ".join(
[
'SELECT "user".id, "user".first, "user".last',
'FROM "user" JOIN "task" ON "task".user_id = "user".id',
'LEFT JOIN "task" ON "task".value = ?',
'RIGHT JOIN "task" ON "task".value >= ?',
'JOIN "task" ON "task".value <= ?',
'WHERE "task".value > ?',
'ORDER BY "task".value ASC,',
'"task".value DESC',
]
)
assert query == expected
def test_select_columns() -> None:
sql, params = (
select(
User.id,
User.first.as_("name"),
Task.id,
)
.from_(
User,
left_join(Task).on(Task.user_id == User.id),
)
.limit(20)
.offset(20)
.prepare()
)
assert params == []
assert sql == " ".join(
[
'SELECT "user".id, "user".first AS name, "task".id',
'FROM "user" LEFT JOIN "task" ON "task".user_id = "user".id',
"LIMIT 20 OFFSET 20",
]
)
def test_insert() -> None:
id = uuid.uuid4()
query, params = (insert_into(User).values({User.id: id})).prepare()
assert params == [id]
assert query == 'INSERT INTO "user" ("id") VALUES (?)'
def test_insert_str_columns() -> None:
id = uuid.uuid4()
query, params = (insert_into(User).values({"id": id})).prepare()
assert params == [id]
assert query == 'INSERT INTO "user" ("id") VALUES (?)'
def test_expressions() -> None:
query = (
select(User)
.from_(User)
.where(
User.id > 1,
and_(User.id < User.id),
and_not(User.id >= User.id),
or_(User.id <= User.id),
or_not(
User.id != User.id,
and_(User.id != User.id),
),
)
)
sql, params = query.prepare()
assert params == [1]
assert sql == " ".join(
[
'SELECT "user".id, "user".first, "user".last',
'FROM "user" WHERE "user".id > ?',
'AND "user".id < "user".id',
'AND NOT "user".id >= "user".id',
'OR "user".id <= "user".id',
'OR NOT ("user".id != "user".id AND "user".id != "user".id)',
]
)
# noinspection PyComparisonWithNone
def test_operators() -> None:
false: Any = False
query = (
select(
User,
(User.id == 3).as_("mocha"),
)
.from_(User)
.where(
User.id > 1,
and_(User.last.is_(None), or_(User.first.is_not(True))),
and_(User.last.is_(1), or_(User.first.is_not(1))),
and_((User.last == false), or_(User.first != false)),
)
)
sql, params = query.prepare()
assert params == [3, 1, 1, 1]
assert sql == " ".join(
[
'SELECT "user".id, "user".first, "user".last, "user".id = ? AS mocha',
'FROM "user"',
'WHERE "user".id > ?',
'AND ("user".last IS NULL OR "user".first IS NOT TRUE)',
'AND ("user".last = ? OR "user".first != ?)',
'AND ("user".last IS FALSE OR "user".first IS NOT FALSE)',
]
)
@pytest.mark.xfail(
reason="AssertionError: assert [1, 2, 12, 5, 6, 7] == [1, 2, 3, 4, 5, 6, ...]"
)
def test_operators_chained() -> None:
query = (
select(User)
.from_(User)
.where(
User.id + 1 > Task.id - 2,
and_(User.id > 12, or_(User.id * 5 % 6 > 7)),
)
)
sql, params = query.prepare()
assert params == [1, 2, 3, 4, 5, 6, 7]
assert sql == " ".join(
[
'SELECT "user".id, "user".first, "user".last',
'FROM "user"',
'WHERE "user".id + ? > "task".id - ? AND ("user".id / ? > ?',
'OR "user".id * ? % ? > ?)',
]
)
def test_update() -> None:
sql, params = (
update(User)
.set(
{
User.first: "Potato",
User.last: "Wedge",
User.id: select(Task.id).from_(Task).where(Task.id == 1),
}
)
.where(User.id == 2)
).prepare()
assert params == ["Potato", "Wedge", 1, 2]
assert sql == " ".join(
[
'UPDATE "user"',
'SET "first" = ?, "last" = ?,',
'"id" = (SELECT "task".id FROM "task" WHERE "task".id = ?)',
'WHERE "user".id = ?',
]
)
def test_update_str_columns() -> None:
sql, params = (
update(User)
.set(
{
"first": "Potato",
"last": "Wedge",
"id": select(Task.id).from_(Task).where(Task.id == 1),
}
)
.where(User.id == 2)
).prepare()
assert params == ["Potato", "Wedge", 1, 2]
assert sql == " ".join(
[
'UPDATE "user"',
'SET "first" = ?, "last" = ?,',
'"id" = (SELECT "task".id FROM "task" WHERE "task".id = ?)',
'WHERE "user".id = ?',
]
)
def test_delete() -> None:
sql, params = delete_from(User).where(User.first == "Potato").prepare()
assert params == ["Potato"]
assert sql == 'DELETE FROM "user" WHERE "user".first = ?'
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_snake.py | Python | import pytest
from pgqb import _snake as snake
@pytest.mark.parametrize(
"input_str, expected_output",
[
("PotatoHumanAlien", ["Potato", "Human", "Alien"]),
("Potato.Human.Alien", ["Potato", "Human", "Alien"]),
("Potato-Human-Alien", ["Potato", "Human", "Alien"]),
("Potato/Human/Alien", ["Potato", "Human", "Alien"]),
("Potato_Human_Alien", ["Potato", "Human", "Alien"]),
("Potato Human Alien", ["Potato", "Human", "Alien"]),
("Honey", ["Honey"]),
("DING", ["DING"]),
("", []),
(
"orange beer-PotatoAlien_food.yummy/honey",
["orange", "beer", "Potato", "Alien", "food", "yummy", "honey"],
),
("HumanNAMEDJason", ["Human", "NAMED", "Jason"]),
],
)
def test_get_words(input_str, expected_output):
assert snake.get_words(input_str) == expected_output
@pytest.mark.parametrize(
"input_str, expected_output",
[
("PotatoHumanAlien", "potato_human_alien"),
("Potato.Human.Alien", "potato_human_alien"),
("Potato-Human-Alien", "potato_human_alien"),
("Potato/Human/Alien", "potato_human_alien"),
("Potato_Human_Alien", "potato_human_alien"),
("Potato Human Alien", "potato_human_alien"),
("Honey", "honey"),
("DING", "ding"),
("", ""),
(
"orange beer-PotatoAlien_food.yummy/honey",
"orange_beer_potato_alien_food_yummy_honey",
),
("HumanNAMEDJason", "human_named_jason"),
],
)
def test_to_snake(input_str, expected_output):
assert snake.to_snake(input_str) == expected_output
# Parametrized tests for happy path scenarios
@pytest.mark.parametrize(
"words, regex, expected",
[
(["hello", "world"], r"\s", ["hello", "world"]),
(["hello-world", "python"], r"-", ["hello", "world", "python"]),
(["hello,world", "python,3.8"], r",", ["hello", "world", "python", "3.8"]),
(["hello|world|again"], r"\|", ["hello", "world", "again"]),
],
ids=lambda id: id,
)
def test_split_words_on_regex_happy_path(words, regex, expected):
result = snake._split_words_on_regex(words, regex)
assert result == expected, f"Expected {expected} but got {result}"
# Parametrized tests for edge cases
@pytest.mark.parametrize(
"words, regex, expected",
[
([], r"\s", []),
([""], r"\s", [""]),
],
ids=lambda id: id,
)
def test_split_words_on_regex_edge_cases(words, regex, expected):
result = snake._split_words_on_regex(words, regex)
assert result == expected, f"Expected {expected} but got {result}"
# Parametrized tests for error cases
@pytest.mark.parametrize(
"words, regex, expected_exception",
[
(["hello", "world"], None, TypeError),
(["hello", "world"], 123, TypeError),
],
ids=lambda id: id,
)
def test_split_words_on_regex_error_cases(words, regex, expected_exception):
with pytest.raises(expected_exception):
snake._split_words_on_regex(words, regex)
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
tests/test_types.py | Python | import inspect
import pytest
from pgqb import (
BIGINT,
BIGSERIAL,
BIT,
BOOLEAN,
BOX,
BYTEA,
CHAR,
CIDR,
CIRCLE,
DATE,
DOUBLE,
INET,
INTEGER,
INTERVAL,
JSON,
JSONB,
LINE,
LSEG,
MACADDR,
MACADDR8,
MONEY,
NUMERIC,
PATH,
PG_LSN,
PG_SNAPSHOT,
POINT,
POLYGON,
REAL,
SERIAL,
SMALLINT,
SMALLSERIAL,
TEXT,
TIME,
TIMESTAMP,
TSQUERY,
TSVECTOR,
UUID,
VARBIT,
VARCHAR,
XML,
Column,
PGEnum,
Table,
)
class User(Table):
id = Column(UUID(), primary=True)
bigint = Column(BIGINT(), primary=True)
bigserial = Column(BIGSERIAL())
bit = Column(BIT())
varbit = Column(VARBIT())
boolean = Column(BOOLEAN())
box = Column(BOX())
bytea = Column(BYTEA())
char = Column(CHAR())
varchar = Column(VARCHAR())
cidr = Column(CIDR())
circle = Column(CIRCLE())
date = Column(DATE())
double = Column(DOUBLE())
inet = Column(INET())
integer = Column(INTEGER())
interval = Column(INTERVAL())
json = Column(JSON())
jsonb = Column(JSONB())
line = Column(LINE())
lseg = Column(LSEG())
macaddr = Column(MACADDR())
macaddr8 = Column(MACADDR8())
money = Column(MONEY())
numeric = Column(NUMERIC())
path = Column(PATH())
pg_lsn = Column(PG_LSN())
pg_snapshot = Column(PG_SNAPSHOT())
point = Column(POINT())
polygon = Column(POLYGON())
real = Column(REAL())
smallint = Column(SMALLINT())
smallserial = Column(SMALLSERIAL())
serial = Column(SERIAL())
text = Column(TEXT())
time = Column(TIME())
timestamp = Column(TIMESTAMP())
tsquery = Column(TSQUERY())
tsvector = Column(TSVECTOR())
uuid = Column(UUID())
xml = Column(XML())
def test_create_table() -> None:
sql = User.create_table()
assert sql == inspect.cleandoc(
"""
CREATE TABLE IF NOT EXISTS "user" (
"id" UUID,
"bigint" BIGINT,
"bigserial" BIGSERIAL NOT NULL,
"bit" BIT NOT NULL,
"varbit" VARBIT NOT NULL,
"boolean" BOOLEAN NOT NULL,
"box" BOX NOT NULL,
"bytea" BYTEA NOT NULL,
"char" CHAR NOT NULL,
"varchar" VARCHAR NOT NULL,
"cidr" CIDR NOT NULL,
"circle" CIRCLE NOT NULL,
"date" DATE NOT NULL,
"double" DOUBLE PRECISION NOT NULL,
"inet" INET NOT NULL,
"integer" INTEGER NOT NULL,
"interval" INTERVAL NOT NULL,
"json" JSON NOT NULL,
"jsonb" JSONB NOT NULL,
"line" LINE NOT NULL,
"lseg" LSEG NOT NULL,
"macaddr" MACADDR NOT NULL,
"macaddr8" MACADDR8 NOT NULL,
"money" MONEY NOT NULL,
"numeric" NUMERIC NOT NULL,
"path" PATH NOT NULL,
"pg_lsn" PG_LSN NOT NULL,
"pg_snapshot" PG_SNAPSHOT NOT NULL,
"point" POINT NOT NULL,
"polygon" POLYGON NOT NULL,
"real" REAL NOT NULL,
"smallint" SMALLINT NOT NULL,
"smallserial" SMALLSERIAL NOT NULL,
"serial" SERIAL NOT NULL,
"text" TEXT NOT NULL,
"time" TIME NOT NULL,
"timestamp" TIMESTAMP NOT NULL,
"tsquery" TSQUERY NOT NULL,
"tsvector" TSVECTOR NOT NULL,
"uuid" UUID NOT NULL,
"xml" XML NOT NULL,
PRIMARY KEY (id, bigint)
);
"""
)
class TypeOptionsTable(Table):
char = Column(CHAR(1), null=True)
varchar = Column(VARCHAR(1), default="", null=True)
interval = Column(INTERVAL(fields="DAY TO SECOND", precision=1), null=True)
numeric = Column(NUMERIC(10, 2), null=True)
numeric_two = Column(NUMERIC(10), null=True)
time = Column(TIME(1, with_time_zone=True), null=True)
timestamp = Column(TIMESTAMP(1, with_time_zone=True), null=True)
def test_type_options() -> None:
sql = TypeOptionsTable.create_table()
assert sql == inspect.cleandoc(
"""
CREATE TABLE IF NOT EXISTS "type_options_table" (
"char" CHAR(1),
"varchar" VARCHAR(1) DEFAULT '',
"interval" INTERVAL DAY TO SECOND(1),
"numeric" NUMERIC(10, 2),
"numeric_two" NUMERIC(10),
"time" TIME(1) WITH TIME ZONE,
"timestamp" TIMESTAMP(1) WITH TIME ZONE
);
"""
)
with pytest.raises(ValueError):
Column(NUMERIC(scale=1))._create()
class ColumnOptionsTable(Table):
integer = Column(
INTEGER(),
check="integer > 0",
default=1,
index=True,
null=True,
unique=True,
)
fk = Column(foreign_key=User.id)
fk2 = Column(foreign_key=User.bigint)
def test_column_options() -> None:
sql = ColumnOptionsTable.create_table()
assert sql == inspect.cleandoc(
"""
CREATE TABLE IF NOT EXISTS "column_options_table" (
"integer" INTEGER DEFAULT 1 UNIQUE CHECK (integer > 0),
"fk" UUID NOT NULL,
"fk2" BIGINT NOT NULL,
FOREIGN KEY (fk, fk2) REFERENCES "user" (id, bigint)
);
CREATE INDEX ON "column_options_table" (integer);
"""
)
def test_enum() -> None:
class MyE(PGEnum):
A = "apple"
B = "bee"
assert MyE.pg_enum_get_create() == "CREATE TYPE MY_E AS ENUM ('apple', 'bee');"
class UsesEnum(Table):
col = Column(MyE, null=True)
assert UsesEnum.create_table() == inspect.cleandoc(
"""
CREATE TABLE IF NOT EXISTS "uses_enum" (
"col" MY_E
);
"""
)
| yezz123/pgqb | 7 | Typed Python PostgreSQL query builder ✨ | Python | yezz123 | Yasser Tahiri | Yezz LLC. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.