diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..26c0a716b368cd93a064460091744e45aa24d252 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +venv/ +.env +.idea/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..719d07b4ce290cf036875658ff744180abcbfb49 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.10 + +WORKDIR /app + +COPY . . + +RUN pip install --no-cache-dir -r requirements.txt + +EXPOSE 7860 + +CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "7860"] diff --git a/README.md b/README.md index ed117ebf5afef6d327b4681c366ce2e15337c4ed..2e300b7c5977c3c520b9c64d13c9e6e426de1ccf 100644 --- a/README.md +++ b/README.md @@ -9,3 +9,5 @@ short_description: Yuvabe App Backend --- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference + +# YB's Wellness App FastAPI Backend diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000000000000000000000000000000000000..1b03b052481851c87ba0172887547b01006ca0d5 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000000000000000000000000000000000000..98e4f9c44effe479ed38c66ba922e7bcc672916f --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000000000000000000000000000000000000..b9bbb7a289109a771a12d40bc4a5596fcb397424 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool +from sqlmodel import SQLModel + +from alembic import context +from src.core import * +from src.core.config import settings + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option(name="sqlalchemy.url", value=settings.DATABASE_URL) +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000000000000000000000000000000000000..6fb73b84bb7e4d52c88e4b053e17b0c50ba0a3f8 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,29 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/584a5111e60f_initial_migration.py b/alembic/versions/584a5111e60f_initial_migration.py new file mode 100644 index 0000000000000000000000000000000000000000..c75268195260b4911ca0541f91076693c53eeb1a --- /dev/null +++ b/alembic/versions/584a5111e60f_initial_migration.py @@ -0,0 +1,33 @@ +"""initial migration + +Revision ID: 584a5111e60f +Revises: +Create Date: 2025-11-10 23:50:23.367946 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision: str = '584a5111e60f' +down_revision: Union[str, Sequence[str], None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/alembic/versions/b33e3b5b7af9_added_roles.py b/alembic/versions/b33e3b5b7af9_added_roles.py new file mode 100644 index 0000000000000000000000000000000000000000..bc4be572de4c439e48a1f5fccc24bad91e74af60 --- /dev/null +++ b/alembic/versions/b33e3b5b7af9_added_roles.py @@ -0,0 +1,33 @@ +"""Added roles + +Revision ID: b33e3b5b7af9 +Revises: e8066533b622 +Create Date: 2025-11-16 21:10:02.038255 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision: str = 'b33e3b5b7af9' +down_revision: Union[str, Sequence[str], None] = 'e8066533b622' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/alembic/versions/dd61202db14f_add_knowledgebase_chunk.py b/alembic/versions/dd61202db14f_add_knowledgebase_chunk.py new file mode 100644 index 0000000000000000000000000000000000000000..b9d36109dd55d9ff784e83e3bf3dfc9ee2a1b1e0 --- /dev/null +++ b/alembic/versions/dd61202db14f_add_knowledgebase_chunk.py @@ -0,0 +1,33 @@ +"""add: knowledgebase,chunk + +Revision ID: dd61202db14f +Revises: b33e3b5b7af9 +Create Date: 2025-11-17 23:28:11.537932 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision: str = 'dd61202db14f' +down_revision: Union[str, Sequence[str], None] = 'b33e3b5b7af9' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/alembic/versions/e8066533b622_delete_user_verification_cols.py b/alembic/versions/e8066533b622_delete_user_verification_cols.py new file mode 100644 index 0000000000000000000000000000000000000000..dac6a5c018bff5e6aa3b641732ebaa6325498956 --- /dev/null +++ b/alembic/versions/e8066533b622_delete_user_verification_cols.py @@ -0,0 +1,49 @@ +"""delete:user/verification cols + +Revision ID: e8066533b622 +Revises: 584a5111e60f +Create Date: 2025-11-11 10:47:38.171691 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = "e8066533b622" +down_revision: Union[str, Sequence[str], None] = "584a5111e60f" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("users", "verification_token") + op.drop_column("users", "verification_expires_at") + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "users", + sa.Column( + "verification_expires_at", + postgresql.TIMESTAMP(), + autoincrement=False, + nullable=True, + ), + ) + op.add_column( + "users", + sa.Column( + "verification_token", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..ae86b328f8c0306533e9947955efc6fc0ccac7e6 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,62 @@ +alembic==1.17.1 +annotated-doc==0.0.3 +annotated-types==0.7.0 +anyio==4.11.0 +asyncpg==0.30.0 +bcrypt==3.2.2 +certifi==2025.11.12 +cffi==2.0.0 +charset-normalizer==3.4.4 +click==8.3.0 +coloredlogs==15.0.1 +cryptography==46.0.3 +dnspython==2.8.0 +ecdsa==0.19.1 +email-validator==2.3.0 +fastapi==0.121.0 +filelock==3.20.0 +flatbuffers==25.9.23 +fsspec==2025.10.0 +greenlet==3.2.4 +h11==0.16.0 +hf-xet==1.2.0 +huggingface-hub==0.36.0 +humanfriendly==10.0 +idna==3.11 +Mako==1.3.10 +MarkupSafe==3.0.3 +mpmath==1.3.0 +numpy==2.3.5 +onnxruntime==1.23.2 +packaging==25.0 +passlib==1.7.4 +pgvector==0.4.1 +protobuf==6.33.1 +psycopg2-binary==2.9.11 +pyasn1==0.6.1 +pycparser==2.23 +pydantic==2.12.4 +pydantic-settings==2.12.0 +pydantic_core==2.41.5 +PyPDF2==3.0.1 +python-dotenv==1.2.1 +python-jose==3.5.0 +python-multipart==0.0.20 +PyYAML==6.0.3 +regex==2025.11.3 +requests==2.32.5 +rsa==4.9.1 +safetensors==0.6.2 +six==1.17.0 +sniffio==1.3.1 +SQLAlchemy==2.0.44 +sqlmodel==0.0.27 +starlette==0.49.3 +sympy==1.14.0 +tokenizers==0.22.1 +tqdm==4.67.1 +transformers==4.57.1 +typing-inspection==0.4.2 +typing_extensions==4.15.0 +urllib3==2.5.0 +uvicorn==0.38.0 diff --git a/src/auth/__init__.py b/src/auth/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/auth/config.py b/src/auth/config.py new file mode 100644 index 0000000000000000000000000000000000000000..f9da10fd75bd50fe3b9cb362157d3d00ff9e92ad --- /dev/null +++ b/src/auth/config.py @@ -0,0 +1,17 @@ +import os +from pydantic import BaseSettings +from dotenv import load_dotenv + + +class HomeSettings(BaseSettings): + FEATURE_ENABLED: bool = True + + +home_settings = HomeSettings() + + +load_dotenv() +SECRET_KEY = os.getenv("SECRET_KEY") + +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 60 diff --git a/src/auth/constants.py b/src/auth/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..81abed5c8ca7c4cf25575214fcef34b33e492dee --- /dev/null +++ b/src/auth/constants.py @@ -0,0 +1,2 @@ +WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen" +EXIT_MESSAGE = "Thank You have a wonderful day" \ No newline at end of file diff --git a/src/auth/dependencies.py b/src/auth/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/auth/exceptions.py b/src/auth/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/auth/feed_db_script.py b/src/auth/feed_db_script.py new file mode 100644 index 0000000000000000000000000000000000000000..2c684ce0f47aa72ba0678a118808256ee359adea --- /dev/null +++ b/src/auth/feed_db_script.py @@ -0,0 +1,132 @@ +from src.auth.utils import hash_password +from datetime import date +from sqlmodel import Session + +from src.core.database import engine +from src.core.models import Users, Teams, Roles, UserTeamsRole + + +# ------------------------ +# 1. Seed Users +# ------------------------ +def seed_users(session: Session): + users = [ + Users( + email_id="ragul@yuvabe.com", + password=hash_password("Yuvabe"), + user_name="ragul", + dob=date(2001, 5, 21), + address="Chennai", + profile_picture="ragul.png", + ), + Users( + email_id="shri@yuvabe.com", + password=hash_password("Yuvabe"), + user_name="Shri", + dob=date(1999, 3, 14), + address="Chennai", + profile_picture="shri.png", + ), + Users( + email_id="hryuva@yuvabe.com", + password=hash_password("Yuvabe"), + user_name="Sathish", + dob=date(1998, 7, 10), + address="Chennai", + profile_picture="Sathish.png", + ), + Users( + email_id="hr2@yuvabe.com", + password=hash_password("Yuvabe"), + user_name="Deepika", + dob=date(1997, 2, 5), + address="Chennai", + profile_picture="deepika.png", + ), + ] + + session.add_all(users) + session.commit() + print("Users added.") + return users + + +# ------------------------ +# 2. Seed Teams +# ------------------------ +def seed_teams(session: Session): + teams = [ + Teams(name="Tech Team"), + Teams(name="HR Team"), + ] + session.add_all(teams) + session.commit() + print("Teams added.") + return teams + + +# ------------------------ +# 3. Seed Roles +# ------------------------ +def seed_roles(session: Session): + roles = [ + Roles(name="Developer"), + Roles(name="Team Lead"), + Roles(name="HR Manager"), + ] + session.add_all(roles) + session.commit() + print("Roles added.") + return roles + + +# ------------------------ +# 4. Map Users → Teams → Roles +# ------------------------ +def seed_user_teams_roles(session: Session, users, teams, roles): + mappings = [ + # Hari → Tech Team → Developer + UserTeamsRole( + user_id=users[0].id, # Hari + team_id=teams[0].id, # Tech Team + role_id=roles[0].id, # Developer + ), + # Shri → Tech Team → Team Lead + UserTeamsRole( + user_id=users[1].id, # Shri + team_id=teams[0].id, # Tech Team + role_id=roles[1].id, # Team Lead + ), + # HR Keerthana + UserTeamsRole( + user_id=users[2].id, # Keerthana + team_id=teams[1].id, # HR Team + role_id=roles[2].id, # HR Manager + ), + # HR Deepika + UserTeamsRole( + user_id=users[3].id, # Deepika + team_id=teams[1].id, # HR Team + role_id=roles[2].id, # HR Manager + ), + ] + + session.add_all(mappings) + session.commit() + print("User-Team-Role mappings added.") + + +# ------------------------ +# 5. Master Runner +# ------------------------ +def run_all_seeds(): + with Session(engine) as session: + users = seed_users(session) + teams = seed_teams(session) + roles = seed_roles(session) + seed_user_teams_roles(session, users, teams, roles) + print("All data seeded successfully!") + + +if __name__ == "__main__": + run_all_seeds() diff --git a/src/auth/models.py b/src/auth/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d7564fed9110f55b871ced45b7673973e41bf43f --- /dev/null +++ b/src/auth/models.py @@ -0,0 +1,2 @@ +import uuid +import sqlmodel diff --git a/src/auth/router.py b/src/auth/router.py new file mode 100644 index 0000000000000000000000000000000000000000..b0ec5bc9312b6b8f2b8e61833ba7755580d60c9f --- /dev/null +++ b/src/auth/router.py @@ -0,0 +1,125 @@ +import uuid +from src.core.database import get_async_session +from fastapi import APIRouter, Depends, HTTPException, status +from jose import jwt, JWTError +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session +from sqlmodel.ext.asyncio.session import AsyncSession +from src.auth.service import ( + create_user, + verify_email, + login_user, +) +from src.auth.utils import get_current_user +from src.core.models import Users +from src.core.config import settings +from fastapi.responses import RedirectResponse +from .schemas import SignUpRequest, LoginRequest, BaseResponse, SendVerificationRequest +from fastapi.security import OAuth2PasswordRequestForm +from src.auth.utils import create_access_token +from jose import jwt, JWTError + + +router = APIRouter(prefix="/auth", tags=["Auth"]) + + +@router.post("/signup", response_model=BaseResponse) +async def signup( + payload: SignUpRequest, session: AsyncSession = Depends(get_async_session) +): + try: + response = await create_user( + session, payload.name, payload.email, payload.password + ) + return {"code": 200, "data": response} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + +# @router.post("/send-verification", response_model=BaseResponse) +# async def send_verification( +# payload: SendVerificationRequest, session: AsyncSession = Depends(get_async_session) +# ): +# if not payload.email: +# raise HTTPException(status_code=400, detail="Email is required") + +# response = await send_verification_link(session, payload.email) +# return {"code": 200, "data": response} + + +# @router.get("/verify-email", response_model=BaseResponse) +# async def verify_email_route( +# token: str, session: AsyncSession = Depends(get_async_session) +# ): +# response = await verify_email(session, token) +# access_token = response["access_token"] +# redirect_url = f"yuvabe://verified?token={access_token}" + +# return RedirectResponse(url=redirect_url) + + +@router.post("/login", response_model=BaseResponse) +async def login( + payload: LoginRequest, session: AsyncSession = Depends(get_async_session) +): + response = await login_user(session, payload.email, payload.password) + return {"code": 200, "data": response} + + +@router.post("/refresh", response_model=BaseResponse) +async def refresh_token(request: dict): + """Generate new access token using refresh token""" + refresh_token = request.get("refresh_token") + if not refresh_token: + raise HTTPException(status_code=400, detail="Refresh token is required") + + try: + payload = jwt.decode( + refresh_token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM] + ) + if payload.get("type") != "refresh": + raise HTTPException(status_code=400, detail="Invalid refresh token") + + user_data = { + "sub": payload["sub"], + "name": payload.get("name"), + "email": payload.get("email"), + } + new_access_token = create_access_token(data=user_data) + return {"code": 200, "data": {"access_token": new_access_token}} + + except JWTError: + raise HTTPException(status_code=401, detail="Invalid or expired refresh token") + + +@router.get("/home", response_model=BaseResponse) +async def get_home( + user_id: str = Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + """ + Protected home endpoint. Requires a valid access token (Bearer). + """ + user = await session.get(Users, uuid.UUID(user_id)) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + # Example payload — replace with your real app data + return { + "code": 200, + "data": { + "message": f"Welcome to Home, {user.user_name}!", + "user": { + "id": str(user.id), + "name": user.user_name, + "email": user.email_id, + "is_verified": user.is_verified, + "dob": user.dob.isoformat() if user.dob else None, + "profile_picture": user.profile_picture + }, + "home_data": { + "announcements": ["Welcome!", "New protocol released"], + "timestamp": user.created_at.isoformat() if user.created_at else None, + }, + }, + } diff --git a/src/auth/schemas.py b/src/auth/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..1bccecb2206e12d66e6b03c2ebb10de3646e8306 --- /dev/null +++ b/src/auth/schemas.py @@ -0,0 +1,38 @@ +from pydantic import BaseModel ,EmailStr +from typing import Optional, Union, Dict + + +class SignUpRequest(BaseModel): + name: str + email: str + password: str + + +class VerifyOtpRequest(BaseModel): + email: str + otp: str + + +class LoginRequest(BaseModel): + email: str + password: str + +class SendVerificationRequest(BaseModel): + email: EmailStr + + +class UserResponse(BaseModel): + id: str + name: str + email: str + + +class LoginResponseData(BaseModel): + access_token: str + token_type: str + user: UserResponse + + +class BaseResponse(BaseModel): + code: int + data: Optional[Union[Dict, str, None]] = None diff --git a/src/auth/service.py b/src/auth/service.py new file mode 100644 index 0000000000000000000000000000000000000000..fe558dfc28d41d2ca1832c28613bfe1957d1639e --- /dev/null +++ b/src/auth/service.py @@ -0,0 +1,156 @@ +import uuid +from src.auth.utils import ( + # send_otp_email, + verify_password, + create_refresh_token, + verify_verification_token, + create_access_token, + hash_password, + create_verification_token, +) +from src.core.models import Users +from sqlmodel import Session, select +from fastapi import HTTPException +from sqlmodel.ext.asyncio.session import AsyncSession + + +async def create_user(session: AsyncSession, name: str, email: str, password: str): + """Create user without sending email""" + + if not email.lower().endswith("@yuvabe.com"): + raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID") + + user = await session.exec(select(Users).where(Users.email_id == email)) + existing_user = user.first() + if existing_user: + raise ValueError("User already exists") + + new_user = Users( + user_name=name, + email_id=email, + password=hash_password(password), + is_verified=True, + ) + + session.add(new_user) + await session.commit() + await session.refresh(new_user) + + access_token = create_access_token( + data={ + "sub": str(new_user.id), + "name": new_user.user_name, + "email": new_user.email_id, + } + ) + + refresh_token = create_refresh_token( + data={ + "sub": str(new_user.id), + "name": new_user.user_name, + "email": new_user.email_id, + } + ) + + return { + "message": "User created successfully", + "user_id": str(new_user.id), + "access_token": access_token, + "refresh_token": refresh_token, + } + + +# async def send_verification_link(session: Session, email: str): +# """Send verification email for an existing user.""" +# result = await session.exec(select(Users).where(Users.email_id == email)) +# user = result.first() + +# if not user: +# raise HTTPException(status_code=404, detail="User not found") + +# if user.is_verified: +# raise HTTPException(status_code=400, detail="User is already verified") + +# # Create a token using existing user ID (opaque token) +# token = create_verification_token(str(user.id)) + +# try: +# send_verification_email(email, token) +# except Exception as e: +# raise HTTPException( +# status_code=500, detail=f"Failed to send verification email: {str(e)}" +# ) + +# return { +# "message": "Verification link sent successfully", +# "user_id": str(user.id), +# "email": user.email_id, +# } + + +async def verify_email(session: Session, token: str): + try: + user_id = await verify_verification_token(token) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + user = await session.get(Users, uuid.UUID(user_id)) + if not user: + raise HTTPException(status_code=404, detail="User not found") + + if not user.is_verified: + user.is_verified = True + await session.commit() + + access_token = create_access_token( + data={"sub": str(user.id), "name": user.user_name, "email": user.email_id} + ) + + refresh_token = create_refresh_token( + data={"sub": str(user.id), "name": user.user_name, "email": user.email_id} + ) + + return { + "message": "Email verified successfully!", + "access_token": access_token, + "refresh_token": refresh_token, + "token_type": "bearer", + } + + +async def login_user(session: Session, email: str, password: str): + + if not email.lower().endswith("@yuvabe.com"): + raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID") + + users = await session.exec(select(Users).where(Users.email_id == email)) + user = users.first() + + if not user: + raise HTTPException(status_code=400, detail="Invalid email or password") + + if not verify_password(password, user.password): + raise HTTPException(status_code=400, detail="Invalid email or password") + + if not user.is_verified: + raise HTTPException(status_code=400, detail="Verify email to login") + + access_token = create_access_token( + data={"sub": str(user.id), "name": user.user_name, "email": user.email_id} + ) + + refresh_token = create_refresh_token( + data={"sub": str(user.id), "name": user.user_name, "email": user.email_id} + ) + + return { + "access_token": access_token, + "refresh_token": refresh_token, + "token_type": "bearer", + "user": { + "id": str(user.id), + "name": user.user_name, + "email": user.email_id, + "is_verified": user.is_verified, + }, + } diff --git a/src/auth/utils.py b/src/auth/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f0f2ef670124329757be7c81229ae109099f9609 --- /dev/null +++ b/src/auth/utils.py @@ -0,0 +1,206 @@ +import json +import smtplib +import os +import uuid +from email.mime.text import MIMEText +import logging +import traceback +from passlib.context import CryptContext +from src.core.database import get_async_session +from sqlmodel.ext.asyncio.session import AsyncSession +from jose import jwt, JWTError +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from datetime import datetime, timedelta +from cryptography.fernet import Fernet, InvalidToken +from fastapi import Depends, HTTPException, status +from src.core.models import Users +from src.core.config import settings + + +SECRET_KEY = settings.SECRET_KEY +ALGORITHM = settings.JWT_ALGORITHM +ACCESS_TOKEN_EXPIRE_MINUTES = settings.JWT_EXPIRE +logger = logging.getLogger(__name__) + +SMTP_SERVER = settings.EMAIL_SERVER +SMTP_PORT = settings.EMAIL_PORT +SMTP_EMAIL = settings.EMAIL_USERNAME +SMTP_PASSWORD = settings.EMAIL_PASSWORD + +FERNET_KEY = settings.FERNET_KEY +VERIFICATION_BASE_URL = settings.VERIFICATION_BASE_URL + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +def hash_password(password: str) -> str: + """Encrypt plain password into hashed password""" + return pwd_context.hash(password) + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """Compare plain password with stored hash""" + return pwd_context.verify(plain_password, hashed_password) + + +def create_access_token(data: dict): + """Create JWT token with expiry""" + to_encode = data.copy() + expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + + +# def send_verification_email(to_email: str, token: str): +# """Send verification email using smtplib with detailed debug logs.""" +# subject = f"Verify your {settings.APP_NAME} Account" +# verification_link = f"{VERIFICATION_BASE_URL}/auth/verify-email?token={token}" + +# body = f""" +# Hi, + +# Please verify your {settings.APP_NAME} account by clicking the link below: +# {verification_link} + +# This link will expire in 24 hours. + +# Regards, +# {settings.APP_NAME} Team +# """ + +# msg = MIMEText(body) +# msg["Subject"] = subject +# msg["From"] = SMTP_EMAIL +# msg["To"] = to_email + +# logger.info("🟢 Starting send_verification_email()") +# logger.info(f"📨 To: {to_email}") +# logger.info(f"📤 SMTP Server: {SMTP_SERVER}:{SMTP_PORT}") + +# try: +# logger.info("🔌 Connecting to SMTP server...") +# with smtplib.SMTP(SMTP_SERVER, SMTP_PORT, timeout=30) as server: +# logger.info("✅ Connected successfully.") + +# logger.info("🔒 Starting TLS...") +# server.starttls() +# logger.info("✅ TLS secured.") + +# logger.info("🔑 Logging in to SMTP server...") +# server.login(SMTP_EMAIL, SMTP_PASSWORD) +# logger.info("✅ Logged in successfully.") + +# # Send email +# logger.info("📧 Sending email message...") +# server.send_message(msg) +# logger.info(f"✅ Email successfully sent to {to_email}") + +# except smtplib.SMTPAuthenticationError as e: +# logger.error("❌ Authentication failed — check email or app password.") +# logger.error(f"Error details: {e}") +# logger.error(traceback.format_exc()) +# raise +# except smtplib.SMTPConnectError as e: +# logger.error("❌ Could not connect to SMTP server.") +# logger.error(f"Error details: {e}") +# logger.error(traceback.format_exc()) +# raise +# except smtplib.SMTPRecipientsRefused as e: +# logger.error("❌ Recipient address refused.") +# logger.error(f"Error details: {e}") +# logger.error(traceback.format_exc()) +# raise +# except smtplib.SMTPException as e: +# logger.error("❌ General SMTP error occurred.") +# logger.error(f"Error details: {e}") +# logger.error(traceback.format_exc()) +# raise +# except Exception as e: +# logger.error("❌ Unknown error occurred while sending verification email.") +# logger.error(f"Error details: {e}") +# logger.error(traceback.format_exc()) +# raise + + +fernet = Fernet(FERNET_KEY.encode()) + + +def create_verification_token(user_id: str, expires_in_hours: int = 24) -> str: + """Create encrypted token with expiry""" + payload = { + "sub": user_id, + "exp": (datetime.utcnow() + timedelta(hours=expires_in_hours)).timestamp(), + } + token = fernet.encrypt(json.dumps(payload).encode()).decode() + return token + + +async def verify_verification_token(token: str) -> str: + """Verify encrypted token and extract user_id""" + try: + decrypted = fernet.decrypt(token.encode()) + data = json.loads(decrypted.decode()) + + exp = datetime.fromtimestamp(data["exp"]) + if datetime.utcnow() > exp: + raise ValueError("Verification link expired") + + return data["sub"] + + except InvalidToken: + raise ValueError("Invalid verification link") + + +bearer_scheme = HTTPBearer() + + +def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme), +): + """Decode JWT token and extract current user ID""" + token = credentials.credentials + + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + user_id: str = payload.get("sub") + + if user_id is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid token: missing user id", + ) + return user_id + + except JWTError: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + ) + + +async def get_current_active_user( + session: AsyncSession = Depends(get_async_session), + user_id: str = Depends(get_current_user), +) -> Users: + """Return the full user model for the currently authenticated user.""" + user = await session.get(Users, uuid.UUID(user_id)) + if not user: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="User not found" + ) + if not user.is_verified: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, detail="User not verified" + ) + return user + + +def create_refresh_token(data: dict, expires_days: int = 7): + """Create a long-lived JWT refresh token""" + to_encode = data.copy() + expire = datetime.utcnow() + timedelta(days=expires_days) + to_encode.update({"exp": expire, "type": "refresh"}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt diff --git a/src/chatbot/__init__.py b/src/chatbot/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/chatbot/config.py b/src/chatbot/config.py new file mode 100644 index 0000000000000000000000000000000000000000..45c6926deaffb56f16e914168daf0d2a6db7ff35 --- /dev/null +++ b/src/chatbot/config.py @@ -0,0 +1,6 @@ +from pydantic import BaseSettings + +class HomeSettings(BaseSettings): + FEATURE_ENABLED: bool = True + +home_settings = HomeSettings() \ No newline at end of file diff --git a/src/chatbot/constants.py b/src/chatbot/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..81abed5c8ca7c4cf25575214fcef34b33e492dee --- /dev/null +++ b/src/chatbot/constants.py @@ -0,0 +1,2 @@ +WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen" +EXIT_MESSAGE = "Thank You have a wonderful day" \ No newline at end of file diff --git a/src/chatbot/dependencies.py b/src/chatbot/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/chatbot/embedding.py b/src/chatbot/embedding.py new file mode 100644 index 0000000000000000000000000000000000000000..a4bfc811769fb2caf1199f4d0af4cd1c664bc1a2 --- /dev/null +++ b/src/chatbot/embedding.py @@ -0,0 +1,100 @@ +# to run this file you need model.onnx_data on the assets/onnx folder or you can obtain it from here.: https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/tree/main/onnx +# model can also be loaded directly from autoModel.pretrained by using the same link "onnx-community/embeddinggemma-300m-ONNX" + +import asyncio +import os +from typing import List + +import numpy as np + +# import onnxruntime as ort +from transformers import AutoTokenizer + +BASE_DIR = os.path.dirname(__file__) + +# TOKENIZER_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "assets", "tokenizer")) +TOKENIZER_DIR = "onnx-community/embeddinggemma-300m-ONNX" + +# MODEL_DIR = os.path.abspath( +# os.path.join(BASE_DIR, "..", "assets", "onnx", "model.onnx") +# ) + + +class EmbeddingModel: + def __init__(self): + # print(TOKENIZER_DIR) + self.tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR) + + # sess_options = ort.SessionOptions() + # providers = ["CPUExecutionProvider"] + # + # self.session = ort.InferenceSession( + # MODEL_DIR, sess_options, providers=providers + # ) + # + # self.input_names = [inp.name for inp in self.session.get_inputs()] + # self.output_names = [out.name for out in self.session.get_outputs()] + + # def _run_sync( + # self, input_ids: np.ndarray, attention_mask: np.ndarray + # ) -> List[float]: + # inputs = {} + # + # if "input_ids" in self.input_names: + # inputs["input_ids"] = input_ids + # else: + # inputs[self.input_names[0]] = input_ids + # + # if "attention_mask" in self.input_names: + # inputs["attention_mask"] = attention_mask + # elif len(self.input_names) > 1: + # inputs[self.input_names[1]] = attention_mask + # + # outputs = self.session.run(self.output_names, inputs) + # emb = outputs[0] + # + # if emb.ndim == 3: + # emb_vector = emb.mean(axis=1)[0] + # elif emb.ndim == 2: + # emb_vector = emb[0] + # else: + # emb_vector = np.asarray(emb).flatten() + # + # return emb_vector.astype(float).tolist() + + async def embed_text(self, text: str, max_length: int = 512) -> List[float]: + + encoded = self.tokenizer( + text, + return_tensors="np", + truncation=True, + padding="longest", + max_length=max_length, + ) + + input_ids = encoded["input_ids"].astype(np.int64) + attention_mask = encoded.get("attention_mask", np.ones_like(input_ids)).astype( + np.int64 + ) + + # loop = asyncio.get_event_loop() + # vector = await loop.run_in_executor( + # None, self._run_sync, input_ids, attention_mask + # ) + # return vector + return input_ids.flatten().tolist() + + +def cleanup(self): + if self.session: + self.session = None + print("ONNX runtime session closed.") + + +embedding_model = EmbeddingModel() + + +async def test_tokenizer(): + text = "What does the company telll about moonlighting" + tokens = await embedding_model.embed_text(text) + print("Tokenized text:", tokens) diff --git a/src/chatbot/exceptions.py b/src/chatbot/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/chatbot/models.py b/src/chatbot/models.py new file mode 100644 index 0000000000000000000000000000000000000000..ecfccca58fd4ef9871163c722a2f35f9d89428fc --- /dev/null +++ b/src/chatbot/models.py @@ -0,0 +1,28 @@ +import uuid +from datetime import datetime +from typing import List + +from pgvector.sqlalchemy import Vector +from sqlalchemy import Column +from sqlmodel import Field, Relationship, SQLModel + + +class KnowledgeBase(SQLModel, table=True): + __tablename__ = "knowledge_base" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + name: str = Field(nullable=False) + description: str | None = None + created_at: datetime = Field(default_factory=datetime.now) + knowledge_chunk: List["KnowledgeChunk"] = Relationship( + back_populates="knowledge_base" + ) + + +class KnowledgeChunk(SQLModel, table=True): + __tablename__ = "knowledge_chunk" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + kb_id: uuid.UUID = Field(foreign_key="knowledge_base.id", nullable=False) + chunk_index: int + chunk_text: str + embedding: List[float] = Field(sa_column=Column(Vector(768))) + knowledge_base: "KnowledgeBase" = Relationship(back_populates="knowledge_chunk") diff --git a/src/chatbot/router.py b/src/chatbot/router.py new file mode 100644 index 0000000000000000000000000000000000000000..e74221f83ff221282026a9da258d9df727b6723f --- /dev/null +++ b/src/chatbot/router.py @@ -0,0 +1,111 @@ +import os +import shutil +import tempfile +from typing import Optional + +from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile +from sqlalchemy import text +from sqlmodel.ext.asyncio.session import AsyncSession + +from src.core.database import get_async_session + +from .embedding import embedding_model +from .schemas import ( + SemanticSearchRequest, + SemanticSearchResult, + TokenizeRequest, + TokenizeResponse, + UploadKBResponse, +) +from .service import process_pdf_and_store + +router = APIRouter(prefix="/chatbot", tags=["chatbot"]) + + +# before hitting this endpoint make sure the model.data & model.onnx_data is available on the asset/onnx folder +@router.post("/upload-pdf", response_model=UploadKBResponse) +async def upload_pdf( + file: UploadFile = File(...), + name: str = Form(...), + description: Optional[str] = Form(None), + session: AsyncSession = Depends(get_async_session), +): + if not file.filename.endswith(".pdf"): + raise HTTPException( + status_code=400, detail="Only PDF files are supported for now." + ) + + tmp_dir = tempfile.mkdtemp() + tmp_path = os.path.join(tmp_dir, file.filename) + try: + with open(tmp_path, "wb") as out_f: + shutil.copyfileobj(file.file, out_f) + + with open(tmp_path, "rb") as fobj: + result = await process_pdf_and_store(fobj, name, description, session) + + return UploadKBResponse( + kb_id=result["kb_id"], + name=result["name"], + chunks_stored=result["chunks_stored"], + ) + finally: + try: + os.remove(tmp_path) + os.rmdir(tmp_dir) + except Exception: + pass + + +@router.post("/tokenize", response_model=TokenizeResponse) +async def tokenize_text(payload: TokenizeRequest): + try: + encoded = embedding_model.tokenizer( + payload.text, + return_tensors="np", + truncation=True, + padding="longest", + max_length=512, + ) + + return TokenizeResponse( + input_ids=encoded["input_ids"][0].tolist(), + attention_mask=encoded["attention_mask"][0].tolist(), + ) + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/semantic-search", response_model=list[SemanticSearchResult]) +async def semantic_search( + payload: SemanticSearchRequest, session: AsyncSession = Depends(get_async_session) +): + + if len(payload.embedding) == 0: + raise HTTPException(status_code=400, detail="Embedding cannot be empty.") + + q_vector = payload.embedding + top_k = payload.top_k or 3 + + sql = text( + """ + SELECT id, kb_id, chunk_text, embedding <=> :query_vec AS score + FROM knowledge_chunk + ORDER BY embedding <=> :query_vec + LIMIT :top_k + """ + ) + + rows = await session.exec(sql, {"query_vec": q_vector, "top_k": top_k}) + rows = rows.fetchall() + + return [ + SemanticSearchResult( + chunk_id=str(r.id), + kb_id=str(r.kb_id), + text=r.chunk_text, + score=float(r.score), + ) + for r in rows + ] diff --git a/src/chatbot/schemas.py b/src/chatbot/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..4c02ae42e60b441da189e79f308f3ffedbf5b197 --- /dev/null +++ b/src/chatbot/schemas.py @@ -0,0 +1,36 @@ +import uuid +from typing import List, Optional + +from pydantic import BaseModel + + +class UploadKBResponse(BaseModel): + kb_id: uuid.UUID + name: str + chunks_stored: int + + +class UploadKBRequest(BaseModel): + name: str + description: Optional[str] = None + + +class TokenizeRequest(BaseModel): + text: str + + +class TokenizeResponse(BaseModel): + input_ids: List[int] + attention_mask: List[int] + + +class SemanticSearchRequest(BaseModel): + embedding: List[float] + top_k: Optional[int] = 3 + + +class SemanticSearchResult(BaseModel): + chunk_id: str + kb_id: str + text: str + score: float diff --git a/src/chatbot/service.py b/src/chatbot/service.py new file mode 100644 index 0000000000000000000000000000000000000000..511bc7cb40f508a3dfeecb568831788c4d55bcae --- /dev/null +++ b/src/chatbot/service.py @@ -0,0 +1,45 @@ +import os + +from sqlmodel.ext.asyncio.session import AsyncSession + +from .embedding import embedding_model +from .models import KnowledgeBase, KnowledgeChunk +from .utils import ( + chunk_sentences_with_overlap, + extract_text_from_pdf_fileobj, + split_into_sentences, +) + +DEFAULT_MAX_WORDS = int(os.getenv("CHUNK_MAX_WORDS", "200")) +DEFAULT_OVERLAP = int(os.getenv("CHUNK_OVERLAP_WORDS", "40")) + + +async def process_pdf_and_store( + fileobj, kb_name: str, kb_description: str | None, session: AsyncSession +): + raw_text = extract_text_from_pdf_fileobj(fileobj) + + sentences = split_into_sentences(raw_text) + + chunks = chunk_sentences_with_overlap( + sentences, max_words=DEFAULT_MAX_WORDS, overlap_words=DEFAULT_OVERLAP + ) + + kb = KnowledgeBase(name=kb_name, description=kb_description) + session.add(kb) + await session.commit() + await session.refresh(kb) + + chunk_objs = [] + for idx, chunk_text in enumerate(chunks): + emb = await embedding_model.embed_text(chunk_text) + + chunk = KnowledgeChunk( + kb_id=kb.id, chunk_index=idx, chunk_text=chunk_text, embedding=emb + ) + session.add(chunk) + chunk_objs.append(chunk) + + await session.commit() + + return {"kb_id": kb.id, "name": kb_name, "chunks_stored": len(chunk_objs)} diff --git a/src/chatbot/utils.py b/src/chatbot/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..262e71fec5e87519479c1f82337fc6ef59c62a38 --- /dev/null +++ b/src/chatbot/utils.py @@ -0,0 +1,57 @@ +import re +from typing import List +import PyPDF2 + + +def clean_text(text: str) -> str: + text = re.sub(r'\s+', ' ', text) + text = re.sub(r'\s+([,.!?;:])', r'\1', text) + text = re.sub(r'[_\-]{2,}', ' ', text) + text = re.sub(r'\.{2,}', '.', text) + text = re.sub(r'\s{2,}', ' ', text) + return text.strip() + + +def extract_text_from_pdf_fileobj(fileobj) -> str: + reader = PyPDF2.PdfReader(fileobj) + all_text = [] + for page in reader.pages: + page_text = page.extract_text() + if page_text: + all_text.append(page_text) + return clean_text(" ".join(all_text)) + + +def split_into_sentences(text: str) -> List[str]: + sentence_endings = re.compile(r'(?<=[.!?])\s+') + sentences = sentence_endings.split(text) + return [s.strip() for s in sentences if s.strip()] + + +def chunk_sentences_with_overlap(sentences: List[str], max_words: int = 200, overlap_words: int = 40) -> List[str]: + chunks = [] + current = [] + current_len = 0 + + for sentence in sentences: + words = sentence.split() + wc = len(words) + + if current_len + wc > max_words and current: + chunks.append(" ".join(current)) + + if overlap_words > 0: + last_words = " ".join(" ".join(current).split()[-overlap_words:]) + current = [last_words] if last_words else [] + current_len = len(last_words.split()) + else: + current = [] + current_len = 0 + + current.append(sentence) + current_len += wc + + if current: + chunks.append(" ".join(current)) + + return chunks diff --git a/src/core/__init__.py b/src/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..14b8fac292ee97721c6ee8cae46948459bcd1b69 --- /dev/null +++ b/src/core/__init__.py @@ -0,0 +1,6 @@ +from src.auth import models as auth_models +from src.chatbot import models as chatbot_models +from src.core import models as core_models +from src.feed import models as feed_models +from src.home import models as home_models +from src.profile import models as profile_models diff --git a/src/core/config.py b/src/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..e4a4d1e876a70a5d076bd5f698df181b83710b42 --- /dev/null +++ b/src/core/config.py @@ -0,0 +1,51 @@ +from pydantic import PostgresDsn, computed_field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class SMTPConfig(BaseSettings): + server: str + port: int + username: str + password: str + + +class Settings(BaseSettings): + + JWT_ALGORITHM: str + JWT_EXPIRE: int + SECRET_KEY: str + + POSTGRES_USER: str + POSTGRES_PASSWORD: str + POSTGRES_HOST: str + POSTGRES_DB: str + + APP_NAME: str + ENV: str + DEBUG: bool + PORT: int + + EMAIL_SERVER: str + EMAIL_PORT: int + EMAIL_USERNAME: str + EMAIL_PASSWORD: str + + FERNET_KEY: str + VERIFICATION_BASE_URL: str + + @computed_field + @property + def DATABASE_URL(self) -> PostgresDsn: + """Sync DB URL""" + return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}" + + @computed_field + @property + def ASYNC_DATABASE_URL(self) -> PostgresDsn: + """Async DB URL""" + return f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}" + + model_config = SettingsConfigDict(env_file=".env", case_sensitive=False) + + +settings = Settings() diff --git a/src/core/database.py b/src/core/database.py new file mode 100644 index 0000000000000000000000000000000000000000..e27452699319f2887b1a8764479763536c1c5c14 --- /dev/null +++ b/src/core/database.py @@ -0,0 +1,38 @@ +from typing import AsyncGenerator + +from dotenv import load_dotenv +from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine +from sqlmodel import SQLModel, create_engine +from sqlmodel.ext.asyncio.session import AsyncSession + +from src.core import * +from src.core.config import settings + +load_dotenv() + +engine = create_engine( + settings.DATABASE_URL, echo=True +) # to false on prod just to chcek for now + +async_engine = create_async_engine( + url=settings.ASYNC_DATABASE_URL, future=True, connect_args={"ssl": True} +) + +async_session = async_sessionmaker( + class_=AsyncSession, bind=async_engine, expire_on_commit=False +) + + +def init_db(): + SQLModel.metadata.create_all(engine) + + +async def get_async_session() -> AsyncGenerator[AsyncSession, None]: + async with async_session() as session: + yield session + + +if __name__ == "__main__": + print("Table creating") + init_db() + print("Table Created successfully!") diff --git a/src/core/exceptions.py b/src/core/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/core/models.py b/src/core/models.py new file mode 100644 index 0000000000000000000000000000000000000000..c24f05bedb6b553d4b6dca230866ab03a12a1a14 --- /dev/null +++ b/src/core/models.py @@ -0,0 +1,74 @@ +import uuid +from datetime import date, datetime +from enum import Enum +from typing import List, Optional + +from sqlalchemy import CheckConstraint, UniqueConstraint +from sqlmodel import Field, Relationship, SQLModel + + +class AssetStatus(str, Enum): + ACTIVE = "Active" + UNAVAILABLE = "Unavailable" + ON_REQUEST = "On Request" + IN_SERVICE = "In Service" + + +class Users(SQLModel, table=True): + __tablename__ = "users" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + email_id: str = Field(unique=True, nullable=False) + password: str = Field(nullable=False) + user_name: str = Field(nullable=False) + is_verified: bool = Field( + default=False, sa_column_kwargs={"server_default": "false"} + ) + dob: Optional[date] = None + address: Optional[str] = None + profile_picture: Optional[str] = None + created_at: datetime = Field(default_factory=datetime.now) + asset: List["Assets"] = Relationship(back_populates="user") + + +class Teams(SQLModel, table=True): + __tablename__ = "teams" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + name: str = Field(unique=True, nullable=False) + + +class Roles(SQLModel, table=True): + __tablename__ = "roles" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + name: str = Field(unique=True, nullable=False) + + +class UserTeamsRole(SQLModel, table=True): + __tablename__ = "user_teams_role" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + team_id: uuid.UUID = Field(foreign_key="teams.id", nullable=False) + role_id: uuid.UUID = Field(foreign_key="roles.id", nullable=False) + + +class Assets(SQLModel, table=True): + __tablename__ = "assets" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + name: str = Field(nullable=False) + type: str = Field(nullable=False) + status: AssetStatus = Field(default=AssetStatus.UNAVAILABLE) + user: "Users" = Relationship(back_populates="asset") + + +class EmotionLogs(SQLModel, table=True): + __tablename__ = "emotion_logs" + __table_args__ = ( + UniqueConstraint("user_id", "log_date"), + CheckConstraint("morning_emotion BETWEEN 1 AND 7 or morning_emotion IS NULL"), + CheckConstraint("evening_emotion BETWEEN 1 AND 7 or evening_emotion IS NULL"), + ) + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + morning_emotion: Optional[int] = Field(default=None, ge=1, le=7) + evening_emotion: Optional[int] = Field(default=None, ge=1, le=7) + log_date: date = Field(default_factory=date.today) diff --git a/src/core/pagination.py b/src/core/pagination.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/core/schemas.py b/src/core/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..a057505b88d99c2afe9ae2ac244459b6f0835cc8 --- /dev/null +++ b/src/core/schemas.py @@ -0,0 +1,10 @@ +from typing import Generic, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T") + + +class BaseResponse(BaseModel, Generic[T]): + status_code: int + data: T diff --git a/src/core/temp_feed_db.py b/src/core/temp_feed_db.py new file mode 100644 index 0000000000000000000000000000000000000000..8cb3acfa18fcf2c4840fd33a75f62d7e3dbf4ff7 --- /dev/null +++ b/src/core/temp_feed_db.py @@ -0,0 +1,144 @@ +from datetime import date + +from sqlmodel import Session + +from src.core.database import engine +from src.core.models import Assets, EmotionLogs, Roles, Teams, Users, UserTeamsRole +from src.feed.models import Comments, Likes, Posts + + +def seed_users(session: Session): + users = [ + Users( + email_id="tilak@example.com", + password="hashed_pass1", + user_name="Tilak", + dob=date(2001, 5, 21), + address="Chennai", + profile_picture="tilak.png", + ), + Users( + email_id="arun@example.com", + password="hashed_pass2", + user_name="Arun", + dob=date(2000, 8, 15), + address="Bangalore", + profile_picture="arun.png", + ), + ] + session.add_all(users) + session.commit() + print("Users added.") + return users + + +def seed_teams(session: Session): + teams = [ + Teams(name="Development"), + Teams(name="Marketing"), + Teams(name="Design"), + ] + session.add_all(teams) + session.commit() + print("Teams added.") + return teams + + +def seed_roles(session: Session): + roles = [ + Roles(name="Admin"), + Roles(name="Member"), + Roles(name="Lead"), + ] + session.add_all(roles) + session.commit() + print("Roles added.") + return roles + + +def seed_user_teams_roles(session: Session, users, teams, roles): + mappings = [ + UserTeamsRole(user_id=users[0].id, team_id=teams[0].id, role_id=roles[0].id), + UserTeamsRole(user_id=users[1].id, team_id=teams[1].id, role_id=roles[1].id), + ] + session.add_all(mappings) + session.commit() + print("User-Team-Role mappings added.") + + +def seed_assets(session: Session, users): + assets = [ + Assets(user_id=users[0].id, name="MacBook Pro", type="Laptop"), + Assets(user_id=users[1].id, name="Dell Monitor", type="Monitor"), + ] + session.add_all(assets) + session.commit() + print("Assets added.") + return assets + + +def seed_emotion_logs(session: Session, users): + logs = [ + EmotionLogs(user_id=users[0].id, morning_emotion=8, evening_emotion=6), + EmotionLogs(user_id=users[1].id, morning_emotion=7, evening_emotion=8), + ] + session.add_all(logs) + session.commit() + print("Emotion logs added.") + + +def seed_posts(session: Session, users): + posts = [ + Posts( + user_id=users[0].id, + caption="New sprint kickoff!", + image="sprint.png", + ), + Posts( + user_id=users[1].id, + caption="Design updates rolling out soon!", + image="design.png", + ), + ] + session.add_all(posts) + session.commit() + print("Posts added.") + return posts + + +def seed_likes(session: Session, users, posts): + likes = [ + Likes(user_id=users[0].id, post_id=posts[1].id), + Likes(user_id=users[1].id, post_id=posts[0].id), + ] + session.add_all(likes) + session.commit() + print("Likes added.") + + +def seed_comments(session: Session, users, posts): + comments = [ + Comments(user_id=users[0].id, post_id=posts[1].id, comment="Looks great!"), + Comments(user_id=users[1].id, post_id=posts[0].id, comment="Can’t wait!"), + ] + session.add_all(comments) + session.commit() + print("Comments added.") + + +def run_all_seeds(): + with Session(engine) as session: + users = seed_users(session) + teams = seed_teams(session) + roles = seed_roles(session) + seed_user_teams_roles(session, users, teams, roles) + seed_assets(session, users) + seed_emotion_logs(session, users) + posts = seed_posts(session, users) + seed_likes(session, users, posts) + seed_comments(session, users, posts) + print("All data seeded successfully!") + + +if __name__ == "__main__": + run_all_seeds() diff --git a/src/feed/__init__.py b/src/feed/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/feed/config.py b/src/feed/config.py new file mode 100644 index 0000000000000000000000000000000000000000..45c6926deaffb56f16e914168daf0d2a6db7ff35 --- /dev/null +++ b/src/feed/config.py @@ -0,0 +1,6 @@ +from pydantic import BaseSettings + +class HomeSettings(BaseSettings): + FEATURE_ENABLED: bool = True + +home_settings = HomeSettings() \ No newline at end of file diff --git a/src/feed/constants.py b/src/feed/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..81abed5c8ca7c4cf25575214fcef34b33e492dee --- /dev/null +++ b/src/feed/constants.py @@ -0,0 +1,2 @@ +WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen" +EXIT_MESSAGE = "Thank You have a wonderful day" \ No newline at end of file diff --git a/src/feed/dependencies.py b/src/feed/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/feed/exceptions.py b/src/feed/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/feed/models.py b/src/feed/models.py new file mode 100644 index 0000000000000000000000000000000000000000..4798a6bdd3fed1334492a7789ccd6a3ed2a461f3 --- /dev/null +++ b/src/feed/models.py @@ -0,0 +1,49 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import Optional + +from sqlalchemy import UniqueConstraint +from sqlmodel import Field, SQLModel + + +class PostType(str, Enum): + BIRTHDAY = "Birthday" + NOTICE = "Notice" + BANNER = "Banner" + JOB_REQUEST = "Job Request" + + +class PostCategory(str, Enum): + TEAM = "Team" + GLOBAL = "Global" + + +class Posts(SQLModel, table=True): + __tablename__ = "posts" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + type: PostType = Field(default=PostType.NOTICE) + category: PostCategory = Field(default=PostCategory.GLOBAL) + caption: Optional[str] = None + image: Optional[str] = None + created_at: datetime = Field(default_factory=datetime.now, nullable=False) + edited_at: datetime = Field(default_factory=datetime.now) + + +class Comments(SQLModel, table=True): + __tablename__ = "comments" + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + post_id: uuid.UUID = Field(foreign_key="posts.id", nullable=False) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + comment: str = Field(nullable=False) + created_at: datetime = Field(default_factory=datetime.now, nullable=False) + + +class Likes(SQLModel, table=True): + __tablename__ = "likes" + __table_args__ = (UniqueConstraint("user_id", "post_id"),) + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + post_id: uuid.UUID = Field(foreign_key="posts.id", nullable=False) + user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False) + liked_at: datetime = Field(default_factory=datetime.now, nullable=False) diff --git a/src/feed/router.py b/src/feed/router.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/feed/schemas.py b/src/feed/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/feed/service.py b/src/feed/service.py new file mode 100644 index 0000000000000000000000000000000000000000..615072994b47208d80caa8d3bb8d9c5030ee1154 --- /dev/null +++ b/src/feed/service.py @@ -0,0 +1,2 @@ +from typing import List +from uuid import UUID diff --git a/src/feed/utils.py b/src/feed/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/home/__init__.py b/src/home/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/home/config.py b/src/home/config.py new file mode 100644 index 0000000000000000000000000000000000000000..45c6926deaffb56f16e914168daf0d2a6db7ff35 --- /dev/null +++ b/src/home/config.py @@ -0,0 +1,6 @@ +from pydantic import BaseSettings + +class HomeSettings(BaseSettings): + FEATURE_ENABLED: bool = True + +home_settings = HomeSettings() \ No newline at end of file diff --git a/src/home/constants.py b/src/home/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..81abed5c8ca7c4cf25575214fcef34b33e492dee --- /dev/null +++ b/src/home/constants.py @@ -0,0 +1,2 @@ +WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen" +EXIT_MESSAGE = "Thank You have a wonderful day" \ No newline at end of file diff --git a/src/home/dependencies.py b/src/home/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/home/exceptions.py b/src/home/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/home/models.py b/src/home/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d7564fed9110f55b871ced45b7673973e41bf43f --- /dev/null +++ b/src/home/models.py @@ -0,0 +1,2 @@ +import uuid +import sqlmodel diff --git a/src/home/router.py b/src/home/router.py new file mode 100644 index 0000000000000000000000000000000000000000..daa500ebeb0fee248d47dd51f2d8b2237ceebb51 --- /dev/null +++ b/src/home/router.py @@ -0,0 +1,44 @@ +from typing import List + +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel.ext.asyncio.session import AsyncSession + +from src.core.database import get_async_session +from src.core.schemas import BaseResponse +from src.auth.utils import get_current_user +from .schemas import EmotionLogCreate, EmotionLogResponse, HomeResponseData +from .service import add_or_update_emotion, get_emotions, get_home_data + +router = APIRouter(tags=["Home"]) + + +@router.get("/{user_id}", response_model=BaseResponse[HomeResponseData]) +async def fetch_home_data( + user_id: str = Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + try: + data = await get_home_data(user_id, session) + return {"status_code": 200, "data": data} + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) + + +@router.post("/emotion", response_model=BaseResponse[EmotionLogResponse]) +async def create_or_update_emotion( + data: EmotionLogCreate, session: AsyncSession = Depends(get_async_session) +): + record = await add_or_update_emotion(data, session) + return { + "status_code": 200, + "data": record, + } + + +@router.get("/emotion/{user_id}", response_model=BaseResponse[List[EmotionLogResponse]]) +async def get_user_emotions( + user_id: str = Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + data = await get_emotions(user_id, session) + return {"status_code": 200, "data": data} diff --git a/src/home/schemas.py b/src/home/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..66f358611176af38cad75588e71fdc7ef3f20cb6 --- /dev/null +++ b/src/home/schemas.py @@ -0,0 +1,26 @@ +# pylint: disable=no-name-in-module +# pylint: disable=no-self-argument +from datetime import date +from typing import List, Optional, Union + +from pydantic import BaseModel + + +class EmotionLogCreate(BaseModel): + user_id: str + morning_emotion: Optional[int] = None + evening_emotion: Optional[int] = None + log_date: date + + +class EmotionLogResponse(BaseModel): + log_date: date + morning_emotion: Optional[int] + evening_emotion: Optional[int] + + +class HomeResponseData(BaseModel): + user_id: str + user_name: str + philosophy_text: str + recent_emotions: List[EmotionLogResponse] diff --git a/src/home/service.py b/src/home/service.py new file mode 100644 index 0000000000000000000000000000000000000000..29a768a1365fbfba888198e9f0ccc274d9c2cda5 --- /dev/null +++ b/src/home/service.py @@ -0,0 +1,99 @@ +from datetime import date, timedelta + +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession + +from src.core.models import EmotionLogs, Users + +from .schemas import EmotionLogCreate, EmotionLogResponse, HomeResponseData + +PHILOSOPHY_TEXT = "Your mind is your greatest asset — train it daily." + + +async def get_home_data(user_id: str, session: AsyncSession) -> HomeResponseData: + result = await session.exec(select(Users).where(Users.id == user_id)) + user = result.first() + if not user: + raise ValueError("User not found") + + seven_days_ago = date.today() - timedelta(days=7) + result = await session.exec( + select(EmotionLogs) + .where(EmotionLogs.user_id == user_id) + .where(EmotionLogs.log_date >= seven_days_ago) + .order_by(EmotionLogs.log_date) + ) + emotion_logs = result.all() + + emotion_responses = [ + EmotionLogResponse( + log_date=log.log_date, + morning_emotion=log.morning_emotion, + evening_emotion=log.evening_emotion, + ) + for log in emotion_logs + ] + + return HomeResponseData( + user_id=str(user.id), + user_name=user.user_name, + philosophy_text=PHILOSOPHY_TEXT, + recent_emotions=emotion_responses, + ) + + +async def add_or_update_emotion( + data: EmotionLogCreate, session: AsyncSession +) -> EmotionLogResponse: + user_exists = await session.exec(select(Users).where(Users.id == data.user_id)) + if not user_exists.first(): + raise ValueError("User not found. Cannot add emotion log.") + + result = await session.exec( + select(EmotionLogs) + .where(EmotionLogs.user_id == data.user_id) + .where(EmotionLogs.log_date == data.log_date) + ) + existing_log = result.first() + + if existing_log: + if data.morning_emotion is not None: + existing_log.morning_emotion = data.morning_emotion + if data.evening_emotion is not None: + existing_log.evening_emotion = data.evening_emotion + record = existing_log + else: + record = EmotionLogs( + user_id=data.user_id, + morning_emotion=data.morning_emotion, + evening_emotion=data.evening_emotion, + log_date=data.log_date, + ) + session.add(record) + + await session.commit() + await session.refresh(record) + + return EmotionLogResponse( + log_date=record.log_date, + morning_emotion=record.morning_emotion, + evening_emotion=record.evening_emotion, + ) + + +async def get_emotions(user_id: str, session: AsyncSession): + result = await session.exec( + select(EmotionLogs) + .where(EmotionLogs.user_id == user_id) + .order_by(EmotionLogs.log_date.desc()) + ) + logs = result.all() + + return [ + EmotionLogResponse( + log_date=log.log_date, + morning_emotion=log.morning_emotion, + evening_emotion=log.evening_emotion, + ) + for log in logs + ] diff --git a/src/home/test.py b/src/home/test.py new file mode 100644 index 0000000000000000000000000000000000000000..c9220062ea7ac6a60e0ba6cc69f7ca79c7babc9b --- /dev/null +++ b/src/home/test.py @@ -0,0 +1,11 @@ +import requests + +url = "https://api.quotable.io/quotes?limit=3" + +response = requests.get(url, verify=False) + +if response.status_code == 200: + data = response.json() + print(data) +else: + print(f"Failed to retrieve data: {response.status_code}") diff --git a/src/home/utils.py b/src/home/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000000000000000000000000000000000000..95135f5d97e27bbaac5cefd71acdc13a1290649b --- /dev/null +++ b/src/main.py @@ -0,0 +1,28 @@ +from fastapi import FastAPI + +from src.auth.router import router as auth_router +from src.chatbot.router import router as chatbot +from src.core.database import init_db +from src.home.router import router as home_router +from src.profile.router import router as profile + +app = FastAPI(title="Yuvabe App API") + +app.include_router(home_router, prefix="/home", tags=["Home"]) + +# init_db() + +app.include_router(auth_router) + +app.include_router(profile) + +# app.include_router(assets) + +# app.include_router(leave) + +app.include_router(chatbot) + + +@app.get("/") +def root(): + return {"message": "API is running fine!"} diff --git a/src/profile/__init__.py b/src/profile/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/profile/config.py b/src/profile/config.py new file mode 100644 index 0000000000000000000000000000000000000000..45c6926deaffb56f16e914168daf0d2a6db7ff35 --- /dev/null +++ b/src/profile/config.py @@ -0,0 +1,6 @@ +from pydantic import BaseSettings + +class HomeSettings(BaseSettings): + FEATURE_ENABLED: bool = True + +home_settings = HomeSettings() \ No newline at end of file diff --git a/src/profile/constants.py b/src/profile/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..81abed5c8ca7c4cf25575214fcef34b33e492dee --- /dev/null +++ b/src/profile/constants.py @@ -0,0 +1,2 @@ +WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen" +EXIT_MESSAGE = "Thank You have a wonderful day" \ No newline at end of file diff --git a/src/profile/dependencies.py b/src/profile/dependencies.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/profile/exceptions.py b/src/profile/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/profile/models.py b/src/profile/models.py new file mode 100644 index 0000000000000000000000000000000000000000..d7564fed9110f55b871ced45b7673973e41bf43f --- /dev/null +++ b/src/profile/models.py @@ -0,0 +1,2 @@ +import uuid +import sqlmodel diff --git a/src/profile/router.py b/src/profile/router.py new file mode 100644 index 0000000000000000000000000000000000000000..c4a30d188163c3784608085d25840dfb2f16c528 --- /dev/null +++ b/src/profile/router.py @@ -0,0 +1,135 @@ +from fastapi.routing import APIRouter +from src.core.database import get_async_session +from src.auth.utils import get_current_user +from src.auth.schemas import BaseResponse +from sqlalchemy.ext.asyncio.session import AsyncSession +from fastapi.params import Depends +from .schemas import UpdateProfileRequest +from src.profile.service import update_user_profile +from fastapi import APIRouter, Depends +from sqlmodel.ext.asyncio.session import AsyncSession +from src.core.database import get_async_session +from src.auth.utils import get_current_user +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession +from src.auth.utils import get_current_user +from src.core.models import Users, Teams, Roles, UserTeamsRole +from fastapi import BackgroundTasks + + +router = APIRouter(prefix="/profile", tags=["Profile"]) + + +@router.get("/", response_model=BaseResponse) +async def get_assets( + user_id: str = Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + assets = await list_user_assets(session, user_id) + + data = { + "assets": [ + { + "id": a.id, + "name": a.name, + "type": a.type, + "status": a.status, + } + for a in assets + ] + } + + return {"code": 200, "data": data} + + +@router.put("/update-profile", response_model=BaseResponse) +async def update_profile( + payload: UpdateProfileRequest, + user_id: str = Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + result = await update_user_profile(session, user_id, payload) + return {"code": 200, "data": result} + + + +@router.get("/contacts", response_model=BaseResponse) +async def get_leave_contacts( + current_user=Depends(get_current_user), + session: AsyncSession = Depends(get_async_session), +): + # get_current_user returns a STRING user_id + user_id = current_user + + if not user_id: + raise HTTPException(status_code=400, detail="Invalid user token") + + # 1) Get user's team + stmt = select(UserTeamsRole).where(UserTeamsRole.user_id == user_id) + ut = (await session.exec(stmt)).first() + + if not ut: + raise HTTPException(status_code=404, detail="User-Team mapping not found") + + # 2) Get Team Lead role + lead_role = ( + await session.exec(select(Roles).where(Roles.name == "Team Lead")) + ).first() + + if not lead_role: + raise HTTPException(status_code=500, detail="Team Lead role not found") + + # 3) Find Team Lead user in same team + lead_user = ( + await session.exec( + select(Users) + .join(UserTeamsRole) + .where(UserTeamsRole.team_id == ut.team_id) + .where(UserTeamsRole.role_id == lead_role.id) + ) + ).all() + + if not lead_user: + raise HTTPException(status_code=404, detail="Team lead not found") + + to_email = ", ".join([u.email_id for u in lead_user]) + + # 4) HR CC emails + hr_team = (await session.exec(select(Teams).where(Teams.name == "HR Team"))).first() + + cc = [] + if hr_team: + hr_users = ( + await session.exec( + select(Users) + .join(UserTeamsRole) + .where(UserTeamsRole.team_id == hr_team.id) + ) + ).all() + + cc = [str(row.email_id) for row in hr_users] + + return BaseResponse(code=200, message="success", data={"to": to_email, "cc": cc}) + + +@router.post("/send", response_model=BaseResponse) +async def send_leave_email( + payload: dict, + background: BackgroundTasks, + current_user=Depends(get_current_user), +): + from_email = payload.get("from_email") + to_email = payload.get("to") + cc = payload.get("cc", []) + subject = payload.get("subject") + body = payload.get("body") + + if not subject or not body: + raise HTTPException(status_code=400, detail="Subject and body required") + + # send in background so API returns fast + background.add_task(send_email, to_email, subject, body, cc, from_email) + + return BaseResponse(code=200, message="Leave request sent", data=None) + diff --git a/src/profile/schemas.py b/src/profile/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..e82d624fbcc265c3fe7c9c2e46c70d484a04e304 --- /dev/null +++ b/src/profile/schemas.py @@ -0,0 +1,41 @@ +from pydantic import BaseModel, EmailStr +from typing import Optional +import uuid +from enum import Enum + +class AssetStatus(str, Enum): + ACTIVE = "Active" + UNAVAILABLE = "Unavailable" + ON_REQUEST = "On Request" + IN_SERVICE = "In Service" + +class AssetCreateRequest(BaseModel): + name: str + type: str + status: Optional[AssetStatus] = AssetStatus.UNAVAILABLE + +class AssetUpdateRequest(BaseModel): + name: Optional[str] = None + type: Optional[str] = None + status: Optional[AssetStatus] = None + +class AssetResponse(BaseModel): + id: uuid.UUID + user_id: uuid.UUID + name: str + type: str + status: AssetStatus + +class BaseResponse(BaseModel): + code: int + data: dict + + +class UpdateProfileRequest(BaseModel): + name: Optional[str] = None + email: Optional[EmailStr] = None + dob: Optional[str] = None + address: Optional[str] = None + + current_password: Optional[str] = None + new_password: Optional[str] = None diff --git a/src/profile/service.py b/src/profile/service.py new file mode 100644 index 0000000000000000000000000000000000000000..9f09e4ccb60cc4ac7b0d3fda7f798c2bbc899bf2 --- /dev/null +++ b/src/profile/service.py @@ -0,0 +1,78 @@ +from src.core.models import Assets +from ast import List +from datetime import datetime +import uuid +from fastapi import HTTPException +from passlib.context import CryptContext +from src.core.models import Users +import uuid +from typing import List +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession +from src.core.models import Assets + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +async def update_user_profile(session, user_id: str, data): + user = await session.get(Users, uuid.UUID(user_id)) + + if not user: + raise HTTPException(status_code=404, detail="User not found") + + # --- Update Name --- + if data.name: + user.user_name = data.name + + # --- Update Email --- + if data.email: + user.email_id = data.email + + # --- Update DOB --- + if data.dob: + try: + # Convert DD.MM.YYYY → Python date + parsed_date = datetime.strptime(data.dob, "%d.%m.%Y").date() + user.dob = parsed_date + except: + raise HTTPException( + status_code=400, detail="DOB must be in DD.MM.YYYY format" + ) + + # --- Update Address --- + if data.address: + user.address = data.address + + # --- Change Password --- + if data.new_password: + if not data.current_password: + raise HTTPException(status_code=400, detail="Current password required") + + # Verify old password + if not pwd_context.verify(data.current_password, user.password): + raise HTTPException(status_code=400, detail="Incorrect current password") + + # Set new password + user.password = pwd_context.hash(data.new_password) + + # Commit changes + await session.commit() + await session.refresh(user) + + return { + "message": "Profile updated successfully", + "user": { + "id": str(user.id), + "name": user.user_name, + "email": user.email_id, + "dob": user.dob.isoformat() if user.dob else None, + "address": user.address, + "is_verified": user.is_verified, + }, + } + +async def list_user_assets(session: AsyncSession, user_id: str) -> List[Assets]: + q = await session.exec( + select(Assets).where(Assets.user_id == uuid.UUID(user_id)) + ) + return q.all() \ No newline at end of file diff --git a/src/profile/utils.py b/src/profile/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..890569c6a4959d340eefb21a8e0f8e1e14282815 --- /dev/null +++ b/src/profile/utils.py @@ -0,0 +1,50 @@ +# src/core/email_utils.py +import smtplib +from email.message import EmailMessage +from src.core.config import settings +from typing import List + + +SMTP_HOST = settings.EMAIL_SERVER +SMTP_PORT = settings.EMAIL_PORT +SMTP_USER = settings.EMAIL_USERNAME +SMTP_PASS = settings.EMAIL_PASSWORD +FROM_DEFAULT = settings.EMAIL_USERNAME + + +def send_email( + to_email: str, subject: str, body: str, cc: list[str] = None, from_email: str = None +): + """ + Gmail cannot send as another user. + So we set 'From' = your Gmail, but 'Reply-To' = user email. + """ + + cc = cc or [] + + msg = EmailMessage() + msg["Subject"] = subject + + # Always send FROM your SMTP account + msg["From"] = settings.EMAIL_USERNAME + + # Show this as reply address + if from_email: + msg["Reply-To"] = from_email + + msg["To"] = to_email + + if cc: + msg["Cc"] = ", ".join(cc) + + msg.set_content(body) + + try: + with smtplib.SMTP(settings.EMAIL_SERVER, settings.EMAIL_PORT) as server: + server.starttls() + server.login(settings.EMAIL_USERNAME, settings.EMAIL_PASSWORD) + + server.send_message(msg) + + except Exception as e: + raise Exception(f"Email sending failed: {str(e)}")