Shri commited on
Commit
6a97566
·
1 Parent(s): c0310c2

hf: for deployment

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitignore +4 -0
  2. Dockerfile +11 -0
  3. README.md +2 -0
  4. alembic.ini +147 -0
  5. alembic/README +1 -0
  6. alembic/env.py +78 -0
  7. alembic/script.py.mako +29 -0
  8. alembic/versions/584a5111e60f_initial_migration.py +33 -0
  9. alembic/versions/b33e3b5b7af9_added_roles.py +33 -0
  10. alembic/versions/dd61202db14f_add_knowledgebase_chunk.py +33 -0
  11. alembic/versions/e8066533b622_delete_user_verification_cols.py +49 -0
  12. requirements.txt +62 -0
  13. src/auth/__init__.py +0 -0
  14. src/auth/config.py +17 -0
  15. src/auth/constants.py +2 -0
  16. src/auth/dependencies.py +0 -0
  17. src/auth/exceptions.py +0 -0
  18. src/auth/feed_db_script.py +132 -0
  19. src/auth/models.py +2 -0
  20. src/auth/router.py +125 -0
  21. src/auth/schemas.py +38 -0
  22. src/auth/service.py +156 -0
  23. src/auth/utils.py +206 -0
  24. src/chatbot/__init__.py +0 -0
  25. src/chatbot/config.py +6 -0
  26. src/chatbot/constants.py +2 -0
  27. src/chatbot/dependencies.py +0 -0
  28. src/chatbot/embedding.py +100 -0
  29. src/chatbot/exceptions.py +0 -0
  30. src/chatbot/models.py +28 -0
  31. src/chatbot/router.py +111 -0
  32. src/chatbot/schemas.py +36 -0
  33. src/chatbot/service.py +45 -0
  34. src/chatbot/utils.py +57 -0
  35. src/core/__init__.py +6 -0
  36. src/core/config.py +51 -0
  37. src/core/database.py +38 -0
  38. src/core/exceptions.py +0 -0
  39. src/core/models.py +74 -0
  40. src/core/pagination.py +0 -0
  41. src/core/schemas.py +10 -0
  42. src/core/temp_feed_db.py +144 -0
  43. src/feed/__init__.py +0 -0
  44. src/feed/config.py +6 -0
  45. src/feed/constants.py +2 -0
  46. src/feed/dependencies.py +0 -0
  47. src/feed/exceptions.py +0 -0
  48. src/feed/models.py +49 -0
  49. src/feed/router.py +0 -0
  50. src/feed/schemas.py +0 -0
.gitignore ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ __pycache__/
2
+ venv/
3
+ .env
4
+ .idea/
Dockerfile ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ WORKDIR /app
4
+
5
+ COPY . .
6
+
7
+ RUN pip install --no-cache-dir -r requirements.txt
8
+
9
+ EXPOSE 7860
10
+
11
+ CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -9,3 +9,5 @@ short_description: Yuvabe App Backend
9
  ---
10
 
11
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
9
  ---
10
 
11
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
12
+
13
+ # YB's Wellness App FastAPI Backend
alembic.ini ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts.
5
+ # this is typically a path given in POSIX (e.g. forward slashes)
6
+ # format, relative to the token %(here)s which refers to the location of this
7
+ # ini file
8
+ script_location = %(here)s/alembic
9
+
10
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
11
+ # Uncomment the line below if you want the files to be prepended with date and time
12
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
13
+ # for all available tokens
14
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
15
+
16
+ # sys.path path, will be prepended to sys.path if present.
17
+ # defaults to the current working directory. for multiple paths, the path separator
18
+ # is defined by "path_separator" below.
19
+ prepend_sys_path = .
20
+
21
+
22
+ # timezone to use when rendering the date within the migration file
23
+ # as well as the filename.
24
+ # If specified, requires the tzdata library which can be installed by adding
25
+ # `alembic[tz]` to the pip requirements.
26
+ # string value is passed to ZoneInfo()
27
+ # leave blank for localtime
28
+ # timezone =
29
+
30
+ # max length of characters to apply to the "slug" field
31
+ # truncate_slug_length = 40
32
+
33
+ # set to 'true' to run the environment during
34
+ # the 'revision' command, regardless of autogenerate
35
+ # revision_environment = false
36
+
37
+ # set to 'true' to allow .pyc and .pyo files without
38
+ # a source .py file to be detected as revisions in the
39
+ # versions/ directory
40
+ # sourceless = false
41
+
42
+ # version location specification; This defaults
43
+ # to <script_location>/versions. When using multiple version
44
+ # directories, initial revisions must be specified with --version-path.
45
+ # The path separator used here should be the separator specified by "path_separator"
46
+ # below.
47
+ # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
48
+
49
+ # path_separator; This indicates what character is used to split lists of file
50
+ # paths, including version_locations and prepend_sys_path within configparser
51
+ # files such as alembic.ini.
52
+ # The default rendered in new alembic.ini files is "os", which uses os.pathsep
53
+ # to provide os-dependent path splitting.
54
+ #
55
+ # Note that in order to support legacy alembic.ini files, this default does NOT
56
+ # take place if path_separator is not present in alembic.ini. If this
57
+ # option is omitted entirely, fallback logic is as follows:
58
+ #
59
+ # 1. Parsing of the version_locations option falls back to using the legacy
60
+ # "version_path_separator" key, which if absent then falls back to the legacy
61
+ # behavior of splitting on spaces and/or commas.
62
+ # 2. Parsing of the prepend_sys_path option falls back to the legacy
63
+ # behavior of splitting on spaces, commas, or colons.
64
+ #
65
+ # Valid values for path_separator are:
66
+ #
67
+ # path_separator = :
68
+ # path_separator = ;
69
+ # path_separator = space
70
+ # path_separator = newline
71
+ #
72
+ # Use os.pathsep. Default configuration used for new projects.
73
+ path_separator = os
74
+
75
+ # set to 'true' to search source files recursively
76
+ # in each "version_locations" directory
77
+ # new in Alembic version 1.10
78
+ # recursive_version_locations = false
79
+
80
+ # the output encoding used when revision files
81
+ # are written from script.py.mako
82
+ # output_encoding = utf-8
83
+
84
+ # database URL. This is consumed by the user-maintained env.py script only.
85
+ # other means of configuring database URLs may be customized within the env.py
86
+ # file.
87
+ sqlalchemy.url = driver://user:pass@localhost/dbname
88
+
89
+
90
+ [post_write_hooks]
91
+ # post_write_hooks defines scripts or Python functions that are run
92
+ # on newly generated revision scripts. See the documentation for further
93
+ # detail and examples
94
+
95
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
96
+ # hooks = black
97
+ # black.type = console_scripts
98
+ # black.entrypoint = black
99
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
100
+
101
+ # lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
102
+ # hooks = ruff
103
+ # ruff.type = module
104
+ # ruff.module = ruff
105
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
106
+
107
+ # Alternatively, use the exec runner to execute a binary found on your PATH
108
+ # hooks = ruff
109
+ # ruff.type = exec
110
+ # ruff.executable = ruff
111
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
112
+
113
+ # Logging configuration. This is also consumed by the user-maintained
114
+ # env.py script only.
115
+ [loggers]
116
+ keys = root,sqlalchemy,alembic
117
+
118
+ [handlers]
119
+ keys = console
120
+
121
+ [formatters]
122
+ keys = generic
123
+
124
+ [logger_root]
125
+ level = WARNING
126
+ handlers = console
127
+ qualname =
128
+
129
+ [logger_sqlalchemy]
130
+ level = WARNING
131
+ handlers =
132
+ qualname = sqlalchemy.engine
133
+
134
+ [logger_alembic]
135
+ level = INFO
136
+ handlers =
137
+ qualname = alembic
138
+
139
+ [handler_console]
140
+ class = StreamHandler
141
+ args = (sys.stderr,)
142
+ level = NOTSET
143
+ formatter = generic
144
+
145
+ [formatter_generic]
146
+ format = %(levelname)-5.5s [%(name)s] %(message)s
147
+ datefmt = %H:%M:%S
alembic/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration.
alembic/env.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config, pool
4
+ from sqlmodel import SQLModel
5
+
6
+ from alembic import context
7
+ from src.core import *
8
+ from src.core.config import settings
9
+
10
+ # this is the Alembic Config object, which provides
11
+ # access to the values within the .ini file in use.
12
+ config = context.config
13
+ config.set_main_option(name="sqlalchemy.url", value=settings.DATABASE_URL)
14
+ # Interpret the config file for Python logging.
15
+ # This line sets up loggers basically.
16
+ if config.config_file_name is not None:
17
+ fileConfig(config.config_file_name)
18
+
19
+ # add your model's MetaData object here
20
+ # for 'autogenerate' support
21
+ # from myapp import mymodel
22
+ # target_metadata = mymodel.Base.metadata
23
+ target_metadata = SQLModel.metadata
24
+
25
+ # other values from the config, defined by the needs of env.py,
26
+ # can be acquired:
27
+ # my_important_option = config.get_main_option("my_important_option")
28
+ # ... etc.
29
+
30
+
31
+ def run_migrations_offline() -> None:
32
+ """Run migrations in 'offline' mode.
33
+
34
+ This configures the context with just a URL
35
+ and not an Engine, though an Engine is acceptable
36
+ here as well. By skipping the Engine creation
37
+ we don't even need a DBAPI to be available.
38
+
39
+ Calls to context.execute() here emit the given string to the
40
+ script output.
41
+
42
+ """
43
+ url = config.get_main_option("sqlalchemy.url")
44
+ context.configure(
45
+ url=url,
46
+ target_metadata=target_metadata,
47
+ literal_binds=True,
48
+ dialect_opts={"paramstyle": "named"},
49
+ )
50
+
51
+ with context.begin_transaction():
52
+ context.run_migrations()
53
+
54
+
55
+ def run_migrations_online() -> None:
56
+ """Run migrations in 'online' mode.
57
+
58
+ In this scenario we need to create an Engine
59
+ and associate a connection with the context.
60
+
61
+ """
62
+ connectable = engine_from_config(
63
+ config.get_section(config.config_ini_section, {}),
64
+ prefix="sqlalchemy.",
65
+ poolclass=pool.NullPool,
66
+ )
67
+
68
+ with connectable.connect() as connection:
69
+ context.configure(connection=connection, target_metadata=target_metadata)
70
+
71
+ with context.begin_transaction():
72
+ context.run_migrations()
73
+
74
+
75
+ if context.is_offline_mode():
76
+ run_migrations_offline()
77
+ else:
78
+ run_migrations_online()
alembic/script.py.mako ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+ ${imports if imports else ""}
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = ${repr(up_revision)}
17
+ down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
18
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
19
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ ${upgrades if upgrades else "pass"}
25
+
26
+
27
+ def downgrade() -> None:
28
+ """Downgrade schema."""
29
+ ${downgrades if downgrades else "pass"}
alembic/versions/584a5111e60f_initial_migration.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """initial migration
2
+
3
+ Revision ID: 584a5111e60f
4
+ Revises:
5
+ Create Date: 2025-11-10 23:50:23.367946
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = '584a5111e60f'
17
+ down_revision: Union[str, Sequence[str], None] = None
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ pass
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ pass
33
+ # ### end Alembic commands ###
alembic/versions/b33e3b5b7af9_added_roles.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Added roles
2
+
3
+ Revision ID: b33e3b5b7af9
4
+ Revises: e8066533b622
5
+ Create Date: 2025-11-16 21:10:02.038255
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = 'b33e3b5b7af9'
17
+ down_revision: Union[str, Sequence[str], None] = 'e8066533b622'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ pass
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ pass
33
+ # ### end Alembic commands ###
alembic/versions/dd61202db14f_add_knowledgebase_chunk.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """add: knowledgebase,chunk
2
+
3
+ Revision ID: dd61202db14f
4
+ Revises: b33e3b5b7af9
5
+ Create Date: 2025-11-17 23:28:11.537932
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = 'dd61202db14f'
17
+ down_revision: Union[str, Sequence[str], None] = 'b33e3b5b7af9'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ pass
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ pass
33
+ # ### end Alembic commands ###
alembic/versions/e8066533b622_delete_user_verification_cols.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """delete:user/verification cols
2
+
3
+ Revision ID: e8066533b622
4
+ Revises: 584a5111e60f
5
+ Create Date: 2025-11-11 10:47:38.171691
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ import sqlmodel.sql.sqltypes
14
+ from sqlalchemy.dialects import postgresql
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "e8066533b622"
18
+ down_revision: Union[str, Sequence[str], None] = "584a5111e60f"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ """Upgrade schema."""
25
+ # ### commands auto generated by Alembic - please adjust! ###
26
+ op.drop_column("users", "verification_token")
27
+ op.drop_column("users", "verification_expires_at")
28
+ # ### end Alembic commands ###
29
+
30
+
31
+ def downgrade() -> None:
32
+ """Downgrade schema."""
33
+ # ### commands auto generated by Alembic - please adjust! ###
34
+ op.add_column(
35
+ "users",
36
+ sa.Column(
37
+ "verification_expires_at",
38
+ postgresql.TIMESTAMP(),
39
+ autoincrement=False,
40
+ nullable=True,
41
+ ),
42
+ )
43
+ op.add_column(
44
+ "users",
45
+ sa.Column(
46
+ "verification_token", sa.VARCHAR(), autoincrement=False, nullable=True
47
+ ),
48
+ )
49
+ # ### end Alembic commands ###
requirements.txt ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ alembic==1.17.1
2
+ annotated-doc==0.0.3
3
+ annotated-types==0.7.0
4
+ anyio==4.11.0
5
+ asyncpg==0.30.0
6
+ bcrypt==3.2.2
7
+ certifi==2025.11.12
8
+ cffi==2.0.0
9
+ charset-normalizer==3.4.4
10
+ click==8.3.0
11
+ coloredlogs==15.0.1
12
+ cryptography==46.0.3
13
+ dnspython==2.8.0
14
+ ecdsa==0.19.1
15
+ email-validator==2.3.0
16
+ fastapi==0.121.0
17
+ filelock==3.20.0
18
+ flatbuffers==25.9.23
19
+ fsspec==2025.10.0
20
+ greenlet==3.2.4
21
+ h11==0.16.0
22
+ hf-xet==1.2.0
23
+ huggingface-hub==0.36.0
24
+ humanfriendly==10.0
25
+ idna==3.11
26
+ Mako==1.3.10
27
+ MarkupSafe==3.0.3
28
+ mpmath==1.3.0
29
+ numpy==2.3.5
30
+ onnxruntime==1.23.2
31
+ packaging==25.0
32
+ passlib==1.7.4
33
+ pgvector==0.4.1
34
+ protobuf==6.33.1
35
+ psycopg2-binary==2.9.11
36
+ pyasn1==0.6.1
37
+ pycparser==2.23
38
+ pydantic==2.12.4
39
+ pydantic-settings==2.12.0
40
+ pydantic_core==2.41.5
41
+ PyPDF2==3.0.1
42
+ python-dotenv==1.2.1
43
+ python-jose==3.5.0
44
+ python-multipart==0.0.20
45
+ PyYAML==6.0.3
46
+ regex==2025.11.3
47
+ requests==2.32.5
48
+ rsa==4.9.1
49
+ safetensors==0.6.2
50
+ six==1.17.0
51
+ sniffio==1.3.1
52
+ SQLAlchemy==2.0.44
53
+ sqlmodel==0.0.27
54
+ starlette==0.49.3
55
+ sympy==1.14.0
56
+ tokenizers==0.22.1
57
+ tqdm==4.67.1
58
+ transformers==4.57.1
59
+ typing-inspection==0.4.2
60
+ typing_extensions==4.15.0
61
+ urllib3==2.5.0
62
+ uvicorn==0.38.0
src/auth/__init__.py ADDED
File without changes
src/auth/config.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from pydantic import BaseSettings
3
+ from dotenv import load_dotenv
4
+
5
+
6
+ class HomeSettings(BaseSettings):
7
+ FEATURE_ENABLED: bool = True
8
+
9
+
10
+ home_settings = HomeSettings()
11
+
12
+
13
+ load_dotenv()
14
+ SECRET_KEY = os.getenv("SECRET_KEY")
15
+
16
+ ALGORITHM = "HS256"
17
+ ACCESS_TOKEN_EXPIRE_MINUTES = 60
src/auth/constants.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen"
2
+ EXIT_MESSAGE = "Thank You have a wonderful day"
src/auth/dependencies.py ADDED
File without changes
src/auth/exceptions.py ADDED
File without changes
src/auth/feed_db_script.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.auth.utils import hash_password
2
+ from datetime import date
3
+ from sqlmodel import Session
4
+
5
+ from src.core.database import engine
6
+ from src.core.models import Users, Teams, Roles, UserTeamsRole
7
+
8
+
9
+ # ------------------------
10
+ # 1. Seed Users
11
+ # ------------------------
12
+ def seed_users(session: Session):
13
+ users = [
14
+ Users(
15
+ email_id="ragul@yuvabe.com",
16
+ password=hash_password("Yuvabe"),
17
+ user_name="ragul",
18
+ dob=date(2001, 5, 21),
19
+ address="Chennai",
20
+ profile_picture="ragul.png",
21
+ ),
22
+ Users(
23
+ email_id="shri@yuvabe.com",
24
+ password=hash_password("Yuvabe"),
25
+ user_name="Shri",
26
+ dob=date(1999, 3, 14),
27
+ address="Chennai",
28
+ profile_picture="shri.png",
29
+ ),
30
+ Users(
31
+ email_id="hryuva@yuvabe.com",
32
+ password=hash_password("Yuvabe"),
33
+ user_name="Sathish",
34
+ dob=date(1998, 7, 10),
35
+ address="Chennai",
36
+ profile_picture="Sathish.png",
37
+ ),
38
+ Users(
39
+ email_id="hr2@yuvabe.com",
40
+ password=hash_password("Yuvabe"),
41
+ user_name="Deepika",
42
+ dob=date(1997, 2, 5),
43
+ address="Chennai",
44
+ profile_picture="deepika.png",
45
+ ),
46
+ ]
47
+
48
+ session.add_all(users)
49
+ session.commit()
50
+ print("Users added.")
51
+ return users
52
+
53
+
54
+ # ------------------------
55
+ # 2. Seed Teams
56
+ # ------------------------
57
+ def seed_teams(session: Session):
58
+ teams = [
59
+ Teams(name="Tech Team"),
60
+ Teams(name="HR Team"),
61
+ ]
62
+ session.add_all(teams)
63
+ session.commit()
64
+ print("Teams added.")
65
+ return teams
66
+
67
+
68
+ # ------------------------
69
+ # 3. Seed Roles
70
+ # ------------------------
71
+ def seed_roles(session: Session):
72
+ roles = [
73
+ Roles(name="Developer"),
74
+ Roles(name="Team Lead"),
75
+ Roles(name="HR Manager"),
76
+ ]
77
+ session.add_all(roles)
78
+ session.commit()
79
+ print("Roles added.")
80
+ return roles
81
+
82
+
83
+ # ------------------------
84
+ # 4. Map Users → Teams → Roles
85
+ # ------------------------
86
+ def seed_user_teams_roles(session: Session, users, teams, roles):
87
+ mappings = [
88
+ # Hari → Tech Team → Developer
89
+ UserTeamsRole(
90
+ user_id=users[0].id, # Hari
91
+ team_id=teams[0].id, # Tech Team
92
+ role_id=roles[0].id, # Developer
93
+ ),
94
+ # Shri → Tech Team → Team Lead
95
+ UserTeamsRole(
96
+ user_id=users[1].id, # Shri
97
+ team_id=teams[0].id, # Tech Team
98
+ role_id=roles[1].id, # Team Lead
99
+ ),
100
+ # HR Keerthana
101
+ UserTeamsRole(
102
+ user_id=users[2].id, # Keerthana
103
+ team_id=teams[1].id, # HR Team
104
+ role_id=roles[2].id, # HR Manager
105
+ ),
106
+ # HR Deepika
107
+ UserTeamsRole(
108
+ user_id=users[3].id, # Deepika
109
+ team_id=teams[1].id, # HR Team
110
+ role_id=roles[2].id, # HR Manager
111
+ ),
112
+ ]
113
+
114
+ session.add_all(mappings)
115
+ session.commit()
116
+ print("User-Team-Role mappings added.")
117
+
118
+
119
+ # ------------------------
120
+ # 5. Master Runner
121
+ # ------------------------
122
+ def run_all_seeds():
123
+ with Session(engine) as session:
124
+ users = seed_users(session)
125
+ teams = seed_teams(session)
126
+ roles = seed_roles(session)
127
+ seed_user_teams_roles(session, users, teams, roles)
128
+ print("All data seeded successfully!")
129
+
130
+
131
+ if __name__ == "__main__":
132
+ run_all_seeds()
src/auth/models.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ import uuid
2
+ import sqlmodel
src/auth/router.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from src.core.database import get_async_session
3
+ from fastapi import APIRouter, Depends, HTTPException, status
4
+ from jose import jwt, JWTError
5
+ from fastapi import APIRouter, Depends, HTTPException
6
+ from sqlmodel import Session
7
+ from sqlmodel.ext.asyncio.session import AsyncSession
8
+ from src.auth.service import (
9
+ create_user,
10
+ verify_email,
11
+ login_user,
12
+ )
13
+ from src.auth.utils import get_current_user
14
+ from src.core.models import Users
15
+ from src.core.config import settings
16
+ from fastapi.responses import RedirectResponse
17
+ from .schemas import SignUpRequest, LoginRequest, BaseResponse, SendVerificationRequest
18
+ from fastapi.security import OAuth2PasswordRequestForm
19
+ from src.auth.utils import create_access_token
20
+ from jose import jwt, JWTError
21
+
22
+
23
+ router = APIRouter(prefix="/auth", tags=["Auth"])
24
+
25
+
26
+ @router.post("/signup", response_model=BaseResponse)
27
+ async def signup(
28
+ payload: SignUpRequest, session: AsyncSession = Depends(get_async_session)
29
+ ):
30
+ try:
31
+ response = await create_user(
32
+ session, payload.name, payload.email, payload.password
33
+ )
34
+ return {"code": 200, "data": response}
35
+ except ValueError as e:
36
+ raise HTTPException(status_code=400, detail=str(e))
37
+
38
+
39
+ # @router.post("/send-verification", response_model=BaseResponse)
40
+ # async def send_verification(
41
+ # payload: SendVerificationRequest, session: AsyncSession = Depends(get_async_session)
42
+ # ):
43
+ # if not payload.email:
44
+ # raise HTTPException(status_code=400, detail="Email is required")
45
+
46
+ # response = await send_verification_link(session, payload.email)
47
+ # return {"code": 200, "data": response}
48
+
49
+
50
+ # @router.get("/verify-email", response_model=BaseResponse)
51
+ # async def verify_email_route(
52
+ # token: str, session: AsyncSession = Depends(get_async_session)
53
+ # ):
54
+ # response = await verify_email(session, token)
55
+ # access_token = response["access_token"]
56
+ # redirect_url = f"yuvabe://verified?token={access_token}"
57
+
58
+ # return RedirectResponse(url=redirect_url)
59
+
60
+
61
+ @router.post("/login", response_model=BaseResponse)
62
+ async def login(
63
+ payload: LoginRequest, session: AsyncSession = Depends(get_async_session)
64
+ ):
65
+ response = await login_user(session, payload.email, payload.password)
66
+ return {"code": 200, "data": response}
67
+
68
+
69
+ @router.post("/refresh", response_model=BaseResponse)
70
+ async def refresh_token(request: dict):
71
+ """Generate new access token using refresh token"""
72
+ refresh_token = request.get("refresh_token")
73
+ if not refresh_token:
74
+ raise HTTPException(status_code=400, detail="Refresh token is required")
75
+
76
+ try:
77
+ payload = jwt.decode(
78
+ refresh_token, settings.SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
79
+ )
80
+ if payload.get("type") != "refresh":
81
+ raise HTTPException(status_code=400, detail="Invalid refresh token")
82
+
83
+ user_data = {
84
+ "sub": payload["sub"],
85
+ "name": payload.get("name"),
86
+ "email": payload.get("email"),
87
+ }
88
+ new_access_token = create_access_token(data=user_data)
89
+ return {"code": 200, "data": {"access_token": new_access_token}}
90
+
91
+ except JWTError:
92
+ raise HTTPException(status_code=401, detail="Invalid or expired refresh token")
93
+
94
+
95
+ @router.get("/home", response_model=BaseResponse)
96
+ async def get_home(
97
+ user_id: str = Depends(get_current_user),
98
+ session: AsyncSession = Depends(get_async_session),
99
+ ):
100
+ """
101
+ Protected home endpoint. Requires a valid access token (Bearer).
102
+ """
103
+ user = await session.get(Users, uuid.UUID(user_id))
104
+ if not user:
105
+ raise HTTPException(status_code=404, detail="User not found")
106
+
107
+ # Example payload — replace with your real app data
108
+ return {
109
+ "code": 200,
110
+ "data": {
111
+ "message": f"Welcome to Home, {user.user_name}!",
112
+ "user": {
113
+ "id": str(user.id),
114
+ "name": user.user_name,
115
+ "email": user.email_id,
116
+ "is_verified": user.is_verified,
117
+ "dob": user.dob.isoformat() if user.dob else None,
118
+ "profile_picture": user.profile_picture
119
+ },
120
+ "home_data": {
121
+ "announcements": ["Welcome!", "New protocol released"],
122
+ "timestamp": user.created_at.isoformat() if user.created_at else None,
123
+ },
124
+ },
125
+ }
src/auth/schemas.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel ,EmailStr
2
+ from typing import Optional, Union, Dict
3
+
4
+
5
+ class SignUpRequest(BaseModel):
6
+ name: str
7
+ email: str
8
+ password: str
9
+
10
+
11
+ class VerifyOtpRequest(BaseModel):
12
+ email: str
13
+ otp: str
14
+
15
+
16
+ class LoginRequest(BaseModel):
17
+ email: str
18
+ password: str
19
+
20
+ class SendVerificationRequest(BaseModel):
21
+ email: EmailStr
22
+
23
+
24
+ class UserResponse(BaseModel):
25
+ id: str
26
+ name: str
27
+ email: str
28
+
29
+
30
+ class LoginResponseData(BaseModel):
31
+ access_token: str
32
+ token_type: str
33
+ user: UserResponse
34
+
35
+
36
+ class BaseResponse(BaseModel):
37
+ code: int
38
+ data: Optional[Union[Dict, str, None]] = None
src/auth/service.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from src.auth.utils import (
3
+ # send_otp_email,
4
+ verify_password,
5
+ create_refresh_token,
6
+ verify_verification_token,
7
+ create_access_token,
8
+ hash_password,
9
+ create_verification_token,
10
+ )
11
+ from src.core.models import Users
12
+ from sqlmodel import Session, select
13
+ from fastapi import HTTPException
14
+ from sqlmodel.ext.asyncio.session import AsyncSession
15
+
16
+
17
+ async def create_user(session: AsyncSession, name: str, email: str, password: str):
18
+ """Create user without sending email"""
19
+
20
+ if not email.lower().endswith("@yuvabe.com"):
21
+ raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID")
22
+
23
+ user = await session.exec(select(Users).where(Users.email_id == email))
24
+ existing_user = user.first()
25
+ if existing_user:
26
+ raise ValueError("User already exists")
27
+
28
+ new_user = Users(
29
+ user_name=name,
30
+ email_id=email,
31
+ password=hash_password(password),
32
+ is_verified=True,
33
+ )
34
+
35
+ session.add(new_user)
36
+ await session.commit()
37
+ await session.refresh(new_user)
38
+
39
+ access_token = create_access_token(
40
+ data={
41
+ "sub": str(new_user.id),
42
+ "name": new_user.user_name,
43
+ "email": new_user.email_id,
44
+ }
45
+ )
46
+
47
+ refresh_token = create_refresh_token(
48
+ data={
49
+ "sub": str(new_user.id),
50
+ "name": new_user.user_name,
51
+ "email": new_user.email_id,
52
+ }
53
+ )
54
+
55
+ return {
56
+ "message": "User created successfully",
57
+ "user_id": str(new_user.id),
58
+ "access_token": access_token,
59
+ "refresh_token": refresh_token,
60
+ }
61
+
62
+
63
+ # async def send_verification_link(session: Session, email: str):
64
+ # """Send verification email for an existing user."""
65
+ # result = await session.exec(select(Users).where(Users.email_id == email))
66
+ # user = result.first()
67
+
68
+ # if not user:
69
+ # raise HTTPException(status_code=404, detail="User not found")
70
+
71
+ # if user.is_verified:
72
+ # raise HTTPException(status_code=400, detail="User is already verified")
73
+
74
+ # # Create a token using existing user ID (opaque token)
75
+ # token = create_verification_token(str(user.id))
76
+
77
+ # try:
78
+ # send_verification_email(email, token)
79
+ # except Exception as e:
80
+ # raise HTTPException(
81
+ # status_code=500, detail=f"Failed to send verification email: {str(e)}"
82
+ # )
83
+
84
+ # return {
85
+ # "message": "Verification link sent successfully",
86
+ # "user_id": str(user.id),
87
+ # "email": user.email_id,
88
+ # }
89
+
90
+
91
+ async def verify_email(session: Session, token: str):
92
+ try:
93
+ user_id = await verify_verification_token(token)
94
+ except ValueError as e:
95
+ raise HTTPException(status_code=400, detail=str(e))
96
+
97
+ user = await session.get(Users, uuid.UUID(user_id))
98
+ if not user:
99
+ raise HTTPException(status_code=404, detail="User not found")
100
+
101
+ if not user.is_verified:
102
+ user.is_verified = True
103
+ await session.commit()
104
+
105
+ access_token = create_access_token(
106
+ data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
107
+ )
108
+
109
+ refresh_token = create_refresh_token(
110
+ data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
111
+ )
112
+
113
+ return {
114
+ "message": "Email verified successfully!",
115
+ "access_token": access_token,
116
+ "refresh_token": refresh_token,
117
+ "token_type": "bearer",
118
+ }
119
+
120
+
121
+ async def login_user(session: Session, email: str, password: str):
122
+
123
+ if not email.lower().endswith("@yuvabe.com"):
124
+ raise HTTPException(status_code=400, detail="Enter you're Yuvabe email ID")
125
+
126
+ users = await session.exec(select(Users).where(Users.email_id == email))
127
+ user = users.first()
128
+
129
+ if not user:
130
+ raise HTTPException(status_code=400, detail="Invalid email or password")
131
+
132
+ if not verify_password(password, user.password):
133
+ raise HTTPException(status_code=400, detail="Invalid email or password")
134
+
135
+ if not user.is_verified:
136
+ raise HTTPException(status_code=400, detail="Verify email to login")
137
+
138
+ access_token = create_access_token(
139
+ data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
140
+ )
141
+
142
+ refresh_token = create_refresh_token(
143
+ data={"sub": str(user.id), "name": user.user_name, "email": user.email_id}
144
+ )
145
+
146
+ return {
147
+ "access_token": access_token,
148
+ "refresh_token": refresh_token,
149
+ "token_type": "bearer",
150
+ "user": {
151
+ "id": str(user.id),
152
+ "name": user.user_name,
153
+ "email": user.email_id,
154
+ "is_verified": user.is_verified,
155
+ },
156
+ }
src/auth/utils.py ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import smtplib
3
+ import os
4
+ import uuid
5
+ from email.mime.text import MIMEText
6
+ import logging
7
+ import traceback
8
+ from passlib.context import CryptContext
9
+ from src.core.database import get_async_session
10
+ from sqlmodel.ext.asyncio.session import AsyncSession
11
+ from jose import jwt, JWTError
12
+ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
13
+ from datetime import datetime, timedelta
14
+ from cryptography.fernet import Fernet, InvalidToken
15
+ from fastapi import Depends, HTTPException, status
16
+ from src.core.models import Users
17
+ from src.core.config import settings
18
+
19
+
20
+ SECRET_KEY = settings.SECRET_KEY
21
+ ALGORITHM = settings.JWT_ALGORITHM
22
+ ACCESS_TOKEN_EXPIRE_MINUTES = settings.JWT_EXPIRE
23
+ logger = logging.getLogger(__name__)
24
+
25
+ SMTP_SERVER = settings.EMAIL_SERVER
26
+ SMTP_PORT = settings.EMAIL_PORT
27
+ SMTP_EMAIL = settings.EMAIL_USERNAME
28
+ SMTP_PASSWORD = settings.EMAIL_PASSWORD
29
+
30
+ FERNET_KEY = settings.FERNET_KEY
31
+ VERIFICATION_BASE_URL = settings.VERIFICATION_BASE_URL
32
+
33
+
34
+ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
35
+
36
+
37
+ def hash_password(password: str) -> str:
38
+ """Encrypt plain password into hashed password"""
39
+ return pwd_context.hash(password)
40
+
41
+
42
+ def verify_password(plain_password: str, hashed_password: str) -> bool:
43
+ """Compare plain password with stored hash"""
44
+ return pwd_context.verify(plain_password, hashed_password)
45
+
46
+
47
+ def create_access_token(data: dict):
48
+ """Create JWT token with expiry"""
49
+ to_encode = data.copy()
50
+ expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
51
+ to_encode.update({"exp": expire})
52
+ encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
53
+ return encoded_jwt
54
+
55
+
56
+ # def send_verification_email(to_email: str, token: str):
57
+ # """Send verification email using smtplib with detailed debug logs."""
58
+ # subject = f"Verify your {settings.APP_NAME} Account"
59
+ # verification_link = f"{VERIFICATION_BASE_URL}/auth/verify-email?token={token}"
60
+
61
+ # body = f"""
62
+ # Hi,
63
+
64
+ # Please verify your {settings.APP_NAME} account by clicking the link below:
65
+ # {verification_link}
66
+
67
+ # This link will expire in 24 hours.
68
+
69
+ # Regards,
70
+ # {settings.APP_NAME} Team
71
+ # """
72
+
73
+ # msg = MIMEText(body)
74
+ # msg["Subject"] = subject
75
+ # msg["From"] = SMTP_EMAIL
76
+ # msg["To"] = to_email
77
+
78
+ # logger.info("🟢 Starting send_verification_email()")
79
+ # logger.info(f"📨 To: {to_email}")
80
+ # logger.info(f"📤 SMTP Server: {SMTP_SERVER}:{SMTP_PORT}")
81
+
82
+ # try:
83
+ # logger.info("🔌 Connecting to SMTP server...")
84
+ # with smtplib.SMTP(SMTP_SERVER, SMTP_PORT, timeout=30) as server:
85
+ # logger.info("✅ Connected successfully.")
86
+
87
+ # logger.info("🔒 Starting TLS...")
88
+ # server.starttls()
89
+ # logger.info("✅ TLS secured.")
90
+
91
+ # logger.info("🔑 Logging in to SMTP server...")
92
+ # server.login(SMTP_EMAIL, SMTP_PASSWORD)
93
+ # logger.info("✅ Logged in successfully.")
94
+
95
+ # # Send email
96
+ # logger.info("📧 Sending email message...")
97
+ # server.send_message(msg)
98
+ # logger.info(f"✅ Email successfully sent to {to_email}")
99
+
100
+ # except smtplib.SMTPAuthenticationError as e:
101
+ # logger.error("❌ Authentication failed — check email or app password.")
102
+ # logger.error(f"Error details: {e}")
103
+ # logger.error(traceback.format_exc())
104
+ # raise
105
+ # except smtplib.SMTPConnectError as e:
106
+ # logger.error("❌ Could not connect to SMTP server.")
107
+ # logger.error(f"Error details: {e}")
108
+ # logger.error(traceback.format_exc())
109
+ # raise
110
+ # except smtplib.SMTPRecipientsRefused as e:
111
+ # logger.error("❌ Recipient address refused.")
112
+ # logger.error(f"Error details: {e}")
113
+ # logger.error(traceback.format_exc())
114
+ # raise
115
+ # except smtplib.SMTPException as e:
116
+ # logger.error("❌ General SMTP error occurred.")
117
+ # logger.error(f"Error details: {e}")
118
+ # logger.error(traceback.format_exc())
119
+ # raise
120
+ # except Exception as e:
121
+ # logger.error("❌ Unknown error occurred while sending verification email.")
122
+ # logger.error(f"Error details: {e}")
123
+ # logger.error(traceback.format_exc())
124
+ # raise
125
+
126
+
127
+ fernet = Fernet(FERNET_KEY.encode())
128
+
129
+
130
+ def create_verification_token(user_id: str, expires_in_hours: int = 24) -> str:
131
+ """Create encrypted token with expiry"""
132
+ payload = {
133
+ "sub": user_id,
134
+ "exp": (datetime.utcnow() + timedelta(hours=expires_in_hours)).timestamp(),
135
+ }
136
+ token = fernet.encrypt(json.dumps(payload).encode()).decode()
137
+ return token
138
+
139
+
140
+ async def verify_verification_token(token: str) -> str:
141
+ """Verify encrypted token and extract user_id"""
142
+ try:
143
+ decrypted = fernet.decrypt(token.encode())
144
+ data = json.loads(decrypted.decode())
145
+
146
+ exp = datetime.fromtimestamp(data["exp"])
147
+ if datetime.utcnow() > exp:
148
+ raise ValueError("Verification link expired")
149
+
150
+ return data["sub"]
151
+
152
+ except InvalidToken:
153
+ raise ValueError("Invalid verification link")
154
+
155
+
156
+ bearer_scheme = HTTPBearer()
157
+
158
+
159
+ def get_current_user(
160
+ credentials: HTTPAuthorizationCredentials = Depends(bearer_scheme),
161
+ ):
162
+ """Decode JWT token and extract current user ID"""
163
+ token = credentials.credentials
164
+
165
+ try:
166
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
167
+ user_id: str = payload.get("sub")
168
+
169
+ if user_id is None:
170
+ raise HTTPException(
171
+ status_code=status.HTTP_401_UNAUTHORIZED,
172
+ detail="Invalid token: missing user id",
173
+ )
174
+ return user_id
175
+
176
+ except JWTError:
177
+ raise HTTPException(
178
+ status_code=status.HTTP_401_UNAUTHORIZED,
179
+ detail="Invalid or expired token",
180
+ )
181
+
182
+
183
+ async def get_current_active_user(
184
+ session: AsyncSession = Depends(get_async_session),
185
+ user_id: str = Depends(get_current_user),
186
+ ) -> Users:
187
+ """Return the full user model for the currently authenticated user."""
188
+ user = await session.get(Users, uuid.UUID(user_id))
189
+ if not user:
190
+ raise HTTPException(
191
+ status_code=status.HTTP_404_NOT_FOUND, detail="User not found"
192
+ )
193
+ if not user.is_verified:
194
+ raise HTTPException(
195
+ status_code=status.HTTP_403_FORBIDDEN, detail="User not verified"
196
+ )
197
+ return user
198
+
199
+
200
+ def create_refresh_token(data: dict, expires_days: int = 7):
201
+ """Create a long-lived JWT refresh token"""
202
+ to_encode = data.copy()
203
+ expire = datetime.utcnow() + timedelta(days=expires_days)
204
+ to_encode.update({"exp": expire, "type": "refresh"})
205
+ encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
206
+ return encoded_jwt
src/chatbot/__init__.py ADDED
File without changes
src/chatbot/config.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from pydantic import BaseSettings
2
+
3
+ class HomeSettings(BaseSettings):
4
+ FEATURE_ENABLED: bool = True
5
+
6
+ home_settings = HomeSettings()
src/chatbot/constants.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen"
2
+ EXIT_MESSAGE = "Thank You have a wonderful day"
src/chatbot/dependencies.py ADDED
File without changes
src/chatbot/embedding.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # to run this file you need model.onnx_data on the assets/onnx folder or you can obtain it from here.: https://huggingface.co/onnx-community/embeddinggemma-300m-ONNX/tree/main/onnx
2
+ # model can also be loaded directly from autoModel.pretrained by using the same link "onnx-community/embeddinggemma-300m-ONNX"
3
+
4
+ import asyncio
5
+ import os
6
+ from typing import List
7
+
8
+ import numpy as np
9
+
10
+ # import onnxruntime as ort
11
+ from transformers import AutoTokenizer
12
+
13
+ BASE_DIR = os.path.dirname(__file__)
14
+
15
+ # TOKENIZER_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "assets", "tokenizer"))
16
+ TOKENIZER_DIR = "onnx-community/embeddinggemma-300m-ONNX"
17
+
18
+ # MODEL_DIR = os.path.abspath(
19
+ # os.path.join(BASE_DIR, "..", "assets", "onnx", "model.onnx")
20
+ # )
21
+
22
+
23
+ class EmbeddingModel:
24
+ def __init__(self):
25
+ # print(TOKENIZER_DIR)
26
+ self.tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR)
27
+
28
+ # sess_options = ort.SessionOptions()
29
+ # providers = ["CPUExecutionProvider"]
30
+ #
31
+ # self.session = ort.InferenceSession(
32
+ # MODEL_DIR, sess_options, providers=providers
33
+ # )
34
+ #
35
+ # self.input_names = [inp.name for inp in self.session.get_inputs()]
36
+ # self.output_names = [out.name for out in self.session.get_outputs()]
37
+
38
+ # def _run_sync(
39
+ # self, input_ids: np.ndarray, attention_mask: np.ndarray
40
+ # ) -> List[float]:
41
+ # inputs = {}
42
+ #
43
+ # if "input_ids" in self.input_names:
44
+ # inputs["input_ids"] = input_ids
45
+ # else:
46
+ # inputs[self.input_names[0]] = input_ids
47
+ #
48
+ # if "attention_mask" in self.input_names:
49
+ # inputs["attention_mask"] = attention_mask
50
+ # elif len(self.input_names) > 1:
51
+ # inputs[self.input_names[1]] = attention_mask
52
+ #
53
+ # outputs = self.session.run(self.output_names, inputs)
54
+ # emb = outputs[0]
55
+ #
56
+ # if emb.ndim == 3:
57
+ # emb_vector = emb.mean(axis=1)[0]
58
+ # elif emb.ndim == 2:
59
+ # emb_vector = emb[0]
60
+ # else:
61
+ # emb_vector = np.asarray(emb).flatten()
62
+ #
63
+ # return emb_vector.astype(float).tolist()
64
+
65
+ async def embed_text(self, text: str, max_length: int = 512) -> List[float]:
66
+
67
+ encoded = self.tokenizer(
68
+ text,
69
+ return_tensors="np",
70
+ truncation=True,
71
+ padding="longest",
72
+ max_length=max_length,
73
+ )
74
+
75
+ input_ids = encoded["input_ids"].astype(np.int64)
76
+ attention_mask = encoded.get("attention_mask", np.ones_like(input_ids)).astype(
77
+ np.int64
78
+ )
79
+
80
+ # loop = asyncio.get_event_loop()
81
+ # vector = await loop.run_in_executor(
82
+ # None, self._run_sync, input_ids, attention_mask
83
+ # )
84
+ # return vector
85
+ return input_ids.flatten().tolist()
86
+
87
+
88
+ def cleanup(self):
89
+ if self.session:
90
+ self.session = None
91
+ print("ONNX runtime session closed.")
92
+
93
+
94
+ embedding_model = EmbeddingModel()
95
+
96
+
97
+ async def test_tokenizer():
98
+ text = "What does the company telll about moonlighting"
99
+ tokens = await embedding_model.embed_text(text)
100
+ print("Tokenized text:", tokens)
src/chatbot/exceptions.py ADDED
File without changes
src/chatbot/models.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from typing import List
4
+
5
+ from pgvector.sqlalchemy import Vector
6
+ from sqlalchemy import Column
7
+ from sqlmodel import Field, Relationship, SQLModel
8
+
9
+
10
+ class KnowledgeBase(SQLModel, table=True):
11
+ __tablename__ = "knowledge_base"
12
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
13
+ name: str = Field(nullable=False)
14
+ description: str | None = None
15
+ created_at: datetime = Field(default_factory=datetime.now)
16
+ knowledge_chunk: List["KnowledgeChunk"] = Relationship(
17
+ back_populates="knowledge_base"
18
+ )
19
+
20
+
21
+ class KnowledgeChunk(SQLModel, table=True):
22
+ __tablename__ = "knowledge_chunk"
23
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
24
+ kb_id: uuid.UUID = Field(foreign_key="knowledge_base.id", nullable=False)
25
+ chunk_index: int
26
+ chunk_text: str
27
+ embedding: List[float] = Field(sa_column=Column(Vector(768)))
28
+ knowledge_base: "KnowledgeBase" = Relationship(back_populates="knowledge_chunk")
src/chatbot/router.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import tempfile
4
+ from typing import Optional
5
+
6
+ from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile
7
+ from sqlalchemy import text
8
+ from sqlmodel.ext.asyncio.session import AsyncSession
9
+
10
+ from src.core.database import get_async_session
11
+
12
+ from .embedding import embedding_model
13
+ from .schemas import (
14
+ SemanticSearchRequest,
15
+ SemanticSearchResult,
16
+ TokenizeRequest,
17
+ TokenizeResponse,
18
+ UploadKBResponse,
19
+ )
20
+ from .service import process_pdf_and_store
21
+
22
+ router = APIRouter(prefix="/chatbot", tags=["chatbot"])
23
+
24
+
25
+ # before hitting this endpoint make sure the model.data & model.onnx_data is available on the asset/onnx folder
26
+ @router.post("/upload-pdf", response_model=UploadKBResponse)
27
+ async def upload_pdf(
28
+ file: UploadFile = File(...),
29
+ name: str = Form(...),
30
+ description: Optional[str] = Form(None),
31
+ session: AsyncSession = Depends(get_async_session),
32
+ ):
33
+ if not file.filename.endswith(".pdf"):
34
+ raise HTTPException(
35
+ status_code=400, detail="Only PDF files are supported for now."
36
+ )
37
+
38
+ tmp_dir = tempfile.mkdtemp()
39
+ tmp_path = os.path.join(tmp_dir, file.filename)
40
+ try:
41
+ with open(tmp_path, "wb") as out_f:
42
+ shutil.copyfileobj(file.file, out_f)
43
+
44
+ with open(tmp_path, "rb") as fobj:
45
+ result = await process_pdf_and_store(fobj, name, description, session)
46
+
47
+ return UploadKBResponse(
48
+ kb_id=result["kb_id"],
49
+ name=result["name"],
50
+ chunks_stored=result["chunks_stored"],
51
+ )
52
+ finally:
53
+ try:
54
+ os.remove(tmp_path)
55
+ os.rmdir(tmp_dir)
56
+ except Exception:
57
+ pass
58
+
59
+
60
+ @router.post("/tokenize", response_model=TokenizeResponse)
61
+ async def tokenize_text(payload: TokenizeRequest):
62
+ try:
63
+ encoded = embedding_model.tokenizer(
64
+ payload.text,
65
+ return_tensors="np",
66
+ truncation=True,
67
+ padding="longest",
68
+ max_length=512,
69
+ )
70
+
71
+ return TokenizeResponse(
72
+ input_ids=encoded["input_ids"][0].tolist(),
73
+ attention_mask=encoded["attention_mask"][0].tolist(),
74
+ )
75
+
76
+ except Exception as e:
77
+ raise HTTPException(status_code=500, detail=str(e))
78
+
79
+
80
+ @router.post("/semantic-search", response_model=list[SemanticSearchResult])
81
+ async def semantic_search(
82
+ payload: SemanticSearchRequest, session: AsyncSession = Depends(get_async_session)
83
+ ):
84
+
85
+ if len(payload.embedding) == 0:
86
+ raise HTTPException(status_code=400, detail="Embedding cannot be empty.")
87
+
88
+ q_vector = payload.embedding
89
+ top_k = payload.top_k or 3
90
+
91
+ sql = text(
92
+ """
93
+ SELECT id, kb_id, chunk_text, embedding <=> :query_vec AS score
94
+ FROM knowledge_chunk
95
+ ORDER BY embedding <=> :query_vec
96
+ LIMIT :top_k
97
+ """
98
+ )
99
+
100
+ rows = await session.exec(sql, {"query_vec": q_vector, "top_k": top_k})
101
+ rows = rows.fetchall()
102
+
103
+ return [
104
+ SemanticSearchResult(
105
+ chunk_id=str(r.id),
106
+ kb_id=str(r.kb_id),
107
+ text=r.chunk_text,
108
+ score=float(r.score),
109
+ )
110
+ for r in rows
111
+ ]
src/chatbot/schemas.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from typing import List, Optional
3
+
4
+ from pydantic import BaseModel
5
+
6
+
7
+ class UploadKBResponse(BaseModel):
8
+ kb_id: uuid.UUID
9
+ name: str
10
+ chunks_stored: int
11
+
12
+
13
+ class UploadKBRequest(BaseModel):
14
+ name: str
15
+ description: Optional[str] = None
16
+
17
+
18
+ class TokenizeRequest(BaseModel):
19
+ text: str
20
+
21
+
22
+ class TokenizeResponse(BaseModel):
23
+ input_ids: List[int]
24
+ attention_mask: List[int]
25
+
26
+
27
+ class SemanticSearchRequest(BaseModel):
28
+ embedding: List[float]
29
+ top_k: Optional[int] = 3
30
+
31
+
32
+ class SemanticSearchResult(BaseModel):
33
+ chunk_id: str
34
+ kb_id: str
35
+ text: str
36
+ score: float
src/chatbot/service.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from sqlmodel.ext.asyncio.session import AsyncSession
4
+
5
+ from .embedding import embedding_model
6
+ from .models import KnowledgeBase, KnowledgeChunk
7
+ from .utils import (
8
+ chunk_sentences_with_overlap,
9
+ extract_text_from_pdf_fileobj,
10
+ split_into_sentences,
11
+ )
12
+
13
+ DEFAULT_MAX_WORDS = int(os.getenv("CHUNK_MAX_WORDS", "200"))
14
+ DEFAULT_OVERLAP = int(os.getenv("CHUNK_OVERLAP_WORDS", "40"))
15
+
16
+
17
+ async def process_pdf_and_store(
18
+ fileobj, kb_name: str, kb_description: str | None, session: AsyncSession
19
+ ):
20
+ raw_text = extract_text_from_pdf_fileobj(fileobj)
21
+
22
+ sentences = split_into_sentences(raw_text)
23
+
24
+ chunks = chunk_sentences_with_overlap(
25
+ sentences, max_words=DEFAULT_MAX_WORDS, overlap_words=DEFAULT_OVERLAP
26
+ )
27
+
28
+ kb = KnowledgeBase(name=kb_name, description=kb_description)
29
+ session.add(kb)
30
+ await session.commit()
31
+ await session.refresh(kb)
32
+
33
+ chunk_objs = []
34
+ for idx, chunk_text in enumerate(chunks):
35
+ emb = await embedding_model.embed_text(chunk_text)
36
+
37
+ chunk = KnowledgeChunk(
38
+ kb_id=kb.id, chunk_index=idx, chunk_text=chunk_text, embedding=emb
39
+ )
40
+ session.add(chunk)
41
+ chunk_objs.append(chunk)
42
+
43
+ await session.commit()
44
+
45
+ return {"kb_id": kb.id, "name": kb_name, "chunks_stored": len(chunk_objs)}
src/chatbot/utils.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import List
3
+ import PyPDF2
4
+
5
+
6
+ def clean_text(text: str) -> str:
7
+ text = re.sub(r'\s+', ' ', text)
8
+ text = re.sub(r'\s+([,.!?;:])', r'\1', text)
9
+ text = re.sub(r'[_\-]{2,}', ' ', text)
10
+ text = re.sub(r'\.{2,}', '.', text)
11
+ text = re.sub(r'\s{2,}', ' ', text)
12
+ return text.strip()
13
+
14
+
15
+ def extract_text_from_pdf_fileobj(fileobj) -> str:
16
+ reader = PyPDF2.PdfReader(fileobj)
17
+ all_text = []
18
+ for page in reader.pages:
19
+ page_text = page.extract_text()
20
+ if page_text:
21
+ all_text.append(page_text)
22
+ return clean_text(" ".join(all_text))
23
+
24
+
25
+ def split_into_sentences(text: str) -> List[str]:
26
+ sentence_endings = re.compile(r'(?<=[.!?])\s+')
27
+ sentences = sentence_endings.split(text)
28
+ return [s.strip() for s in sentences if s.strip()]
29
+
30
+
31
+ def chunk_sentences_with_overlap(sentences: List[str], max_words: int = 200, overlap_words: int = 40) -> List[str]:
32
+ chunks = []
33
+ current = []
34
+ current_len = 0
35
+
36
+ for sentence in sentences:
37
+ words = sentence.split()
38
+ wc = len(words)
39
+
40
+ if current_len + wc > max_words and current:
41
+ chunks.append(" ".join(current))
42
+
43
+ if overlap_words > 0:
44
+ last_words = " ".join(" ".join(current).split()[-overlap_words:])
45
+ current = [last_words] if last_words else []
46
+ current_len = len(last_words.split())
47
+ else:
48
+ current = []
49
+ current_len = 0
50
+
51
+ current.append(sentence)
52
+ current_len += wc
53
+
54
+ if current:
55
+ chunks.append(" ".join(current))
56
+
57
+ return chunks
src/core/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from src.auth import models as auth_models
2
+ from src.chatbot import models as chatbot_models
3
+ from src.core import models as core_models
4
+ from src.feed import models as feed_models
5
+ from src.home import models as home_models
6
+ from src.profile import models as profile_models
src/core/config.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import PostgresDsn, computed_field
2
+ from pydantic_settings import BaseSettings, SettingsConfigDict
3
+
4
+
5
+ class SMTPConfig(BaseSettings):
6
+ server: str
7
+ port: int
8
+ username: str
9
+ password: str
10
+
11
+
12
+ class Settings(BaseSettings):
13
+
14
+ JWT_ALGORITHM: str
15
+ JWT_EXPIRE: int
16
+ SECRET_KEY: str
17
+
18
+ POSTGRES_USER: str
19
+ POSTGRES_PASSWORD: str
20
+ POSTGRES_HOST: str
21
+ POSTGRES_DB: str
22
+
23
+ APP_NAME: str
24
+ ENV: str
25
+ DEBUG: bool
26
+ PORT: int
27
+
28
+ EMAIL_SERVER: str
29
+ EMAIL_PORT: int
30
+ EMAIL_USERNAME: str
31
+ EMAIL_PASSWORD: str
32
+
33
+ FERNET_KEY: str
34
+ VERIFICATION_BASE_URL: str
35
+
36
+ @computed_field
37
+ @property
38
+ def DATABASE_URL(self) -> PostgresDsn:
39
+ """Sync DB URL"""
40
+ return f"postgresql://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}"
41
+
42
+ @computed_field
43
+ @property
44
+ def ASYNC_DATABASE_URL(self) -> PostgresDsn:
45
+ """Async DB URL"""
46
+ return f"postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@{self.POSTGRES_HOST}/{self.POSTGRES_DB}"
47
+
48
+ model_config = SettingsConfigDict(env_file=".env", case_sensitive=False)
49
+
50
+
51
+ settings = Settings()
src/core/database.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import AsyncGenerator
2
+
3
+ from dotenv import load_dotenv
4
+ from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
5
+ from sqlmodel import SQLModel, create_engine
6
+ from sqlmodel.ext.asyncio.session import AsyncSession
7
+
8
+ from src.core import *
9
+ from src.core.config import settings
10
+
11
+ load_dotenv()
12
+
13
+ engine = create_engine(
14
+ settings.DATABASE_URL, echo=True
15
+ ) # to false on prod just to chcek for now
16
+
17
+ async_engine = create_async_engine(
18
+ url=settings.ASYNC_DATABASE_URL, future=True, connect_args={"ssl": True}
19
+ )
20
+
21
+ async_session = async_sessionmaker(
22
+ class_=AsyncSession, bind=async_engine, expire_on_commit=False
23
+ )
24
+
25
+
26
+ def init_db():
27
+ SQLModel.metadata.create_all(engine)
28
+
29
+
30
+ async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
31
+ async with async_session() as session:
32
+ yield session
33
+
34
+
35
+ if __name__ == "__main__":
36
+ print("Table creating")
37
+ init_db()
38
+ print("Table Created successfully!")
src/core/exceptions.py ADDED
File without changes
src/core/models.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import date, datetime
3
+ from enum import Enum
4
+ from typing import List, Optional
5
+
6
+ from sqlalchemy import CheckConstraint, UniqueConstraint
7
+ from sqlmodel import Field, Relationship, SQLModel
8
+
9
+
10
+ class AssetStatus(str, Enum):
11
+ ACTIVE = "Active"
12
+ UNAVAILABLE = "Unavailable"
13
+ ON_REQUEST = "On Request"
14
+ IN_SERVICE = "In Service"
15
+
16
+
17
+ class Users(SQLModel, table=True):
18
+ __tablename__ = "users"
19
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
20
+ email_id: str = Field(unique=True, nullable=False)
21
+ password: str = Field(nullable=False)
22
+ user_name: str = Field(nullable=False)
23
+ is_verified: bool = Field(
24
+ default=False, sa_column_kwargs={"server_default": "false"}
25
+ )
26
+ dob: Optional[date] = None
27
+ address: Optional[str] = None
28
+ profile_picture: Optional[str] = None
29
+ created_at: datetime = Field(default_factory=datetime.now)
30
+ asset: List["Assets"] = Relationship(back_populates="user")
31
+
32
+
33
+ class Teams(SQLModel, table=True):
34
+ __tablename__ = "teams"
35
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
36
+ name: str = Field(unique=True, nullable=False)
37
+
38
+
39
+ class Roles(SQLModel, table=True):
40
+ __tablename__ = "roles"
41
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
42
+ name: str = Field(unique=True, nullable=False)
43
+
44
+
45
+ class UserTeamsRole(SQLModel, table=True):
46
+ __tablename__ = "user_teams_role"
47
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
48
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
49
+ team_id: uuid.UUID = Field(foreign_key="teams.id", nullable=False)
50
+ role_id: uuid.UUID = Field(foreign_key="roles.id", nullable=False)
51
+
52
+
53
+ class Assets(SQLModel, table=True):
54
+ __tablename__ = "assets"
55
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
56
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
57
+ name: str = Field(nullable=False)
58
+ type: str = Field(nullable=False)
59
+ status: AssetStatus = Field(default=AssetStatus.UNAVAILABLE)
60
+ user: "Users" = Relationship(back_populates="asset")
61
+
62
+
63
+ class EmotionLogs(SQLModel, table=True):
64
+ __tablename__ = "emotion_logs"
65
+ __table_args__ = (
66
+ UniqueConstraint("user_id", "log_date"),
67
+ CheckConstraint("morning_emotion BETWEEN 1 AND 7 or morning_emotion IS NULL"),
68
+ CheckConstraint("evening_emotion BETWEEN 1 AND 7 or evening_emotion IS NULL"),
69
+ )
70
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
71
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
72
+ morning_emotion: Optional[int] = Field(default=None, ge=1, le=7)
73
+ evening_emotion: Optional[int] = Field(default=None, ge=1, le=7)
74
+ log_date: date = Field(default_factory=date.today)
src/core/pagination.py ADDED
File without changes
src/core/schemas.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Generic, TypeVar
2
+
3
+ from pydantic import BaseModel
4
+
5
+ T = TypeVar("T")
6
+
7
+
8
+ class BaseResponse(BaseModel, Generic[T]):
9
+ status_code: int
10
+ data: T
src/core/temp_feed_db.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import date
2
+
3
+ from sqlmodel import Session
4
+
5
+ from src.core.database import engine
6
+ from src.core.models import Assets, EmotionLogs, Roles, Teams, Users, UserTeamsRole
7
+ from src.feed.models import Comments, Likes, Posts
8
+
9
+
10
+ def seed_users(session: Session):
11
+ users = [
12
+ Users(
13
+ email_id="tilak@example.com",
14
+ password="hashed_pass1",
15
+ user_name="Tilak",
16
+ dob=date(2001, 5, 21),
17
+ address="Chennai",
18
+ profile_picture="tilak.png",
19
+ ),
20
+ Users(
21
+ email_id="arun@example.com",
22
+ password="hashed_pass2",
23
+ user_name="Arun",
24
+ dob=date(2000, 8, 15),
25
+ address="Bangalore",
26
+ profile_picture="arun.png",
27
+ ),
28
+ ]
29
+ session.add_all(users)
30
+ session.commit()
31
+ print("Users added.")
32
+ return users
33
+
34
+
35
+ def seed_teams(session: Session):
36
+ teams = [
37
+ Teams(name="Development"),
38
+ Teams(name="Marketing"),
39
+ Teams(name="Design"),
40
+ ]
41
+ session.add_all(teams)
42
+ session.commit()
43
+ print("Teams added.")
44
+ return teams
45
+
46
+
47
+ def seed_roles(session: Session):
48
+ roles = [
49
+ Roles(name="Admin"),
50
+ Roles(name="Member"),
51
+ Roles(name="Lead"),
52
+ ]
53
+ session.add_all(roles)
54
+ session.commit()
55
+ print("Roles added.")
56
+ return roles
57
+
58
+
59
+ def seed_user_teams_roles(session: Session, users, teams, roles):
60
+ mappings = [
61
+ UserTeamsRole(user_id=users[0].id, team_id=teams[0].id, role_id=roles[0].id),
62
+ UserTeamsRole(user_id=users[1].id, team_id=teams[1].id, role_id=roles[1].id),
63
+ ]
64
+ session.add_all(mappings)
65
+ session.commit()
66
+ print("User-Team-Role mappings added.")
67
+
68
+
69
+ def seed_assets(session: Session, users):
70
+ assets = [
71
+ Assets(user_id=users[0].id, name="MacBook Pro", type="Laptop"),
72
+ Assets(user_id=users[1].id, name="Dell Monitor", type="Monitor"),
73
+ ]
74
+ session.add_all(assets)
75
+ session.commit()
76
+ print("Assets added.")
77
+ return assets
78
+
79
+
80
+ def seed_emotion_logs(session: Session, users):
81
+ logs = [
82
+ EmotionLogs(user_id=users[0].id, morning_emotion=8, evening_emotion=6),
83
+ EmotionLogs(user_id=users[1].id, morning_emotion=7, evening_emotion=8),
84
+ ]
85
+ session.add_all(logs)
86
+ session.commit()
87
+ print("Emotion logs added.")
88
+
89
+
90
+ def seed_posts(session: Session, users):
91
+ posts = [
92
+ Posts(
93
+ user_id=users[0].id,
94
+ caption="New sprint kickoff!",
95
+ image="sprint.png",
96
+ ),
97
+ Posts(
98
+ user_id=users[1].id,
99
+ caption="Design updates rolling out soon!",
100
+ image="design.png",
101
+ ),
102
+ ]
103
+ session.add_all(posts)
104
+ session.commit()
105
+ print("Posts added.")
106
+ return posts
107
+
108
+
109
+ def seed_likes(session: Session, users, posts):
110
+ likes = [
111
+ Likes(user_id=users[0].id, post_id=posts[1].id),
112
+ Likes(user_id=users[1].id, post_id=posts[0].id),
113
+ ]
114
+ session.add_all(likes)
115
+ session.commit()
116
+ print("Likes added.")
117
+
118
+
119
+ def seed_comments(session: Session, users, posts):
120
+ comments = [
121
+ Comments(user_id=users[0].id, post_id=posts[1].id, comment="Looks great!"),
122
+ Comments(user_id=users[1].id, post_id=posts[0].id, comment="Can’t wait!"),
123
+ ]
124
+ session.add_all(comments)
125
+ session.commit()
126
+ print("Comments added.")
127
+
128
+
129
+ def run_all_seeds():
130
+ with Session(engine) as session:
131
+ users = seed_users(session)
132
+ teams = seed_teams(session)
133
+ roles = seed_roles(session)
134
+ seed_user_teams_roles(session, users, teams, roles)
135
+ seed_assets(session, users)
136
+ seed_emotion_logs(session, users)
137
+ posts = seed_posts(session, users)
138
+ seed_likes(session, users, posts)
139
+ seed_comments(session, users, posts)
140
+ print("All data seeded successfully!")
141
+
142
+
143
+ if __name__ == "__main__":
144
+ run_all_seeds()
src/feed/__init__.py ADDED
File without changes
src/feed/config.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from pydantic import BaseSettings
2
+
3
+ class HomeSettings(BaseSettings):
4
+ FEATURE_ENABLED: bool = True
5
+
6
+ home_settings = HomeSettings()
src/feed/constants.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ WELCOME_MESSAGE = "Welcome to Yuvabe's Home Screen"
2
+ EXIT_MESSAGE = "Thank You have a wonderful day"
src/feed/dependencies.py ADDED
File without changes
src/feed/exceptions.py ADDED
File without changes
src/feed/models.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from enum import Enum
4
+ from typing import Optional
5
+
6
+ from sqlalchemy import UniqueConstraint
7
+ from sqlmodel import Field, SQLModel
8
+
9
+
10
+ class PostType(str, Enum):
11
+ BIRTHDAY = "Birthday"
12
+ NOTICE = "Notice"
13
+ BANNER = "Banner"
14
+ JOB_REQUEST = "Job Request"
15
+
16
+
17
+ class PostCategory(str, Enum):
18
+ TEAM = "Team"
19
+ GLOBAL = "Global"
20
+
21
+
22
+ class Posts(SQLModel, table=True):
23
+ __tablename__ = "posts"
24
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
25
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
26
+ type: PostType = Field(default=PostType.NOTICE)
27
+ category: PostCategory = Field(default=PostCategory.GLOBAL)
28
+ caption: Optional[str] = None
29
+ image: Optional[str] = None
30
+ created_at: datetime = Field(default_factory=datetime.now, nullable=False)
31
+ edited_at: datetime = Field(default_factory=datetime.now)
32
+
33
+
34
+ class Comments(SQLModel, table=True):
35
+ __tablename__ = "comments"
36
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
37
+ post_id: uuid.UUID = Field(foreign_key="posts.id", nullable=False)
38
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
39
+ comment: str = Field(nullable=False)
40
+ created_at: datetime = Field(default_factory=datetime.now, nullable=False)
41
+
42
+
43
+ class Likes(SQLModel, table=True):
44
+ __tablename__ = "likes"
45
+ __table_args__ = (UniqueConstraint("user_id", "post_id"),)
46
+ id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
47
+ post_id: uuid.UUID = Field(foreign_key="posts.id", nullable=False)
48
+ user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
49
+ liked_at: datetime = Field(default_factory=datetime.now, nullable=False)
src/feed/router.py ADDED
File without changes
src/feed/schemas.py ADDED
File without changes