deepakpant commited on
Commit
a5883c9
·
0 Parent(s):

Initial commit

Browse files
.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "resume-maker-ai-agent",
3
+ // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
4
+ "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
5
+ "features": {
6
+ "ghcr.io/devcontainers/features/python:1": {
7
+ "version": "3.12"
8
+ },
9
+ "ghcr.io/devcontainers/features/git:1": {},
10
+ "ghcr.io/devcontainers-contrib/features/poetry:2": {},
11
+ "ghcr.io/devcontainers/features/docker-in-docker:2.12.0": {
12
+ "version": "latest",
13
+ "moby": true
14
+ }
15
+ },
16
+ // Use 'postCreateCommand' to run commands after the container is created.
17
+ "postCreateCommand": "./.devcontainer/postCreateCommand.sh",
18
+ "forwardPorts": [7860, 8000],
19
+ // Configure tool-specific properties.
20
+ "customizations": {
21
+ "vscode": {
22
+ "extensions": [
23
+ "ms-python.python",
24
+ "editorconfig.editorconfig",
25
+ "ms-azuretools.vscode-docker", // Docker
26
+ "ms-python.isort", // isort
27
+ "visualstudioexptteam.vscodeintellicode", // IntelliCode
28
+ "codeium.codeium", // Codeium AI
29
+ "ms-vscode.makefile-tools", // Makefile tool
30
+ "ms-python.python", // Python
31
+ "ms-python.black-formatter", // Black
32
+ "ms-python.debugpy", // Debugger for Python
33
+ "redhat.vscode-yaml" // YAML
34
+ ],
35
+ "settings": {
36
+ "python.testing.pytestArgs": ["tests"],
37
+ "python.testing.unittestEnabled": false,
38
+ "python.testing.pytestEnabled": true,
39
+ "python.defaultInterpreterPath": "/workspaces/resume-maker-ai-agent/.venv/bin/python",
40
+ "python.testing.pytestPath": "/workspaces/resume-maker-ai-agent/.venv/bin/pytest",
41
+ "python.languageServer": "Pylance",
42
+ "editor.formatOnSave": true,
43
+ "python.analysis.typeCheckingMode": "basic",
44
+ "python.linting.enabled": true,
45
+ "python.linting.pylintEnabled": true
46
+ }
47
+ }
48
+ } //,
49
+ // "hostRequirements": {
50
+ // "cpus": 2,
51
+ // "memory": "4gb",
52
+ // "storage": "10gb"
53
+ // }
54
+ }
.devcontainer/postCreateCommand.sh ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env bash
2
+
3
+ # Install fish terminal
4
+ sudo apt update -y
5
+ sudo apt-get install fish -y
6
+
7
+ # Repo Initialization
8
+ make init-repo
9
+ git config --global --add safe.directory /workspaces/resume-maker-ai-agent
10
+
11
+ # Install Dependencies
12
+ make reset-env
.dockerignore ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python cache files
2
+ **/__pycache__/
3
+ *.pyc
4
+ *.pyo
5
+ *.pyd
6
+
7
+ # Poetry-related files
8
+ .poetry/
9
+
10
+ # Virtual environments
11
+ **/.venv/
12
+ **/venv/
13
+
14
+ # Node.js dependencies
15
+ **/node_modules/
16
+
17
+ # Mypy cache
18
+ **/.mypy_cache/
19
+
20
+ # Pytest cache
21
+ **/.pytest_cache/
22
+
23
+ # Ruff cache
24
+ **/.ruff_cache/
25
+
26
+ # Coverage files
27
+ **/.coverage
28
+ **/coverage.xml
29
+
30
+ # Development tools and IDE-specific files
31
+ *.idea/
32
+ *.vscode/
33
+ **/.vscode/
34
+ .devcontainer/
35
+ .github/
36
+
37
+ # Exclude notebooks
38
+ notebooks/
39
+
40
+ # Exclude test-related files and directories
41
+ tests/
42
+
43
+ # Build and distribution artifacts
44
+ **/build/
45
+ **/dist/
46
+ *.egg-info/
47
+ .eggs/
48
+
49
+ # Logs and temporary files
50
+ *.log
51
+ *.tmp
52
+ *.swp
53
+ *.swo
54
+
55
+ # Site-related files (if applicable)
56
+ **/site/
57
+
58
+ # Sensitive or configuration files
59
+ .env
60
+ *.pem
61
+ *.key
62
+
63
+ # Markdown and configuration files
64
+ mkdocs.yml
65
+ tox.ini
66
+ pre-commit-config.yml
67
+ codecove.yml
68
+ .gitignore
69
+
70
+ # Docker-related files (optional if Docker Compose is used locally but not in the image)
71
+ docker-compose.yml
.editorconfig ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ root = true
2
+
3
+ [*]
4
+ indent_style = space
5
+ indent_size = 4
6
+ trim_trailing_whitespace = true
7
+ insert_final_newline = true
8
+ charset = utf-8
9
+ end_of_line = lf
10
+ max_line_length = 120
11
+
12
+ [*.bat]
13
+ indent_style = tab
14
+ end_of_line = crlf
15
+
16
+ [LICENSE]
17
+ insert_final_newline = false
18
+
19
+ [Makefile]
20
+ indent_style = tab
21
+
22
+ [*.json]
23
+ indent_style = space
24
+ indent_size = 4
.github/actions/setup-poetry-env/action.yml ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "setup-poetry-env"
2
+ description: "Composite action to setup the Python and poetry environment."
3
+
4
+ inputs:
5
+ python-version:
6
+ required: false
7
+ description: "The python version to use"
8
+ default: "3.12"
9
+
10
+ runs:
11
+ using: "composite"
12
+ steps:
13
+ - name: Set up python
14
+ uses: actions/setup-python@v5
15
+ with:
16
+ python-version: ${{ inputs.python-version }}
17
+
18
+ - name: Install Poetry
19
+ env:
20
+ POETRY_VERSION: "1.7.1"
21
+ run: curl -sSL https://install.python-poetry.org | python - -y
22
+ shell: bash
23
+
24
+ - name: Add Poetry to Path
25
+ run: echo "$HOME/.local/bin" >> $GITHUB_PATH
26
+ shell: bash
27
+
28
+ - name: Configure Poetry virtual environment in project
29
+ run: poetry config virtualenvs.in-project true
30
+ shell: bash
31
+
32
+ - name: Load cached venv
33
+ id: cached-poetry-dependencies
34
+ uses: actions/cache@v4
35
+ with:
36
+ path: .venv
37
+ key: venv-${{ runner.os }}-${{ inputs.python-version }}-${{ hashFiles('poetry.lock') }}
38
+
39
+ - name: Install dependencies
40
+ if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
41
+ run: poetry install --no-interaction
42
+ shell: bash
.github/workflows/deploy-docs.yml ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy Documentation
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+ branches: [main]
7
+ repository_dispatch:
8
+ types: [package-release]
9
+
10
+ jobs:
11
+ deploy-docs:
12
+ runs-on: ubuntu-latest
13
+
14
+ steps:
15
+ - name: Check out
16
+ uses: actions/checkout@v4
17
+
18
+ - name: Set up the environment
19
+ uses: ./.github/actions/setup-poetry-env
20
+
21
+ - name: Deploy documentation
22
+ run: poetry run mkdocs gh-deploy --force
.github/workflows/hugging_face-deploy.yml ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy to Hugging Face Spaces
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main # or master, depending on your default branch
7
+
8
+ jobs:
9
+ deploy:
10
+ runs-on: ubuntu-latest
11
+ steps:
12
+ - name: Checkout repository
13
+ uses: actions/checkout@v4
14
+ with:
15
+ fetch-depth: 0
16
+
17
+ - name: Configure Git
18
+ run: |
19
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
20
+ git config --global user.name "github-actions[bot]"
21
+
22
+ - name: Push to Hugging Face Space
23
+ env:
24
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
25
+ run: |
26
+ git remote add space https://USER:$HF_TOKEN@huggingface.co/spaces/deepakpant/resume-maker-ai-agent
27
+ git push --force space main
.github/workflows/test-check-build.yml ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Test, check and build pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ pull_request:
8
+ types: [opened, synchronize, reopened, ready_for_review]
9
+
10
+ jobs:
11
+ quality:
12
+ runs-on: ubuntu-latest
13
+ steps:
14
+ - name: Check out
15
+ uses: actions/checkout@v4
16
+
17
+ - uses: actions/cache@v4
18
+ with:
19
+ path: ~/.cache/pre-commit
20
+ key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
21
+
22
+ - name: Set up the environment
23
+ uses: ./.github/actions/setup-poetry-env
24
+
25
+ - name: Run checks
26
+ run: make lint
27
+
28
+ build:
29
+ runs-on: ubuntu-latest
30
+ needs: [quality]
31
+ strategy:
32
+ matrix:
33
+ python-version: ["3.12"]
34
+ fail-fast: false
35
+ defaults:
36
+ run:
37
+ shell: bash
38
+ steps:
39
+ - name: Check out
40
+ uses: actions/checkout@v4
41
+
42
+ - name: Set up the environment
43
+ uses: ./.github/actions/setup-poetry-env
44
+ with:
45
+ python-version: ${{ matrix.python-version }}
46
+
47
+ - name: Build
48
+ run: make bake
49
+
50
+ check-docs:
51
+ runs-on: ubuntu-latest
52
+ steps:
53
+ - name: Check out
54
+ uses: actions/checkout@v4
55
+
56
+ - name: Set up the environment
57
+ uses: ./.github/actions/setup-poetry-env
58
+
59
+ - name: Check if documentation can be built
60
+ run: poetry run mkdocs build -s
.github/workflows/version-bump-and-release.yml ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Version Bump and Create Release
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ inputs:
6
+ bump_type:
7
+ description: "Type of version bump"
8
+ required: true
9
+ type: choice
10
+ options:
11
+ - patch
12
+ - minor
13
+ - major
14
+
15
+ permissions:
16
+ contents: write
17
+
18
+ jobs:
19
+ bump-version:
20
+ runs-on: ubuntu-latest
21
+ steps:
22
+ - name: Checkout code
23
+ uses: actions/checkout@v4
24
+ with:
25
+ fetch-depth: 0
26
+ token: ${{ secrets.GITHUB_TOKEN }}
27
+
28
+ - name: Set up the environment
29
+ uses: ./.github/actions/setup-poetry-env
30
+ with:
31
+ python-version: "3.12"
32
+
33
+ - name: Configure Git
34
+ run: |
35
+ git config --global user.name 'GitHub Actions'
36
+ git config --global user.email 'actions@github.com'
37
+
38
+ - name: Bump Version
39
+ id: bump
40
+ run: |
41
+ poetry run bump-my-version bump ${{ github.event.inputs.bump_type }}
42
+ echo "NEW_TAG=$(git describe --tags --abbrev=0)" >> $GITHUB_OUTPUT
43
+
44
+ - name: Push changes
45
+ run: |
46
+ git push
47
+ git push --tags
48
+
49
+ - name: Create Release
50
+ uses: softprops/action-gh-release@v2
51
+ with:
52
+ generate_release_notes: true
53
+ tag_name: ${{ steps.bump.outputs.NEW_TAG }}
54
+
55
+ - name: Trigger Package Publish
56
+ uses: peter-evans/repository-dispatch@v3
57
+ with:
58
+ token: ${{ secrets.GITHUB_TOKEN }}
59
+ event-type: package-release
60
+ client-payload: '{"version": "${{ steps.bump.outputs.NEW_TAG }}"}'
.gitignore ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ docs/source
2
+
3
+ # From https://raw.githubusercontent.com/github/gitignore/main/Python.gitignore
4
+
5
+ # Byte-compiled / optimized / DLL files
6
+ __pycache__/
7
+ *.py[cod]
8
+ *$py.class
9
+
10
+ # C extensions
11
+ *.so
12
+
13
+ # Distribution / packaging
14
+ .Python
15
+ build/
16
+ develop-eggs/
17
+ dist/
18
+ downloads/
19
+ eggs/
20
+ .eggs/
21
+ lib/
22
+ lib64/
23
+ parts/
24
+ sdist/
25
+ var/
26
+ wheels/
27
+ share/python-wheels/
28
+ *.egg-info/
29
+ .installed.cfg
30
+ *.egg
31
+ MANIFEST
32
+
33
+ # PyInstaller
34
+ # Usually these files are written by a python script from a template
35
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
36
+ *.manifest
37
+ *.spec
38
+
39
+ # Installer logs
40
+ pip-log.txt
41
+ pip-delete-this-directory.txt
42
+
43
+ # Unit test / coverage reports
44
+ htmlcov/
45
+ .tox/
46
+ .nox/
47
+ .coverage
48
+ .coverage.*
49
+ .cache
50
+ nosetests.xml
51
+ coverage.xml
52
+ *.cover
53
+ *.py,cover
54
+ .hypothesis/
55
+ .pytest_cache/
56
+ cover/
57
+
58
+ # Translations
59
+ *.mo
60
+ *.pot
61
+
62
+ # Django stuff:
63
+ *.log
64
+ local_settings.py
65
+ db.sqlite3
66
+ db.sqlite3-journal
67
+
68
+ # Flask stuff:
69
+ instance/
70
+ .webassets-cache
71
+
72
+ # Scrapy stuff:
73
+ .scrapy
74
+
75
+ # Sphinx documentation
76
+ docs/_build/
77
+
78
+ # PyBuilder
79
+ .pybuilder/
80
+ target/
81
+
82
+ # Jupyter Notebook
83
+ .ipynb_checkpoints
84
+
85
+ # IPython
86
+ profile_default/
87
+ ipython_config.py
88
+
89
+ # pyenv
90
+ # For a library or package, you might want to ignore these files since the code is
91
+ # intended to run in multiple environments; otherwise, check them in:
92
+ # .python-version
93
+
94
+ # pipenv
95
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
96
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
97
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
98
+ # install all needed dependencies.
99
+ #Pipfile.lock
100
+
101
+ # poetry
102
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
103
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
104
+ # commonly ignored for libraries.
105
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
106
+ #poetry.lock
107
+
108
+ # pdm
109
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
110
+ #pdm.lock
111
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
112
+ # in version control.
113
+ # https://pdm.fming.dev/#use-with-ide
114
+ .pdm.toml
115
+
116
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
117
+ __pypackages__/
118
+
119
+ # Celery stuff
120
+ celerybeat-schedule
121
+ celerybeat.pid
122
+
123
+ # SageMath parsed files
124
+ *.sage.py
125
+
126
+ # Environments
127
+ .env
128
+ .venv
129
+ env/
130
+ venv/
131
+ ENV/
132
+ env.bak/
133
+ venv.bak/
134
+
135
+ # Spyder project settings
136
+ .spyderproject
137
+ .spyproject
138
+
139
+ # Rope project settings
140
+ .ropeproject
141
+
142
+ # mkdocs documentation
143
+ /site
144
+
145
+ # mypy
146
+ .mypy_cache/
147
+ .dmypy.json
148
+ dmypy.json
149
+
150
+ # Pyre type checker
151
+ .pyre/
152
+
153
+ # pytype static type analyzer
154
+ .pytype/
155
+
156
+ # Cython debug symbols
157
+ cython_debug/
158
+
159
+ # Vscode config files
160
+ # .vscode/
161
+
162
+ # PyCharm
163
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
164
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
165
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
166
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
167
+ #.idea/
168
+
169
+ # artifect/
170
+ dvc-service-account-key.json
171
+
172
+
173
+ # MacOS
174
+ .DS_Store
.pre-commit-config.yaml ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: "v4.4.0"
4
+ hooks:
5
+ - id: check-case-conflict
6
+ - id: check-merge-conflict
7
+ - id: end-of-file-fixer
8
+ - id: trailing-whitespace
9
+ - id: check-yaml
10
+ - id: check-toml
11
+
12
+ - repo: https://github.com/astral-sh/ruff-pre-commit
13
+ rev: "v0.5.2"
14
+ hooks:
15
+ - id: ruff
16
+ args: [--exit-non-zero-on-fix, --config=pyproject.toml]
17
+ - id: ruff-format
18
+ args: [--config=pyproject.toml]
19
+
20
+ - repo: https://github.com/pre-commit/mirrors-prettier
21
+ rev: "v3.0.3"
22
+ hooks:
23
+ - id: prettier
24
+
25
+ # - repo: https://github.com/psf/black
26
+ # rev: 23.9.0
27
+ # hooks:
28
+ # - id: black
29
+
30
+ # - repo: https://github.com/timothycrosley/isort
31
+ # rev: 5.12.0
32
+ # hooks:
33
+ # - id: isort
34
+ # args: [--settings-path=pyproject.toml]
35
+
36
+ - repo: https://github.com/PyCQA/pydocstyle
37
+ rev: 6.3.0
38
+ hooks:
39
+ - id: pydocstyle
40
+ args: [--config=pyproject.toml]
.vscode/launch.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.2.0",
3
+ "configurations": [
4
+ {
5
+ "name": "Python Debugger: resume_maker_ai_agent",
6
+ "type": "debugpy",
7
+ "request": "launch",
8
+ "program": "${workspaceFolder}/resume_maker_ai_agent/app.py",
9
+ "args": ["run"],
10
+ "console": "integratedTerminal",
11
+ "justMyCode": true,
12
+ "jinja": true
13
+ }
14
+ ]
15
+ }
.vscode/settings.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "files.exclude": {
3
+ "**/.mypy_cache": true,
4
+ "**/__pycache__": true,
5
+ "**/.pytest_cache": true,
6
+ "**/.ruff_cache": true,
7
+ "**/.venv": true,
8
+ "**/venv": true,
9
+ "**/node_modules": true,
10
+ "**/site": true,
11
+ "**/.coverage": true,
12
+ "**/coverage.xml": true,
13
+ "**/build": true,
14
+ "**/dist": true
15
+ }
16
+ }
Dockerfile ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Builder Stage
2
+ FROM python:3.12-slim AS builder
3
+
4
+ RUN useradd -m -u 1000 user
5
+
6
+ # Set environment variables for Poetry
7
+ ENV POETRY_VERSION=1.6.1 \
8
+ POETRY_HOME="/opt/poetry" \
9
+ PATH="/opt/poetry/bin:$PATH"
10
+
11
+ # Install Poetry and necessary tools
12
+ RUN apt-get update && apt-get install -y --no-install-recommends curl \
13
+ && curl -sSL https://install.python-poetry.org | python3 - \
14
+ && apt-get remove -y curl && apt-get clean && rm -rf /var/lib/apt/lists/*
15
+
16
+ # Set working directory
17
+ WORKDIR /app
18
+
19
+ # Copy only the dependency files first to leverage Docker caching
20
+ COPY --chown=user pyproject.toml poetry.lock /app/
21
+
22
+ # Install dependencies (only for building the wheel)
23
+ RUN poetry config virtualenvs.create false \
24
+ && poetry install --no-root --only main
25
+
26
+ # Copy the rest of the application code
27
+ COPY --chown=user . /app
28
+
29
+ # Build the wheel file
30
+ RUN poetry build -f wheel
31
+
32
+ # Runtime Stage
33
+ FROM python:3.12-slim AS runtime
34
+
35
+ RUN useradd -m -u 1000 user
36
+
37
+ # Set environment variables
38
+ ENV PYTHONUNBUFFERED=1
39
+
40
+ # Install runtime dependencies for Chromium and Selenium
41
+ RUN apt-get update && apt-get install -y \
42
+ libgconf-2-4 \
43
+ libnss3 \
44
+ && rm -rf /var/lib/apt/lists/*
45
+
46
+
47
+ # Set working directory
48
+ WORKDIR /app
49
+
50
+ # Copy the built wheel file from the builder stage
51
+ COPY --chown=user --from=builder /app/dist/*.whl /app/
52
+
53
+
54
+ # Install the wheel file
55
+ RUN pip install --no-cache-dir /app/*.whl
56
+
57
+ # Expose application port
58
+ EXPOSE 7860
59
+
60
+ # Command to run the application
61
+ CMD ["resume_maker_ai_agent"]
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025, Deepak pant
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
Makefile ADDED
@@ -0,0 +1,266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # =============================
2
+ # Variable Documentation
3
+ # =============================
4
+
5
+ # PYPI_TOKEN: Authentication token for PyPI publishing
6
+ # Usage: make publish PYPI_TOKEN=your_pypi_token
7
+
8
+ # PACKAGE_NAME: Name of the Python package for dependency tree
9
+ # Usage: make print-dependency-tree PACKAGE_NAME=your_package_name
10
+
11
+
12
+ # =============================
13
+ # Project Configuration
14
+ # =============================
15
+ PROJECT_NAME = resume-maker-ai-agent
16
+ GITHUB_USERNAME = DeepakPant93
17
+ GITHUB_REPO = $(PROJECT_NAME)
18
+ PROJECT_SLUG = resume_maker_ai_agent
19
+ CLOUD_REGION = eastus
20
+ TAG = latest
21
+ IMAGE_NAME = deepak93p/$(PROJECT_SLUG)
22
+ RESOURCE_GROUP = $(PROJECT_NAME)-rg
23
+ APP_NAME = $(PROJECT_NAME)-app
24
+ APP_ENV_NAME = $(APP_NAME)-env
25
+ BUMP_TYPE = patch
26
+
27
+ # =============================
28
+ # Help (Default Target)
29
+ # =============================
30
+ .PHONY: help
31
+ help: ## Display this help message
32
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-25s\033[0m %s\n", $$1, $$2}'
33
+
34
+ .DEFAULT_GOAL := help
35
+
36
+ # =============================
37
+ # Installation and Setup
38
+ # =============================
39
+ .PHONY: bake-env
40
+ bake-env: clean-env ## Install the poetry environment and set up pre-commit hooks
41
+ @echo "🚀 Creating virtual environment using pyenv and poetry"
42
+ @poetry install
43
+ @poetry run pre-commit install || true
44
+ @max_retries=3; count=0; \
45
+ while ! make lint; do \
46
+ count=$$((count + 1)); \
47
+ if [ $$count -ge $$max_retries ]; then \
48
+ echo "Max retries reached. Exiting."; \
49
+ exit 1; \
50
+ fi; \
51
+ echo "Retrying make lint ($$count/$$max_retries)..."; \
52
+ done
53
+ @poetry shell
54
+
55
+ .PHONY: clean-env
56
+ clean-env: ## Remove the poetry environment
57
+ @echo "🚀 Removing virtual environment"
58
+ @rm -rf .venv
59
+
60
+ .PHONY: reset-env
61
+ reset-env: clean-env bake-env ## Install the poetry environment and set up pre-commit hooks
62
+
63
+ .PHONY: init-repo
64
+ init-repo: ## Initialize git repository
65
+ @echo "🚀 Initializing git repository"
66
+ @git init
67
+ @echo "🚀 Creating initial commit"
68
+ @git add .
69
+ @git commit -m "Initial commit"
70
+ @echo "🚀 Adding remote repository"
71
+ @git branch -M main
72
+ @git remote add origin git@github.com:$(GITHUB_USERNAME)/$(GITHUB_REPO).git
73
+ @echo "🚀 Pushing initial commit"
74
+ @git push -u origin main
75
+
76
+ .PHONY: setup-cloud-env
77
+ setup-cloud-env: ## Create resource group, container app environment, and service principal
78
+ @echo "🚀 Creating resource group: $(RESOURCE_GROUP)"
79
+ @az group create --name $(RESOURCE_GROUP) --location $(CLOUD_REGION)
80
+
81
+ @echo "🚀 Creating container app environment: $(APP_ENV_NAME)"
82
+ @az containerapp env create --name $(APP_ENV_NAME) --resource-group $(RESOURCE_GROUP) --location $(CLOUD_REGION)
83
+
84
+ @echo "🚀 Fetching subscription ID"
85
+ @subscription_id=$$(az account show --query "id" -o tsv) && \
86
+ echo "Subscription ID: $$subscription_id" && \
87
+ echo "🚀 Creating service principal for: $(APP_NAME)" && \
88
+ az ad sp create-for-rbac --name "$(APP_NAME)-service-principal" --role contributor --scopes /subscriptions/$$subscription_id --sdk-auth
89
+
90
+ @echo "🚀 Creating container app: $(APP_NAME)"
91
+ @az containerapp create --name $(APP_NAME) --resource-group $(RESOURCE_GROUP) --environment $(APP_ENV_NAME) --image 'nginx:latest' --target-port 80 --ingress 'external' --query "properties.configuration.ingress.fqdn"
92
+
93
+ .PHONY: clean-cloud-env
94
+ clean-cloud-env: ## Delete resource group, container app environment, and service principal
95
+ @echo "🚀 Deleting service principal for: $(APP_NAME)-service-principal"
96
+ @sp_object_id=$$(az ad sp list --display-name "$(APP_NAME)-service-principal" --query "[0].id" -o tsv) && \
97
+ if [ -n "$$sp_object_id" ]; then \
98
+ az ad sp delete --id $$sp_object_id; \
99
+ echo "Service principal deleted"; \
100
+ else \
101
+ echo "Service principal not found, skipping deletion"; \
102
+ fi
103
+
104
+ @echo "🚀 Deleting container app: $(APP_NAME)"
105
+ @az containerapp delete --name $(APP_NAME) --resource-group $(RESOURCE_GROUP) --yes --no-wait || echo "Container app not found, skipping deletion"
106
+
107
+ @echo "🚀 Deleting container app environment: $(APP_ENV_NAME)"
108
+ @az containerapp env delete --name $(APP_ENV_NAME) --resource-group $(RESOURCE_GROUP) --yes --no-wait || echo "Container app environment not found, skipping deletion"
109
+
110
+ @echo "🚀 Deleting resource group: $(RESOURCE_GROUP)"
111
+ @az group delete --name $(RESOURCE_GROUP) --yes --no-wait || echo "Resource group not found, skipping deletion"
112
+
113
+ .PHONY: install-prerequisites
114
+ install-prerequisites: ## Install system prerequisites
115
+ @echo "Updating package lists..."
116
+ @sudo apt-get update
117
+ @echo "Installing system prerequisites..."
118
+ @sudo apt-get install -y \
119
+ chromium \
120
+ chromium-driver \
121
+ gnupg \
122
+ libgconf-2-4 \
123
+ libnss3 \
124
+ unzip \
125
+ wget \
126
+ sqlite3
127
+
128
+
129
+ # =============================
130
+ # Code Quality and Testing
131
+ # =============================
132
+ .PHONY: lint
133
+ lint: ## Run code quality tools
134
+ @echo "🚀 Checking Poetry lock file consistency with 'pyproject.toml'"
135
+ @poetry check --lock
136
+ @echo "🚀 Linting code with pre-commit"
137
+ @poetry run pre-commit run -a
138
+ @echo "🚀 Static type checking with mypy"
139
+ # @echo "🚀 Sorting imports with isort"
140
+ # @poetry run isort resume_maker_ai_agent/
141
+ # @echo "🚀 Linting code with Ruff"
142
+ # @poetry run ruff format resume_maker_ai_agent/
143
+ @poetry run mypy
144
+ @echo "🚀 Checking for obsolete dependencies with deptry"
145
+ @poetry run deptry .
146
+ @echo "🚀 Checking for security vulnerabilities with bandit"
147
+ @poetry run bandit -c pyproject.toml -r resume_maker_ai_agent/ -ll
148
+
149
+
150
+ .PHONY: test
151
+ test: ## Run tests with pytest
152
+ @echo "🚀 Running tests with pytest"
153
+ @poetry run pytest --cov --cov-config=pyproject.toml --cov-report=term-missing
154
+
155
+
156
+ # =============================
157
+ # Build and Release
158
+ # =============================
159
+ .PHONY: bake
160
+ bake: clean-bake ## Build wheel file using poetry
161
+ @echo "🚀 Creating wheel file"
162
+ @poetry build
163
+
164
+ .PHONY: clean-bake
165
+ clean-bake: ## Clean build artifacts
166
+ @rm -rf dist
167
+
168
+ .PHONY: bump
169
+ bump: ## Bump project version
170
+ @echo "🚀 Bumping version"
171
+ @poetry run bump-my-version bump $(BUMP_TYPE)
172
+
173
+ .PHONY: publish
174
+ publish: ## Publish a release to PyPI
175
+ @echo "🚀 Publishing: Dry run"
176
+ @poetry config pypi-token.pypi $(PYPI_TOKEN)
177
+ @poetry publish --dry-run
178
+ @echo "🚀 Publishing"
179
+ @poetry publish
180
+
181
+ .PHONY: bake-and-publish
182
+ bake-and-publish: bake publish ## Build and publish to PyPI
183
+
184
+ .PHONY: update
185
+ update: ## Update project dependencies
186
+ @echo "🚀 Updating project dependencies"
187
+ @poetry update
188
+ @poetry run pre-commit install --overwrite
189
+ @echo "Dependencies updated successfully"
190
+
191
+ # =============================
192
+ # Run and Documentation
193
+ # =============================
194
+ .PHONY: run
195
+ run: ## Run the project's main application
196
+ @echo "🚀 Running the project"
197
+ @poetry run streamlit run $(PROJECT_SLUG)/app.py
198
+
199
+ .PHONY: docs-test
200
+ docs-test: ## Test if documentation can be built without warnings or errors
201
+ @poetry run mkdocs build -s
202
+
203
+ .PHONY: docs
204
+ docs: ## Build and serve the documentation
205
+ @poetry run mkdocs serve
206
+
207
+ # =============================
208
+ # Docker
209
+ # =============================
210
+ .PHONY: bake-container
211
+ bake-container: ## Build Docker image
212
+ @echo "🚀 Building Docker image"
213
+ docker build -t $(IMAGE_NAME):$(TAG) -f Dockerfile .
214
+
215
+ .PHONY: container-push
216
+ container-push: ## Push Docker image to Docker Hub
217
+ @echo "🚀 Pushing Docker image to Docker Hub"
218
+ docker push $(IMAGE_NAME):$(TAG)
219
+
220
+ .PHONY: bake-container-and-push
221
+ bake-container-and-push: bake-container container-push ## Build and push Docker image to Docker Hub
222
+
223
+ .PHONY: clean-container
224
+ clean-container: ## Clean up Docker resources related to the app
225
+ @echo "🚀 Deleting Docker image for app: $(IMAGE_NAME)"
226
+ @docker images $(IMAGE_NAME) --format "{{.Repository}}:{{.Tag}}" | xargs -r docker rmi -f || echo "No image to delete"
227
+
228
+ @echo "🚀 Deleting unused Docker volumes"
229
+ @docker volume ls -qf dangling=true | xargs -r docker volume rm || echo "No unused volumes to delete"
230
+
231
+ @echo "🚀 Deleting unused Docker networks"
232
+ @docker network ls -q --filter "dangling=true" | xargs -r docker network rm || echo "No unused networks to delete"
233
+
234
+ @echo "🚀 Cleaning up stopped containers"
235
+ @docker ps -aq --filter "status=exited" | xargs -r docker rm || echo "No stopped containers to clean up"
236
+
237
+
238
+ # =============================
239
+ # Debug
240
+ # =============================
241
+
242
+ .PHONY: print-dependency-tree
243
+ print-dependency-tree: ## Print dependency tree
244
+ @echo "Printing dependency tree..."
245
+ @poetry run pipdeptree -p $(PACKAGE_NAME)
246
+
247
+
248
+ # =============================
249
+ # Cleanup
250
+ # =============================
251
+ .PHONY: teardown
252
+ teardown: clean-bake clean-container ## Clean up temporary files and directories and destroy the virtual environment, Docker image from your local machine
253
+ @echo "🚀 Cleaning up temporary files and directories"
254
+ @rm -rf .pytest_cache || true
255
+ @rm -rf dist || true
256
+ @rm -rf build || true
257
+ @rm -rf htmlcov || true
258
+ @rm -rf .venv || true
259
+ @rm -rf .mypy_cache || true
260
+ @rm -rf site || true
261
+ @find . -type d -name "__pycache__" -exec rm -rf {} + || true
262
+ @rm -rf .ruff_cache || true
263
+ @echo "🚀 Clean up completed."
264
+
265
+ .PHONY: teardown-all
266
+ teardown-all: teardown clean-cloud-env ## Clean up temporary files and directories and destroy the virtual environment, Docker image, and Cloud resources
README.md ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Resume Maker AI
3
+ emoji: 🎵
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: docker
7
+ app_port: 7860
8
+ short_description: AI service for making resumes.
9
+ ---
10
+
11
+ # resume-maker-ai-agent
12
+
13
+ This app will download Jio-Savan music.
14
+
15
+ - **Github repository**: <https://github.com/DeepakPant93/resume-maker-ai-agent/>
16
+ - **Documentation** <https://DeepakPant93.github.io/resume-maker-ai-agent/>
17
+
18
+ ## Getting started with your project
19
+
20
+ First, create a repository on GitHub with the same name as this project, and then run the following commands:
21
+
22
+ ## Installation
23
+
24
+ 1. Initialize the repository if it's your first time:
25
+
26
+ ```bash
27
+ cd resume-maker-ai-agent
28
+ make init-repo
29
+ ```
30
+
31
+ 2. Install dependencies using Poetry:
32
+
33
+ ```bash
34
+ make bake-env
35
+ ```
36
+
37
+ 3. Run the FastAPI server:
38
+
39
+ ```bash
40
+ make run
41
+ ```
42
+
43
+ You are now ready to start development on your project!
44
+ The CI/CD pipeline will be triggered when you open a pull request, merge to main, or when you create a new release.
codecov.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ coverage:
2
+ range: 70..100
3
+ round: down
4
+ precision: 1
5
+ status:
6
+ project:
7
+ default:
8
+ target: 90%
9
+ threshold: 0.5%
docker-compose.yml ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: "3.9"
2
+
3
+ services:
4
+ resume_maker_ai_agent:
5
+ build:
6
+ context: .
7
+ dockerfile: Dockerfile
8
+ container_name: resume_maker_ai_agent
9
+ ports:
10
+ - "7860:7860"
11
+ volumes:
12
+ - .:/app
13
+ environment:
14
+ - PYTHONUNBUFFERED=1
15
+ command: >
16
+ resume_maker_ai_agent
docs/assets/favicon.ico ADDED
docs/assets/logo.png ADDED
docs/index.md ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # resume-maker-ai-agent
2
+
3
+ [![Release](https://img.shields.io/github/v/release/DeepakPant93/resume-maker-ai-agent)](https://img.shields.io/github/v/release/DeepakPant93/resume-maker-ai-agent)
4
+ [![Build status](https://img.shields.io/github/actions/workflow/status/DeepakPant93/resume-maker-ai-agent/test-check-build.yml?branch=main)](https://github.com/DeepakPant93/resume-maker-ai-agent/actions/workflows/test-check-build.yml?query=branch%3Amain)
5
+ [![Commit activity](https://img.shields.io/github/commit-activity/m/DeepakPant93/resume-maker-ai-agent)](https://img.shields.io/github/commit-activity/m/DeepakPant93/resume-maker-ai-agent)
6
+ [![License](https://img.shields.io/github/license/DeepakPant93/resume-maker-ai-agent)](https://img.shields.io/github/license/DeepakPant93/resume-maker-ai-agent)
7
+
8
+ This app will download Jio-Savan music.
9
+ This repository contains a sample Data Science application built with FastAPI, designed to streamline model training and prediction processes via RESTful APIs. The application leverages **Poetry** for dependency management, ensuring a robust and scalable development environment.
10
+
11
+ ---
12
+
13
+ ## Features
14
+
15
+ ### FastAPI Endpoints:
16
+
17
+ - `/upload-docs`: API endpoint to upload documents for creating embeddings.
18
+ - `/ask`: API endpoint for querying the system and receiving context-aware answers.
19
+
20
+ ### Poetry for Dependency Management:
21
+
22
+ - Simplifies package installation and management.
23
+ - Ensures compatibility and reproducibility of the project environment.
24
+
25
+ ### Scalable Architecture:
26
+
27
+ - Modular design with clear separation of concerns.
28
+ - Easy integration of new features or pipelines.
29
+
30
+ ---
31
+
32
+ ## Prerequisites
33
+
34
+ - Python >= 3.12
35
+ - Poetry installed (`pip install poetry`)
36
+
37
+ ---
38
+
39
+ ## Installation
40
+
41
+ 1. Clone the repository:
42
+
43
+ ```bash
44
+ git clone https://github.com/DeepakPant93/resume-maker-ai-agent.
45
+ cd resume-maker-ai-agent
46
+ ```
47
+
48
+ 1. Initialize the repository if it's your first time:
49
+
50
+ ```bash
51
+ cd resume-maker-ai-agent
52
+ make init-repo
53
+ ```
54
+
55
+ 1. Install dependencies using Poetry:
56
+
57
+ ```bash
58
+ make bake-env
59
+ ```
60
+
61
+ 1. Run the FastAPI server:
62
+
63
+ ```bash
64
+ make run
65
+ ```
66
+
67
+ ---
68
+
69
+ ## Project Structure
70
+
71
+ ```plaintext
72
+ ──resume-maker-ai-agent/
73
+ ├── api # API route definitions
74
+ ├── config # Configuration files and settings
75
+ ├── constants # Static constants and enumerations
76
+ ├── core # Core logic for the application
77
+ ├── entity # Definitions of data models and schemas
78
+ ├── exception # Custom exception classes for error handling
79
+ ├── logger # Logging setup for the application
80
+ ├── models # Request and response models
81
+ ├── services # Business logic and service layer
82
+ ├── utils # Utility functions (e.g., file handling, data encoding)
83
+ └── main.py # Entry point for the FastAPI application
84
+ ```
85
+
86
+ ---
87
+
88
+ Enjoy building with this RAG FastAPI application! 🚀
docs/modules.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ::: resume_maker_ai_agent
2
+
3
+ ## Models
4
+
5
+ ::: resume_maker_ai_agent.models
6
+
7
+ ## Services
8
+
9
+ ::: resume_maker_ai_agent.services
mkdocs.yml ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ site_name: resume-maker-ai-agent
2
+ repo_url: https://github.com/DeepakPant93/resume-maker-ai-agent
3
+ site_url: https://DeepakPant93.github.io/resume-maker-ai-agent
4
+ site_description: This app will download Jio-Savan music.
5
+ site_author: Deepak pant
6
+ edit_uri: edit/main/docs/
7
+ repo_name: DeepakPant93/resume-maker-ai-agent
8
+ copyright: Maintained by <a href="https://github.com/DeepakPant93/">Deepak pant</a>.
9
+
10
+ nav:
11
+ - Home: index.md
12
+ - Modules: modules.md
13
+
14
+ plugins:
15
+ - search
16
+ - tags
17
+ - mkdocstrings:
18
+ handlers:
19
+ python:
20
+ setup_commands:
21
+ show_source: false
22
+ show_root_heading: true
23
+ show_root_full_path: false
24
+ heading_level: 3
25
+ enable_inventory: true
26
+ # - import sys
27
+ # - sys.path.append('../')
28
+ # - git-revision-date-localized: # Shows last updated date
29
+ # enable_creation_date: true
30
+ - minify: # Minifies HTML and JS
31
+ minify_html: true
32
+ minify_js: true
33
+
34
+ theme:
35
+ name: material
36
+ logo: assets/logo.png # Optional custom logo
37
+ favicon: assets/favicon.ico # Optional favicon
38
+ features:
39
+ - navigation.tabs # Top-level sections as tabs
40
+ - navigation.sections # Sections are expanded
41
+ - navigation.top # Back to top button
42
+ - search.suggest # Search suggestions
43
+ - search.highlight # Highlight search results
44
+ - content.tabs.link # Link code tabs
45
+ - content.code.annotate # Code block annotations
46
+ - content.copy.code # Copy code button
47
+ palette:
48
+ - media: "(prefers-color-scheme: light)"
49
+ scheme: default
50
+ primary: indigo
51
+ accent: deep orange
52
+ toggle:
53
+ icon: material/brightness-7
54
+ name: Switch to dark mode
55
+ - media: "(prefers-color-scheme: dark)"
56
+ scheme: slate
57
+ primary: black
58
+ accent: deep orange
59
+ toggle:
60
+ icon: material/brightness-4
61
+ name: Switch to light mode
62
+ icon:
63
+ repo: fontawesome/brands/github
64
+
65
+ extra:
66
+ social:
67
+ - icon: fontawesome/brands/github
68
+ link: https://github.com/DeepakPant93/resume-maker-ai-agent
69
+ - icon: fontawesome/brands/python
70
+ link: https://pypi.org/project/resume-maker-ai-agent
71
+ # analytics: # Optional Google Analytics
72
+ # provider: google
73
+ # property: G-XXXXXXXXXX # Replace with your tracking ID
74
+
75
+ markdown_extensions:
76
+ - toc:
77
+ permalink: true
78
+ - pymdownx.arithmatex:
79
+ generic: true
80
+ - pymdownx.highlight: # Advanced code highlighting
81
+ anchor_linenums: true
82
+ line_spans: __span
83
+ pygments_lang_class: true
84
+ - pymdownx.inlinehilite
85
+ - pymdownx.snippets
86
+ - pymdownx.superfences
87
+ - pymdownx.tabbed
88
+ - attr_list
89
+ - md_in_html
notebooks/trails.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
poetry.lock ADDED
The diff for this file is too large to render. See raw diff
 
poetry.toml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [virtualenvs]
2
+ in-project = true
pyproject.toml ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tool.poetry]
2
+ name = "resume_maker_ai_agent"
3
+ version = "0.0.1"
4
+ description = "This app will download Jio-Savan music."
5
+ authors = ["Deepak pant <deepak.93p@gmail.com>"]
6
+ repository = "https://github.com/DeepakPant93/resume-maker-ai-agent"
7
+ documentation = "https://DeepakPant93.github.io/resume-maker-ai-agent/"
8
+ readme = "README.md"
9
+ packages = [
10
+ {include = "resume_maker_ai_agent"}
11
+ ]
12
+
13
+ [tool.poetry.scripts]
14
+ resume_maker_ai_agent = "resume_maker_ai_agent.__main__:main"
15
+
16
+ [tool.poetry.dependencies]
17
+ python = ">=3.11,<3.13"
18
+ crewai = {version = ">=0.86.0,<1.0.0", extras = ["tools"]}
19
+ streamlit = "^1.41.1"
20
+ selenium = "^4.27.1"
21
+ # chromedriver_autoinstaller = "^0.6.4"
22
+ # webdriver-manager = "*"
23
+ bs4 = "*"
24
+
25
+ [tool.poetry.group.dev.dependencies]
26
+ deptry = "^0.16.2" # For dependency management
27
+ mypy = "^1.5.1" # Static type checking
28
+ pre-commit = "^3.4.0" # Pre-commit hooks
29
+ tox = "^4.11.1" # Testing in multiple environments
30
+ ipykernel = "^6.25.0" # Jupyter kernel
31
+ black = "^23.9.0" # Code formatter
32
+ build = "^1.0.0" # Build management
33
+ bump-my-version = "^0.28.2" # Bump versions automatically
34
+ codespell = "^2.2.5" # Spell checking in code
35
+ wheel = "^0.41.0" # Build wheels
36
+ twine = "^4.0.0" # Publish packages
37
+ bandit = "^1.8.0" # Security check
38
+ pylint = "^3.0.0" # Powerful linter
39
+ pydocstyle = "^6.3.0" # Enforce PEP 257 docstring conventions
40
+ # isort = "^5.12.0" # Sort imports
41
+ # ruff = "^0.8.6" # Linting tool
42
+
43
+ [tool.poetry.group.docs.dependencies]
44
+ mkdocs = "^1.5.0" # Documentation site generator
45
+ sphinx = "^7.2.0" # Documentation tool
46
+ mkdocs-git-revision-date-plugin = "^0.3.2" # Show revision dates
47
+ mkdocs-git-revision-date-localized-plugin = "^1.3.0" # Localized dates
48
+ mkdocs-jupyter = ">=0.25.1" # For Jupyter Notebook integration
49
+ mkdocs-pdf-export-plugin = "^0.5.10" # PDF export
50
+ mkdocs-material = ">=9.1.3" # MkDocs Material theme
51
+ mkdocstrings-crystal = "^0.3.0" # Mkdocstrings for Crystal
52
+ pygments = "^2.16.0" # Syntax highlighting
53
+ pymdown-extensions = "^10.0" # Markdown extensions
54
+ nbconvert = "^7.7.0" # Convert notebooks to other formats
55
+ nbformat = "^5.9.0" # Notebook format support
56
+ livereload = "^2.6.3" # Live reload for MkDocs
57
+ watchdog = "^3.0.0" # File monitoring
58
+ mkdocstrings = {extras = ["python"], version = "^0.27.0"} # Auto-generate documentation from docstrings
59
+ mkdocs-minify-plugin = "^0.8.0" # Minify HTML
60
+
61
+ [build-system]
62
+ requires = ["poetry-core>=1.0.0"]
63
+ build-backend = "poetry.core.masonry.api"
64
+
65
+
66
+ [tool.mypy]
67
+ files = ["resume_maker_ai_agent"]
68
+ disallow_untyped_defs = true
69
+ disallow_any_unimported = true
70
+ no_implicit_optional = true
71
+ check_untyped_defs = true
72
+ warn_return_any = true
73
+ warn_unused_ignores = true
74
+ show_error_codes = true
75
+ pretty = true
76
+ show_traceback = true
77
+
78
+
79
+ [[tool.mypy.overrides]]
80
+ module = [
81
+ "joblib.*",
82
+ "yaml.*",
83
+ "ensure.*",
84
+ "fastapi_health.*",
85
+ "crewai.*",
86
+ "crewai_tools.*",
87
+ "bs4.*",
88
+ "resume_maker_ai_agent.crew"
89
+ ]
90
+ ignore_missing_imports = true
91
+ ignore_errors = true
92
+
93
+
94
+ [tool.pytest.ini_options]
95
+ testpaths = ["tests"]
96
+ norecursedirs = "legacy_tests"
97
+ python_files = ["test_*.py"]
98
+ python_classes = ["Test*"]
99
+ python_functions = ["test_*"]
100
+ filterwarnings = [
101
+ "ignore:.*general_plain_validator_function.*:DeprecationWarning",
102
+ "ignore:.*with_info_plain_validator_function.*:DeprecationWarning"
103
+ ]
104
+
105
+
106
+ [tool.ruff]
107
+ target-version = "py39"
108
+ line-length = 120
109
+ fix = true
110
+ select = [
111
+ # flake8-2020
112
+ "YTT",
113
+ # flake8-bandit
114
+ "S",
115
+ # flake8-bugbear
116
+ "B",
117
+ # flake8-builtins
118
+ "A",
119
+ # flake8-comprehensions
120
+ "C4",
121
+ # flake8-debugger
122
+ "T10",
123
+ # flake8-simplify
124
+ "SIM",
125
+ # isort
126
+ "I",
127
+ # mccabe
128
+ "C90",
129
+ # pycodestyle
130
+ "E", "W",
131
+ # pyflakes
132
+ "F",
133
+ # pygrep-hooks
134
+ "PGH",
135
+ # pyupgrade
136
+ "UP",
137
+ # ruff
138
+ "RUF",
139
+ # tryceratops
140
+ "TRY",
141
+ ]
142
+ ignore = [
143
+ # LineTooLong
144
+ "E501",
145
+ # DoNotAssignLambda
146
+ "E731",
147
+ # Possible binding to all interfaces - Require for Docker container
148
+ "S104"
149
+ ]
150
+
151
+ [tool.ruff.format]
152
+ preview = true
153
+
154
+ [tool.coverage.report]
155
+ skip_empty = true
156
+
157
+
158
+ [tool.coverage.run]
159
+ branch = true
160
+ source = ["resume_maker_ai_agent"]
161
+ # parallel = true
162
+ # concurrency = ["thread"]
163
+ omit = [
164
+ "**/__init__.py", # Exclude all init files
165
+ "resume_maker_ai_agent/crew.py", # Exclude crew.py file
166
+ "resume_maker_ai_agent/config/*", # Exclude all files in config folder
167
+ "resume_maker_ai_agent/models/*", # Exclude all files in model folder
168
+ ]
169
+
170
+ [tool.ruff.per-file-ignores]
171
+ "tests/*" = ["S101"]
172
+
173
+ [tool.bumpversion]
174
+ current_version = "0.0.1"
175
+ commit = true
176
+ tag = true
177
+
178
+ [[tool.bumpversion.files]]
179
+ glob = "pyproject.toml"
180
+ search = 'version = "{current_version}"'
181
+ replace = 'version = "{new_version}"'
182
+
183
+ [tool.deptry]
184
+ exclude = ["research","artifacts", "notebooks", "tests", "docs", ".venv", "venv", "__pycache__", ".ruff_cache", ".pytest_cache", ".mypy_cache", ".coverage", ".git", "build", "dist", ".github", "site", "config"]
185
+ ignore = ["DEP003"]
186
+
187
+ [tool.pydocstyle]
188
+ select = ["D101", "D102"]
189
+
190
+ [tool.isort]
191
+ profile = "black"
192
+ known_third_party = ["requests", "bs4", "pydantic", "crewai_tools"]
193
+ default_section = "THIRDPARTY"
194
+ force_sort_within_sections = true
195
+ line_length = 120
resume_maker_ai_agent/__init__.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ This is the initialization file for the RAG (Retrieval-Augmented Generation) application,
4
+ designed to provide context-aware responses by combining document embeddings with large
5
+ language model (LLM) capabilities. The application is modular, scalable, and maintains
6
+ a clear separation of concerns across its components.
7
+
8
+ Modules:
9
+ api: Exposes endpoints for document upload and querying the system.
10
+ config: Manages application settings and environment variables.
11
+ core: Implements embedding generation, LLM integration, and workflow orchestration.
12
+ models: Defines schemas for API request validation and response structuring.
13
+ service: Provides document management and vector database interaction services.
14
+ exception: Contains custom exceptions for handling application-specific errors.
15
+ utils: Offers utility functions for common operations and data manipulation.
16
+ logger: Implements centralized logging with customizable levels.
17
+ constants: Stores application-wide constants for consistency and maintainability.
18
+
19
+ Features:
20
+ - **Retrieval-Augmented Generation**: Combines document embeddings with LLMs to deliver accurate, context-aware answers.
21
+ - **Modular Design**: Ensures scalability, maintainability, and ease of testing.
22
+ - **Error Handling and Logging**: Enhances debugging and monitoring with structured logs and custom exceptions.
23
+ - **Seamless Integration**: Connects document management, vector database, and LLM workflows efficiently.
24
+ - **User-Friendly API**: Simplifies user interaction with the application's core functionalities.
25
+
26
+ This package serves as the backbone of the RAG application, ensuring a seamless pipeline
27
+ from document ingestion to intelligent query resolution.
28
+ """
resume_maker_ai_agent/__main__.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # __main__.py
2
+ import sys
3
+ from pathlib import Path
4
+
5
+ import streamlit.web.cli as stcli
6
+
7
+
8
+ def main() -> None:
9
+ """
10
+ Sets up the environment to run a Streamlit application by modifying
11
+ system arguments and initiating the Streamlit CLI.
12
+
13
+ This function determines the current directory of the script, constructs
14
+ the path to the Streamlit app, updates the system arguments to run the
15
+ app, and then executes the Streamlit command-line interface to start the
16
+ application.
17
+
18
+ Exits the program when the Streamlit application exits, passing the
19
+ appropriate exit code.
20
+ """
21
+
22
+ current_dir = Path(__file__).parent
23
+ streamlit_app_path = current_dir / "app.py"
24
+
25
+ sys.argv = ["streamlit", "run", str(streamlit_app_path), "--server.port", "7860"]
26
+ sys.exit(stcli.main())
27
+
28
+
29
+ if __name__ == "__main__":
30
+ main()
resume_maker_ai_agent/app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import warnings
2
+
3
+ import streamlit as st
4
+
5
+ from resume_maker_ai_agent.services.app_service import search_music
6
+
7
+ warnings.filterwarnings("ignore", category=SyntaxWarning, module="pysbd")
8
+
9
+
10
+ # Set page config
11
+ st.set_page_config(page_title="Music Search", page_icon="🎵", layout="wide")
12
+
13
+ # App title
14
+ st.title("🎵 Music Search Results")
15
+
16
+ search_query = st.sidebar.text_input("Enter song name or artist")
17
+
18
+ if search_query:
19
+ # Show loading spinner
20
+ with st.spinner("Searching for music..."):
21
+ music_data = search_music(search_query)
22
+
23
+ if music_data is None or len(music_data) == 0:
24
+ st.warning("No music found. Please try again.")
25
+
26
+ for item in music_data:
27
+ try:
28
+ song_id = item["song_info"]["song_url"].split("/")[-1] # Get unique ID from URL
29
+ song_title = item["song_info"]["title"].split(" - ")[0]
30
+ musicians = item["song_info"]["musician"]
31
+ artists = ", ".join(musicians[:2])
32
+ release_date = item["song_info"]["release_date"]
33
+
34
+ # Display song information in a row
35
+ with st.container():
36
+ # Create columns
37
+ col1, col2, col3, col4 = st.columns([1, 2, 2, 2])
38
+
39
+ # Column 1: Image
40
+ with col1:
41
+ st.image(item["album_image_url"], width=100)
42
+
43
+ # Column 2: Title and Artists
44
+ with col2:
45
+ st.markdown(f"**{song_title}**")
46
+ st.markdown(f"*{artists} | {release_date}*")
47
+
48
+ # Column 4: Audio Player
49
+ with col4:
50
+ st.audio(item["song_info"]["downloadable_url"])
51
+ except Exception as e:
52
+ print(f"An error occurred: {e!s}")
53
+ continue
resume_maker_ai_agent/config/agents.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ music_researcher:
2
+ role: >
3
+ Senior Music Researcher at {website}
4
+ goal: >
5
+ Discover the latest music from {website} on {topic}
6
+ backstory: >
7
+ You're a seasoned music researcher with a passion for discovering new
8
+ music from {website}. Known for your ability to find the most relevant
9
+ information and present it in a clear and concise manner.
resume_maker_ai_agent/config/tasks.yaml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ music_research_task:
2
+ description: >
3
+ Find the latest music from internet for {website} on {topic}. make sure you find the most
4
+ relevant information and present it in a clear and concise manner.
5
+ expected_output: >
6
+ A list of atleast 5 latest music links in a JSON with (title, link) format from internet on {topic}. Search the music in internet and get the most relevent {website} link.
7
+ agent: music_researcher
resume_maker_ai_agent/crew.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from crewai import Agent, Crew, Process, Task
2
+ from crewai.project import CrewBase, agent, crew, task
3
+
4
+ from resume_maker_ai_agent.models.response_models import MusicDetails
5
+ from resume_maker_ai_agent.tools.custom_tool import search_tool
6
+
7
+
8
+ @CrewBase
9
+ class JioSavanMusicDownloaderAgent:
10
+ """JioSavanMusicDownloaderAgent crew"""
11
+
12
+ agents_config = "config/agents.yaml"
13
+ tasks_config = "config/tasks.yaml"
14
+
15
+ @agent
16
+ def music_researcher(self) -> Agent:
17
+ """
18
+ Creates a music researcher agent.
19
+
20
+ This agent is responsible for searching for the specified music on JioSaavn
21
+ and returning the results in a structured format.
22
+
23
+ :return: An instance of the Agent class
24
+ """
25
+ return Agent(config=self.agents_config["music_researcher"], verbose=True)
26
+
27
+ @task
28
+ def music_research_task(self) -> Task:
29
+ """
30
+ Creates the music research task.
31
+
32
+ This task is responsible for searching for the specified music on JioSaavn
33
+ and returning the results in a structured format.
34
+
35
+ :return: An instance of the Task class
36
+ """
37
+
38
+ return Task(
39
+ config=self.tasks_config["music_research_task"],
40
+ tools=[search_tool],
41
+ output_json=MusicDetails,
42
+ )
43
+
44
+ @crew
45
+ def crew(self) -> Crew:
46
+ """Creates the JioSavanMusicDownloaderAgent crew"""
47
+
48
+ return Crew(
49
+ agents=self.agents, # Automatically created by the @agent decorator
50
+ tasks=self.tasks, # Automatically created by the @task decorator
51
+ process=Process.sequential,
52
+ verbose=False,
53
+ )
resume_maker_ai_agent/models/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ This module defines the data models used for API request and response handling.
4
+
5
+ Components:
6
+ - Request Models: Defines the structure and validation rules for incoming API requests.
7
+ - Response Models: Specifies the format and schema for outgoing API responses.
8
+
9
+ Purpose:
10
+ - Ensures consistent data validation and serialization across the application, adhering to defined schemas.
11
+ """
resume_maker_ai_agent/models/response_models.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+
3
+
4
+ class MusicLink(BaseModel):
5
+ """
6
+ A class representing a music link."""
7
+
8
+ title: str
9
+ link: str
10
+
11
+
12
+ class MusicDetails(BaseModel):
13
+ """
14
+ A class representing music details."""
15
+
16
+ links: list[MusicLink]
resume_maker_ai_agent/services/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+
3
+ This module provides the core services for managing documents and vector database operations.
4
+
5
+ Components:
6
+ - Document Service: Handles document-related operations such as storage, retrieval, and preprocessing.
7
+ - Vector DB Service: Manages interactions with the vector database, including storing and querying embeddings.
8
+
9
+ Purpose:
10
+ - Implements the business logic and service layer to support the application's RAG functionality.
11
+ """
resume_maker_ai_agent/services/app_service.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import warnings
2
+
3
+ from resume_maker_ai_agent.crew import JioSavanMusicDownloaderAgent
4
+ from resume_maker_ai_agent.services.web_scarapper_service import scrape_pages
5
+
6
+ warnings.filterwarnings("ignore", category=SyntaxWarning, module="pysbd")
7
+
8
+
9
+ def search_music(query: str) -> list[dict]:
10
+ music_details = []
11
+ try:
12
+ # Search the internet for music
13
+ print(f"Searching for music: {query}")
14
+ search_results = search_internet(query)
15
+ print(f"Found {len(search_results)} results")
16
+
17
+ # Get music details
18
+ print("Getting music details")
19
+ music_details = get_music_details(search_results)
20
+ print(f"Got details for {len(music_details)} songs")
21
+ print(f"Music details: {music_details}")
22
+ print("Done")
23
+ except Exception as e:
24
+ print(f"An error occurred: {e!s}")
25
+
26
+ return music_details
27
+
28
+
29
+ def search_internet(query: str) -> list[dict]:
30
+ # Run the crew
31
+ inputs = {"website": "https://www.jiosaavn.com", "topic": query}
32
+ result = JioSavanMusicDownloaderAgent().crew().kickoff(inputs=inputs)
33
+ links = result.to_dict().get("links", [])
34
+ return links if isinstance(links, list) else []
35
+
36
+
37
+ def get_music_details(songs: list[dict]) -> list[dict]:
38
+ # Get music details by scrapping the pages
39
+
40
+ links: list[str] = [album["link"] for album in songs]
41
+ return scrape_pages(links)
resume_maker_ai_agent/services/web_scarapper_service.py ADDED
@@ -0,0 +1,226 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ from datetime import datetime
3
+ from typing import Any
4
+ from urllib.parse import urlparse
5
+
6
+ import requests
7
+ from bs4 import BeautifulSoup
8
+ from selenium import webdriver
9
+ from selenium.common.exceptions import TimeoutException
10
+ from selenium.webdriver.chrome.service import Service
11
+ from selenium.webdriver.common.by import By
12
+ from selenium.webdriver.support import expected_conditions as EC
13
+ from selenium.webdriver.support.ui import WebDriverWait
14
+
15
+
16
+ def _setup_driver() -> webdriver.Chrome:
17
+ # Setup Chrome WebDriver (or other driver)
18
+ options = webdriver.ChromeOptions()
19
+
20
+ # Essential container arguments
21
+ options.add_argument("--headless=new")
22
+ options.add_argument("--disable-gpu")
23
+ options.add_argument("--no-sandbox")
24
+ options.add_argument("--disable-dev-shm-usage")
25
+
26
+ # JavaScript-specific configurations
27
+ options.add_argument("--enable-javascript")
28
+ options.add_argument("--disable-web-security")
29
+ options.add_argument("--allow-running-insecure-content")
30
+
31
+ # Performance optimizations
32
+ options.add_argument("--window-size=1920,1080")
33
+ options.add_argument("--disable-extensions")
34
+ options.add_argument("--disable-setuid-sandbox")
35
+
36
+ # Memory management
37
+ options.add_argument("--disable-dev-tools")
38
+ options.add_argument("--no-zygote")
39
+ options.add_argument("--single-process")
40
+
41
+ # Handle Chrome Driver installation
42
+ try:
43
+ # For container environments, specify the Chrome version
44
+ print("Setting up Chrome WebDriver")
45
+ chrome_service = Service("/usr/bin/chromedriver")
46
+ driver = webdriver.Chrome(service=chrome_service, options=options)
47
+ driver = webdriver.Chrome(options=options)
48
+ except Exception as e:
49
+ # Fallback to direct path if ChromeDriverManager fails
50
+ print(f"An error occurred: {e!s}")
51
+ print("Falling back to direct path")
52
+
53
+ driver = webdriver.Chrome(options=options)
54
+ return driver
55
+
56
+
57
+ def _get_downloadable_audio_link(url: str) -> str:
58
+ if not url:
59
+ return ""
60
+
61
+ # Extract the album ID and file ID from the URL
62
+ parsed_url = urlparse(url)
63
+ path_parts = parsed_url.path.split("/")
64
+ album_id = path_parts[-2]
65
+ file_id = path_parts[-1].split(".")[0]
66
+
67
+ # Construct the downloadable audio link
68
+ return f"https://aac.saavncdn.com/{album_id}/{file_id}.mp4"
69
+
70
+
71
+ def _extract_musician_name(url: str) -> str:
72
+ return url.split("/")[-2].replace("-songs", "").replace("-", " ").title()
73
+
74
+
75
+ def scrape_dynamic_page(url: str, wait_time: int = 5) -> dict[str, Any]:
76
+ """
77
+ Scrape a webpage including content loaded by JavaScript
78
+
79
+ Parameters:
80
+ url (str): The URL to scrape
81
+ wait_time (int): Maximum time to wait for dynamic content to load
82
+
83
+ Returns:
84
+ dict: Dictionary containing various elements from the page
85
+ """
86
+ driver = _setup_driver()
87
+
88
+ try:
89
+ # Load the page
90
+ driver.get(url)
91
+
92
+ # Wait for the button to be present
93
+ button = WebDriverWait(driver, wait_time).until(
94
+ EC.presence_of_element_located((By.CSS_SELECTOR, 'a.c-btn.c-btn--primary[data-btn-icon="q"]'))
95
+ )
96
+
97
+ # Check visibility and enablement
98
+ is_displayed = button.is_displayed()
99
+ is_enabled = button.is_enabled()
100
+ print(f"Button displayed: {is_displayed}, Button enabled: {is_enabled}")
101
+
102
+ if is_displayed and is_enabled:
103
+ # Click the button
104
+ driver.execute_script("arguments[0].scrollIntoView(true);", button)
105
+ driver.execute_script("arguments[0].click();", button)
106
+ else:
107
+ print("Button is not interactable!")
108
+
109
+ # Wait a moment for any JavaScript updates
110
+ time.sleep(5)
111
+
112
+ # Get the updated HTML
113
+ html_content = driver.page_source
114
+ soup = BeautifulSoup(html_content, "html.parser")
115
+
116
+ # Extract elements
117
+ details = {
118
+ "album_title": soup.title.text if soup.title else "",
119
+ "description": soup.find("meta", {"name": "description"})["content"]
120
+ if soup.find("meta", {"name": "description"})
121
+ else "",
122
+ "album_description": soup.find("meta", {"property": "og:description"})["content"]
123
+ if soup.find("meta", {"property": "og:description"})
124
+ else "",
125
+ "album_url": soup.find("meta", {"property": "music:album"})["content"]
126
+ if soup.find("meta", {"property": "music:album"})
127
+ else "",
128
+ "album_image_url": soup.find("meta", {"property": "twitter:image"})["content"]
129
+ if soup.find("meta", {"property": "twitter:image"})
130
+ else "",
131
+ "song_info": {
132
+ "name": soup.title.text if soup.title else "",
133
+ "title": soup.find("meta", {"property": "twitter:title"})["content"]
134
+ if soup.find("meta", {"property": "twitter:title"})
135
+ else "",
136
+ "musician": [
137
+ _extract_musician_name(musician["content"])
138
+ for musician in soup.find_all("meta", {"property": "music:musician"})
139
+ ],
140
+ "release_date": datetime.strptime(
141
+ soup.find("meta", {"property": "music:release_date"})["content"],
142
+ "%Y-%m-%d",
143
+ ).strftime("%B %d, %Y")
144
+ if soup.find("meta", {"property": "music:release_date"})
145
+ else "",
146
+ "song_url": soup.find("meta", {"property": "twitter:url"})["content"]
147
+ if soup.find("meta", {"property": "twitter:url"})
148
+ else "",
149
+ "description": soup.find("meta", {"property": "twitter:description"})["content"]
150
+ if soup.find("meta", {"property": "twitter:description"})
151
+ else "",
152
+ "downloadable_url": _get_downloadable_audio_link(
153
+ soup.find("audio").find("source")["src"] if soup.find("audio").find("source") else ""
154
+ ),
155
+ "song_lyrics_url": "https://www.jiosaavn.com" + soup.find("a", title="Song Lyrics")["href"]
156
+ if soup.find("a", title="Song Lyrics")
157
+ else "",
158
+ },
159
+ }
160
+ except TimeoutException:
161
+ print(f"Timeout waiting for page to load: {url}")
162
+ return {}
163
+ except Exception as e:
164
+ print(f"An error occurred: {e!s}")
165
+ return {}
166
+ else:
167
+ return details
168
+ finally:
169
+ driver.quit()
170
+
171
+
172
+ def scrape_pages(urls: list[str]) -> list[dict]:
173
+ """
174
+ Scrape multiple webpages and return a list of elements
175
+
176
+ Parameters:
177
+ urls (list of str): List of URLs to scrape
178
+ wait_time (int): Maximum time to wait for dynamic content to load
179
+
180
+ Returns:
181
+ list of dict: List of dictionaries containing various elements from each page
182
+ """
183
+ results = []
184
+ for url in urls:
185
+ details = scrape_dynamic_page(url)
186
+ if details:
187
+ results.append(details)
188
+ return results
189
+
190
+
191
+ def download_file(url: str) -> None:
192
+ """
193
+ Download a file from a URL and save it to a local file
194
+
195
+ Parameters:
196
+ url (str): URL of the file to be downloaded
197
+
198
+ Returns:
199
+ None
200
+ """
201
+ try:
202
+ response = requests.get(url, stream=True, timeout=10)
203
+
204
+ # Check if the request was successful
205
+ if response.status_code == 200:
206
+ # Open a local file with the specified filename in binary write mode
207
+ filename = _get_filename_name(url)
208
+ filename = f"downloads/{filename}.mp4"
209
+
210
+ with open(filename, "wb") as file:
211
+ # Write the content of the response to the file in chunks
212
+ for chunk in response.iter_content(chunk_size=8192):
213
+ file.write(chunk)
214
+ print(f"File downloaded successfully as '{filename}'")
215
+ else:
216
+ print(f"Failed to download file. HTTP Status Code: {response.status_code}")
217
+ except requests.exceptions.Timeout:
218
+ print(f"Request to {url} timed out.")
219
+ except requests.exceptions.RequestException as e:
220
+ print(f"Request to {url} failed: {e}")
221
+
222
+
223
+ def _get_filename_name(url: str) -> str:
224
+ parsed_url = urlparse(url)
225
+ path_parts = parsed_url.path.split("/")
226
+ return path_parts[2]
resume_maker_ai_agent/tools/__init__.py ADDED
File without changes
resume_maker_ai_agent/tools/custom_tool.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from crewai_tools import FileWriterTool, ScrapeWebsiteTool, SerperDevTool
2
+
3
+ jio_savan_scapper_tool = ScrapeWebsiteTool(website_url="https://www.jiosaavn.com")
4
+ file_writer_tool = FileWriterTool()
5
+ search_tool = SerperDevTool(
6
+ country="in", # Set to 'in' for India
7
+ locale="en", # Set locale to English
8
+ n_results=5, # You can adjust the number of results as needed
9
+ )
tox.ini ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [tox]
2
+ skipsdist = true
3
+ envlist = py311, py312
4
+
5
+ [gh-actions]
6
+ python =
7
+ 3.11: py311
8
+ 3.12: py312
9
+
10
+ [testenv]
11
+ passenv = PYTHON_VERSION
12
+ allowlist_externals = poetry
13
+ commands =
14
+ poetry install -v
15
+ pytest --doctest-modules tests --cov --cov-config=pyproject.toml --cov-report=xml
16
+ mypy