Vjay15 commited on
Commit
877b38e
·
verified ·
1 Parent(s): 618d791

Added Files to HF space

Browse files

Contains all the required files

Files changed (10) hide show
  1. .gitignore +207 -0
  2. .python-version +1 -0
  3. Dockerfile +20 -0
  4. LICENSE +21 -0
  5. README.md +70 -12
  6. docker-compose.yml +11 -0
  7. llm_utils.py +135 -0
  8. main.py +149 -0
  9. pyproject.toml +14 -0
  10. uv.lock +0 -0
.gitignore ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[codz]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py.cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+ #poetry.toml
110
+
111
+ # pdm
112
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113
+ # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114
+ # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115
+ #pdm.lock
116
+ #pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # pixi
121
+ # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122
+ #pixi.lock
123
+ # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124
+ # in the .venv directory. It is recommended not to include this directory in version control.
125
+ .pixi
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # SageMath parsed files
135
+ *.sage.py
136
+
137
+ # Environments
138
+ .env
139
+ .envrc
140
+ .venv
141
+ env/
142
+ venv/
143
+ ENV/
144
+ env.bak/
145
+ venv.bak/
146
+
147
+ # Spyder project settings
148
+ .spyderproject
149
+ .spyproject
150
+
151
+ # Rope project settings
152
+ .ropeproject
153
+
154
+ # mkdocs documentation
155
+ /site
156
+
157
+ # mypy
158
+ .mypy_cache/
159
+ .dmypy.json
160
+ dmypy.json
161
+
162
+ # Pyre type checker
163
+ .pyre/
164
+
165
+ # pytype static type analyzer
166
+ .pytype/
167
+
168
+ # Cython debug symbols
169
+ cython_debug/
170
+
171
+ # PyCharm
172
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
173
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
174
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
175
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
176
+ #.idea/
177
+
178
+ # Abstra
179
+ # Abstra is an AI-powered process automation framework.
180
+ # Ignore directories containing user credentials, local state, and settings.
181
+ # Learn more at https://abstra.io/docs
182
+ .abstra/
183
+
184
+ # Visual Studio Code
185
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
186
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
187
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
188
+ # you could uncomment the following to ignore the entire vscode folder
189
+ # .vscode/
190
+
191
+ # Ruff stuff:
192
+ .ruff_cache/
193
+
194
+ # PyPI configuration file
195
+ .pypirc
196
+
197
+ # Cursor
198
+ # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
199
+ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
200
+ # refer to https://docs.cursor.com/context/ignore-files
201
+ .cursorignore
202
+ .cursorindexingignore
203
+
204
+ # Marimo
205
+ marimo/_static/
206
+ marimo/_lsp/
207
+ __marimo__/
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.13
Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use Python base image
2
+ FROM python:3.12-slim-trixie
3
+
4
+ # Copy UV directly from official image
5
+ COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
6
+
7
+ # Set working directory
8
+ WORKDIR /app
9
+
10
+ # Copy only dependency files first
11
+ COPY pyproject.toml ./
12
+
13
+ # Install dependencies using UV sync
14
+ RUN /bin/uv sync
15
+
16
+ # Now copy application files
17
+ COPY main.py llm_utils.py ./
18
+
19
+ # Run the application using UV
20
+ CMD ["/bin/uv", "run", "main.py"]
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 22f3000730
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,12 +1,70 @@
1
- ---
2
- title: CodeGen
3
- emoji: 🦀
4
- colorFrom: pink
5
- colorTo: red
6
- sdk: docker
7
- pinned: false
8
- license: mit
9
- short_description: This is the space created for TDS project 1
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # CodeGen
2
+
3
+ A FastAPI service that generates single-page web applications using AI. This service takes natural language descriptions and automatically generates web applications, creates GitHub repositories, and handles deployments.
4
+
5
+ ## Features
6
+
7
+ - AI-powered web application generation
8
+ - Automatic GitHub repository creation and management
9
+ - GitHub Pages hosting setup
10
+ - Built-in evaluation submission handling
11
+ - Support for multi-round updates
12
+ - Secure API with secret key authentication
13
+
14
+ ## Requirements
15
+
16
+ - Python 3.10 or higher
17
+ - GitHub account with API access
18
+ - AI API key (OpenRouter)
19
+
20
+ ## Setup
21
+
22
+ 1. Clone the repository:
23
+ ```bash
24
+ git clone <repo url or ssh>
25
+ cd codegen
26
+ ```
27
+
28
+ 2. Create a virtual environment:
29
+ ```bash
30
+ python -m venv .venv
31
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
32
+ ```
33
+
34
+ 3. Create a `.env` file with your credentials:
35
+ ```
36
+ SECRET_KEY=your_secret
37
+ GITHUB_TOKEN=your_github_token
38
+ AI_API_KEY=your_ai_key
39
+ ```
40
+
41
+ 4. Install dependencies:
42
+ ```bash
43
+ uv sync
44
+ ```
45
+
46
+ 5. Install development dependencies:
47
+ ```bash
48
+ uv sync -G dev
49
+ ```
50
+
51
+ ## Usage
52
+
53
+ Start the development server:
54
+ ```bash
55
+ uvicorn main:app --reload
56
+ ```
57
+
58
+ The server will listen on `http://localhost:8000` for incoming task requests.
59
+
60
+ ### API Endpoints
61
+
62
+ - `GET /`: Health check endpoint
63
+ - `POST /task1`: Main endpoint for task processing
64
+ - Requires authentication via `secret` field
65
+ - Handles both initial creation (round 1) and updates (round > 1)
66
+ - Returns repository and GitHub Pages URLs
67
+
68
+ ## License
69
+
70
+ MIT License - See LICENSE file for details
docker-compose.yml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ services:
2
+ web:
3
+ build: .
4
+ ports:
5
+ - "8000:8000"
6
+ environment:
7
+ - SECRET_KEY=${SECRET_KEY}
8
+ - GITHUB_TOKEN=${GITHUB_TOKEN}
9
+ - AI_API_KEY=${AI_API_KEY}
10
+ volumes:
11
+ - .:/app
llm_utils.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # llm_utils.py
2
+ import os
3
+ import requests
4
+ import re
5
+ import json
6
+ from typing import Optional, List, Dict, Any
7
+ from github import Github
8
+
9
+ def generate_app_files(brief: str, checks: List[str], attachments: Optional[List[Dict[str, str]]] = None, round: int = 1, task: str = None) -> Dict[str, Any]:
10
+ api_key = os.getenv("AI_API_KEY")
11
+ github_token = os.getenv("GITHUB_TOKEN")
12
+ if not api_key:
13
+ raise RuntimeError("AI_API_KEY environment variable is required")
14
+
15
+ # Get existing code for rounds > 1
16
+ existing_files = {}
17
+ if round > 1 and github_token and task:
18
+ try:
19
+ g = Github(github_token)
20
+ user = g.get_user()
21
+ # Just use the task name as repo name, don't include user login
22
+ repo = g.get_repo(f"{user.login}/{task}")
23
+ for filename in ["index.html", "README.md"]:
24
+ try:
25
+ file_content = repo.get_contents(filename)
26
+ existing_files[filename] = file_content.decoded_content.decode('utf-8')
27
+ except Exception as e:
28
+ print(f"Failed to fetch {filename}: {str(e)}")
29
+ except Exception as e:
30
+ print(f"Failed to access repository: {str(e)}")
31
+
32
+ # Modify system message to include context for updates
33
+ system_msg = (
34
+ 'Create a single-page web application that implements the requirements.\n\n'
35
+ 'Output format - JSON object with:\n'
36
+ '- "index": Complete HTML file with implementation\n'
37
+ '- "README": Documentation markdown file\n\n'
38
+ 'Technical requirements:\n'
39
+ '1. Process data client-side\n'
40
+ '2. Use CDN libraries when needed\n'
41
+ '3. Base64 handling:\n'
42
+ ' - Keep ${...} strings in data URIs as-is\n'
43
+ ' - Do not try to decode template literals\n'
44
+ ' - Example: data:text/csv;base64,${someBase64} should be used directly\n'
45
+ '4. Match exact IDs from brief\n'
46
+ '5. Handle all test conditions\n\n'
47
+ 'Best practices:\n'
48
+ '- Process encoded data at runtime\n'
49
+ '- Keep template literals intact\n'
50
+ '- Include error handling\n'
51
+ '- Verify all test checks pass'
52
+ )
53
+
54
+ if round > 1 and existing_files:
55
+ system_msg += (
56
+ '\n\nUpdate mode:\n'
57
+ '- Use existing files as base\n'
58
+ '- Preserve working features\n'
59
+ '- Add new requirements\n'
60
+ '- Maintain code structure\n'
61
+ '- Use the attachments if provided\n'
62
+ )
63
+
64
+ # Prepare messages and make direct request to OpenRouter
65
+ headers = {
66
+ "Authorization": f"Bearer {api_key}",
67
+ "Content-Type": "application/json"
68
+ }
69
+
70
+ # Prepare user message with clearer context
71
+ user_payload = {
72
+ "brief": brief,
73
+ "checks": checks,
74
+ "attachments": attachments,
75
+ "round": round,
76
+ "note": "Important: Keep ${...} template literals intact in data URIs"
77
+ }
78
+ if existing_files:
79
+ user_payload["existing_files"] = existing_files
80
+
81
+ payload = {
82
+ "model": "qwen/qwen3-coder",
83
+ "messages": [
84
+ {"role": "system", "content": system_msg},
85
+ {"role": "user", "content": json.dumps(user_payload, ensure_ascii=False, indent=2)}
86
+ ]
87
+ }
88
+
89
+ resp = requests.post(
90
+ "https://aipipe.org/openrouter/v1/chat/completions",
91
+ headers=headers,
92
+ json=payload
93
+ )
94
+
95
+ if not resp.ok:
96
+ raise RuntimeError(f"API request failed: {resp.status_code} {resp.text}")
97
+
98
+ # Extract content from OpenRouter response format
99
+ try:
100
+ content = resp.json()["choices"][0]["message"]["content"]
101
+ except Exception as e:
102
+ content = str(resp.text)
103
+
104
+ # Attempt to extract JSON substring if wrapped in markdown or extra text
105
+ json_obj = None
106
+ try:
107
+ json_obj = json.loads(content)
108
+ except Exception:
109
+ # Try to find the first {...} JSON block (non-greedy to avoid trailing text)
110
+ m = re.search(r"\{(?:[^{}]|(?R))*\}", content, re.S) if hasattr(re, 'R') else re.search(r"\{.*\}", content, re.S)
111
+ if m:
112
+ try:
113
+ json_obj = json.loads(m.group(0))
114
+ except Exception:
115
+ json_obj = None
116
+
117
+ # If parsing succeeded and has required keys, normalize and return it
118
+ if isinstance(json_obj, dict) and "index" in json_obj and "README" in json_obj:
119
+ result = {"index": json_obj["index"], "README": json_obj["README"]}
120
+ assets = json_obj.get("assets")
121
+ if isinstance(assets, dict):
122
+ # Ensure asset keys and values are strings
123
+ sanitized_assets: Dict[str, str] = {}
124
+ for k, v in assets.items():
125
+ if isinstance(k, str) and (isinstance(v, str) or v is None):
126
+ sanitized_assets[k] = v or ""
127
+ if sanitized_assets:
128
+ result["assets"] = sanitized_assets
129
+ return result
130
+
131
+ # Fallback: return the assistant output as the index.html and a basic README
132
+ return {
133
+ "index": content,
134
+ "README": f"# {brief}\n\n{content}",
135
+ }
main.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ from fastapi import FastAPI, HTTPException, BackgroundTasks
3
+ from pydantic import BaseModel
4
+ import requests,time, os
5
+ from dotenv import load_dotenv
6
+ from github import Github # from PyGithub
7
+ from llm_utils import generate_app_files # your LLM logic
8
+ from typing import Optional, List, Dict
9
+
10
+ app = FastAPI()
11
+ load_dotenv() # take environment variables from .env
12
+
13
+ SHARED_SECRET = os.getenv("SECRET_KEY")
14
+ GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
15
+
16
+ class TaskRequest(BaseModel):
17
+ email: str
18
+ task: str
19
+ brief: str
20
+ checks: List[str]
21
+ round: int
22
+ nonce: str
23
+ secret: str
24
+ evaluation_url: str
25
+ attachments: Optional[List[Dict[str, str]]] = None # added to accept attachments like data URIs
26
+
27
+ @app.get("/")
28
+ def root():
29
+ return {"status": "ok"}
30
+
31
+ async def process_task_in_background(req: TaskRequest):
32
+ # 2. Generate files and handle repo based on round
33
+ app_files = generate_app_files(
34
+ brief=req.brief,
35
+ checks=req.checks,
36
+ attachments=req.attachments,
37
+ round=req.round,
38
+ task=req.task
39
+ )
40
+
41
+ # Validate LLM output
42
+ if not isinstance(app_files, dict) or "index" not in app_files or "README" not in app_files:
43
+ raise HTTPException(status_code=500, detail="LLM did not return expected file structure")
44
+
45
+ # Initialize GitHub client
46
+ g = Github(GITHUB_TOKEN)
47
+ user = g.get_user()
48
+ repo_name = f"{req.task}"
49
+
50
+ if req.round == 1:
51
+ # Round 1: Create new repo
52
+ try:
53
+ repo = user.create_repo(repo_name, private=False, auto_init=False, license_template="mit")
54
+ except Exception as e:
55
+ raise HTTPException(status_code=500, detail=f"Failed to create repo: {e}")
56
+ else:
57
+ # Round >1: Get existing repo and update
58
+ try:
59
+ # Use get_repo with full path
60
+ repo = g.get_repo(f"{user.login}/{repo_name}")
61
+ except Exception as e:
62
+ raise HTTPException(status_code=404, detail=f"Repository not found: {e}")
63
+
64
+ # Prepare files to commit
65
+ files_to_commit = {
66
+ "index.html": app_files["index"],
67
+ "README.md": app_files["README"],
68
+ }
69
+ if isinstance(app_files.get("assets"), dict):
70
+ files_to_commit.update(app_files["assets"])
71
+
72
+ # Add/Update files & commit
73
+ for path, content in files_to_commit.items():
74
+ try:
75
+ if req.round == 1:
76
+ # Create new file for round 1
77
+ repo.create_file(path, f"add {path}", content)
78
+ else:
79
+ # Update existing file for later rounds
80
+ try:
81
+ # Get current file content
82
+ file = repo.get_contents(path)
83
+ # Update file
84
+ repo.update_file(path, f"update {path} for round {req.round}", content, file.sha)
85
+ except Exception:
86
+ # File doesn't exist, create it
87
+ repo.create_file(path, f"add {path} for round {req.round}", content)
88
+ except Exception as e:
89
+ raise HTTPException(status_code=500, detail=f"Failed to handle file {path}: {e}")
90
+
91
+ # Enable GitHub Pages only for round 1
92
+ if req.round == 1:
93
+ try:
94
+ requests.post(
95
+ f"https://api.github.com/repos/{user.login}/{repo_name}/pages",
96
+ headers={
97
+ "Authorization": f"token {GITHUB_TOKEN}",
98
+ "Accept": "application/vnd.github.v3+json",
99
+ },
100
+ json={"source": {"branch": "main", "path": "/"}}
101
+ )
102
+ except Exception:
103
+ pass
104
+
105
+ # Get latest commit SHA
106
+ commit_sha = repo.get_commits()[0].sha
107
+
108
+ # Prepare evaluation JSON
109
+ payload = {
110
+ "email": req.email,
111
+ "task": req.task,
112
+ "round": req.round,
113
+ "nonce": req.nonce,
114
+ "repo_url": repo.html_url,
115
+ "commit_sha": commit_sha,
116
+ "pages_url": f"https://{user.login}.github.io/{repo_name}/",
117
+ }
118
+
119
+
120
+ time.sleep(60)
121
+
122
+ # 8. POST to evaluation URL (with exponential backoff) — include JSON Content-Type & try up to 5 times
123
+ delay = 1
124
+ headers = {"Content-Type": "application/json"}
125
+ for _ in range(5):
126
+ try:
127
+ r = requests.post(req.evaluation_url, json=payload, headers=headers, timeout=10)
128
+ if r.status_code == 200:
129
+ break
130
+ except Exception:
131
+ pass
132
+ time.sleep(delay)
133
+ delay *= 2
134
+
135
+ @app.post("/task1")
136
+ async def handle_task(req: TaskRequest, background_tasks: BackgroundTasks):
137
+ # 1. Secret verification
138
+ if req.secret != SHARED_SECRET:
139
+ raise HTTPException(status_code=403, detail="Invalid secret")
140
+
141
+ # Add the processing to background tasks
142
+ background_tasks.add_task(process_task_in_background, req)
143
+
144
+ # Return simple acknowledgment
145
+ return {"status": "accepted", "message": "Request received and processing"}
146
+
147
+ if __name__ == "__main__":
148
+ import uvicorn
149
+ uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
pyproject.toml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "codegen"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "dotenv>=0.9.9",
9
+ "fastapi>=0.119.0",
10
+ "openai>=2.3.0",
11
+ "pygithub>=2.8.1",
12
+ "requests>=2.32.5",
13
+ "uvicorn>=0.37.0",
14
+ ]
uv.lock ADDED
The diff for this file is too large to render. See raw diff