23f3003322 commited on
Commit
af43a76
·
1 Parent(s): b7e5593

round 1 complete

Browse files
Files changed (5) hide show
  1. app/deployer.py +137 -60
  2. app/llm.py +52 -8
  3. app/models.py +2 -2
  4. app/rounds.py +2 -2
  5. app/utils.py +23 -0
app/deployer.py CHANGED
@@ -5,6 +5,7 @@ import base64
5
  from dotenv import load_dotenv
6
  from typing import List,Optional
7
  from .models import FileContext
 
8
  load_dotenv()
9
 
10
 
@@ -33,84 +34,144 @@ async def create_github_repo(repo_name: str) -> dict:
33
  logger.error(f"Failed to create GitHub repo: {e}")
34
  raise RuntimeError(f"GitHub repo creation failed: {e}") from e
35
 
36
- async def get_latest_commit_sha(repo: str,):
37
  async with httpx.AsyncClient() as client:
38
  try:
39
-
40
  ref_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/ref/heads/main"
41
  ref_resp = await client.get(ref_url, headers=headers)
 
 
42
  ref_resp.raise_for_status()
43
  ref_data = ref_resp.json()
44
  commit_sha = ref_data["object"]["sha"]
45
  return commit_sha
46
-
47
  except httpx.HTTPStatusError as http_err:
48
- if http_err.response.status_code == 409:
49
- logger.warning(f"Repository {OWNER}/{repo} is empty, no commits found.")
50
  return None
51
- logger.error(f"GitHub API request failed: {http_err.response.status_code} {http_err.response.text}")
52
- raise
53
- except httpx.RequestError as req_err:
54
- logger.error(f"Network error while calling GitHub API: {req_err}")
55
- raise
56
- except Exception as exc:
57
- logger.error(f"Unexpected error during GitHub commit: {exc}", exc_info=True)
58
- raise
 
 
 
 
 
 
59
 
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
  async def push_files_to_github_repo(
63
  repo: str,
64
  files: List[FileContext],
65
- commit_message: str = "Add generated files",
66
- ) -> Optional[str]:
67
  """
68
- Pushes files individually to the GitHub repo using the 'create or update file' API endpoint.
69
- Returns the SHA of the last commit made.
70
  """
 
 
 
 
 
 
 
 
71
 
72
- last_commit_sha = None
73
 
74
  async with httpx.AsyncClient() as client:
75
- for file in files:
76
- try:
77
- url = f"https://api.github.com/repos/{OWNER}/{repo}/contents/{file.file_name}"
78
-
79
- # Check if the file exists to get its sha (required for updates)
80
- get_resp = await client.get(url, headers=headers, params={"ref": "main"})
81
- if get_resp.status_code == 200:
82
- sha = get_resp.json()["sha"]
83
- elif get_resp.status_code == 404:
84
- sha = None # File doesn't exist, create new
85
- else:
86
- get_resp.raise_for_status() # Raise for other HTTP errors
87
-
88
- payload = {
89
- "message": commit_message,
90
- "content": base64.b64encode(file.file_content.encode("utf-8")).decode("utf-8"),
91
- "branch": "main",
92
- }
93
- if sha:
94
- payload["sha"] = sha
95
-
96
- resp = await client.put(url, headers=headers, json=payload)
97
- resp.raise_for_status()
98
-
99
- last_commit_sha = resp.json()["commit"]["sha"]
100
- logger.info(f"Committed {file.file_name} (sha: {last_commit_sha})")
101
-
102
- except httpx.HTTPStatusError as http_err:
103
- logger.error(f"GitHub API HTTP error for file {file.file_name}: {http_err.response.status_code} {http_err.response.text}")
104
- raise
105
- except httpx.RequestError as req_err:
106
- logger.error(f"Network error while pushing file {file.file_name}: {req_err}")
107
- raise
108
- except Exception as exc:
109
- logger.error(f"Unexpected error for file {file.file_name}: {exc}", exc_info=True)
110
- raise
111
-
112
- return last_commit_sha
113
 
 
 
 
114
 
115
  async def enable_github_pages(repo: str):
116
  url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/pages"
@@ -123,13 +184,29 @@ async def enable_github_pages(repo: str):
123
 
124
  async with httpx.AsyncClient() as client:
125
  try:
126
- response = await client.post(url, json=payload, headers=headers)
127
  response.raise_for_status()
 
128
  logger.info(f"GitHub Pages enabled for {OWNER}/{repo}@main/")
129
- return response.json()
 
 
 
130
  except httpx.HTTPStatusError as exc:
131
- logger.error(f"Failed to enable GitHub Pages: {exc.response.status_code} - {exc.response.text}")
132
- raise
 
 
 
 
 
 
 
 
 
 
 
 
133
  except Exception as e:
134
  logger.error(f"Unexpected error enabling GitHub Pages: {str(e)}")
135
  raise
 
5
  from dotenv import load_dotenv
6
  from typing import List,Optional
7
  from .models import FileContext
8
+ from .utils import retry_request
9
  load_dotenv()
10
 
11
 
 
34
  logger.error(f"Failed to create GitHub repo: {e}")
35
  raise RuntimeError(f"GitHub repo creation failed: {e}") from e
36
 
37
+ async def get_latest_commit_sha(repo: str):
38
  async with httpx.AsyncClient() as client:
39
  try:
 
40
  ref_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/ref/heads/main"
41
  ref_resp = await client.get(ref_url, headers=headers)
42
+ if ref_resp.status_code in (404, 409):
43
+ return None
44
  ref_resp.raise_for_status()
45
  ref_data = ref_resp.json()
46
  commit_sha = ref_data["object"]["sha"]
47
  return commit_sha
 
48
  except httpx.HTTPStatusError as http_err:
49
+ if http_err.response.status_code in {404, 409}:
50
+ # Branch not found or repo empty --> no commits yet
51
  return None
52
+ raise
53
+
54
+ async def create_initial_commit(repo: str):
55
+ """
56
+ Create initial commit in an empty GitHub repo by adding README.md file.
57
+ """
58
+ url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/contents/README.md"
59
+ readme_content = "# Initial Commit\n\nThis is the initial commit."
60
+ encoded_content = base64.b64encode(readme_content.encode("utf-8")).decode("utf-8")
61
+ payload = {
62
+ "message": "Initial commit with README.md",
63
+ "content": encoded_content,
64
+ "branch": "main"
65
+ }
66
 
67
 
68
+ async with httpx.AsyncClient() as client:
69
+ resp = await retry_request(client.put, url, headers=headers, json=payload)
70
+ resp.raise_for_status()
71
+ return resp.json()
72
+
73
+ async def get_tree_sha(repo: str, commit_sha: str) -> str:
74
+ commit_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/commits/{commit_sha}"
75
+ async with httpx.AsyncClient() as client:
76
+ commit_resp = await retry_request(client.get, commit_url, headers=headers)
77
+ commit_resp.raise_for_status()
78
+ return commit_resp.json()["tree"]["sha"]
79
+
80
+ async def create_blob(repo: str, file: FileContext) -> str:
81
+ blob_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/blobs"
82
+ blob_payload = {
83
+ "content": file.file_content,
84
+ "encoding": "utf-8"
85
+ }
86
+ async with httpx.AsyncClient() as client:
87
+ blob_resp = await retry_request(client.post, blob_url, headers=headers, json=blob_payload)
88
+ blob_resp.raise_for_status()
89
+ return blob_resp.json()["sha"]
90
+
91
+ async def create_tree(repo: str, tree_sha: Optional[str], blob_entries: List[dict]) -> str:
92
+ tree_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/trees"
93
+ tree_payload = {"tree": blob_entries}
94
+ if tree_sha:
95
+ tree_payload["base_tree"] = tree_sha
96
+ async with httpx.AsyncClient() as client:
97
+ tree_resp = await retry_request(client.post, tree_url, headers=headers, json=tree_payload)
98
+ tree_resp.raise_for_status()
99
+ return tree_resp.json()["sha"]
100
+
101
+ async def create_commit(repo: str, commit_message: str, new_tree_sha: str, commit_sha: Optional[str]) -> str:
102
+ commit_payload = {
103
+ "message": commit_message,
104
+ "tree": new_tree_sha,
105
+ }
106
+ if commit_sha:
107
+ commit_payload["parents"] = [commit_sha]
108
+
109
+ new_commit_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/commits"
110
+ async with httpx.AsyncClient() as client:
111
+ new_commit_resp = await retry_request(client.post, new_commit_url, headers=headers, json=commit_payload)
112
+ new_commit_resp.raise_for_status()
113
+ return new_commit_resp.json()["sha"]
114
+
115
+ async def update_ref(repo: str, new_commit_sha: str, commit_sha_exists: bool):
116
+ update_ref_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/git/refs/heads/main"
117
+ async with httpx.AsyncClient() as client:
118
+ if not commit_sha_exists:
119
+ # Create ref since it doesn't exist yet
120
+ ref_create_payload = {
121
+ "ref": f"refs/heads/main",
122
+ "sha": new_commit_sha,
123
+ }
124
+ update_resp = await retry_request(client.post, update_ref_url.replace("/git/refs/heads", "/git/refs"), headers=headers, json=ref_create_payload)
125
+ else:
126
+ update_resp = await retry_request(client.patch, update_ref_url, headers=headers, json={"sha": new_commit_sha})
127
+
128
+ update_resp.raise_for_status()
129
 
130
  async def push_files_to_github_repo(
131
  repo: str,
132
  files: List[FileContext],
133
+ commit_message: str = "Add generated project files",
134
+ ):
135
  """
136
+ Pushes files to a GitHub repository via REST API by creating or updating blobs and commits.
 
137
  """
138
+ commit_sha = await get_latest_commit_sha(repo)
139
+ # Create initial commit if repo is empty
140
+ if commit_sha is None:
141
+ logger.info(f"No commits found in {repo}. Creating initial commit.")
142
+ await create_initial_commit(repo)
143
+ commit_sha = await get_latest_commit_sha(repo)
144
+ if commit_sha is None:
145
+ raise RuntimeError("Failed to create initial commit")
146
 
147
+ commit_sha_exists = bool(commit_sha)
148
 
149
  async with httpx.AsyncClient() as client:
150
+ try:
151
+ tree_sha = None
152
+ if commit_sha_exists:
153
+ tree_sha = await get_tree_sha(repo, commit_sha)
154
+
155
+ blob_entries = []
156
+ for file in files:
157
+ blob_sha = await create_blob(repo, file)
158
+ blob_entries.append({
159
+ "path": file.file_name,
160
+ "mode": "100644",
161
+ "type": "blob",
162
+ "sha": blob_sha,
163
+ })
164
+
165
+ new_tree_sha = await create_tree(repo, tree_sha, blob_entries)
166
+ new_commit_sha = await create_commit(repo, commit_message, new_tree_sha, commit_sha)
167
+ await update_ref(repo, new_commit_sha, commit_sha_exists)
168
+
169
+ logger.info(f"Committed files to {OWNER}/{repo}@main with commit SHA {new_commit_sha}")
170
+ return new_commit_sha
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
 
172
+ except Exception as e:
173
+ logger.error(f"Error pushing files to GitHub repo: {e}", exc_info=True)
174
+ raise
175
 
176
  async def enable_github_pages(repo: str):
177
  url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/pages"
 
184
 
185
  async with httpx.AsyncClient() as client:
186
  try:
187
+ response = await retry_request(client.post, url, json=payload, headers=headers)
188
  response.raise_for_status()
189
+ json_data = response.json()
190
  logger.info(f"GitHub Pages enabled for {OWNER}/{repo}@main/")
191
+ return {
192
+ "repo_url": f"https://github.com/{OWNER}/{repo}",
193
+ "pages_url": json_data.get("html_url")
194
+ }
195
  except httpx.HTTPStatusError as exc:
196
+ if exc.response.status_code == 409:
197
+ # Pages already enabled, fetch existing pages info instead of error
198
+ info_url = f"{GITHUB_API_URL}repos/{OWNER}/{repo}/pages"
199
+ info_resp = await client.get(info_url, headers=headers)
200
+ info_resp.raise_for_status()
201
+ data = info_resp.json()
202
+ logger.info(f"GitHub Pages already enabled for {OWNER}/{repo}@main/")
203
+ return {
204
+ "repo_url": f"https://github.com/{OWNER}/{repo}",
205
+ "pages_url": data.get("html_url")
206
+ }
207
+ else:
208
+ logger.error(f"Failed to enable GitHub Pages: {exc.response.status_code} - {exc.response.text}")
209
+ raise
210
  except Exception as e:
211
  logger.error(f"Unexpected error enabling GitHub Pages: {str(e)}")
212
  raise
app/llm.py CHANGED
@@ -10,12 +10,30 @@ logging.basicConfig(level=logging.INFO)
10
 
11
 
12
  async def genereate_code_with_llm(request: TaskRequest) -> List[FileContext]:
 
 
 
 
 
 
 
 
13
  prompt = f"""
14
  Generate a complete static website project that is deployable on GitHub Pages.
15
 
16
  Brief: {request.brief}
17
 
 
 
 
 
 
 
18
  Requirements:
 
 
 
 
19
  - Provide all files necessary for deployment including at least an index.html.
20
  - Write a thorough README.md that includes:
21
  - Project summary
@@ -23,6 +41,13 @@ Requirements:
23
  - Usage guide
24
  - Explanation of the main code/files
25
  - License information (use MIT)
 
 
 
 
 
 
 
26
  - The project must follow industry standards for static GitHub Pages hosting.
27
  - Return the project as a list of files with filenames and file contents.
28
  - All code should be modern and ready to deploy without modification.
@@ -35,29 +60,48 @@ Return only a JSON array of objects where each object has:
35
  system_prompt = """
36
  You are a highly experienced senior developer specializing in creating GitHub Pages-ready static websites.
37
 
38
- Your task is to generate a production-ready, modern, and industry-standard static website project based on the user’s brief.
 
 
39
 
40
  Specifically, ensure you do the following:
41
 
42
- 1. Create all necessary files to fully deploy a static website on GitHub Pages, including but not limited to:
 
 
 
 
 
 
43
  - index.html (the homepage)
44
  - any required CSS, JS, or asset files
45
  - configuration files (e.g., CNAME, if needed)
46
- 2. Write a complete, professional README.md file containing:
47
  - A clear project summary describing what the site does
48
  - Setup instructions to deploy the project on GitHub Pages step-by-step
49
  - Usage instructions, explaining how to use the website
50
  - Explanation of the key code files and their purpose
51
  - License information, applying the MIT license in standard format
52
- 3. Ensure all source code and resources are clean, properly structured, and follow modern best practices
53
- 4. Format your output as a list of files, with each containing a file_name and file_content field
54
- 5. Do not include text explanations in the output; only return the code files and README as specified
55
- 6. Output format:
 
 
 
 
 
 
 
 
 
 
 
56
  Return only a JSON array of objects where each object has:
57
  - "file_name": string
58
  - "file_content": string
59
 
60
- The user will provide a brief describing the site functionality. Use that to guide your file generation.
61
 
62
  Focus on quality, clarity, and correctness to deliver a ready-to-use GitHub Pages static website project.
63
  """
 
10
 
11
 
12
  async def genereate_code_with_llm(request: TaskRequest) -> List[FileContext]:
13
+
14
+ # Prepare brief and attachment context
15
+ attachments_text = ""
16
+ if request.attachments and len(request.attachments) > 0:
17
+ attachments_text = "\nAttachments:\n" + "\n".join([f"- {att.name}: {att.url[:80]}..." for att in request.attachments])
18
+ else:
19
+ attachments_text = "\n(No attachments were provided with the request)"
20
+
21
  prompt = f"""
22
  Generate a complete static website project that is deployable on GitHub Pages.
23
 
24
  Brief: {request.brief}
25
 
26
+ attachments (if any): {attachments_text}
27
+
28
+ Checks to be satisfied:
29
+ {f"{request.checks}" if getattr(request, "checks", None) else "(No explicit checks provided)"}
30
+
31
+
32
  Requirements:
33
+ - Use the attachments provided (if any). For example, if a CSV file, image, or data file is attached,
34
+ the generated site should correctly reference and use it in the codebase.
35
+ - Use data URIs as the source for attachments when embedding or referencing them (e.g., <img src="data:image/png;base64,..."> or fetch inline encoded data directly from JS).
36
+ - If attachments are present, they should be used within the project logically matching the task description.
37
  - Provide all files necessary for deployment including at least an index.html.
38
  - Write a thorough README.md that includes:
39
  - Project summary
 
41
  - Usage guide
42
  - Explanation of the main code/files
43
  - License information (use MIT)
44
+ - Carefully read the provided "checks" section. Each listed check represents a requirement that must be fulfilled by the project files and behavior.
45
+ - Checks can include both human-readable and programmatic JavaScript expressions.
46
+ - Implement all behaviors required so that JavaScript-based checks evaluate as true when tested.
47
+ - For checks beginning with `js:`:
48
+ - Ensure your HTML, CSS, and JavaScript code produces behavior consistent with the JS expressions listed.
49
+ - Implement any specified DOM updates, calculations, links, or asynchronous logic needed.
50
+ - Produce the project in such a way that every listed check passes successfully when evaluated automatically by test scripts or human reviewers.
51
  - The project must follow industry standards for static GitHub Pages hosting.
52
  - Return the project as a list of files with filenames and file contents.
53
  - All code should be modern and ready to deploy without modification.
 
60
  system_prompt = """
61
  You are a highly experienced senior developer specializing in creating GitHub Pages-ready static websites.
62
 
63
+ Your goal is to produce a production-ready project based on the provided task brief and optional attachments.
64
+ The user may include one or more attachments, such as images, CSV data files, or other static assets encoded as Data URIs.
65
+
66
 
67
  Specifically, ensure you do the following:
68
 
69
+ 1. Process the task brief carefully to understand the required site behavior and structure.
70
+ 2. If attachments are provided:
71
+ - Treat them as first-class project assets.
72
+ - Use `data:` URLs where appropriate (for images, CSV, JSON, etc.).
73
+ - If a CSV or data file is attached, the project should load and use it accordingly in the site logic.
74
+ - If images are attached, include them visually or reference them as part of the static content.
75
+ 3. Create all necessary files to fully deploy a static website on GitHub Pages, including but not limited to:
76
  - index.html (the homepage)
77
  - any required CSS, JS, or asset files
78
  - configuration files (e.g., CNAME, if needed)
79
+ 4. Write a complete, professional README.md file containing:
80
  - A clear project summary describing what the site does
81
  - Setup instructions to deploy the project on GitHub Pages step-by-step
82
  - Usage instructions, explaining how to use the website
83
  - Explanation of the key code files and their purpose
84
  - License information, applying the MIT license in standard format
85
+ 5. Ensure all source code and resources are clean, properly structured, and follow modern best practices
86
+ 6. Ensure the code Strictly fulfills every check listed in "checks" section . "checks" = explicit criteria the generated site will be tested against (either human-readable or
87
+ programmatic checks such as JavaScript expressions beginning with `js:`).
88
+ - Behavior and compliance:
89
+ - Implement code so that all **JavaScript-based checks (`js:`)** evaluate to `true`
90
+ when the resulting page runs in a browser.
91
+ - Satisfy any conditions mentioned in the `checks` — including DOM structure,
92
+ CSS links (like `<link href*='bootstrap'>`), and computed dynamic values.
93
+ - If a check includes formulas, like
94
+ `Math.abs(parseFloat(document.querySelector('#total-sales').textContent) - ${result}) < 0.01`,
95
+ ensure your JavaScript logic dynamically computes matching values at runtime.
96
+ - Include any required scripts or libraries (e.g., Bootstrap 5 from jsDelivr).
97
+ 7. Format your output as a list of files, with each containing a file_name and file_content field
98
+ 8. Do not include text explanations in the output; only return the code files and README as specified
99
+ 9. Output format:
100
  Return only a JSON array of objects where each object has:
101
  - "file_name": string
102
  - "file_content": string
103
 
104
+ The user will provide a brief describing the site functionality along with attachments (optional). Use these to guide your file generation.
105
 
106
  Focus on quality, clarity, and correctness to deliver a ready-to-use GitHub Pages static website project.
107
  """
app/models.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import List, Optional
2
  from pydantic import BaseModel
3
  class Attachment(BaseModel):
4
  name: str
@@ -11,7 +11,7 @@ class TaskRequest(BaseModel):
11
  round: int
12
  nonce: str
13
  brief: str
14
- checks: List[str]
15
  evaluation_url: str
16
  attachments: Optional[List[Attachment]] = None
17
 
 
1
+ from typing import List, Optional,Any
2
  from pydantic import BaseModel
3
  class Attachment(BaseModel):
4
  name: str
 
11
  round: int
12
  nonce: str
13
  brief: str
14
+ checks: Any
15
  evaluation_url: str
16
  attachments: Optional[List[Attachment]] = None
17
 
app/rounds.py CHANGED
@@ -16,8 +16,8 @@ async def round1(request: TaskRequest):
16
  commit_sha = await push_files_to_github_repo(repo=request.task,files=files)
17
  # commit_sha = await push_files_to_github_repo(repo=request.task)
18
 
19
- pages_response = await enable_github_pages(repo=request.task)
20
- pages_url = pages_response.get("html_url")
21
 
22
  # Prepare payload
23
  payload = {
 
16
  commit_sha = await push_files_to_github_repo(repo=request.task,files=files)
17
  # commit_sha = await push_files_to_github_repo(repo=request.task)
18
 
19
+ pages_reponse = await enable_github_pages(repo=request.task)
20
+ pages_url = pages_reponse.get("pages_url")
21
 
22
  # Prepare payload
23
  payload = {
app/utils.py CHANGED
@@ -1,7 +1,30 @@
1
  import os
 
 
 
2
  API_SECRET = os.getenv("API_SECRET", "shamil")
3
 
 
 
 
4
  def validate_secret(secret: str) -> bool:
5
  # Constant-time comparison to prevent timing attacks
6
  from hmac import compare_digest
7
  return compare_digest(secret, API_SECRET)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import httpx
3
+ import logging
4
+ import asyncio
5
  API_SECRET = os.getenv("API_SECRET", "shamil")
6
 
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
  def validate_secret(secret: str) -> bool:
11
  # Constant-time comparison to prevent timing attacks
12
  from hmac import compare_digest
13
  return compare_digest(secret, API_SECRET)
14
+
15
+
16
+ async def retry_request(func, *args, retries=3, delay=3, **kwargs):
17
+ last_exc = None
18
+ for attempt in range(retries):
19
+ try:
20
+ return await func(*args, **kwargs)
21
+ except (httpx.HTTPStatusError, httpx.RequestError) as e:
22
+ last_exc = e
23
+ logger.warning(f"Request failed on attempt {attempt + 1}/{retries}: {e}")
24
+ if attempt < retries - 1:
25
+ await asyncio.sleep(delay)
26
+ else:
27
+ logger.error("Max retries reached.")
28
+ raise last_exc
29
+ if last_exc:
30
+ raise last_exc