Fix for api endpoint
#2
by
Hammad6271
- opened
- app.py +3 -1
- pyproject.toml +1 -1
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -10,6 +10,7 @@ import black # Add black import
|
|
| 10 |
# Initialize the inference client
|
| 11 |
client = InferenceClient(
|
| 12 |
api_key=os.getenv("HF_TOKEN"), # Make sure to set this environment variable
|
|
|
|
| 13 |
)
|
| 14 |
|
| 15 |
# Load questions from Hugging Face dataset
|
|
@@ -72,8 +73,9 @@ def check_code(
|
|
| 72 |
3. Does it follow the requirements of the challenge?
|
| 73 |
4. Does it meet the assessment criteria?
|
| 74 |
|
| 75 |
-
Respond with ONLY "CORRECT" or "INCORRECT" followed by a brief explanation.
|
| 76 |
"""
|
|
|
|
| 77 |
|
| 78 |
messages = [{"role": "user", "content": prompt}]
|
| 79 |
|
|
|
|
| 10 |
# Initialize the inference client
|
| 11 |
client = InferenceClient(
|
| 12 |
api_key=os.getenv("HF_TOKEN"), # Make sure to set this environment variable
|
| 13 |
+
base_url="https://router.huggingface.co", #updated base URL for the new inference API
|
| 14 |
)
|
| 15 |
|
| 16 |
# Load questions from Hugging Face dataset
|
|
|
|
| 73 |
3. Does it follow the requirements of the challenge?
|
| 74 |
4. Does it meet the assessment criteria?
|
| 75 |
|
| 76 |
+
Respond with ONLY "CORRECT" or "INCORRECT" followed by a brief explanation. Do not mention the "Student" rather mention it as "the solution" in your explanation.
|
| 77 |
"""
|
| 78 |
+
#updated prompt to avoid mentioning "Student" and instead refer to "the solution" in the explanation, to make it more neutral.
|
| 79 |
|
| 80 |
messages = [{"role": "user", "content": prompt}]
|
| 81 |
|
pyproject.toml
CHANGED
|
@@ -8,6 +8,6 @@ dependencies = [
|
|
| 8 |
"black>=25.1.0",
|
| 9 |
"datasets>=3.2.0",
|
| 10 |
"gradio[oauth]==5.15.0",
|
| 11 |
-
"huggingface-hub>=0.
|
| 12 |
"ipykernel>=6.29.5",
|
| 13 |
]
|
|
|
|
| 8 |
"black>=25.1.0",
|
| 9 |
"datasets>=3.2.0",
|
| 10 |
"gradio[oauth]==5.15.0",
|
| 11 |
+
"huggingface-hub>=0.31.0",
|
| 12 |
"ipykernel>=6.29.5",
|
| 13 |
]
|
requirements.txt
CHANGED
|
@@ -34,7 +34,7 @@ gradio-client==1.7.0
|
|
| 34 |
h11==0.14.0
|
| 35 |
httpcore==1.0.7
|
| 36 |
httpx==0.28.1
|
| 37 |
-
huggingface-hub
|
| 38 |
idna==3.10
|
| 39 |
ipykernel==6.29.5
|
| 40 |
ipython==8.32.0
|
|
|
|
| 34 |
h11==0.14.0
|
| 35 |
httpcore==1.0.7
|
| 36 |
httpx==0.28.1
|
| 37 |
+
huggingface-hub>=0.31.0
|
| 38 |
idna==3.10
|
| 39 |
ipykernel==6.29.5
|
| 40 |
ipython==8.32.0
|