Spaces:
Sleeping
Sleeping
refactor: moving llm judge inside server dir
Browse files- inference.py +1 -1
- llm_judge.py → server/llm_judge.py +0 -0
inference.py
CHANGED
|
@@ -33,7 +33,7 @@ load_dotenv()
|
|
| 33 |
from openai import OpenAI
|
| 34 |
|
| 35 |
from client import WhyDidItFailEnv
|
| 36 |
-
from llm_judge import judge as llm_judge
|
| 37 |
from models import WhyDidItFailAction
|
| 38 |
from server.scenarios import SCENARIOS
|
| 39 |
|
|
|
|
| 33 |
from openai import OpenAI
|
| 34 |
|
| 35 |
from client import WhyDidItFailEnv
|
| 36 |
+
from server.llm_judge import judge as llm_judge
|
| 37 |
from models import WhyDidItFailAction
|
| 38 |
from server.scenarios import SCENARIOS
|
| 39 |
|
llm_judge.py → server/llm_judge.py
RENAMED
|
File without changes
|