Update README.md
Browse files
README.md
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
# EPIC Router Family
|
| 2 |
|
| 3 |
This repository hosts the public checkpoints for the EPIC router models. Each
|
| 4 |
checkpoint learns to pick the best reasoning configuration (method, aggregator,
|
| 5 |
-
sample count, etc.) given a natural-language math question.
|
| 6 |
-
training and evaluation code lives in the EPIC GitHub project; these weights are
|
| 7 |
-
ready-to-use drop-in artifacts for that codebase.
|
| 8 |
|
| 9 |
|
| 10 |
## Available Versions
|
|
@@ -26,10 +28,12 @@ Each checkpoint contains:
|
|
| 26 |
|
| 27 |
## Quickstart (Python)
|
| 28 |
|
| 29 |
-
Install
|
| 30 |
|
| 31 |
```bash
|
| 32 |
-
|
|
|
|
|
|
|
| 33 |
```
|
| 34 |
|
| 35 |
Load a checkpoint and route a question:
|
|
@@ -37,8 +41,8 @@ Load a checkpoint and route a question:
|
|
| 37 |
```python
|
| 38 |
from huggingface_hub import hf_hub_download
|
| 39 |
import torch
|
| 40 |
-
from router.models import RouterScoringModel, MiniLMQuestionEncoder, QuestionProjector
|
| 41 |
-
from data_schemas.reasoning import ReasoningConfig
|
| 42 |
|
| 43 |
REPO_ID = "baonn/epic"
|
| 44 |
VERSION = "1.0" # or 0.5 / 0.75 / 0.25
|
|
@@ -70,11 +74,11 @@ reasoning_configs = [
|
|
| 70 |
ReasoningConfig.deserialize(payload) for payload in checkpoint["methods"]
|
| 71 |
]
|
| 72 |
|
| 73 |
-
|
| 74 |
with torch.no_grad():
|
| 75 |
-
logits = model(
|
| 76 |
-
|
| 77 |
|
| 78 |
-
|
| 79 |
-
print("Recommended config:",
|
| 80 |
```
|
|
|
|
| 1 |
+
---
|
| 2 |
+
datasets:
|
| 3 |
+
- HuggingFaceH4/MATH-500
|
| 4 |
+
---
|
| 5 |
# EPIC Router Family
|
| 6 |
|
| 7 |
This repository hosts the public checkpoints for the EPIC router models. Each
|
| 8 |
checkpoint learns to pick the best reasoning configuration (method, aggregator,
|
| 9 |
+
sample count, etc.) given a natural-language math question.
|
|
|
|
|
|
|
| 10 |
|
| 11 |
|
| 12 |
## Available Versions
|
|
|
|
| 28 |
|
| 29 |
## Quickstart (Python)
|
| 30 |
|
| 31 |
+
Install the package locally:
|
| 32 |
|
| 33 |
```bash
|
| 34 |
+
git clone https://github.com/nguyenngocbaocmt02/epic.git
|
| 35 |
+
cd epic_project
|
| 36 |
+
pip install -e .
|
| 37 |
```
|
| 38 |
|
| 39 |
Load a checkpoint and route a question:
|
|
|
|
| 41 |
```python
|
| 42 |
from huggingface_hub import hf_hub_download
|
| 43 |
import torch
|
| 44 |
+
from epic.router.models import RouterScoringModel, MiniLMQuestionEncoder, QuestionProjector
|
| 45 |
+
from epic.data_schemas.reasoning import ReasoningConfig
|
| 46 |
|
| 47 |
REPO_ID = "baonn/epic"
|
| 48 |
VERSION = "1.0" # or 0.5 / 0.75 / 0.25
|
|
|
|
| 74 |
ReasoningConfig.deserialize(payload) for payload in checkpoint["methods"]
|
| 75 |
]
|
| 76 |
|
| 77 |
+
questions = ["x + 20 = 30 then x = ?", "How many positive divisors does 3600 have?"]
|
| 78 |
with torch.no_grad():
|
| 79 |
+
logits = model(questions)
|
| 80 |
+
method_indices = torch.argmax(logits, dim=1).tolist()
|
| 81 |
|
| 82 |
+
print("Recommended config for question 1:", reasoning_configs[method_indices[0]].serialize(include_samples=True))
|
| 83 |
+
print("Recommended config for question 2:", reasoning_configs[method_indices[1]].serialize(include_samples=True))
|
| 84 |
```
|