Spaces:
Runtime error
Runtime error
Paul Hager
commited on
Commit
·
47f11bf
1
Parent(s):
6ec6db3
debug config
Browse files- app.py +7 -7
- src/about.py +0 -30
- src/envs.py +0 -3
- src/leaderboard/read_evals.py +1 -0
app.py
CHANGED
|
@@ -78,13 +78,13 @@ def init_leaderboard(dataframe):
|
|
| 78 |
filter_columns=[
|
| 79 |
# ColumnFilter(AutoEvalColumn.model_type.name, type="checkboxgroup", label="Model types"),
|
| 80 |
# ColumnFilter(AutoEvalColumn.precision.name, type="checkboxgroup", label="Precision"),
|
| 81 |
-
ColumnFilter(
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
),
|
| 88 |
ColumnFilter(AutoEvalColumn.still_on_hub.name, type="boolean", label="Deleted/incomplete", default=True),
|
| 89 |
],
|
| 90 |
bool_checkboxgroup_label="Hide models",
|
|
|
|
| 78 |
filter_columns=[
|
| 79 |
# ColumnFilter(AutoEvalColumn.model_type.name, type="checkboxgroup", label="Model types"),
|
| 80 |
# ColumnFilter(AutoEvalColumn.precision.name, type="checkboxgroup", label="Precision"),
|
| 81 |
+
# ColumnFilter(
|
| 82 |
+
# AutoEvalColumn.params.name,
|
| 83 |
+
# type="slider",
|
| 84 |
+
# min=0.01,
|
| 85 |
+
# max=150,
|
| 86 |
+
# label="Select the number of parameters (B)",
|
| 87 |
+
# ),
|
| 88 |
ColumnFilter(AutoEvalColumn.still_on_hub.name, type="boolean", label="Deleted/incomplete", default=True),
|
| 89 |
],
|
| 90 |
bool_checkboxgroup_label="Hide models",
|
src/about.py
CHANGED
|
@@ -59,36 +59,6 @@ python run_full_info.py pathology=diverticulitis model=<YOUR_MODEL_NAME>
|
|
| 59 |
|
| 60 |
"""
|
| 61 |
|
| 62 |
-
# EVALUATION_QUEUE_TEXT = """
|
| 63 |
-
# ## Some good practices before submitting a model
|
| 64 |
-
|
| 65 |
-
# ### 1) Make sure you can load your model and tokenizer using AutoClasses:
|
| 66 |
-
# ```python
|
| 67 |
-
# from transformers import AutoConfig, AutoModel, AutoTokenizer
|
| 68 |
-
# config = AutoConfig.from_pretrained("your model name", revision=revision)
|
| 69 |
-
# model = AutoModel.from_pretrained("your model name", revision=revision)
|
| 70 |
-
# tokenizer = AutoTokenizer.from_pretrained("your model name", revision=revision)
|
| 71 |
-
# ```
|
| 72 |
-
# If this step fails, follow the error messages to debug your model before submitting it. It's likely your model has been improperly uploaded.
|
| 73 |
-
|
| 74 |
-
# Note: make sure your model is public!
|
| 75 |
-
# Note: if your model needs `use_remote_code=True`, we do not support this option yet but we are working on adding it, stay posted!
|
| 76 |
-
|
| 77 |
-
# ### 2) Convert your model weights to [safetensors](https://huggingface.co/docs/safetensors/index)
|
| 78 |
-
# It's a new format for storing weights which is safer and faster to load and use. It will also allow us to add the number of parameters of your model to the `Extended Viewer`!
|
| 79 |
-
|
| 80 |
-
# ### 3) Make sure your model has an open license!
|
| 81 |
-
# This is a leaderboard for Open LLMs, and we'd love for as many people as possible to know they can use your model 🤗
|
| 82 |
-
|
| 83 |
-
# ### 4) Fill up your model card
|
| 84 |
-
# When we add extra information about models to the leaderboard, it will be automatically taken from the model card
|
| 85 |
-
|
| 86 |
-
# ## In case of model failure
|
| 87 |
-
# If your model is displayed in the `FAILED` category, its execution stopped.
|
| 88 |
-
# Make sure you have followed the above steps first.
|
| 89 |
-
# If everything is done, check you can launch the EleutherAIHarness on your model locally, using the above command without modifications (you can add `--limit` to limit the number of examples per task).
|
| 90 |
-
# """
|
| 91 |
-
|
| 92 |
CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results"
|
| 93 |
CITATION_BUTTON_TEXT = r"""
|
| 94 |
@article{hager_evaluation_2024,
|
|
|
|
| 59 |
|
| 60 |
"""
|
| 61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results"
|
| 63 |
CITATION_BUTTON_TEXT = r"""
|
| 64 |
@article{hager_evaluation_2024,
|
src/envs.py
CHANGED
|
@@ -12,7 +12,6 @@ OWNER = (
|
|
| 12 |
# ----------------------------------
|
| 13 |
|
| 14 |
REPO_ID = f"{OWNER}/leaderboard"
|
| 15 |
-
# QUEUE_REPO = f"{OWNER}/requests"
|
| 16 |
RESULTS_REPO_CDM = f"{OWNER}/results-CDM"
|
| 17 |
RESULTS_REPO_CDM_FI = f"{OWNER}/results-CDM-FI"
|
| 18 |
|
|
@@ -20,10 +19,8 @@ RESULTS_REPO_CDM_FI = f"{OWNER}/results-CDM-FI"
|
|
| 20 |
CACHE_PATH = os.getenv("HF_HOME", ".")
|
| 21 |
|
| 22 |
# Local caches
|
| 23 |
-
# EVAL_REQUESTS_PATH = os.path.join(CACHE_PATH, "eval-queue")
|
| 24 |
EVAL_RESULTS_PATH_CDM = os.path.join(CACHE_PATH, "eval-results-CDM")
|
| 25 |
EVAL_RESULTS_PATH_CDM_FI = os.path.join(CACHE_PATH, "eval-results-CDM-FI")
|
| 26 |
-
# EVAL_REQUESTS_PATH_BACKEND = os.path.join(CACHE_PATH, "eval-queue-bk")
|
| 27 |
EVAL_RESULTS_PATH_BACKEND = os.path.join(CACHE_PATH, "eval-results-bk")
|
| 28 |
|
| 29 |
API = HfApi(token=TOKEN)
|
|
|
|
| 12 |
# ----------------------------------
|
| 13 |
|
| 14 |
REPO_ID = f"{OWNER}/leaderboard"
|
|
|
|
| 15 |
RESULTS_REPO_CDM = f"{OWNER}/results-CDM"
|
| 16 |
RESULTS_REPO_CDM_FI = f"{OWNER}/results-CDM-FI"
|
| 17 |
|
|
|
|
| 19 |
CACHE_PATH = os.getenv("HF_HOME", ".")
|
| 20 |
|
| 21 |
# Local caches
|
|
|
|
| 22 |
EVAL_RESULTS_PATH_CDM = os.path.join(CACHE_PATH, "eval-results-CDM")
|
| 23 |
EVAL_RESULTS_PATH_CDM_FI = os.path.join(CACHE_PATH, "eval-results-CDM-FI")
|
|
|
|
| 24 |
EVAL_RESULTS_PATH_BACKEND = os.path.join(CACHE_PATH, "eval-results-bk")
|
| 25 |
|
| 26 |
API = HfApi(token=TOKEN)
|
src/leaderboard/read_evals.py
CHANGED
|
@@ -41,6 +41,7 @@ class EvalResult:
|
|
| 41 |
data = json.load(fp)
|
| 42 |
|
| 43 |
config = data.get("config")
|
|
|
|
| 44 |
|
| 45 |
# Precision
|
| 46 |
precision = Precision.from_str(config.get("model_dtype"))
|
|
|
|
| 41 |
data = json.load(fp)
|
| 42 |
|
| 43 |
config = data.get("config")
|
| 44 |
+
print(config)
|
| 45 |
|
| 46 |
# Precision
|
| 47 |
precision = Precision.from_str(config.get("model_dtype"))
|