Spaces:
Runtime error
Runtime error
Update nerBio.py
Browse files
nerBio.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
| 1 |
import os
|
| 2 |
|
| 3 |
-
os.environ["CUDA_VISIBLE_DEVICES"] = "1,6" #GPUs to use
|
| 4 |
-
|
| 5 |
-
os.environ["HF_HUB_CACHE"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 6 |
-
os.environ["HUGGINGFACE_HUB_CACHE"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 7 |
-
os.environ["HF_HOME"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 8 |
|
| 9 |
from transformers import file_utils
|
| 10 |
|
|
@@ -21,10 +21,10 @@ from collections import Counter
|
|
| 21 |
from gliner import GLiNER, GLiNERConfig, data_processing
|
| 22 |
|
| 23 |
#os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:512"
|
| 24 |
-
os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'expandable_segments:True'
|
| 25 |
|
| 26 |
import torch
|
| 27 |
-
torch.cuda.empty_cache() # Clear cache ot torch
|
| 28 |
|
| 29 |
import logging
|
| 30 |
|
|
@@ -67,10 +67,6 @@ import numpy as np
|
|
| 67 |
|
| 68 |
from retrieverRAG_testing import RAG_retrieval_Base, RAG_retrieval_Z_scores, RAG_retrieval_Percentile, RAG_retrieval_TopK
|
| 69 |
|
| 70 |
-
from joblib import Memory
|
| 71 |
-
|
| 72 |
-
cachedir = 'cached'
|
| 73 |
-
mem = Memory(cachedir, verbose=False)
|
| 74 |
|
| 75 |
# this is to completely delete the cache:
|
| 76 |
# mem.clear(warn=False)
|
|
|
|
| 1 |
import os
|
| 2 |
|
| 3 |
+
#os.environ["CUDA_VISIBLE_DEVICES"] = "1,6" #GPUs to use
|
| 4 |
+
#
|
| 5 |
+
#os.environ["HF_HUB_CACHE"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 6 |
+
#os.environ["HUGGINGFACE_HUB_CACHE"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 7 |
+
#os.environ["HF_HOME"] = "/eos/jeodpp/home/users/consose/cache/huggingface/hub"
|
| 8 |
|
| 9 |
from transformers import file_utils
|
| 10 |
|
|
|
|
| 21 |
from gliner import GLiNER, GLiNERConfig, data_processing
|
| 22 |
|
| 23 |
#os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:512"
|
| 24 |
+
#os.environ['PYTORCH_CUDA_ALLOC_CONF'] = 'expandable_segments:True'
|
| 25 |
|
| 26 |
import torch
|
| 27 |
+
#torch.cuda.empty_cache() # Clear cache ot torch
|
| 28 |
|
| 29 |
import logging
|
| 30 |
|
|
|
|
| 67 |
|
| 68 |
from retrieverRAG_testing import RAG_retrieval_Base, RAG_retrieval_Z_scores, RAG_retrieval_Percentile, RAG_retrieval_TopK
|
| 69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
# this is to completely delete the cache:
|
| 72 |
# mem.clear(warn=False)
|