Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
|
@@ -30,7 +30,7 @@ os.makedirs(download_path, exist_ok=True)
|
|
| 30 |
os.makedirs(papers_path, exist_ok=True)
|
| 31 |
|
| 32 |
# Load LLaMA 2
|
| 33 |
-
model_name = "meta-llama/Llama-2-
|
| 34 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=True)
|
| 35 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype="auto")
|
| 36 |
|
|
@@ -156,5 +156,5 @@ def hybrid_rag_system_with_llama(query):
|
|
| 156 |
# Example query
|
| 157 |
query = "short easy machine learning"
|
| 158 |
results, final_response = hybrid_rag_system_with_llama(query)
|
| 159 |
-
print("\nFinal Response Generated by
|
| 160 |
print(final_response)
|
|
|
|
| 30 |
os.makedirs(papers_path, exist_ok=True)
|
| 31 |
|
| 32 |
# Load LLaMA 2
|
| 33 |
+
model_name = "meta-llama/Llama-3.2-1B-Instruct"
|
| 34 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=True)
|
| 35 |
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype="auto")
|
| 36 |
|
|
|
|
| 156 |
# Example query
|
| 157 |
query = "short easy machine learning"
|
| 158 |
results, final_response = hybrid_rag_system_with_llama(query)
|
| 159 |
+
print("\nFinal Response Generated by Llama 3:")
|
| 160 |
print(final_response)
|