Spaces:
Paused
Paused
update
Browse files
app.py
CHANGED
|
@@ -13,6 +13,22 @@ import requests
|
|
| 13 |
import seaborn as sns
|
| 14 |
from tqdm import tqdm
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
def download_arxiv_source(paper_id):
|
| 18 |
url = f"https://arxiv.org/e-print/{paper_id}"
|
|
@@ -74,10 +90,8 @@ class ContextualQA:
|
|
| 74 |
self.context = text
|
| 75 |
|
| 76 |
def ask_question(self, question):
|
| 77 |
-
leading_prompt = "Consider the
|
| 78 |
-
trailing_prompt =
|
| 79 |
-
"Now answer the following question, use Markdown to format your answer."
|
| 80 |
-
)
|
| 81 |
prompt = f"{anthropic.HUMAN_PROMPT} {leading_prompt}\n\n{self.context}\n\n{trailing_prompt}\n\n{anthropic.HUMAN_PROMPT} {question} {anthropic.AI_PROMPT}"
|
| 82 |
response = self.client.completion_stream(
|
| 83 |
prompt=prompt,
|
|
@@ -107,16 +121,24 @@ class ContextualQA:
|
|
| 107 |
|
| 108 |
|
| 109 |
def load_context(paper_id):
|
| 110 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
|
| 112 |
model = ContextualQA(client, model="claude-v1.3-100k")
|
| 113 |
model.load_text(latex_source)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 114 |
return (
|
| 115 |
model,
|
| 116 |
[
|
| 117 |
(
|
| 118 |
f"Load the paper with id {paper_id}.",
|
| 119 |
-
"
|
| 120 |
)
|
| 121 |
],
|
| 122 |
)
|
|
@@ -131,7 +153,11 @@ def answer_fn(model, question, chat_history):
|
|
| 131 |
client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
|
| 132 |
model.client = client
|
| 133 |
|
| 134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
|
| 136 |
chat_history.append((question, response[0]["completion"]))
|
| 137 |
return model, chat_history, ""
|
|
|
|
| 13 |
import seaborn as sns
|
| 14 |
from tqdm import tqdm
|
| 15 |
|
| 16 |
+
import arxiv
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def get_paper_info(paper_id):
|
| 20 |
+
# Create a search query with the arXiv ID
|
| 21 |
+
search = arxiv.Search(id_list=[paper_id])
|
| 22 |
+
|
| 23 |
+
# Fetch the paper using its arXiv ID
|
| 24 |
+
paper = next(search.results(), None)
|
| 25 |
+
|
| 26 |
+
if paper is not None:
|
| 27 |
+
# Return the paper's title and abstract
|
| 28 |
+
return paper.title, paper.summary
|
| 29 |
+
else:
|
| 30 |
+
return None, None
|
| 31 |
+
|
| 32 |
|
| 33 |
def download_arxiv_source(paper_id):
|
| 34 |
url = f"https://arxiv.org/e-print/{paper_id}"
|
|
|
|
| 90 |
self.context = text
|
| 91 |
|
| 92 |
def ask_question(self, question):
|
| 93 |
+
leading_prompt = "Consider the following paper:"
|
| 94 |
+
trailing_prompt = "Now, answer the following question using Markdown syntax:"
|
|
|
|
|
|
|
| 95 |
prompt = f"{anthropic.HUMAN_PROMPT} {leading_prompt}\n\n{self.context}\n\n{trailing_prompt}\n\n{anthropic.HUMAN_PROMPT} {question} {anthropic.AI_PROMPT}"
|
| 96 |
response = self.client.completion_stream(
|
| 97 |
prompt=prompt,
|
|
|
|
| 121 |
|
| 122 |
|
| 123 |
def load_context(paper_id):
|
| 124 |
+
try:
|
| 125 |
+
latex_source = download_arxiv_source(paper_id)
|
| 126 |
+
except Exception as e:
|
| 127 |
+
return None, [(f"Error loading paper with id {paper_id}.", str(e))]
|
| 128 |
+
|
| 129 |
client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
|
| 130 |
model = ContextualQA(client, model="claude-v1.3-100k")
|
| 131 |
model.load_text(latex_source)
|
| 132 |
+
|
| 133 |
+
# Usage
|
| 134 |
+
title, abstract = get_paper_info(paper_id)
|
| 135 |
+
|
| 136 |
return (
|
| 137 |
model,
|
| 138 |
[
|
| 139 |
(
|
| 140 |
f"Load the paper with id {paper_id}.",
|
| 141 |
+
f"\n**Title**: {title}\n\n**Abstract**: {abstract}\n\nPaper loaded, You can now ask questions.",
|
| 142 |
)
|
| 143 |
],
|
| 144 |
)
|
|
|
|
| 153 |
client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
|
| 154 |
model.client = client
|
| 155 |
|
| 156 |
+
try:
|
| 157 |
+
response = model.ask_question(question)
|
| 158 |
+
except Exception as e:
|
| 159 |
+
chat_history.append(("Error Asking Question", str(e)))
|
| 160 |
+
return model, chat_history, ""
|
| 161 |
|
| 162 |
chat_history.append((question, response[0]["completion"]))
|
| 163 |
return model, chat_history, ""
|