Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -219,71 +219,6 @@ def crewai_process_phi2(research_topic):
|
|
| 219 |
|
| 220 |
|
| 221 |
|
| 222 |
-
# Credentials ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
| 223 |
-
|
| 224 |
-
corpus_id = os.environ['VECTARA_CORPUS_ID']
|
| 225 |
-
customer_id = os.environ['VECTARA_CUSTOMER_ID']
|
| 226 |
-
api_key = os.environ['VECTARA_API_KEY']
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
# Get Data +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
def get_post_headers() -> dict:
|
| 233 |
-
"""Returns headers that should be attached to each post request."""
|
| 234 |
-
return {
|
| 235 |
-
"x-api-key": api_key,
|
| 236 |
-
"customer-id": customer_id,
|
| 237 |
-
"Content-Type": "application/json",
|
| 238 |
-
}
|
| 239 |
-
|
| 240 |
-
def query_vectara(query: str, filter_str="", lambda_val=0.0) -> str:
|
| 241 |
-
corpus_key = {
|
| 242 |
-
"customerId": customer_id,
|
| 243 |
-
"corpusId": corpus_id,
|
| 244 |
-
"lexicalInterpolationConfig": {"lambda": lambda_val},
|
| 245 |
-
}
|
| 246 |
-
if filter_str:
|
| 247 |
-
corpus_key["metadataFilter"] = filter_str
|
| 248 |
-
|
| 249 |
-
data = {
|
| 250 |
-
"query": [
|
| 251 |
-
{
|
| 252 |
-
"query": query,
|
| 253 |
-
"start": 0,
|
| 254 |
-
"numResults": 10,
|
| 255 |
-
"contextConfig": {
|
| 256 |
-
"sentencesBefore": 2,
|
| 257 |
-
"sentencesAfter": 2
|
| 258 |
-
},
|
| 259 |
-
"corpusKey": [corpus_key],
|
| 260 |
-
"summary": [
|
| 261 |
-
{
|
| 262 |
-
"responseLang": "eng",
|
| 263 |
-
"maxSummarizedResults": 5,
|
| 264 |
-
"summarizerPromptName": "vectara-summary-ext-v1.2.0"
|
| 265 |
-
},
|
| 266 |
-
]
|
| 267 |
-
}
|
| 268 |
-
]
|
| 269 |
-
}
|
| 270 |
-
|
| 271 |
-
response = requests.post(
|
| 272 |
-
"https://api.vectara.io/v1/query",
|
| 273 |
-
headers=get_post_headers(),
|
| 274 |
-
data=json.dumps(data),
|
| 275 |
-
timeout=130,
|
| 276 |
-
)
|
| 277 |
-
|
| 278 |
-
if response.status_code != 200:
|
| 279 |
-
st.error(f"Query failed (code {response.status_code}, reason {response.reason}, details {response.text})")
|
| 280 |
-
return ""
|
| 281 |
-
|
| 282 |
-
result = response.json()
|
| 283 |
-
|
| 284 |
-
answer = result["responseSet"][0]["summary"][0]["text"]
|
| 285 |
-
return re.sub(r'\[\d+(,\d+){0,5}\]', '', answer)
|
| 286 |
-
|
| 287 |
|
| 288 |
|
| 289 |
# Initialize the HHEM model +++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
| 219 |
|
| 220 |
|
| 221 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
|
| 223 |
|
| 224 |
# Initialize the HHEM model +++++++++++++++++++++++++++++++++++++++++++++++
|