Spaces:
darsoarafa
/
Runtime error

darsoarafa commited on
Commit
f3a8bd6
Β·
verified Β·
1 Parent(s): f133435

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -13
app.py CHANGED
@@ -2,8 +2,8 @@ from sentence_transformers import SentenceTransformer
2
  from PyPDF2 import PdfReader
3
  import tiktoken
4
  import groq
5
- import asyncio
6
- from groq import AsyncGroq
7
  import faiss
8
  import numpy as np
9
  import gradio as gr
@@ -84,20 +84,16 @@ def load_embeddings(embeddings_path="models/embeddings.pkl"):
84
  index = pickle.load(f)
85
 
86
  # == Integrasi LLaMA via Groq API ==
87
- #client = groq.Client(api_key=GROQ_API_KEY)
88
- client = AsyncGroq(api_key=GROQ_API_KEY)
89
 
90
- async def query_llama(prompt):
91
- response = await client.chat.completions.create(
92
  model="llama3-8b-8192",
93
  messages=[{"role": "user", "content": prompt}],
94
  max_tokens=512
95
  )
96
- for chunk in response:
97
- print(chunk.choices[0].delta.content)
98
- yield chunk.choices[0].delta.content
99
- print("****************")
100
- #return response.choices[0].message.content.strip()
101
 
102
  # == Main Workflow ==
103
  if __name__ == '__main__':
@@ -112,7 +108,6 @@ if __name__ == '__main__':
112
  context = "\n".join(retrieved_chunks)
113
 
114
  prompt = f"Gunakan informasi berikut untuk menjawab:\n{context}\n\nPertanyaan: Apa isi dokumen ini?"
115
- print(prompt)
116
  answer = query_llama(prompt)
117
  print(answer)
118
 
@@ -122,7 +117,7 @@ def chatbot_interface(user_query):
122
  context = "\n".join(retrieved_chunks)
123
 
124
  prompt = f"Gunakan informasi berikut untuk menjawab:\n{context}\n\nPertanyaan: {user_query}"
125
- answer = asyncio.run(query_llama(prompt))
126
 
127
  return answer
128
 
 
2
  from PyPDF2 import PdfReader
3
  import tiktoken
4
  import groq
5
+ #import asyncio
6
+ #from groq import AsyncGroq
7
  import faiss
8
  import numpy as np
9
  import gradio as gr
 
84
  index = pickle.load(f)
85
 
86
  # == Integrasi LLaMA via Groq API ==
87
+ client = groq.Client(api_key=GROQ_API_KEY)
88
+ #client = AsyncGroq(api_key=GROQ_API_KEY)
89
 
90
+ def query_llama(prompt):
91
+ response = client.chat.completions.create(
92
  model="llama3-8b-8192",
93
  messages=[{"role": "user", "content": prompt}],
94
  max_tokens=512
95
  )
96
+ return response.choices[0].message.content.strip()
 
 
 
 
97
 
98
  # == Main Workflow ==
99
  if __name__ == '__main__':
 
108
  context = "\n".join(retrieved_chunks)
109
 
110
  prompt = f"Gunakan informasi berikut untuk menjawab:\n{context}\n\nPertanyaan: Apa isi dokumen ini?"
 
111
  answer = query_llama(prompt)
112
  print(answer)
113
 
 
117
  context = "\n".join(retrieved_chunks)
118
 
119
  prompt = f"Gunakan informasi berikut untuk menjawab:\n{context}\n\nPertanyaan: {user_query}"
120
+ answer = query_llama(prompt)
121
 
122
  return answer
123