cleaned chunks
Browse files
app.py
CHANGED
|
@@ -2,9 +2,21 @@ import gradio as gr
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
with open("knowledge.txt" , "r", encoding="utf-8") as f:
|
| 4 |
knowledge_base = f.read()
|
|
|
|
| 5 |
print(knowledge_base)
|
| 6 |
|
| 7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
def respond(message,history):
|
| 10 |
messages = [{"role": "system" , "content" : "You're a supportive and helpful feminist"}]
|
|
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
with open("knowledge.txt" , "r", encoding="utf-8") as f:
|
| 4 |
knowledge_base = f.read()
|
| 5 |
+
|
| 6 |
print(knowledge_base)
|
| 7 |
|
| 8 |
+
cleaned_text = knowledge_base.strip()
|
| 9 |
+
|
| 10 |
+
chunks = cleaned_text.split("\n")
|
| 11 |
+
cleaned_chunks = []
|
| 12 |
+
|
| 13 |
+
for chunk in chunks:
|
| 14 |
+
stripped_chunk = chunk.strip()
|
| 15 |
+
if stripped_chunk:
|
| 16 |
+
cleaned_chunks.append(stripped_chunk)
|
| 17 |
+
print(cleaned_chunks)
|
| 18 |
+
|
| 19 |
+
client = InferenceClient("google/gemma-3-27b-it")
|
| 20 |
|
| 21 |
def respond(message,history):
|
| 22 |
messages = [{"role": "system" , "content" : "You're a supportive and helpful feminist"}]
|