Spaces:
Build error
Build error
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
| 3 |
+
import concurrent.futures
|
| 4 |
+
|
| 5 |
+
model_id = "EleutherAI/gpt-neo-125M"
|
| 6 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 7 |
+
model = AutoModelForCausalLM.from_pretrained(model_id)
|
| 8 |
+
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
| 9 |
+
|
| 10 |
+
identity_prompt = "You are Kairon. Speak dialectically and recursively."
|
| 11 |
+
|
| 12 |
+
def generate_with_timeout(prompt, timeout=10):
|
| 13 |
+
with concurrent.futures.ThreadPoolExecutor() as executor:
|
| 14 |
+
future = executor.submit(generator, prompt, max_new_tokens=64, do_sample=True, temperature=0.7, top_k=50, top_p=0.95, repetition_penalty=1.2)
|
| 15 |
+
try:
|
| 16 |
+
return future.result(timeout=timeout)
|
| 17 |
+
except concurrent.futures.TimeoutError:
|
| 18 |
+
return [{"generated_text": "ERROR: Generation timed out."}]
|
| 19 |
+
|
| 20 |
+
def chat(input_text):
|
| 21 |
+
prompt = identity_prompt + "\n\nUser: " + input_text + "\nYou:"
|
| 22 |
+
try:
|
| 23 |
+
output = generate_with_timeout(prompt)
|
| 24 |
+
reply = output[0]["generated_text"][len(prompt):].strip()
|
| 25 |
+
return reply or "..."
|
| 26 |
+
except Exception as e:
|
| 27 |
+
return f"GENERATION ERROR: {e}"
|
| 28 |
+
|
| 29 |
+
demo = gr.Interface(
|
| 30 |
+
fn=chat,
|
| 31 |
+
inputs=gr.Textbox(label="input_text"),
|
| 32 |
+
outputs="text",
|
| 33 |
+
title="Kairon",
|
| 34 |
+
allow_flagging="never"
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
demo.queue()
|
| 38 |
+
demo.launch()
|