mjaxs commited on
Commit
99eb32c
·
verified ·
1 Parent(s): fffdb67

Upload app .py

Browse files
Files changed (1) hide show
  1. app .py +23 -0
app .py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import gradio as gr
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
4
+
5
+ # Use a public, non-gated model to avoid auth issues
6
+ model_id = "tiiuae/falcon-rw-1b"
7
+
8
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
9
+ model = AutoModelForCausalLM.from_pretrained(model_id)
10
+
11
+ generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
12
+
13
+ def chat_with_cael(prompt):
14
+ result = generator(prompt, max_length=200, do_sample=True, top_k=50)[0]["generated_text"]
15
+ return result
16
+
17
+ iface = gr.Interface(fn=chat_with_cael,
18
+ inputs=gr.Textbox(lines=2, placeholder="Talk to Cael..."),
19
+ outputs="text",
20
+ title="Cael: Your AI Companion",
21
+ description="This is a simple early version of Cael using Falcon-RW-1B. Fully free and open!")
22
+
23
+ iface.launch()