amkyawdev commited on
Commit
44970d9
·
verified ·
1 Parent(s): 049bf09

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1,8 +1,8 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- # Use HF Inference API directly (no local model needed)
5
- client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.2")
6
 
7
  def generate(prompt, temperature=0.8, max_tokens=256):
8
  try:
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # Use a model that supports text-generation
5
+ client = InferenceClient("meta-llama/Llama-3.2-1B-Instruct")
6
 
7
  def generate(prompt, temperature=0.8, max_tokens=256):
8
  try: