willco-afk commited on
Commit
51427c6
·
1 Parent(s): 1b73caf

Updated app.py to use Hugging Face InferenceClient correctly

Browse files
Files changed (1) hide show
  1. app.py +7 -9
app.py CHANGED
@@ -1,19 +1,17 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- # Use the correct Hugging Face InferenceClient initialization
5
- client = InferenceClient("willco-afk/languages") # No need for repo_id here
6
 
7
  # Define the prediction function
8
  def predict(input_text):
9
- try:
10
- result = client(input_text) # Call the model
11
- return result # Assuming the result is directly usable
12
- except Exception as e:
13
- return f"Error: {e}"
14
 
15
- # Create the Gradio interface
16
  iface = gr.Interface(fn=predict, inputs="text", outputs="text")
17
 
18
- # Launch the interface
19
  iface.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # Create an InferenceClient object (correct method)
5
+ client = InferenceClient(repo_id="willco-afk/languages")
6
 
7
  # Define the prediction function
8
  def predict(input_text):
9
+ # Use the 'predict' method to get the output
10
+ result = client.predict(input_text)
11
+ return result
 
 
12
 
13
+ # Define the Gradio interface
14
  iface = gr.Interface(fn=predict, inputs="text", outputs="text")
15
 
16
+ # Launch the interface locally
17
  iface.launch()