AiCoderv2 commited on
Commit
c742cde
·
verified ·
1 Parent(s): 541cd2b

Update Gradio app with multiple files

Browse files
Files changed (3) hide show
  1. README.md +9 -2
  2. app.py +5 -3
  3. requirements.txt +2 -1
README.md CHANGED
@@ -7,8 +7,6 @@ sdk: gradio
7
  sdk_version: 4.44.0
8
  app_file: app.py
9
  pinned: false
10
- tags:
11
- - anycoder
12
  ---
13
 
14
  # AI Chatbot with Hugging Face Model
@@ -20,6 +18,15 @@ This is a simple chatbot application built with Gradio and powered by a Hugging
20
  - Conversational AI using Microsoft's DialoGPT model
21
  - Gradio interface for easy interaction
22
  - Maintains conversation history
 
 
 
 
 
 
 
 
 
23
 
24
  ## Usage
25
 
 
7
  sdk_version: 4.44.0
8
  app_file: app.py
9
  pinned: false
 
 
10
  ---
11
 
12
  # AI Chatbot with Hugging Face Model
 
18
  - Conversational AI using Microsoft's DialoGPT model
19
  - Gradio interface for easy interaction
20
  - Maintains conversation history
21
+ - Supports Hugging Face token for accessing private models or increased rate limits
22
+
23
+ ## Setup
24
+
25
+ To use a Hugging Face token (optional but recommended for better performance):
26
+
27
+ 1. Create a Hugging Face account at https://huggingface.co
28
+ 2. Generate a token at https://huggingface.co/settings/tokens
29
+ 3. In your Space settings, add the token as an environment variable named `HF_TOKEN`
30
 
31
  ## Usage
32
 
app.py CHANGED
@@ -2,10 +2,12 @@ import gradio as gr
2
  from transformers import pipeline, Conversation
3
  import os
4
 
5
- # Load the conversational model
6
  # Using DialoGPT-small for a simple chatbot
7
  # You can change this to any other conversational model like 'microsoft/DialoGPT-medium' or 'facebook/blenderbot-400M-distil-3B'
8
- chatbot_model = pipeline("conversational", model="microsoft/DialoGPT-small")
 
 
9
 
10
  def chat(message, history):
11
  # Convert history to Conversation format
@@ -31,7 +33,7 @@ def chat(message, history):
31
  demo = gr.ChatInterface(
32
  fn=chat,
33
  title="AI Chatbot",
34
- description="Chat with an AI powered by Hugging Face transformers.",
35
  theme=gr.themes.Soft()
36
  )
37
 
 
2
  from transformers import pipeline, Conversation
3
  import os
4
 
5
+ # Load the conversational model with HF token support
6
  # Using DialoGPT-small for a simple chatbot
7
  # You can change this to any other conversational model like 'microsoft/DialoGPT-medium' or 'facebook/blenderbot-400M-distil-3B'
8
+ # The app will use the HF_TOKEN environment variable if set
9
+ token = os.getenv('HF_TOKEN')
10
+ chatbot_model = pipeline("conversational", model="microsoft/DialoGPT-small", token=token)
11
 
12
  def chat(message, history):
13
  # Convert history to Conversation format
 
33
  demo = gr.ChatInterface(
34
  fn=chat,
35
  title="AI Chatbot",
36
+ description="Chat with an AI powered by Hugging Face transformers. <a href='https://huggingface.co/spaces/akhaliq/anycoder' target='_blank'>Built with anycoder</a>",
37
  theme=gr.themes.Soft()
38
  )
39
 
requirements.txt CHANGED
@@ -2,4 +2,5 @@ gradio
2
  transformers
3
  torch
4
  tokenizers
5
- accelerate
 
 
2
  transformers
3
  torch
4
  tokenizers
5
+ accelerate
6
+ huggingface_hub