ashok2216 commited on
Commit
0cc0935
·
verified ·
1 Parent(s): cb7075f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -1,11 +1,12 @@
1
  import os
2
  import streamlit as st
3
- from huggingface_hub import InferenceApi, login
 
4
 
5
  # Streamlit app configuration
6
  st.set_page_config(page_title="Medical Chatbot")
7
- st.title("Medical Chatbot")
8
- st.subheader("I'm Your Medical Assistance", divider='rainbow')
9
  # Get the Hugging Face token from environment variables
10
  hf_token = os.getenv("HF_TOKEN")
11
  if hf_token is None:
@@ -32,7 +33,7 @@ st.sidebar.markdown(model_info[selected_model]['description'])
32
  st.sidebar.image(model_info[selected_model]['logo'])
33
 
34
  # Inference API Initialization
35
- client = InferenceApi(repo_id=model_links[selected_model], token=hf_token)
36
 
37
  # Sidebar settings
38
  max_tokens = st.sidebar.slider("Max new tokens", 1, 2048, 512)
 
1
  import os
2
  import streamlit as st
3
+ from huggingface_hub import InferenceApi, login, InferenceClient
4
+
5
 
6
  # Streamlit app configuration
7
  st.set_page_config(page_title="Medical Chatbot")
8
+ st.title("Medical Chatbot 🤖")
9
+ st.subheader("", divider='rainbow')
10
  # Get the Hugging Face token from environment variables
11
  hf_token = os.getenv("HF_TOKEN")
12
  if hf_token is None:
 
33
  st.sidebar.image(model_info[selected_model]['logo'])
34
 
35
  # Inference API Initialization
36
+ client = InferenceClient('HuggingFaceH4/zephyr-7b-beta')
37
 
38
  # Sidebar settings
39
  max_tokens = st.sidebar.slider("Max new tokens", 1, 2048, 512)