ashutoshzade commited on
Commit
8771845
·
verified ·
1 Parent(s): 7bc109d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +110 -0
app.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
+ Fintelligence (C) 2024
3
+ Intelligent Finance
4
+ Released in Apache 2.0 license
5
+
6
+ '''
7
+ import streamlit as st
8
+ import os
9
+ from openai import OpenAI
10
+ import json
11
+
12
+ # Initialize the client
13
+ client = OpenAI(
14
+ base_url="https://api-inference.huggingface.co/v1",
15
+ api_key=os.environ.get('API_KEY')
16
+ )
17
+
18
+ # Model configuration
19
+ MODEL = "HuggingFaceH4/zephyr-7b-beta"
20
+
21
+ # Define a custom function
22
+ def get_current_weather(location, unit="celsius"):
23
+ """Get the current weather in a given location"""
24
+ weather_info = {
25
+ "location": location,
26
+ "temperature": "22",
27
+ "unit": unit,
28
+ "forecast": ["sunny", "windy"],
29
+ }
30
+ return weather_info
31
+
32
+ # Set up the Streamlit app
33
+ st.title("Chatbot with Hugging Face and Zephyr-7B-Beta")
34
+
35
+ # Initialize chat history
36
+ if "messages" not in st.session_state:
37
+ st.session_state.messages = []
38
+
39
+ # Display chat messages from history on app rerun
40
+ for message in st.session_state.messages:
41
+ with st.chat_message(message["role"]):
42
+ st.markdown(message["content"])
43
+
44
+ # Accept user input
45
+ if prompt := st.chat_input("What is up?"):
46
+ # Add user message to chat history
47
+ st.session_state.messages.append({"role": "user", "content": prompt})
48
+ # Display user message in chat message container
49
+ with st.chat_message("user"):
50
+ st.markdown(prompt)
51
+
52
+ # Prepare the messages for the API call
53
+ messages = [
54
+ {"role": msg["role"], "content": msg["content"]}
55
+ for msg in st.session_state.messages
56
+ ]
57
+
58
+ # Call the Hugging Face API
59
+ try:
60
+ response = client.chat.completions.create(
61
+ model=MODEL,
62
+ messages=messages,
63
+ functions=[
64
+ {
65
+ "name": "get_current_weather",
66
+ "description": "Get the current weather in a given location",
67
+ "parameters": {
68
+ "type": "object",
69
+ "properties": {
70
+ "location": {"type": "string"},
71
+ "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
72
+ },
73
+ "required": ["location"],
74
+ },
75
+ }
76
+ ],
77
+ function_call="auto",
78
+ )
79
+
80
+ # Process the response
81
+ assistant_response = response.choices[0].message.content
82
+ function_call = response.choices[0].message.function_call
83
+
84
+ if function_call:
85
+ function_name = function_call.name
86
+ function_args = json.loads(function_call.arguments)
87
+
88
+ if function_name == "get_current_weather":
89
+ function_response = get_current_weather(**function_args)
90
+
91
+ # Call the API again with the function response
92
+ messages.append({"role": "function", "name": function_name, "content": json.dumps(function_response)})
93
+ final_response = client.chat.completions.create(
94
+ model=MODEL,
95
+ messages=messages
96
+ )
97
+ assistant_response = final_response.choices[0].message.content
98
+
99
+ # Display assistant response in chat message container
100
+ with st.chat_message("assistant"):
101
+ st.markdown(assistant_response)
102
+
103
+ # Add assistant response to chat history
104
+ st.session_state.messages.append({"role": "assistant", "content": assistant_response})
105
+
106
+ except Exception as e:
107
+ st.error(f"An error occurred: {str(e)}")
108
+
109
+ # Note: Token usage information is not available with the Hugging Face API
110
+ st.sidebar.write("Note: Token usage information is not available with the Hugging Face API.")