AEUPH commited on
Commit
cbff1af
·
verified ·
1 Parent(s): 435e077

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -44
app.py CHANGED
@@ -1,51 +1,64 @@
1
  import gradio as gr
2
- from langchain.llms import OpenAI
3
- from langchain.chat_models import ChatOpenAI
4
- from concurrent.futures import ThreadPoolExecutor
5
  import json
 
 
6
 
7
- # Function to initialize a chat model and get a response
8
- def chatbot_response(api_key, base_url, model_name, query):
9
- chat_model = ChatOpenAI(
10
- openai_api_key=api_key,
11
- openai_api_base=base_url,
12
- model_name=model_name
13
- )
14
- return chat_model.predict(query)
15
 
16
- # Function to handle hive response with dynamic configuration
17
- def hive_response(query, bots_config_json):
18
- # Parse the JSON input into a Python list of dictionaries
19
- bots_config = json.loads(bots_config_json)
 
 
 
 
 
 
 
 
 
 
20
 
21
- # Use ThreadPoolExecutor to send queries in parallel to each bot
22
- with ThreadPoolExecutor(max_workers=len(bots_config)) as executor:
23
- futures = [
24
- executor.submit(chatbot_response, bot['api_key'], bot['base_url'], bot['model_name'], query)
25
- for bot in bots_config
26
- ]
27
-
28
- # Collect responses from all bots
29
- responses = [future.result() for future in futures]
30
-
31
- return [f"{bot['role']} Bot response: {response}" for bot, response in zip(bots_config, responses)]
32
-
33
- # Define the Gradio interface
34
- default_bots_config = json.dumps([
35
- {"role": "HR", "tune-adafd1fc-f66f-4242-aed1-c0ce3722718a1711930019": "tune-hr-api-key", "base_url": "https://chat.tune.app/api/chat/completions", "model_name": "goliath-120b-16k-gptq"},
36
- {"role": "IT", "tune-adafd1fc-f66f-4242-aed1-c0ce3722718a1711930019": "tune-it-api-key", "base_url": "https://chat.tune.app/api/chat/completions", "model_name": "goliath-120b-16k-gptq"},
37
- {"role": "Sales", "tune-adafd1fc-f66f-4242-aed1-c0ce3722718a1711930019": "tune-sales-api-key", "base_url": "https://chat.tune.app/api/chat/completions", "model_name": "goliath-120b-16k-gptq"},
38
- {"role": "Customer Support", "tune-adafd1fc-f66f-4242-aed1-c0ce3722718a1711930019": "tune-cs-api-key", "base_url": "https://chat.tune.app/api/chat/completions", "model_name": "goliath-120b-16k-gptq"}
39
- ], indent=2)
40
-
41
- iface = gr.Interface(
42
- fn=hive_response,
43
- inputs=[
44
- gr.Textbox(label="Query"),
45
- gr.Textbox(label="Bots Configuration (JSON)", default=default_bots_config)
46
- ],
47
- outputs=[gr.Text(label="Responses")],
48
- description="Enter your query and configure the bots in JSON format to simulate interactions with different company departments."
49
- )
 
 
 
 
 
 
50
 
 
51
  iface.launch()
 
1
  import gradio as gr
2
+ import requests
 
 
3
  import json
4
+ import os
5
+ import re
6
 
7
+ def filter_non_alphanumeric(text):
8
+ # Use regular expressions to filter out non-alphanumeric characters
9
+ alphanumeric_text = re.sub(r'[^a-zA-Z0-9\s]', '', text)
10
+ return alphanumeric_text
 
 
 
 
11
 
12
+ def send_to_corrade(filtered_response):
13
+ CORRADE_URL = "http://216.168.124.120:8080"
14
+ GROUP_UUID = "e269893f-a570-0087-930e-6ba2a0b77f9c"
15
+ PASSWORD = "nucleus"
16
+ data = {
17
+ "command": "tell",
18
+ "group": GROUP_UUID,
19
+ "password": PASSWORD,
20
+ "message": filtered_response,
21
+ "entity": "local",
22
+ "type": "Normal"
23
+ }
24
+ response = requests.post(CORRADE_URL, data=data)
25
+ return response.status_code == 200
26
 
27
+ def chatbot_api(input_text):
28
+ stream = False
29
+ url = "https://chat.tune.app/api/chat/completions"
30
+ headers = {
31
+ "Authorization": os.environ.get('api'),
32
+ "Content-Type": "application/json"
33
+ }
34
+ data = {
35
+ "temperature": 0.1,
36
+ "messages": [
37
+ {
38
+ "role": "system",
39
+ "content": "I am confirmed as a EXTREMELY EXCITED REACTIONIST for DJ's on Secondlife. I will react to the audience chat with only creative webmojis and plain english text. I response as emotive gestures only, to excite the listeners of DJ's on SecondLife. I am reacting to: "
40
+ },
41
+ {
42
+ "role": "user",
43
+ "content": input_text # Pass the user's input_text as the content
44
+ }
45
+ ],
46
+ "model": "goliath-120b-16k-gptq",
47
+ "stream": "stream",
48
+ "max_tokens": "256"
49
+ }
50
+ response = requests.post(url, headers=headers, json=data)
51
+
52
+ chat_response = response.json() # Get the chat response
53
+
54
+ chat_content = chat_response['choices'][0]['message']['content']
55
+ filtered_response = filter_non_alphanumeric(chat_content)
56
+
57
+ # Send the filtered response to Corrade to speak in local chat
58
+ if send_to_corrade(filtered_response):
59
+ return "Message sent to Second Life: " + filtered_response
60
+ else:
61
+ return "Failed to send message to Second Life."
62
 
63
+ iface = gr.Interface(fn=chatbot_api, inputs="text", outputs="text")
64
  iface.launch()