binqiangliu commited on
Commit
9859223
·
1 Parent(s): 3d184b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -26
app.py CHANGED
@@ -1,17 +1,12 @@
1
- import uvicorn
2
- import gunicorn
3
- from gunicorn.app.wsgiapp import run
4
  from flask import Flask, request, jsonify
5
- #import streamlit as st
6
  from langchain import PromptTemplate, LLMChain
7
  from langchain.memory import StreamlitChatMessageHistory
8
- #from streamlit_chat import message
9
  import numpy as np
10
  from langchain.chains import LLMChain
11
  from langchain.prompts import PromptTemplate
12
  from langchain.memory import ConversationBufferMemory
13
  from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
14
- #from streamlit.components.v1 import html
15
  from langchain import HuggingFaceHub
16
  import os
17
  from dotenv import load_dotenv
@@ -20,18 +15,6 @@ from pathlib import Path
20
  from huggingface_hub import InferenceClient
21
  from langchain import HuggingFaceHub
22
  import requests
23
- #from time import sleep
24
- import uuid
25
- import sys
26
- #from streamlit_extras.colored_header import colored_header
27
- #from streamlit_extras.add_vertical_space import add_vertical_space
28
-
29
- #st.set_page_config(page_title="AI Chatbot 100% Free", layout="wide")
30
- #st.write('完全开源免费的AI智能聊天助手 | Absolute Free & Opensouce AI Chatbot')
31
-
32
- #css_file = "main.css"
33
- #with open(css_file) as f:
34
- # st.markdown("<style>{}</style>".format(f.read()), unsafe_allow_html=True)
35
 
36
  # 初始化Chatbot
37
  HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
@@ -60,8 +43,6 @@ assistant:
60
 
61
  llm_chain = LLMChain(llm=llm, prompt=PromptTemplate.from_template(prompt_template))
62
 
63
- #temp_user_query = st.chat_input("Enter your question here.")
64
-
65
  # 定义API端点
66
  app = Flask(__name__)
67
  @app.route('/api/chat', methods=['POST'])
@@ -77,9 +58,5 @@ def chat():
77
  #st.write(initial_response)
78
  return jsonify({'response': initial_response})
79
 
80
- if __name__ == '__main__':
81
- #app.run(host='0.0.0.0', port=port)
82
- #app.run(host='0.0.0.0')
83
- #uvicorn.run(app, host='0.0.0.0')
84
- #gunicorn.run(app, host='0.0.0.0')
85
- run(app, host='0.0.0.0')
 
1
+ import uvicorn
 
 
2
  from flask import Flask, request, jsonify
 
3
  from langchain import PromptTemplate, LLMChain
4
  from langchain.memory import StreamlitChatMessageHistory
 
5
  import numpy as np
6
  from langchain.chains import LLMChain
7
  from langchain.prompts import PromptTemplate
8
  from langchain.memory import ConversationBufferMemory
9
  from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
 
10
  from langchain import HuggingFaceHub
11
  import os
12
  from dotenv import load_dotenv
 
15
  from huggingface_hub import InferenceClient
16
  from langchain import HuggingFaceHub
17
  import requests
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
  # 初始化Chatbot
20
  HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
 
43
 
44
  llm_chain = LLMChain(llm=llm, prompt=PromptTemplate.from_template(prompt_template))
45
 
 
 
46
  # 定义API端点
47
  app = Flask(__name__)
48
  @app.route('/api/chat', methods=['POST'])
 
58
  #st.write(initial_response)
59
  return jsonify({'response': initial_response})
60
 
61
+ if __name__ == "__main__":
62
+ uvicorn.run(app, host='0.0.0.0', port=8888)