Spaces:
Sleeping
Sleeping
Commit
Β·
0c3d039
1
Parent(s):
e026a88
Upload 12 files
Browse files
app.py
CHANGED
|
@@ -47,11 +47,8 @@ from langchain.chains.question_answering import load_qa_chain
|
|
| 47 |
# os.environ["NEWS_API_KEY"] = ""
|
| 48 |
# os.environ["TMDB_BEARER_TOKEN"] = ""
|
| 49 |
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
news_api_key = "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"
|
| 53 |
-
# tmdb_bearer_token = os.environ["TMDB_BEARER_TOKEN"]
|
| 54 |
-
tmdb_bearer_token = "ef6345567bb53731af1fd359c5ed5ec9"
|
| 55 |
|
| 56 |
|
| 57 |
TOOLS_LIST = ['serpapi', 'wolfram-alpha', 'pal-math', 'pal-colored-objects', 'news-api'] #'google-search','news-api','tmdb-api','open-meteo-api'
|
|
@@ -646,7 +643,9 @@ with gr.Blocks(css=".gradio-container {background-color: lightgray}") as block:
|
|
| 646 |
</b></center></p>""")
|
| 647 |
|
| 648 |
openai_api_key_textbox = gr.Textbox(placeholder="sk-... μμνλ OpenAI API key λΆμ¬λ£κΈ°",
|
| 649 |
-
show_label=False, lines=1, type='password'
|
|
|
|
|
|
|
| 650 |
|
| 651 |
with gr.Row():
|
| 652 |
with gr.Column(scale=1, min_width=TALKING_HEAD_WIDTH, visible=True):
|
|
@@ -670,7 +669,7 @@ with gr.Blocks(css=".gradio-container {background-color: lightgray}") as block:
|
|
| 670 |
chatbot = gr.Chatbot()
|
| 671 |
|
| 672 |
with gr.Row():
|
| 673 |
-
message = gr.Textbox(label="
|
| 674 |
placeholder="μ§κΈ λ μ€λ μκ°μ νλ² λ§ν΄ 보μΈμ",
|
| 675 |
lines=1)
|
| 676 |
submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
|
|
@@ -936,14 +935,23 @@ with gr.Blocks(css=".gradio-container {background-color: lightgray}") as block:
|
|
| 936 |
|
| 937 |
openai_api_key_textbox.change(None,
|
| 938 |
inputs=[openai_api_key_textbox],
|
|
|
|
| 939 |
outputs=None, _js="(api_key) => localStorage.setItem('open_api_key', api_key)")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 940 |
|
| 941 |
openai_api_key_textbox.change(set_openai_api_key,
|
| 942 |
inputs=[openai_api_key_textbox],
|
| 943 |
outputs=[chain_state, express_chain_state, llm_state, embeddings_state,
|
| 944 |
qa_chain_state, memory_state])
|
| 945 |
|
| 946 |
-
|
|
|
|
|
|
|
| 947 |
|
| 948 |
|
| 949 |
block.launch(debug=True)
|
|
|
|
| 47 |
# os.environ["NEWS_API_KEY"] = ""
|
| 48 |
# os.environ["TMDB_BEARER_TOKEN"] = ""
|
| 49 |
|
| 50 |
+
news_api_key = os.environ["NEWS_API_KEY"]
|
| 51 |
+
tmdb_bearer_token = os.environ["TMDB_BEARER_TOKEN"]
|
|
|
|
|
|
|
|
|
|
| 52 |
|
| 53 |
|
| 54 |
TOOLS_LIST = ['serpapi', 'wolfram-alpha', 'pal-math', 'pal-colored-objects', 'news-api'] #'google-search','news-api','tmdb-api','open-meteo-api'
|
|
|
|
| 643 |
</b></center></p>""")
|
| 644 |
|
| 645 |
openai_api_key_textbox = gr.Textbox(placeholder="sk-... μμνλ OpenAI API key λΆμ¬λ£κΈ°",
|
| 646 |
+
show_label=False, lines=1, type='password')
|
| 647 |
+
# openai_api_key_textbox = gr.Textbox(placeholder="Paste your OpenAI API key (sk-...) and hit Enter",
|
| 648 |
+
# show_label=False, lines=1, type='password')
|
| 649 |
|
| 650 |
with gr.Row():
|
| 651 |
with gr.Column(scale=1, min_width=TALKING_HEAD_WIDTH, visible=True):
|
|
|
|
| 669 |
chatbot = gr.Chatbot()
|
| 670 |
|
| 671 |
with gr.Row():
|
| 672 |
+
message = gr.Textbox(label="무μμ λμλ릴κΉμ?",
|
| 673 |
placeholder="μ§κΈ λ μ€λ μκ°μ νλ² λ§ν΄ 보μΈμ",
|
| 674 |
lines=1)
|
| 675 |
submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
|
|
|
|
| 935 |
|
| 936 |
openai_api_key_textbox.change(None,
|
| 937 |
inputs=[openai_api_key_textbox],
|
| 938 |
+
# outputs=None, _js="() => localStorage.setItem('open_api_key', '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')")
|
| 939 |
outputs=None, _js="(api_key) => localStorage.setItem('open_api_key', api_key)")
|
| 940 |
+
|
| 941 |
+
# openai_api_key_textbox.change(set_openai_api_key,
|
| 942 |
+
# inputs=[openai_api_key_textbox],
|
| 943 |
+
# outputs=[chain_state, express_chain_state, llm_state, embeddings_state,
|
| 944 |
+
# qa_chain_state, memory_state])
|
| 945 |
+
|
| 946 |
|
| 947 |
openai_api_key_textbox.change(set_openai_api_key,
|
| 948 |
inputs=[openai_api_key_textbox],
|
| 949 |
outputs=[chain_state, express_chain_state, llm_state, embeddings_state,
|
| 950 |
qa_chain_state, memory_state])
|
| 951 |
|
| 952 |
+
|
| 953 |
+
|
| 954 |
+
# block.load(None, inputs=None, outputs=openai_api_key_textbox, _js="()=> localStorage.getItem('open_api_key')")
|
| 955 |
|
| 956 |
|
| 957 |
block.launch(debug=True)
|