Spaces:
Sleeping
Sleeping
File size: 3,640 Bytes
1912fdc | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 | {
"cells": [
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Running on local URL: http://127.0.0.1:7863\n",
"Running on public URL: https://e43e3020a427152c81.gradio.live\n",
"\n",
"This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"https://e43e3020a427152c81.gradio.live\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import gradio as gr\n",
"\n",
"from openai import OpenAI\n",
"import gradio as gr\n",
"import os\n",
"\n",
"#API_KEY = os.getenv(\"PPLX_API_KEY\")\n",
"API_KEY = \"os123\"\n",
"client = OpenAI(api_key=API_KEY, base_url=\"https://api.perplexity.ai\")\n",
"\n",
"def predict(message, history):\n",
" history_openai_format = []\n",
" for human, assistant in history:\n",
" history_openai_format.append({\"role\": \"user\", \"content\": human})\n",
" history_openai_format.append({\"role\": \"assistant\", \"content\": assistant})\n",
" history_openai_format.append({\"role\": \"user\", \"content\": message})\n",
" \n",
" # 使用 Perplexity AI 的 70B 模型进行响应\n",
" response = client.chat.completions.create(\n",
" model='pplx-70b-online',\n",
" messages=history_openai_format,\n",
" temperature=1.0,\n",
" stream=True\n",
" )\n",
"\n",
" partial_message = \"\"\n",
" for chunk in response:\n",
" if chunk.choices[0].delta.content is not None:\n",
" partial_message += chunk.choices[0].delta.content\n",
" yield partial_message\n",
"\n",
"def change_textbox(choice):\n",
" #根据不同输入对输出控件进行更新\n",
" if choice == \"short\":\n",
" return gr.update(lines=2, visible=True, value=\"Short story: \")\n",
" elif choice == \"long\":\n",
" return gr.update(lines=8, visible=True, value=\"Long story...\")\n",
" else:\n",
" return gr.update(visible=False)\n",
"\n",
"with gr.Blocks(fill_height=True) as demo:\n",
" radio = gr.Radio(\n",
" [\"short\", \"long\", \"none\"], label=\"Essay Length to Write?\"\n",
" )\n",
" text = gr.Textbox(lines=2, interactive=True)\n",
" radio.change(fn=change_textbox, inputs=radio, outputs=text)\n",
" chat_interface = gr.ChatInterface(predict)\n",
"\n",
"demo.launch(share=True)\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
|