| """ |
| 以下所有配置也都支持利用环境变量覆写,环境变量配置格式见docker-compose.yml。 |
| 读取优先级:环境变量 > config_private.py > config.py |
| --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- |
| All the following configurations also support using environment variables to override, |
| and the environment variable configuration format can be seen in docker-compose.yml. |
| Configuration reading priority: environment variable > config_private.py > config.py |
| """ |
|
|
| |
| API_KEY = "此处填API密钥" |
|
|
|
|
| |
| API_KEY = "此处填API密钥" |
|
|
|
|
| |
| USE_PROXY = False |
| if USE_PROXY: |
| """ |
| 代理网络的地址,打开你的代理软件查看代理协议(socks5h / http)、地址(localhost)和端口(11284) |
| 填写格式是 [协议]:// [地址] :[端口],填写之前不要忘记把USE_PROXY改成True,如果直接在海外服务器部署,此处不修改 |
| <配置教程&视频教程> https://github.com/binary-husky/gpt_academic/issues/1> |
| [协议] 常见协议无非socks5h/http; 例如 v2**y 和 ss* 的默认本地协议是socks5h; 而cl**h 的默认本地协议是http |
| [地址] 填localhost或者127.0.0.1(localhost意思是代理软件安装在本机上) |
| [端口] 在代理软件的设置里找。虽然不同的代理软件界面不一样,但端口号都应该在最显眼的位置上 |
| """ |
| proxies = { |
| |
| "http": "socks5h://localhost:11284", |
| "https": "socks5h://localhost:11284", |
| } |
| else: |
| proxies = None |
|
|
| |
|
|
| |
| |
| |
| API_URL_REDIRECT = {} |
|
|
|
|
| |
| |
| DEFAULT_WORKER_NUM = 3 |
|
|
|
|
| |
| |
| THEME = "Chuanhu-Small-and-Beautiful" |
| AVAIL_THEMES = ["Default", "Chuanhu-Small-and-Beautiful", "High-Contrast", "Gstaff/Xkcd", "NoCrypt/Miku"] |
|
|
|
|
| |
| INIT_SYS_PROMPT = "Serve me as a writing and programming assistant." |
|
|
|
|
| |
| CHATBOT_HEIGHT = 1115 |
|
|
|
|
| |
| CODE_HIGHLIGHT = True |
|
|
|
|
| |
| LAYOUT = "LEFT-RIGHT" |
|
|
|
|
| |
| DARK_MODE = False |
|
|
|
|
| |
| TIMEOUT_SECONDS = 30 |
|
|
|
|
| |
| WEB_PORT = -1 |
|
|
|
|
| |
| MAX_RETRY = 2 |
|
|
| |
| LLM_MODEL = "gpt-3.5-turbo" |
| AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "api2d-gpt-3.5-turbo", "spark", "azure-gpt-3.5"] |
|
|
| |
| DEFAULT_FN_GROUPS = ['对话', '编程', '学术', '智能体'] |
|
|
|
|
| |
| LLM_MODEL = "gpt-3.5-turbo-16k" |
| AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", |
| "gpt-3.5-turbo-1106", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", |
| "gpt-4", "gpt-4-32k", "azure-gpt-4", "glm-4", "glm-3-turbo", |
| "gemini-pro", "chatglm3", "claude-2"] |
| |
| |
| |
| |
| |
| |
|
|
|
|
| |
| MULTI_QUERY_LLM_MODELS = "gpt-3.5-turbo&chatglm3" |
|
|
|
|
| |
| |
| |
| QWEN_LOCAL_MODEL_SELECTION = "Qwen/Qwen-1_8B-Chat-Int8" |
|
|
|
|
| |
| DASHSCOPE_API_KEY = "" |
|
|
|
|
| |
| BAIDU_CLOUD_API_KEY = '' |
| BAIDU_CLOUD_SECRET_KEY = '' |
| BAIDU_CLOUD_QIANFAN_MODEL = 'ERNIE-Bot' |
|
|
|
|
| |
| CHATGLM_PTUNING_CHECKPOINT = "" |
|
|
|
|
| |
| LOCAL_MODEL_DEVICE = "cpu" |
| LOCAL_MODEL_QUANT = "FP16" |
|
|
| |
| CONCURRENT_COUNT = 100 |
|
|
|
|
| |
| AUTO_CLEAR_TXT = False |
|
|
|
|
| |
| ADD_WAIFU = True |
|
|
|
|
| |
| |
| AUTHENTICATION = [] |
|
|
|
|
| |
| CUSTOM_PATH = "/" |
|
|
|
|
| |
| SSL_KEYFILE = "" |
| SSL_CERTFILE = "" |
|
|
|
|
| |
| API_ORG = "" |
|
|
|
|
| |
| SLACK_CLAUDE_BOT_ID = '' |
| SLACK_CLAUDE_USER_TOKEN = '' |
|
|
|
|
| |
| AZURE_ENDPOINT = "https://你亲手写的api名称.openai.azure.com/" |
| AZURE_API_KEY = "填入azure openai api的密钥" |
| AZURE_ENGINE = "填入你亲手写的部署名" |
|
|
|
|
| |
| AZURE_CFG_ARRAY = {} |
|
|
|
|
| |
| NEWBING_STYLE = "creative" |
| NEWBING_COOKIES = """ |
| put your new bing cookies here |
| """ |
|
|
|
|
| |
| ENABLE_AUDIO = False |
| ALIYUN_TOKEN="" |
| ALIYUN_APPKEY="" |
| ALIYUN_ACCESSKEY="" |
| ALIYUN_SECRET="" |
|
|
|
|
| |
| XFYUN_APPID = "00000000" |
| XFYUN_API_SECRET = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" |
| XFYUN_API_KEY = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" |
|
|
|
|
| |
| ZHIPUAI_API_KEY = "" |
| ZHIPUAI_MODEL = "" |
|
|
|
|
| |
| |
| |
| |
|
|
|
|
| |
| ANTHROPIC_API_KEY = "" |
|
|
|
|
| |
| MATHPIX_APPID = "" |
| MATHPIX_APPKEY = "" |
|
|
|
|
| |
| CUSTOM_API_KEY_PATTERN = "" |
|
|
|
|
| |
| GEMINI_API_KEY = '' |
|
|
|
|
| |
| HUGGINGFACE_ACCESS_TOKEN = "" |
|
|
|
|
| |
| |
| GROBID_URLS = [ |
| "https://qingxu98-grobid.hf.space","https://qingxu98-grobid2.hf.space","https://qingxu98-grobid3.hf.space", |
| "https://qingxu98-grobid4.hf.space","https://qingxu98-grobid5.hf.space", "https://qingxu98-grobid6.hf.space", |
| "https://qingxu98-grobid7.hf.space", "https://qingxu98-grobid8.hf.space", |
| ] |
|
|
|
|
| |
| ALLOW_RESET_CONFIG = False |
|
|
|
|
| |
| AUTOGEN_USE_DOCKER = False |
|
|
|
|
| |
| PATH_PRIVATE_UPLOAD = "private_upload" |
|
|
|
|
| |
| PATH_LOGGING = "gpt_log" |
|
|
|
|
| |
| WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", |
| "Warmup_Modules", "Nougat_Download", "AutoGen"] |
|
|
|
|
| |
| BLOCK_INVALID_APIKEY = False |
|
|
|
|
| |
| PLUGIN_HOT_RELOAD = False |
|
|
|
|
| |
| NUM_CUSTOM_BASIC_BTN = 4 |
|
|
| """ |
| 在线大模型配置关联关系示意图 |
| │ |
| ├── "gpt-3.5-turbo" 等openai模型 |
| │ ├── API_KEY |
| │ ├── CUSTOM_API_KEY_PATTERN(不常用) |
| │ ├── API_ORG(不常用) |
| │ └── API_URL_REDIRECT(不常用) |
| │ |
| ├── "azure-gpt-3.5" 等azure模型(单个azure模型,不需要动态切换) |
| │ ├── API_KEY |
| │ ├── AZURE_ENDPOINT |
| │ ├── AZURE_API_KEY |
| │ ├── AZURE_ENGINE |
| │ └── API_URL_REDIRECT |
| │ |
| ├── "azure-gpt-3.5" 等azure模型(多个azure模型,需要动态切换,高优先级) |
| │ └── AZURE_CFG_ARRAY |
| │ |
| ├── "spark" 星火认知大模型 spark & sparkv2 |
| │ ├── XFYUN_APPID |
| │ ├── XFYUN_API_SECRET |
| │ └── XFYUN_API_KEY |
| │ |
| ├── "claude-1-100k" 等claude模型 |
| │ └── ANTHROPIC_API_KEY |
| │ |
| ├── "stack-claude" |
| │ ├── SLACK_CLAUDE_BOT_ID |
| │ └── SLACK_CLAUDE_USER_TOKEN |
| │ |
| ├── "qianfan" 百度千帆大模型库 |
| │ ├── BAIDU_CLOUD_QIANFAN_MODEL |
| │ ├── BAIDU_CLOUD_API_KEY |
| │ └── BAIDU_CLOUD_SECRET_KEY |
| │ |
| ├── "glm-4", "glm-3-turbo", "zhipuai" 智谱AI大模型 |
| │ └── ZHIPUAI_API_KEY |
| │ |
| ├── "qwen-turbo" 等通义千问大模型 |
| │ └── DASHSCOPE_API_KEY |
| │ |
| ├── "Gemini" |
| │ └── GEMINI_API_KEY |
| │ |
| └── "newbing" Newbing接口不再稳定,不推荐使用 |
| ├── NEWBING_STYLE |
| └── NEWBING_COOKIES |
| |
| |
| 本地大模型示意图 |
| │ |
| ├── "chatglm3" |
| ├── "chatglm" |
| ├── "chatglm_onnx" |
| ├── "chatglmft" |
| ├── "internlm" |
| ├── "moss" |
| ├── "jittorllms_pangualpha" |
| ├── "jittorllms_llama" |
| ├── "deepseekcoder" |
| ├── "qwen-local" |
| ├── RWKV的支持见Wiki |
| └── "llama2" |
| |
| |
| 用户图形界面布局依赖关系示意图 |
| │ |
| ├── CHATBOT_HEIGHT 对话窗的高度 |
| ├── CODE_HIGHLIGHT 代码高亮 |
| ├── LAYOUT 窗口布局 |
| ├── DARK_MODE 暗色模式 / 亮色模式 |
| ├── DEFAULT_FN_GROUPS 插件分类默认选项 |
| ├── THEME 色彩主题 |
| ├── AUTO_CLEAR_TXT 是否在提交时自动清空输入框 |
| ├── ADD_WAIFU 加一个live2d装饰 |
| └── ALLOW_RESET_CONFIG 是否允许通过自然语言描述修改本页的配置,该功能具有一定的危险性 |
| |
| |
| 插件在线服务配置依赖关系示意图 |
| │ |
| ├── 语音功能 |
| │ ├── ENABLE_AUDIO |
| │ ├── ALIYUN_TOKEN |
| │ ├── ALIYUN_APPKEY |
| │ ├── ALIYUN_ACCESSKEY |
| │ └── ALIYUN_SECRET |
| │ |
| └── PDF文档精准解析 |
| ├── GROBID_URLS |
| ├── MATHPIX_APPID |
| └── MATHPIX_APPKEY |
| |
| |
| """ |
|
|