notpok commited on
Commit
c4630dd
·
verified ·
1 Parent(s): ba282ea

Upload 20 files

Browse files
Files changed (6) hide show
  1. .env.example +1 -0
  2. config/config.go +4 -0
  3. config/model.go +34 -10
  4. core/api.go +38 -30
  5. router/router.go +2 -2
  6. service/handle.go +2 -2
.env.example CHANGED
@@ -8,3 +8,4 @@ NO_ROLE_PREFIX=false
8
  SEARCH_RESULT_COMPATIBLE=false
9
  PROMPT_FOR_FILE=You must immerse yourself in the role of assistant in txt file, cannot respond as a user, cannot reply to this message, cannot mention this message, and ignore this message in your response.
10
  IGNORE_SEARCH_RESULT=false
 
 
8
  SEARCH_RESULT_COMPATIBLE=false
9
  PROMPT_FOR_FILE=You must immerse yourself in the role of assistant in txt file, cannot respond as a user, cannot reply to this message, cannot mention this message, and ignore this message in your response.
10
  IGNORE_SEARCH_RESULT=false
11
+ IS_MAX_SUBSCRIBE=false
config/config.go CHANGED
@@ -36,6 +36,7 @@ type Config struct {
36
  RwMutex sync.RWMutex
37
  IgnoreSerchResult bool
38
  IgnoreModelMonitoring bool
 
39
  }
40
 
41
  // 解析 SESSION 格式的环境变量
@@ -109,6 +110,8 @@ func LoadConfig() *Config {
109
  IgnoreModelMonitoring: os.Getenv("IGNORE_MODEL_MONITORING") == "true",
110
  // 读写锁
111
  RwMutex: sync.RWMutex{},
 
 
112
  }
113
 
114
  // 如果地址为空,使用默认值
@@ -153,4 +156,5 @@ func init() {
153
  logger.Info(fmt.Sprintf("PromptForFile: %s", ConfigInstance.PromptForFile))
154
  logger.Info(fmt.Sprintf("IgnoreSerchResult: %t", ConfigInstance.IgnoreSerchResult))
155
  logger.Info(fmt.Sprintf("IgnoreModelMonitoring: %t", ConfigInstance.IgnoreModelMonitoring))
 
156
  }
 
36
  RwMutex sync.RWMutex
37
  IgnoreSerchResult bool
38
  IgnoreModelMonitoring bool
39
+ IsMaxSubscribe bool
40
  }
41
 
42
  // 解析 SESSION 格式的环境变量
 
110
  IgnoreModelMonitoring: os.Getenv("IGNORE_MODEL_MONITORING") == "true",
111
  // 读写锁
112
  RwMutex: sync.RWMutex{},
113
+ // 是否max订阅
114
+ IsMaxSubscribe: os.Getenv("IS_MAX_SUBSCRIBE") == "true",
115
  }
116
 
117
  // 如果地址为空,使用默认值
 
156
  logger.Info(fmt.Sprintf("PromptForFile: %s", ConfigInstance.PromptForFile))
157
  logger.Info(fmt.Sprintf("IgnoreSerchResult: %t", ConfigInstance.IgnoreSerchResult))
158
  logger.Info(fmt.Sprintf("IgnoreModelMonitoring: %t", ConfigInstance.IgnoreModelMonitoring))
159
+ logger.Info(fmt.Sprintf("IsMaxSubscribe: %t", ConfigInstance.IsMaxSubscribe))
160
  }
config/model.go CHANGED
@@ -8,11 +8,16 @@ var ModelMap = map[string]string{
8
  "o4-mini": "o4mini",
9
  "gpt-4o": "gpt4o",
10
  "gemini-2.5-pro-06-05": "gemini2flash",
11
- "grok-3-beta": "grok",
12
  "gpt-4.1": "gpt41",
13
  // "claude-4.0-opus": "claude40opus",
14
- // "claude-4.0-opus-think": "claude40opusthinking",
15
- "o3": "o3",
 
 
 
 
 
16
  }
17
 
18
  // Get returns the value for the given key from the ModelMap.
@@ -33,17 +38,36 @@ func ModelReverseMapGet(key string, defaultValue string) string {
33
  return defaultValue
34
  }
35
 
36
- var ResponseModles []map[string]string
37
 
38
  func init() {
 
39
  for k, v := range ModelMap {
40
  ModelReverseMap[v] = k
41
- model := map[string]string{
42
- "id": k,
43
- }
44
- modelSearch := map[string]string{
45
- "id": k + "-search",
 
 
 
 
 
 
 
 
 
46
  }
47
- ResponseModles = append(ResponseModles, model, modelSearch)
 
 
 
 
 
 
 
 
 
48
  }
49
  }
 
8
  "o4-mini": "o4mini",
9
  "gpt-4o": "gpt4o",
10
  "gemini-2.5-pro-06-05": "gemini2flash",
11
+ "grok4": "grok4",
12
  "gpt-4.1": "gpt41",
13
  // "claude-4.0-opus": "claude40opus",
14
+ "claude-4.0-opus-think": "claude40opusthinking",
15
+ "o3": "o3",
16
+ "o3-pro": "o3pro",
17
+ }
18
+ var MaxModelMap = map[string]string{
19
+ "o3-pro": "o3pro",
20
+ "claude-4.0-opus-think": "claude40opusthinking",
21
  }
22
 
23
  // Get returns the value for the given key from the ModelMap.
 
38
  return defaultValue
39
  }
40
 
41
+ var ResponseModels []map[string]string
42
 
43
  func init() {
44
+ // 构建反向映射
45
  for k, v := range ModelMap {
46
  ModelReverseMap[v] = k
47
+ }
48
+ buildResponseModels()
49
+ }
50
+
51
+ // buildResponseModels 构建响应模型列表
52
+ func buildResponseModels() {
53
+ ResponseModels = make([]map[string]string, 0, len(ModelMap)*2)
54
+
55
+ for modelID := range ModelMap {
56
+ // 如果不是最大订阅用户,跳过最大模型
57
+ if !ConfigInstance.IsMaxSubscribe {
58
+ if _, isMaxModel := MaxModelMap[modelID]; isMaxModel {
59
+ continue
60
+ }
61
  }
62
+
63
+ // 添加普通模型
64
+ ResponseModels = append(ResponseModels, map[string]string{
65
+ "id": modelID,
66
+ })
67
+
68
+ // 添加搜索模型
69
+ ResponseModels = append(ResponseModels, map[string]string{
70
+ "id": modelID + "-search",
71
+ })
72
  }
73
  }
core/api.go CHANGED
@@ -350,7 +350,7 @@ func (c *Client) HandleResponse(body io.ReadCloser, stream bool, gc *gin.Context
350
  if block.MarkdownBlock != nil && len(block.MarkdownBlock.Chunks) > 0 {
351
  res_text := ""
352
  if inThinking {
353
- res_text += "</think>\n"
354
  inThinking = false
355
  thinkShown = true
356
  }
@@ -470,6 +470,7 @@ func (c *Client) UploadImage(img_list []string) error {
470
  }
471
 
472
  func (c *Client) UloadFileToCloudinary(uploadInfo CloudinaryUploadInfo, contentType string, filedata string, filename string) error {
 
473
  if len(filedata) > 100 {
474
  logger.Info(fmt.Sprintf("filedata: %s ……", filedata[:50]))
475
  }
@@ -478,18 +479,25 @@ func (c *Client) UloadFileToCloudinary(uploadInfo CloudinaryUploadInfo, contentT
478
  var formFields map[string]string
479
  if contentType == "img" {
480
  formFields = map[string]string{
481
- "timestamp": fmt.Sprintf("%d", uploadInfo.Timestamp),
482
- "unique_filename": uploadInfo.UniqueFilename,
483
- "folder": uploadInfo.Folder,
484
- "use_filename": uploadInfo.UseFilename,
485
- "public_id": uploadInfo.PublicID,
486
- "transformation": uploadInfo.Transformation,
487
- "moderation": uploadInfo.Moderation,
488
- "resource_type": uploadInfo.ResourceType,
489
- "api_key": uploadInfo.APIKey,
490
- "cloud_name": uploadInfo.CloudName,
491
- "signature": uploadInfo.Signature,
492
- "type": "private",
 
 
 
 
 
 
 
493
  }
494
  } else {
495
  formFields = map[string]string{
@@ -539,11 +547,11 @@ func (c *Client) UloadFileToCloudinary(uploadInfo CloudinaryUploadInfo, contentT
539
 
540
  // Create the upload request
541
  var uploadURL string
542
- if contentType == "img" {
543
- uploadURL = fmt.Sprintf("https://api.cloudinary.com/v1_1/%s/image/upload", uploadInfo.CloudName)
544
- } else {
545
- uploadURL = "https://ppl-ai-file-upload.s3.amazonaws.com/"
546
- }
547
 
548
  resp, err := c.client.R().
549
  SetHeader("Content-Type", writer.FormDataContentType()).
@@ -555,23 +563,23 @@ func (c *Client) UloadFileToCloudinary(uploadInfo CloudinaryUploadInfo, contentT
555
  return err
556
  }
557
  logger.Info(fmt.Sprintf("Image Upload with status code %d: %s", resp.StatusCode, resp.String()))
558
- if contentType == "img" {
559
- var uploadResponse map[string]interface{}
560
- if err := json.Unmarshal(resp.Bytes(), &uploadResponse); err != nil {
561
- return err
562
- }
563
- imgUrl := uploadResponse["secure_url"].(string)
564
- imgUrl = "https://pplx-res.cloudinary.com/image/private" + imgUrl[strings.Index(imgUrl, "/user_uploads"):]
565
- c.Attachments = append(c.Attachments, imgUrl)
566
- } else {
567
- c.Attachments = append(c.Attachments, "https://ppl-ai-file-upload.s3.amazonaws.com/"+uploadInfo.Key)
568
- }
569
  return nil
570
  }
571
 
572
  // SetBigContext is a placeholder for setting context
573
  func (c *Client) UploadText(context string) error {
574
- logger.Info("Uploading txt to Cloudinary")
575
  filedata := base64.StdEncoding.EncodeToString([]byte(context))
576
  filename := utils.RandomString(5) + ".txt"
577
  // Upload images to Cloudinary
 
350
  if block.MarkdownBlock != nil && len(block.MarkdownBlock.Chunks) > 0 {
351
  res_text := ""
352
  if inThinking {
353
+ res_text += "</think>\n\n"
354
  inThinking = false
355
  thinkShown = true
356
  }
 
470
  }
471
 
472
  func (c *Client) UloadFileToCloudinary(uploadInfo CloudinaryUploadInfo, contentType string, filedata string, filename string) error {
473
+ // 更新为 AWS S3 上传
474
  if len(filedata) > 100 {
475
  logger.Info(fmt.Sprintf("filedata: %s ……", filedata[:50]))
476
  }
 
479
  var formFields map[string]string
480
  if contentType == "img" {
481
  formFields = map[string]string{
482
+ // "timestamp": fmt.Sprintf("%d", uploadInfo.Timestamp),
483
+ // "unique_filename": uploadInfo.UniqueFilename,
484
+ // "folder": uploadInfo.Folder,
485
+ // "use_filename": uploadInfo.UseFilename,
486
+ // "public_id": uploadInfo.PublicID,
487
+ // "transformation": uploadInfo.Transformation,
488
+ // "moderation": uploadInfo.Moderation,
489
+ // "resource_type": uploadInfo.ResourceType,
490
+ // "api_key": uploadInfo.APIKey,
491
+ // "cloud_name": uploadInfo.CloudName,
492
+ "signature": uploadInfo.Signature,
493
+ // "type": "private",
494
+ "key": uploadInfo.Key,
495
+ "tagging": uploadInfo.Tagging,
496
+ "AWSAccessKeyId": uploadInfo.AWSAccessKeyId,
497
+ "policy": uploadInfo.Policy,
498
+ "x-amz-security-token": uploadInfo.Xamzsecuritytoken,
499
+ "acl": uploadInfo.ACL,
500
+ "Content-Type": "image/jpeg", // Assuming image/jpeg for images
501
  }
502
  } else {
503
  formFields = map[string]string{
 
547
 
548
  // Create the upload request
549
  var uploadURL string
550
+ // if contentType == "img" {
551
+ // uploadURL = fmt.Sprintf("https://api.cloudinary.com/v1_1/%s/image/upload", uploadInfo.CloudName)
552
+ // } else {
553
+ uploadURL = "https://ppl-ai-file-upload.s3.amazonaws.com/"
554
+ // }
555
 
556
  resp, err := c.client.R().
557
  SetHeader("Content-Type", writer.FormDataContentType()).
 
563
  return err
564
  }
565
  logger.Info(fmt.Sprintf("Image Upload with status code %d: %s", resp.StatusCode, resp.String()))
566
+ // if contentType == "img" {
567
+ // var uploadResponse map[string]interface{}
568
+ // if err := json.Unmarshal(resp.Bytes(), &uploadResponse); err != nil {
569
+ // return err
570
+ // }
571
+ // imgUrl := uploadResponse["secure_url"].(string)
572
+ // imgUrl = "https://pplx-res.cloudinary.com/image/private" + imgUrl[strings.Index(imgUrl, "/user_uploads"):]
573
+ // c.Attachments = append(c.Attachments, imgUrl)
574
+ // } else {
575
+ c.Attachments = append(c.Attachments, "https://ppl-ai-file-upload.s3.amazonaws.com/"+uploadInfo.Key)
576
+ // }
577
  return nil
578
  }
579
 
580
  // SetBigContext is a placeholder for setting context
581
  func (c *Client) UploadText(context string) error {
582
+ logger.Info("Uploading txt to AWS")
583
  filedata := base64.StdEncoding.EncodeToString([]byte(context))
584
  filename := utils.RandomString(5) + ".txt"
585
  // Upload images to Cloudinary
router/router.go CHANGED
@@ -17,14 +17,14 @@ func SetupRoutes(r *gin.Engine) {
17
 
18
  // Chat completions endpoint (OpenAI-compatible)
19
  r.POST("/v1/chat/completions", service.ChatCompletionsHandler)
20
- r.GET("/v1/models", service.MoudlesHandler)
21
  // HuggingFace compatible routes
22
  hfRouter := r.Group("/hf")
23
  {
24
  v1Router := hfRouter.Group("/v1")
25
  {
26
  v1Router.POST("/chat/completions", service.ChatCompletionsHandler)
27
- v1Router.GET("/models", service.MoudlesHandler)
28
  }
29
  }
30
  }
 
17
 
18
  // Chat completions endpoint (OpenAI-compatible)
19
  r.POST("/v1/chat/completions", service.ChatCompletionsHandler)
20
+ r.GET("/v1/models", service.ModelsHandler)
21
  // HuggingFace compatible routes
22
  hfRouter := r.Group("/hf")
23
  {
24
  v1Router := hfRouter.Group("/v1")
25
  {
26
  v1Router.POST("/chat/completions", service.ChatCompletionsHandler)
27
+ v1Router.GET("/models", service.ModelsHandler)
28
  }
29
  }
30
  }
service/handle.go CHANGED
@@ -163,8 +163,8 @@ func ChatCompletionsHandler(c *gin.Context) {
163
  Error: "Failed to process request after multiple attempts"})
164
  }
165
 
166
- func MoudlesHandler(c *gin.Context) {
167
  c.JSON(http.StatusOK, gin.H{
168
- "data": config.ResponseModles,
169
  })
170
  }
 
163
  Error: "Failed to process request after multiple attempts"})
164
  }
165
 
166
+ func ModelsHandler(c *gin.Context) {
167
  c.JSON(http.StatusOK, gin.H{
168
+ "data": config.ResponseModels,
169
  })
170
  }