File size: 3,010 Bytes
01d5a5d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import { create } from 'zustand';
import {
  getModelConfig,
  type IBaseModelParams,
  type IModelConfig,
  type IThinkingModelParams
} from '@/service/modelConfig';

interface ModelConfigState {
  modelConfig: IModelConfig;
  baseModelConfig: IBaseModelParams;
  thinkingModelConfig: IThinkingModelParams;
  fetchModelConfig: () => Promise<void>;
  updateModelConfig: (config: IModelConfig) => void;
  deleteModelConfig: () => void;
  updateBaseModelConfig: (config: IBaseModelParams) => void;
  updateThinkingModelConfig: (config: IThinkingModelParams) => void;
}

export const useModelConfigStore = create<ModelConfigState>((set, get) => ({
  modelConfig: {} as IModelConfig,
  baseModelConfig: {} as IBaseModelParams,
  thinkingModelConfig: {} as IThinkingModelParams,
  fetchModelConfig: async () => {
    return getModelConfig()
      .then((res) => {
        if (res.data.code !== 0) {
          throw new Error(res.data.message);
        }

        const { thinking_model_name, thinking_api_key, thinking_endpoint, ...baseModelConfig } =
          res.data.data;

        set({
          modelConfig: { ...(get().modelConfig as IModelConfig), ...res.data.data },
          baseModelConfig: { ...(get().baseModelConfig as IBaseModelParams), ...baseModelConfig },
          thinkingModelConfig: {
            ...(get().thinkingModelConfig as IThinkingModelParams),
            thinking_model_name,
            thinking_api_key,
            thinking_endpoint
          }
        });
      })
      .catch((error) => {
        console.error(error.message || 'Failed to fetch model config');
      });
  },
  updateModelConfig(config: IModelConfig) {
    const { thinking_model_name, thinking_api_key, thinking_endpoint, ...baseModelConfig } = config;

    set({
      modelConfig: { ...(get().modelConfig as IModelConfig), ...config },
      baseModelConfig: { ...(get().baseModelConfig as IBaseModelParams), ...baseModelConfig },
      thinkingModelConfig: {
        ...(get().thinkingModelConfig as IThinkingModelParams),
        thinking_model_name,
        thinking_api_key,
        thinking_endpoint
      }
    });
  },
  deleteModelConfig() {
    set({
      modelConfig: {} as IModelConfig,
      baseModelConfig: {} as IBaseModelParams,
      thinkingModelConfig: {} as IThinkingModelParams
    });
  },
  updateBaseModelConfig(config: IBaseModelParams) {
    set({
      baseModelConfig: { ...(get().baseModelConfig as IBaseModelParams), ...config }
    });

    set({
      modelConfig: {
        ...(get().modelConfig as IModelConfig),
        ...(get().baseModelConfig as IBaseModelParams),
        ...config
      }
    });
  },
  updateThinkingModelConfig(config: IThinkingModelParams) {
    set({
      thinkingModelConfig: { ...(get().thinkingModelConfig as IThinkingModelParams), ...config }
    });

    set({
      modelConfig: {
        ...(get().modelConfig as IModelConfig),
        ...(get().thinkingModelConfig as IThinkingModelParams),
        ...config
      }
    });
  }
}));