File size: 2,022 Bytes
0133533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bde2c7b
 
 
 
 
0133533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bde2c7b
0133533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
// 配置常量
const CONFIG = {
  port:process.env.PORT || 3000,

  endpoint: [
    {
      name: 'openai',
      base_url: 'https://app.factory.ai/api/llm/o/v1/responses'
    },
    {
      name: 'anthropic',
      base_url: 'https://app.factory.ai/api/llm/a/v1/messages'
    },
    {
      name: 'common',
      base_url: 'https://app.factory.ai/api/llm/o/v1/chat/completions'
    }
  ],

  models: [
    {
        id: 'claude-opus-4-5-20251101',
        type: 'anthropic',
        reasoning: 'auto'
      },
    {
      id: 'claude-opus-4-1-20250805',
      type: 'anthropic',
      reasoning: 'auto'
    },
    {
      id: 'claude-haiku-4-5-20251001',
      type: 'anthropic',
      reasoning: 'auto'
    },
    {
      id: 'claude-sonnet-4-5-20250929',
      type: 'anthropic',
      reasoning: 'auto'
    },
    {
      id: 'gpt-5-codex',
      type: 'openai',
      reasoning: 'off'
    },
    {
      id: 'glm-4.6',
      type: 'common'
    }
  ],

  user_agent: 'factory-cli/0.27.0',
  system_prompt: 'You are Droid, an AI software engineering agent built by Factory.\n\n'
}

export function getConfig() {
  return CONFIG
}

export function getModelById(modelId) {
  return CONFIG.models.find(m => m.id === modelId)
}

export function getEndpointByType(type) {
  return CONFIG.endpoint.find(e => e.name === type)
}

export function getPort() {
  return parseInt(process.env.PORT) || CONFIG.port
}

export function getSystemPrompt() {
  return CONFIG.system_prompt || ''
}

export function getModelReasoning(modelId) {
  const model = getModelById(modelId)
  if (!model || !model.reasoning) {
    return null
  }
  const reasoningLevel = model.reasoning.toLowerCase()
  if (['low', 'medium', 'high', 'auto'].includes(reasoningLevel)) {
    return reasoningLevel
  }
  return null
}

export function getUserAgent() {
  return CONFIG.user_agent
}

export function getProxyUrl() {
  return process.env.PROXY_URL || null
}