File size: 3,548 Bytes
d145f59
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import { useCallback, useRef } from 'react';
import { MESSAGE_ROLES } from '../constants/playground.constants';

export const useSyncMessageAndCustomBody = (
  customRequestMode,
  customRequestBody,
  message,
  inputs,
  setCustomRequestBody,
  setMessage,
  debouncedSaveConfig
) => {
  const isUpdatingFromMessage = useRef(false);
  const isUpdatingFromCustomBody = useRef(false);
  const lastMessageHash = useRef('');
  const lastCustomBodyHash = useRef('');

  const getMessageHash = useCallback((messages) => {
    return JSON.stringify(messages.map(msg => ({
      id: msg.id,
      role: msg.role,
      content: msg.content
    })));
  }, []);

  const getCustomBodyHash = useCallback((customBody) => {
    try {
      const parsed = JSON.parse(customBody);
      return JSON.stringify(parsed.messages || []);
    } catch {
      return '';
    }
  }, []);

  const syncMessageToCustomBody = useCallback(() => {
    if (!customRequestMode || isUpdatingFromCustomBody.current) return;

    const currentMessageHash = getMessageHash(message);
    if (currentMessageHash === lastMessageHash.current) return;

    try {
      isUpdatingFromMessage.current = true;
      let customPayload;

      try {
        customPayload = JSON.parse(customRequestBody || '{}');
      } catch {
        customPayload = {
          model: inputs.model || 'gpt-4o',
          messages: [],
          temperature: inputs.temperature || 0.7,
          stream: inputs.stream !== false
        };
      }

      customPayload.messages = message.map(msg => ({
        role: msg.role,
        content: msg.content
      }));

      const newCustomBody = JSON.stringify(customPayload, null, 2);
      setCustomRequestBody(newCustomBody);
      lastMessageHash.current = currentMessageHash;
      lastCustomBodyHash.current = getCustomBodyHash(newCustomBody);

      setTimeout(() => {
        debouncedSaveConfig();
      }, 0);
    } finally {
      isUpdatingFromMessage.current = false;
    }
  }, [customRequestMode, customRequestBody, message, inputs.model, inputs.temperature, inputs.stream, getMessageHash, getCustomBodyHash, setCustomRequestBody, debouncedSaveConfig]);

  const syncCustomBodyToMessage = useCallback(() => {
    if (!customRequestMode || isUpdatingFromMessage.current) return;

    const currentCustomBodyHash = getCustomBodyHash(customRequestBody);
    if (currentCustomBodyHash === lastCustomBodyHash.current) return;

    try {
      isUpdatingFromCustomBody.current = true;
      const customPayload = JSON.parse(customRequestBody || '{}');

      if (customPayload.messages && Array.isArray(customPayload.messages)) {
        const newMessages = customPayload.messages.map((msg, index) => ({
          id: msg.id || (index + 1).toString(),
          role: msg.role || MESSAGE_ROLES.USER,
          content: msg.content || '',
          createAt: Date.now(),
          ...(msg.role === MESSAGE_ROLES.ASSISTANT && {
            reasoningContent: msg.reasoningContent || '',
            isReasoningExpanded: false
          })
        }));

        setMessage(newMessages);
        lastCustomBodyHash.current = currentCustomBodyHash;
        lastMessageHash.current = getMessageHash(newMessages);
      }
    } catch (error) {
      console.warn('同步自定义请求体到消息失败:', error);
    } finally {
      isUpdatingFromCustomBody.current = false;
    }
  }, [customRequestMode, customRequestBody, getCustomBodyHash, getMessageHash, setMessage]);

  return {
    syncMessageToCustomBody,
    syncCustomBodyToMessage
  };
};