kokofixcomputers commited on
Commit
9fc50c0
·
verified ·
1 Parent(s): af6a5a0

add that for custom models too - Follow Up Deployment

Browse files
Files changed (1) hide show
  1. index.html +81 -8
index.html CHANGED
@@ -665,12 +665,33 @@
665
  }));
666
 
667
  let response;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
668
  if (model.type === 'default') {
669
  // Call OpenAI API
670
- response = await callOpenAIAPI(messages, model.id);
671
  } else {
672
  // Call custom model API
673
- response = await callCustomModelAPI(messages, model);
674
  }
675
 
676
  // Remove loading indicator
@@ -684,7 +705,6 @@
684
 
685
  currentChat.messages.push(assistantMessage);
686
  saveChats();
687
- renderMessages(chat.messages);
688
 
689
  // Update chat list
690
  renderChatList();
@@ -710,17 +730,20 @@
710
  }
711
 
712
  // Call OpenAI API
713
- async function callOpenAIAPI(messages, model, apiKey) {
 
 
714
  const response = await fetch('https://api.openai.com/v1/chat/completions', {
715
  method: 'POST',
716
  headers: {
717
  'Content-Type': 'application/json',
718
- 'Authorization': `Bearer ${apiKey}`
719
  },
720
  body: JSON.stringify({
721
  model: model === 'gpt-3.5' ? 'gpt-3.5-turbo' : 'gpt-4',
722
  messages,
723
- temperature: 0.7
 
724
  })
725
  });
726
 
@@ -729,8 +752,39 @@
729
  throw new Error(errorData.error?.message || 'Failed to call OpenAI API');
730
  }
731
 
732
- const data = await response.json();
733
- return data.choices[0]?.message?.content || 'No response from model';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
734
  }
735
 
736
  // Call custom model API
@@ -1078,6 +1132,25 @@
1078
  e.preventDefault();
1079
  sendMessage();
1080
  });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1081
 
1082
  // Auto-resize textarea
1083
  messageInput.addEventListener('input', () => {
 
665
  }));
666
 
667
  let response;
668
+ // Create assistant message element for streaming
669
+ const messageElement = document.createElement('div');
670
+ messageElement.className = 'flex gap-4 justify-start';
671
+ messageElement.innerHTML = `
672
+ <div class="flex-shrink-0 w-8 h-8 rounded-full bg-primary-500 text-white flex items-center justify-center">
673
+ <i class="fas fa-robot"></i>
674
+ </div>
675
+ <div class="max-w-[80%] bg-white dark:bg-gray-800 rounded-lg p-4 shadow-sm relative group">
676
+ <div class="prose dark:prose-invert" id="streaming-content"></div>
677
+ </div>
678
+ `;
679
+ chatArea.appendChild(messageElement);
680
+ const contentElement = messageElement.querySelector('#streaming-content');
681
+
682
+ let response = '';
683
+ const onStream = (content) => {
684
+ response = content;
685
+ contentElement.innerHTML = marked.parse(response);
686
+ chatArea.scrollTop = chatArea.scrollHeight;
687
+ };
688
+
689
  if (model.type === 'default') {
690
  // Call OpenAI API
691
+ response = await callOpenAIAPI(messages, model.id, null, onStream);
692
  } else {
693
  // Call custom model API
694
+ response = await callCustomModelAPI(messages, model, onStream);
695
  }
696
 
697
  // Remove loading indicator
 
705
 
706
  currentChat.messages.push(assistantMessage);
707
  saveChats();
 
708
 
709
  // Update chat list
710
  renderChatList();
 
730
  }
731
 
732
  // Call OpenAI API
733
+ async function callOpenAIAPI(messages, model, apiKey, onStream) {
734
+ const isStreaming = model.streaming || false;
735
+
736
  const response = await fetch('https://api.openai.com/v1/chat/completions', {
737
  method: 'POST',
738
  headers: {
739
  'Content-Type': 'application/json',
740
+ 'Authorization': `Bearer ${apiKey || localStorage.getItem('openaiApiKey')}`
741
  },
742
  body: JSON.stringify({
743
  model: model === 'gpt-3.5' ? 'gpt-3.5-turbo' : 'gpt-4',
744
  messages,
745
+ temperature: 0.7,
746
+ stream: isStreaming
747
  })
748
  });
749
 
 
752
  throw new Error(errorData.error?.message || 'Failed to call OpenAI API');
753
  }
754
 
755
+ if (isStreaming && onStream) {
756
+ const reader = response.body.getReader();
757
+ const decoder = new TextDecoder();
758
+ let result = '';
759
+
760
+ while (true) {
761
+ const { done, value } = await reader.read();
762
+ if (done) break;
763
+
764
+ const chunk = decoder.decode(value);
765
+ const lines = chunk.split('\n').filter(line => line.trim() !== '');
766
+
767
+ for (const line of lines) {
768
+ if (line.startsWith('data: ')) {
769
+ const data = line.substring(6);
770
+ if (data === '[DONE]') continue;
771
+
772
+ try {
773
+ const parsed = JSON.parse(data);
774
+ const content = parsed.choices?.[0]?.delta?.content || '';
775
+ result += content;
776
+ onStream(result);
777
+ } catch (e) {
778
+ console.error('Error parsing stream data:', e);
779
+ }
780
+ }
781
+ }
782
+ }
783
+ return result;
784
+ } else {
785
+ const data = await response.json();
786
+ return data.choices[0]?.message?.content || 'No response from model';
787
+ }
788
  }
789
 
790
  // Call custom model API
 
1132
  e.preventDefault();
1133
  sendMessage();
1134
  });
1135
+
1136
+ messageInput.addEventListener('keydown', (e) => {
1137
+ if (e.key === 'Enter') {
1138
+ // Check for Ctrl+Enter or Command+Enter (Mac)
1139
+ if (e.ctrlKey || e.metaKey) {
1140
+ // Insert new line
1141
+ const start = messageInput.selectionStart;
1142
+ const end = messageInput.selectionEnd;
1143
+ messageInput.value = messageInput.value.substring(0, start) + '\n' + messageInput.value.substring(end);
1144
+ messageInput.selectionStart = messageInput.selectionEnd = start + 1;
1145
+ // Prevent default to avoid submitting
1146
+ e.preventDefault();
1147
+ } else if (!e.shiftKey) {
1148
+ // Regular Enter - submit form
1149
+ e.preventDefault();
1150
+ sendMessage();
1151
+ }
1152
+ }
1153
+ });
1154
 
1155
  // Auto-resize textarea
1156
  messageInput.addEventListener('input', () => {