victor HF Staff commited on
Commit
2bc6604
·
1 Parent(s): 719cfe8

Add optional chaining for choices array access

Browse files
src/lib/server/endpoints/openai/openAIChatToTextGenerationStream.ts CHANGED
@@ -61,10 +61,10 @@ export async function* openAIChatToTextGenerationStream(
61
  : typeof delta?.reasoning_content === "string"
62
  ? (delta.reasoning_content as string)
63
  : "";
64
- const last = choices[0]?.finish_reason === "stop" || choices[0]?.finish_reason === "length";
65
 
66
  // if the last token is a stop and the tool buffer is not empty, yield it as a generated_text
67
- if (choices[0]?.finish_reason === "stop" && toolBuffer.length > 0) {
68
  yield {
69
  token: {
70
  id: tokenId++,
@@ -79,7 +79,7 @@ export async function* openAIChatToTextGenerationStream(
79
  }
80
 
81
  // weird bug where the parameters are streamed in like this
82
- if (choices[0]?.delta?.tool_calls) {
83
  const calls = Array.isArray(choices[0].delta.tool_calls)
84
  ? choices[0].delta.tool_calls
85
  : [choices[0].delta.tool_calls];
 
61
  : typeof delta?.reasoning_content === "string"
62
  ? (delta.reasoning_content as string)
63
  : "";
64
+ const last = choices?.[0]?.finish_reason === "stop" || choices?.[0]?.finish_reason === "length";
65
 
66
  // if the last token is a stop and the tool buffer is not empty, yield it as a generated_text
67
+ if (choices?.[0]?.finish_reason === "stop" && toolBuffer.length > 0) {
68
  yield {
69
  token: {
70
  id: tokenId++,
 
79
  }
80
 
81
  // weird bug where the parameters are streamed in like this
82
+ if (choices?.[0]?.delta?.tool_calls) {
83
  const calls = Array.isArray(choices[0].delta.tool_calls)
84
  ? choices[0].delta.tool_calls
85
  : [choices[0].delta.tool_calls];
src/lib/server/endpoints/openai/openAICompletionToTextGenerationStream.ts CHANGED
@@ -12,8 +12,8 @@ export async function* openAICompletionToTextGenerationStream(
12
  let tokenId = 0;
13
  for await (const completion of completionStream) {
14
  const { choices } = completion;
15
- const text = choices[0]?.text ?? "";
16
- const last = choices[0]?.finish_reason === "stop" || choices[0]?.finish_reason === "length";
17
  if (text) {
18
  generatedText = generatedText + text;
19
  }
 
12
  let tokenId = 0;
13
  for await (const completion of completionStream) {
14
  const { choices } = completion;
15
+ const text = choices?.[0]?.text ?? "";
16
+ const last = choices?.[0]?.finish_reason === "stop" || choices?.[0]?.finish_reason === "length";
17
  if (text) {
18
  generatedText = generatedText + text;
19
  }