Leon4gr45 commited on
Commit
0054a9d
·
verified ·
1 Parent(s): 527ca63

Upload folder using huggingface_hub

Browse files
app/api/analyze-edit-intent/route.ts CHANGED
@@ -83,7 +83,7 @@ export async function POST(request: NextRequest) {
83
 
84
  // Use AI to create a search plan
85
  const result = await generateObject({
86
- model: aiModel,
87
  schema: searchPlanSchema,
88
  messages: [
89
  {
 
83
 
84
  // Use AI to create a search plan
85
  const result = await generateObject({
86
+ model: aiModel(actualModel),
87
  schema: searchPlanSchema,
88
  messages: [
89
  {
app/api/generate-ai-code-stream/route.ts CHANGED
@@ -1187,7 +1187,7 @@ MORPH FAST APPLY MODE (EDIT-ONLY):
1187
 
1188
  // Make streaming API call with appropriate provider
1189
  const streamOptions: any = {
1190
- model: modelProvider,
1191
  messages: [
1192
  {
1193
  role: 'system',
@@ -1660,27 +1660,7 @@ Original request: ${prompt}
1660
  Provide the complete file content without any truncation. Include all necessary imports, complete all functions, and close all tags properly.`;
1661
 
1662
  // Make a focused API call to complete this specific file
1663
- // Create a new client for the completion based on the provider
1664
- let completionClient;
1665
- if (model.includes('gpt') || model.includes('openai')) {
1666
- completionClient = openai;
1667
- } else if (model.includes('claude')) {
1668
- completionClient = anthropic;
1669
- } else {
1670
- completionClient = openai;
1671
- }
1672
-
1673
- // Determine the correct model name for the completion
1674
- let completionModelName: string;
1675
- if (model.includes('openai')) {
1676
- completionModelName = model.replace('openai/', '');
1677
- } else if (model.includes('anthropic')) {
1678
- completionModelName = model.replace('anthropic/', '');
1679
- } else if (model.includes('google')) {
1680
- completionModelName = model.replace('google/', '');
1681
- } else {
1682
- completionModelName = model;
1683
- }
1684
 
1685
  const completionResult = await streamText({
1686
  model: completionClient(completionModelName),
 
1187
 
1188
  // Make streaming API call with appropriate provider
1189
  const streamOptions: any = {
1190
+ model: modelProvider(actualModel),
1191
  messages: [
1192
  {
1193
  role: 'system',
 
1660
  Provide the complete file content without any truncation. Include all necessary imports, complete all functions, and close all tags properly.`;
1661
 
1662
  // Make a focused API call to complete this specific file
1663
+ const { client: completionClient, actualModel: completionModelName } = getProviderForModel(model);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1664
 
1665
  const completionResult = await streamText({
1666
  model: completionClient(completionModelName),