programci48 commited on
Commit
e796c15
·
verified ·
1 Parent(s): d98e8f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -91,12 +91,15 @@ async def predict(request: Request):
91
  top_p=0.9
92
  )
93
 
94
- response = app.state.models["tokenizer"].decode(
95
  outputs[0],
96
  skip_special_tokens=True
97
  ).strip()
98
-
99
- return {"generated_text": response}
 
 
 
100
 
101
  except Exception as e:
102
  logger.error(f"Tahmin hatası: {str(e)}")
 
91
  top_p=0.9
92
  )
93
 
94
+ full_response = app.state.models["tokenizer"].decode(
95
  outputs[0],
96
  skip_special_tokens=True
97
  ).strip()
98
+
99
+ # Sadece modelin ürettiği kısmı al (prompt'u çıkar)
100
+ generated_text = full_response[len(prompt):].strip()
101
+
102
+ return {"generated_text": generated_text}
103
 
104
  except Exception as e:
105
  logger.error(f"Tahmin hatası: {str(e)}")