nixaut-codelabs commited on
Commit
9d68c64
·
verified ·
1 Parent(s): 205b3f4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -23
app.py CHANGED
@@ -78,40 +78,36 @@ def enhance_for_gradio(prompt_text, api_key):
78
  ]
79
 
80
  try:
81
- result = pipe(messages, max_new_tokens=256, temperature=0.7, do_sample=True)
 
 
 
 
 
 
82
  full_response = result[0]["generated_text"]
83
 
84
- # Eğer full_response bir liste ise (örneğin [{'role': '...', 'content': '...'}, ...])
85
  if isinstance(full_response, list):
86
  # Assistant rolündeki son mesajı bul
87
- assistant_messages = [msg["content"] for msg in full_response if msg["role"] == "assistant"]
88
  if assistant_messages:
89
  enhanced_prompt = assistant_messages[-1]
90
  else:
91
- enhanced_prompt = "No assistant response found."
 
92
  else:
93
- # Eğer string ise, assistant kısmını elle ayırmaya çalış
94
- if '"role": "assistant"' in full_response:
95
- parts = full_response.split('"role": "assistant"')
96
- last_part = parts[-1]
97
- if '"content":' in last_part:
98
- start = last_part.find('"content":') + len('"content":')
99
- content = last_part[start:].strip()
100
- if content.startswith('"'):
101
- end_quote = content[1:].find('"') + 1
102
- enhanced_prompt = content[1:end_quote]
103
- else:
104
- enhanced_prompt = content.split("}")[0].strip(' "\'')
105
- else:
106
- enhanced_prompt = "Could not extract assistant content."
107
- else:
108
- # Fallback: sadece assistant kısmını tahmin et
109
- enhanced_prompt = full_response.strip()
110
 
111
- # Özel token'ları temizle
112
  enhanced_prompt = enhanced_prompt.replace("<end_of_turn>", "").strip()
113
 
114
- return enhanced_prompt if enhanced_prompt else "No enhanced prompt generated."
 
 
 
 
115
 
116
  except Exception as e:
117
  return f"Enhancement failed: {str(e)}"
 
78
  ]
79
 
80
  try:
81
+ result = pipe(
82
+ messages,
83
+ max_new_tokens=512,
84
+ temperature=0.7,
85
+ do_sample=True,
86
+ stop_strings=["<end_of_turn>"] # Modelin üretmeyi durdurması gereken token
87
+ )
88
  full_response = result[0]["generated_text"]
89
 
90
+ # Eğer full_response bir liste ise
91
  if isinstance(full_response, list):
92
  # Assistant rolündeki son mesajı bul
93
+ assistant_messages = [msg.get("content", "") for msg in full_response if msg.get("role") == "assistant"]
94
  if assistant_messages:
95
  enhanced_prompt = assistant_messages[-1]
96
  else:
97
+ # Eğer assistant mesajı yoksa, tüm çıktıyı birleştir
98
+ enhanced_prompt = " ".join([msg.get("content", "") for msg in full_response])
99
  else:
100
+ # Eğer string ise
101
+ enhanced_prompt = full_response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
 
103
+ # Temizlik işlemleri
104
  enhanced_prompt = enhanced_prompt.replace("<end_of_turn>", "").strip()
105
 
106
+ # Eğer hâlâ boşsa fallback
107
+ if not enhanced_prompt:
108
+ return "Model did not generate a valid response. Try again or check the prompt."
109
+
110
+ return enhanced_prompt
111
 
112
  except Exception as e:
113
  return f"Enhancement failed: {str(e)}"