Paulwalker4884 commited on
Commit
c1b12e2
·
1 Parent(s): 7e44aab

Initial commit

Browse files
Files changed (1) hide show
  1. app.py +15 -20
app.py CHANGED
@@ -25,7 +25,7 @@ logger.debug("Loading DeepSeek-Coder-6.7B-Instruct model")
25
  try:
26
  tokenizer = AutoTokenizer.from_pretrained("DeepSeek/DeepSeek-Coder-6.7B-Instruct", cache_dir="/tmp/hf_cache")
27
  model = AutoModelForCausalLM.from_pretrained("DeepSeek/DeepSeek-Coder-6.7B-Instruct", cache_dir="/tmp/hf_cache", torch_dtype=torch.float16, device_map="auto")
28
- code_gen = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024, temperature=0.7, top_p=0.9)
29
  logger.debug("DeepSeek-Coder-6.7B-Instruct model loaded")
30
  except Exception as e:
31
  logger.error(f"Failed to load DeepSeek model: {e}")
@@ -101,33 +101,29 @@ def get_history():
101
  def debug_model(text, language):
102
  logger.debug(f"Debugging model with input: {text}, language: {language}")
103
  try:
104
- prompt = f"""You are a coding assistant. The user provided this instruction in Persian: "{text}". Translate it to English, then write a complete, correct, and well-explained code in {language} based on the translated instruction. Return the English translation, the prompt, and the generated code."""
105
  logger.debug(f"Generated prompt: {prompt}")
106
- result = code_gen(prompt, max_new_tokens=1024, temperature=0.7, top_p=0.9, do_sample=True)[0]['generated_text']
107
  logger.debug(f"Generated output: {result}")
108
- return f"Debug: {result}"
109
  except Exception as e:
110
  logger.error(f"Debug error: {e}")
111
  return f"Debug error: {e}"
112
 
113
- # تولید پرامپت و کد
114
- def generate_code_and_prompt(text, language):
115
  logger.debug(f"Processing input: {text}, language: {language}")
116
  try:
117
- # پرامپت برای DeepSeek که هم ترجمه کنه و هم کد تولید کنه
118
- prompt = f"""You are a coding assistant. The user provided this instruction in Persian: "{text}". First, translate the instruction to English. Then, write a complete, correct, and well-explained code in {language} based on the translated instruction. Return the English translation followed by the code."""
119
  logger.debug(f"Generated prompt: {prompt}")
120
- result = code_gen(prompt, max_new_tokens=1024, temperature=0.7, top_p=0.9, do_sample=True)[0]['generated_text']
121
 
122
- # جدا کردن ترجمه و کد از خروجی
123
  lines = result.split('\n')
124
- translation = ""
125
  code = []
126
  is_code = False
127
  for line in lines:
128
- if line.startswith("Translation:"):
129
- translation = line.replace("Translation:", "").strip()
130
- elif line.startswith("```"):
131
  is_code = not is_code
132
  continue
133
  elif is_code:
@@ -138,26 +134,25 @@ def generate_code_and_prompt(text, language):
138
  logger.warning("Generated code is empty")
139
  code_output = "خطا: کد تولیدشده خالی است"
140
 
141
- logger.debug(f"Translation: {translation}")
142
  logger.debug(f"Generated code: {code_output}")
143
  torch.cuda.empty_cache()
144
- return translation, prompt, code_output
145
  except Exception as e:
146
  logger.error(f"Code generation error: {e}")
147
- return "", f"Error prompt: {text}", f"Code generation error: {e}"
148
 
149
  # تابع اصلی
150
  def christopher(user_input, language, show_history):
151
  logger.debug(f"Processing input: {user_input}, language: {language}, show_history: {show_history}")
152
  try:
153
  task_type = "نوشتن کد کامل"
154
- # تولید ترجمه، پرامپت و کد
155
- translation, prompt, response = generate_code_and_prompt(user_input, language)
156
  if "error" in response.lower():
157
  history = get_history() if show_history else ""
158
  return response, history
159
  # ذخیره تو دیتابیس (اختیاری)
160
- save_to_memory(user_input, task_type, f"Translation: {translation}\nPrompt: {prompt}", response)
161
  history = get_history() if show_history else ""
162
  return response, history
163
  except Exception as e:
 
25
  try:
26
  tokenizer = AutoTokenizer.from_pretrained("DeepSeek/DeepSeek-Coder-6.7B-Instruct", cache_dir="/tmp/hf_cache")
27
  model = AutoModelForCausalLM.from_pretrained("DeepSeek/DeepSeek-Coder-6.7B-Instruct", cache_dir="/tmp/hf_cache", torch_dtype=torch.float16, device_map="auto")
28
+ code_gen = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, temperature=0.6, top_p=0.95)
29
  logger.debug("DeepSeek-Coder-6.7B-Instruct model loaded")
30
  except Exception as e:
31
  logger.error(f"Failed to load DeepSeek model: {e}")
 
101
  def debug_model(text, language):
102
  logger.debug(f"Debugging model with input: {text}, language: {language}")
103
  try:
104
+ prompt = f"Write a complete, correct, and well-explained code in {language} to: {text}"
105
  logger.debug(f"Generated prompt: {prompt}")
106
+ result = code_gen(prompt, max_new_tokens=512, temperature=0.6, top_p=0.95, do_sample=True)[0]['generated_text']
107
  logger.debug(f"Generated output: {result}")
108
+ return f"Debug: Prompt: {prompt}\nOutput: {result}"
109
  except Exception as e:
110
  logger.error(f"Debug error: {e}")
111
  return f"Debug error: {e}"
112
 
113
+ # تولید کد
114
+ def generate_code(text, language):
115
  logger.debug(f"Processing input: {text}, language: {language}")
116
  try:
117
+ prompt = f"Write a complete, correct, and well-explained code in {language} to: {text}"
 
118
  logger.debug(f"Generated prompt: {prompt}")
119
+ result = code_gen(prompt, max_new_tokens=512, temperature=0.6, top_p=0.95, do_sample=True)[0]['generated_text']
120
 
121
+ # استخراج کد از خروجی
122
  lines = result.split('\n')
 
123
  code = []
124
  is_code = False
125
  for line in lines:
126
+ if line.startswith("```"):
 
 
127
  is_code = not is_code
128
  continue
129
  elif is_code:
 
134
  logger.warning("Generated code is empty")
135
  code_output = "خطا: کد تولیدشده خالی است"
136
 
 
137
  logger.debug(f"Generated code: {code_output}")
138
  torch.cuda.empty_cache()
139
+ return prompt, code_output
140
  except Exception as e:
141
  logger.error(f"Code generation error: {e}")
142
+ return f"Error prompt: {text}", f"Code generation error: {e}"
143
 
144
  # تابع اصلی
145
  def christopher(user_input, language, show_history):
146
  logger.debug(f"Processing input: {user_input}, language: {language}, show_history: {show_history}")
147
  try:
148
  task_type = "نوشتن کد کامل"
149
+ # تولید پرامپت و کد
150
+ prompt, response = generate_code(user_input, language)
151
  if "error" in response.lower():
152
  history = get_history() if show_history else ""
153
  return response, history
154
  # ذخیره تو دیتابیس (اختیاری)
155
+ save_to_memory(user_input, task_type, prompt, response)
156
  history = get_history() if show_history else ""
157
  return response, history
158
  except Exception as e: