mostafasmart commited on
Commit
58518f0
·
1 Parent(s): 5d75ab3

Add requirements00

Browse files
Files changed (1) hide show
  1. app.py +18 -17
app.py CHANGED
@@ -1,13 +1,12 @@
1
  import gradio as gr
2
  import torch
3
- from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
4
 
5
  # ----------------------
6
  # 1. تحميل المودل
7
  # ----------------------
8
  model_name = "bigcode/starcoder2-7b"
9
 
10
-
11
  tokenizer = AutoTokenizer.from_pretrained(
12
  model_name,
13
  trust_remote_code=True
@@ -15,8 +14,8 @@ tokenizer = AutoTokenizer.from_pretrained(
15
 
16
  model = AutoModelForCausalLM.from_pretrained(
17
  model_name,
18
- torch_dtype=torch.float16,
19
-
20
  trust_remote_code=True
21
  )
22
  model.eval()
@@ -25,22 +24,22 @@ model.eval()
25
  # 2. دالة التوليد
26
  # ----------------------
27
  def generate_code(prompt):
28
- input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device)
29
  with torch.no_grad():
30
- output_ids = model.generate(
31
- input_ids,
32
- max_new_tokens=300,
33
- do_sample=False,
34
  temperature=0.2,
35
- repetition_penalty=1.05,
36
- use_cache=True,
37
- pad_token_id=tokenizer.eos_token_id
 
38
  )
39
- text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
40
- return text
41
 
42
  # ----------------------
43
- # 3. واجهة Gradio
44
  # ----------------------
45
  title = "StarCoder2 Flutter Code Generator"
46
  description = """
@@ -53,8 +52,10 @@ demo = gr.Interface(
53
  inputs=gr.Textbox(lines=8, placeholder="Write your Flutter prompt here..."),
54
  outputs=gr.Textbox(lines=20),
55
  title=title,
56
- description=description,
57
- allow_flagging="never"
58
  )
59
 
 
 
 
60
  demo.launch()
 
1
  import gradio as gr
2
  import torch
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM
4
 
5
  # ----------------------
6
  # 1. تحميل المودل
7
  # ----------------------
8
  model_name = "bigcode/starcoder2-7b"
9
 
 
10
  tokenizer = AutoTokenizer.from_pretrained(
11
  model_name,
12
  trust_remote_code=True
 
14
 
15
  model = AutoModelForCausalLM.from_pretrained(
16
  model_name,
17
+ torch_dtype=torch.float16, # FP16 لتقليل استهلاك VRAM
18
+ device_map="auto",
19
  trust_remote_code=True
20
  )
21
  model.eval()
 
24
  # 2. دالة التوليد
25
  # ----------------------
26
  def generate_code(prompt):
27
+ inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
28
  with torch.no_grad():
29
+ outputs = model.generate(
30
+ **inputs,
31
+ max_new_tokens=700,
 
32
  temperature=0.2,
33
+ do_sample=False,
34
+ eos_token_id=tokenizer.eos_token_id,
35
+ pad_token_id=tokenizer.eos_token_id,
36
+ repetition_penalty=1.1
37
  )
38
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
39
+ return generated_text
40
 
41
  # ----------------------
42
+ # 3. واجهة Gradio بدون allow_flagging
43
  # ----------------------
44
  title = "StarCoder2 Flutter Code Generator"
45
  description = """
 
52
  inputs=gr.Textbox(lines=8, placeholder="Write your Flutter prompt here..."),
53
  outputs=gr.Textbox(lines=20),
54
  title=title,
55
+ description=description
 
56
  )
57
 
58
+ # ----------------------
59
+ # 4. تشغيل الواجهة
60
+ # ----------------------
61
  demo.launch()