Teotonix commited on
Commit
ff59cf5
·
verified ·
1 Parent(s): 00f7a72

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -28
app.py CHANGED
@@ -13,27 +13,23 @@ chat_model = AutoModelForCausalLM.from_pretrained(
13
  )
14
 
15
  def chat_fn(message, history):
16
- messages = [{"role": "user", "content": message}]
17
  prompt = tokenizer.apply_chat_template(
18
- messages,
19
  tokenize=False,
20
  add_generation_prompt=True
21
  )
22
-
23
  inputs = tokenizer(prompt, return_tensors="pt")
24
  output = chat_model.generate(
25
  **inputs,
26
  max_new_tokens=200,
 
27
  temperature=0.7
28
  )
29
  return tokenizer.decode(output[0], skip_special_tokens=True)
30
 
31
  # ---------------- IMAGE ----------------
32
- from diffusers import StableDiffusionPipeline
33
- import torch
34
-
35
  img_pipe = StableDiffusionPipeline.from_pretrained(
36
- "stabilityai/sdxl-turbo",
37
  torch_dtype=torch.float32,
38
  safety_checker=None
39
  )
@@ -47,37 +43,29 @@ def generate_image(prompt):
47
 
48
  image = img_pipe(
49
  prompt,
50
- num_inference_steps=4,
51
- guidance_scale=0.0
52
  ).images[0]
53
 
54
  return image
55
 
56
-
57
-
58
  # ---------------- UI ----------------
59
- with gr.Blocks(
60
- title="MaindAI",
61
- theme=gr.themes.Soft(primary_hue="blue")
62
- ) as app:
63
-
64
- gr.Image(
65
- value="Maindai.png",
66
- show_label=False,
67
- height=140
68
- )
69
-
70
  gr.Markdown(
71
- "<h1 style='text-align:center;color:#4da6ff;'>MaindAI</h1>"
 
 
 
 
 
72
  )
73
 
74
  with gr.Row():
75
- with gr.Column(scale=1):
76
  gr.ChatInterface(chat_fn)
77
-
78
- with gr.Column(scale=1):
79
- prompt = gr.Textbox(label="Görsel açıklaması")
80
- btn = gr.Button("🎨 Oluştur")
81
  output = gr.Image()
82
  btn.click(generate_image, prompt, output)
83
 
 
 
13
  )
14
 
15
  def chat_fn(message, history):
 
16
  prompt = tokenizer.apply_chat_template(
17
+ [{"role": "user", "content": message}],
18
  tokenize=False,
19
  add_generation_prompt=True
20
  )
 
21
  inputs = tokenizer(prompt, return_tensors="pt")
22
  output = chat_model.generate(
23
  **inputs,
24
  max_new_tokens=200,
25
+ do_sample=True,
26
  temperature=0.7
27
  )
28
  return tokenizer.decode(output[0], skip_special_tokens=True)
29
 
30
  # ---------------- IMAGE ----------------
 
 
 
31
  img_pipe = StableDiffusionPipeline.from_pretrained(
32
+ "runwayml/stable-diffusion-v1-5",
33
  torch_dtype=torch.float32,
34
  safety_checker=None
35
  )
 
43
 
44
  image = img_pipe(
45
  prompt,
46
+ num_inference_steps=20
 
47
  ).images[0]
48
 
49
  return image
50
 
 
 
51
  # ---------------- UI ----------------
52
+ with gr.Blocks(title="MaindAI") as app:
 
 
 
 
 
 
 
 
 
 
53
  gr.Markdown(
54
+ """
55
+ <div style="text-align:center">
56
+ <img src="file=Maindai.png" width="140"><br>
57
+ <h1 style="color:#00bfff;">MaindAI</h1>
58
+ </div>
59
+ """
60
  )
61
 
62
  with gr.Row():
63
+ with gr.Column():
64
  gr.ChatInterface(chat_fn)
65
+ with gr.Column():
66
+ prompt = gr.Textbox(label="🎨 Görsel Prompt")
67
+ btn = gr.Button("Oluştur")
 
68
  output = gr.Image()
69
  btn.click(generate_image, prompt, output)
70
 
71
+ app.launch()