to0ony commited on
Commit
aff8dfb
·
1 Parent(s): 44a3850
Files changed (1) hide show
  1. app.py +6 -17
app.py CHANGED
@@ -1,8 +1,8 @@
1
- import gc, json, torch, gradio as gr, time
2
  from huggingface_hub import hf_hub_download
3
  import tiktoken
4
 
5
- from mingpt.model import GPT
6
 
7
  DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
8
  REPO_ID = "to0ony/final-thesis-plotgen"
@@ -21,6 +21,7 @@ def load_model():
21
 
22
  gcfg = GPT.get_default_config()
23
  gcfg.model_type = None
 
24
  gcfg.vocab_size = int(cfg["vocab_size"])
25
  gcfg.block_size = int(cfg["block_size"])
26
  gcfg.n_layer = int(cfg["n_layer"])
@@ -36,6 +37,7 @@ def load_model():
36
  state["model"] = model
37
  return model
38
 
 
39
  @torch.inference_mode()
40
  def generate(prompt, max_new_tokens=200, temperature=0.9, top_k=50):
41
  """Generiranje teksta iz prompta"""
@@ -53,18 +55,6 @@ def generate(prompt, max_new_tokens=200, temperature=0.9, top_k=50):
53
 
54
  return enc.decode(y[0].tolist())
55
 
56
- def typewriter(prompt, max_new_tokens=200, temperature=0.9, top_k=50, delay=0.02):
57
- """
58
- UI-streaming: pozove generate() jednom, a zatim ispisuje
59
- rezultat riječ-po-riječ radi efekta tipkanja.
60
- """
61
- full = generate(prompt, max_new_tokens, temperature, top_k)
62
- acc = []
63
- for word in full.split(" "):
64
- acc.append(word)
65
- yield " ".join(acc)
66
- time.sleep(delay)
67
-
68
  # Gradio UI
69
  with gr.Blocks(title="🎬 Final Thesis Plot Generator") as demo:
70
  gr.Markdown("## 🎬 Film Plot Generator\nUnesi prompt i generiraj radnju filma.")
@@ -76,8 +66,7 @@ with gr.Blocks(title="🎬 Final Thesis Plot Generator") as demo:
76
  btn = gr.Button("Generate")
77
  output = gr.Textbox(label="Output", lines=15)
78
 
79
- # koristimo typewriter za streaming outputa; generate() ostaje netaknut
80
- btn.click(typewriter, [prompt, max_new_tokens, temperature, top_k], output)
81
 
82
  if __name__ == "__main__":
83
- demo.queue().launch()
 
1
+ import gc, json, torch, gradio as gr
2
  from huggingface_hub import hf_hub_download
3
  import tiktoken
4
 
5
+ from mingpt.model import GPT
6
 
7
  DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
8
  REPO_ID = "to0ony/final-thesis-plotgen"
 
21
 
22
  gcfg = GPT.get_default_config()
23
  gcfg.model_type = None
24
+
25
  gcfg.vocab_size = int(cfg["vocab_size"])
26
  gcfg.block_size = int(cfg["block_size"])
27
  gcfg.n_layer = int(cfg["n_layer"])
 
37
  state["model"] = model
38
  return model
39
 
40
+
41
  @torch.inference_mode()
42
  def generate(prompt, max_new_tokens=200, temperature=0.9, top_k=50):
43
  """Generiranje teksta iz prompta"""
 
55
 
56
  return enc.decode(y[0].tolist())
57
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  # Gradio UI
59
  with gr.Blocks(title="🎬 Final Thesis Plot Generator") as demo:
60
  gr.Markdown("## 🎬 Film Plot Generator\nUnesi prompt i generiraj radnju filma.")
 
66
  btn = gr.Button("Generate")
67
  output = gr.Textbox(label="Output", lines=15)
68
 
69
+ btn.click(generate, [prompt, max_new_tokens, temperature, top_k], output)
 
70
 
71
  if __name__ == "__main__":
72
+ demo.launch()