divython commited on
Commit
61fa9dd
·
verified ·
1 Parent(s): 0b6a906

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -14
app.py CHANGED
@@ -1,24 +1,33 @@
1
- # UI Generator Hugging Face Space - MVP
2
- # Requirements: gradio, transformers, torch
3
 
4
  import gradio as gr
5
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
6
 
7
- # Load an open-source code generation model (you can swap with DeepSeek-Coder, CodeLlama, etc.)
8
- model_id = "Salesforce/codegen-350M-mono"
 
 
 
 
 
 
9
  tokenizer = AutoTokenizer.from_pretrained(model_id)
10
- model = AutoModelForCausalLM.from_pretrained(model_id)
11
- generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
 
 
12
 
13
  def generate_ui(platform, framework, ui_prompt):
14
  prompt = f"""
15
- You are an expert {framework} developer.
16
- Generate full code for a {platform} UI screen based on the following description:
17
- "{ui_prompt}"
18
- Please include necessary imports and use best practices.
19
- """
20
- output = generator(prompt, max_length=512, do_sample=True, temperature=0.7)[0]["generated_text"]
21
- return output.strip()
 
22
 
23
  interface = gr.Interface(
24
  fn=generate_ui,
 
1
+ # Updated UI Generator with Better Open-Source Model
 
2
 
3
  import gradio as gr
4
+ from transformers import AutoTokenizer, AutoModelForCausalLM, TextGenerationPipeline
5
+ import torch
6
 
7
+ # Use a better model for code generation, such as DeepSeek-Coder or Codestral
8
+ model_id = "deepseek-ai/deepseek-coder-6.7b-instruct" # Change to another if desired
9
+
10
+ # Ensure torch uses CPU if no GPU
11
+ device = 0 if torch.cuda.is_available() else -1
12
+
13
+ # Load tokenizer and model
14
+ print("Loading model...")
15
  tokenizer = AutoTokenizer.from_pretrained(model_id)
16
+ model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32)
17
+
18
+ # Build the text generation pipeline
19
+ generator = TextGenerationPipeline(model=model, tokenizer=tokenizer, device=device)
20
 
21
  def generate_ui(platform, framework, ui_prompt):
22
  prompt = f"""
23
+ You are an expert mobile app developer.
24
+ Generate a complete {framework} UI code snippet for a {platform} app based on the description:
25
+ "{ui_prompt}"
26
+ Include all required imports, a main method, and best practices for UI structure.
27
+ """
28
+ response = generator(prompt, max_new_tokens=512, do_sample=True, temperature=0.7)[0]['generated_text']
29
+ # Trim the echoed prompt and just return the generated code
30
+ return response.split(""""""")[-1].strip()
31
 
32
  interface = gr.Interface(
33
  fn=generate_ui,