AiCoderv2 commited on
Commit
7d75366
·
verified ·
1 Parent(s): d6d07f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -9
app.py CHANGED
@@ -3,20 +3,30 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
  import warnings
5
 
6
- # Suppress the specific warning
7
  warnings.filterwarnings("ignore", category=FutureWarning, module="huggingface_hub")
 
8
 
9
- # Download and load the model
10
- model_name = "baidu/ERNIE-4.5-21B-A3B-Thinking"
11
- tokenizer = AutoTokenizer.from_pretrained(model_name)
12
- model = AutoModelForCausalLM.from_pretrained(
13
- model_name,
14
- torch_dtype=torch.bfloat16,
15
- device_map="auto"
16
- )
 
 
 
 
 
 
17
 
18
  def generate_code(prompt):
19
  """Generate HTML code using ERNIE model"""
 
 
 
20
  full_prompt = f"Create a complete single HTML file with embedded CSS and JavaScript for: {prompt}. Return only valid HTML code."
21
 
22
  inputs = tokenizer(full_prompt, return_tensors="pt").to("cuda")
@@ -41,6 +51,9 @@ def generate_code(prompt):
41
 
42
  def improve_code(description, current_code):
43
  """Improve existing code"""
 
 
 
44
  prompt = f"Improve this HTML code based on: {description}\n\nCurrent code:\n{current_code}\n\nReturn only the improved HTML code."
45
 
46
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
 
3
  import torch
4
  import warnings
5
 
6
+ # Suppress warnings
7
  warnings.filterwarnings("ignore", category=FutureWarning, module="huggingface_hub")
8
+ warnings.filterwarnings("ignore", category=UserWarning, module="transformers")
9
 
10
+ try:
11
+ # Download and load the model
12
+ model_name = "baidu/ERNIE-4.5-21B-A3B-Thinking"
13
+ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
14
+ model = AutoModelForCausalLM.from_pretrained(
15
+ model_name,
16
+ torch_dtype=torch.bfloat16,
17
+ device_map="auto",
18
+ trust_remote_code=True
19
+ )
20
+ model_loaded = True
21
+ except Exception as e:
22
+ print(f"Model loading failed: {e}")
23
+ model_loaded = False
24
 
25
  def generate_code(prompt):
26
  """Generate HTML code using ERNIE model"""
27
+ if not model_loaded:
28
+ return "<!-- Model not available -->\n<h1>Model Loading Failed</h1>"
29
+
30
  full_prompt = f"Create a complete single HTML file with embedded CSS and JavaScript for: {prompt}. Return only valid HTML code."
31
 
32
  inputs = tokenizer(full_prompt, return_tensors="pt").to("cuda")
 
51
 
52
  def improve_code(description, current_code):
53
  """Improve existing code"""
54
+ if not model_loaded:
55
+ return current_code
56
+
57
  prompt = f"Improve this HTML code based on: {description}\n\nCurrent code:\n{current_code}\n\nReturn only the improved HTML code."
58
 
59
  inputs = tokenizer(prompt, return_tensors="pt").to("cuda")