chmawia commited on
Commit
60c2e3a
·
verified ·
1 Parent(s): 19c7c87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -12
app.py CHANGED
@@ -1,12 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
3
  import torch
4
  import subprocess
5
 
6
- # Load a code-generation model with support for multiple languages
 
7
  MODEL_NAME = "Salesforce/codet5-small"
 
8
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
9
- model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
10
 
11
  def generate_code(description, language):
12
  prompt = f"Generate {language} code: {description}"
@@ -29,7 +48,7 @@ def generate_and_execute(description, language):
29
  output = execute_code(code, language) if language == "Python" else "Execution not supported for this language."
30
  return code, output
31
 
32
- # Create a Gradio interface with language selection and execution
33
  iface = gr.Interface(
34
  fn=generate_and_execute,
35
  inputs=[
@@ -38,12 +57,4 @@ iface = gr.Interface(
38
  ],
39
  outputs=[gr.Code(label="Generated Code"), gr.Textbox(label="Execution Output")],
40
  title="Multi-Language Text-to-Code AI",
41
- description="Convert natural language descriptions into code in different programming languages! Run Python code directly in the app.",
42
- theme="default",
43
- allow_flagging="never",
44
- live=True
45
- )
46
-
47
- # Launch the app
48
- if __name__ == "__main__":
49
- iface.launch(share=True)
 
1
+ Let's go step by step to fix your issue. Follow these instructions carefully:
2
+
3
+ ---
4
+
5
+ ### **🔹 1. Restart Your Hugging Face Space**
6
+ - **Go to your Hugging Face Space.**
7
+ - Click **“Settings”** → Click **“Restart Space”**.
8
+ - If it still doesn’t work, try **“Factory Reboot”** (this will reset everything).
9
+
10
+ ---
11
+
12
+ ### **🔹 2. Modify Your Code**
13
+ **Update your model loading code** to make sure it runs properly. Open your `app.py` file and replace the following lines:
14
+
15
+ #### ✅ **Fixed Code:**
16
+ ```python
17
+ import os
18
  import gradio as gr
19
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
20
  import torch
21
  import subprocess
22
 
23
+ # Force CPU usage & prevent model download issues
24
+ os.environ["HF_HOME"] = "./cache" # Store model locally
25
  MODEL_NAME = "Salesforce/codet5-small"
26
+
27
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
28
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME, device_map="cpu")
29
 
30
  def generate_code(description, language):
31
  prompt = f"Generate {language} code: {description}"
 
48
  output = execute_code(code, language) if language == "Python" else "Execution not supported for this language."
49
  return code, output
50
 
51
+ # Fix Gradio interface loading issue
52
  iface = gr.Interface(
53
  fn=generate_and_execute,
54
  inputs=[
 
57
  ],
58
  outputs=[gr.Code(label="Generated Code"), gr.Textbox(label="Execution Output")],
59
  title="Multi-Language Text-to-Code AI",
60
+ description="Convert natural language descriptions into code in different programming languages! Run Python code