JanadaSroor commited on
Commit
9b90c30
·
1 Parent(s): 7f5e4f2

Demo version

Browse files
Files changed (1) hide show
  1. app.py +44 -50
app.py CHANGED
@@ -3,61 +3,52 @@ import zipfile
3
  import io
4
  import os
5
  from datetime import datetime
6
- from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
7
- import torch
8
-
9
- # Global model variables
10
- model = None
11
- tokenizer = None
12
-
13
- def load_model():
14
- """Load the small AI model for code generation."""
15
- global model, tokenizer
16
- if model is None:
17
- try:
18
- # Use a very small model that can run on Hugging Face Spaces free tier
19
- model_name = "distilgpt2"
20
-
21
- print("Loading AI model...")
22
- tokenizer = AutoTokenizer.from_pretrained(model_name)
23
- model = AutoModelForCausalLM.from_pretrained(model_name)
24
-
25
- # Set padding token to EOS token for GPT-2
26
- tokenizer.pad_token = tokenizer.eos_token
27
-
28
- print("Model loaded successfully!")
29
- except Exception as e:
30
- print(f"Error loading model: {e}")
31
- return False
32
- return True
33
 
34
- def generate_code_with_ai(prompt, max_length=500):
35
- """Generate code using the AI model."""
36
- if not load_model():
37
- return "// Error: Could not load AI model\n// Using fallback template instead"
38
 
 
 
39
  try:
40
  # Create a more specific prompt for Flutter/Dart code
41
- full_prompt = f"Write Flutter Dart code for: {prompt}\n\n"
 
 
 
 
 
 
42
 
43
- inputs = tokenizer(full_prompt, return_tensors="pt", padding=True, truncation=True, max_length=100)
 
44
 
45
- with torch.no_grad():
46
- outputs = model.generate(
47
- inputs["input_ids"],
48
- attention_mask=inputs["attention_mask"],
49
- max_length=max_length,
50
- num_return_sequences=1,
51
- temperature=0.7,
52
- do_sample=True,
53
- pad_token_id=tokenizer.eos_token_id,
54
- eos_token_id=tokenizer.eos_token_id,
55
- )
56
 
57
- generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
 
 
 
 
 
58
 
59
- # Extract the code part (remove the prompt)
60
- code = generated_text[len(full_prompt):].strip()
 
 
 
 
 
 
 
 
 
 
61
 
62
  # Basic cleanup - try to extract Dart-like code
63
  if "```dart" in code:
@@ -65,11 +56,14 @@ def generate_code_with_ai(prompt, max_length=500):
65
  elif "```" in code:
66
  code = code.split("```")[1].split("```")[0]
67
 
 
 
 
68
  return code if code else "// AI-generated placeholder code"
69
 
70
  except Exception as e:
71
  print(f"Error generating code with AI: {e}")
72
- return "// Error generating code\n// Using fallback template"
73
 
74
  def generate_flutter_code(description):
75
  """
@@ -721,10 +715,10 @@ def create_interface():
721
  - **README.md** - Setup and usage instructions
722
 
723
  ### ⚠️ Important Notes:
724
- - This uses a **small AI model** (DistilGPT-2) running directly on Hugging Face Spaces
725
  - The AI generates Flutter/Dart code based on your description
726
  - Always review and test generated code before production use
727
- - The model may produce varying quality results - iterate on your descriptions
728
  - Customize the generated code to match your specific requirements
729
 
730
  ### 🔧 Next Steps After Download:
 
3
  import io
4
  import os
5
  from datetime import datetime
6
+ import requests
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
+ # Hugging Face API configuration
9
+ HF_API_URL = "https://api-inference.huggingface.co/models/microsoft/DialoGPT-small"
10
+ HF_TOKEN = os.environ.get("HF_TOKEN") # Will be set in Hugging Face Spaces
 
11
 
12
+ def generate_code_with_ai(prompt, max_length=500):
13
+ """Generate code using the Hugging Face Inference API."""
14
  try:
15
  # Create a more specific prompt for Flutter/Dart code
16
+ full_prompt = f"""Write clean Flutter Dart code for this requirement: {prompt}
17
+
18
+ Generate proper Dart classes, methods, and Flutter widgets. Focus on:
19
+ - Clean, readable code
20
+ - Proper Flutter/Dart syntax
21
+ - Material Design patterns
22
+ - Error handling where appropriate
23
 
24
+ ```dart
25
+ """
26
 
27
+ headers = {"Authorization": f"Bearer {HF_TOKEN}"} if HF_TOKEN else {}
 
 
 
 
 
 
 
 
 
 
28
 
29
+ payload = {
30
+ "inputs": full_prompt,
31
+ "parameters": {
32
+ "max_length": max_length,
33
+ "temperature": 0.7,
34
+ "do_sample": True,
35
+ "return_full_text": False,
36
+ "num_return_sequences": 1,
37
+ }
38
+ }
39
 
40
+ response = requests.post(HF_API_URL, headers=headers, json=payload)
41
+ response.raise_for_status()
42
+
43
+ result = response.json()
44
+
45
+ if isinstance(result, list) and len(result) > 0:
46
+ generated_text = result[0].get("generated_text", "")
47
+ else:
48
+ generated_text = str(result)
49
+
50
+ # Clean up the generated text
51
+ code = generated_text.strip()
52
 
53
  # Basic cleanup - try to extract Dart-like code
54
  if "```dart" in code:
 
56
  elif "```" in code:
57
  code = code.split("```")[1].split("```")[0]
58
 
59
+ # Remove any remaining markdown formatting
60
+ code = code.replace("```", "").strip()
61
+
62
  return code if code else "// AI-generated placeholder code"
63
 
64
  except Exception as e:
65
  print(f"Error generating code with AI: {e}")
66
+ return "// Error: AI service unavailable\n// Using fallback template"
67
 
68
  def generate_flutter_code(description):
69
  """
 
715
  - **README.md** - Setup and usage instructions
716
 
717
  ### ⚠️ Important Notes:
718
+ - This uses the **Hugging Face Inference API** for AI-powered code generation
719
  - The AI generates Flutter/Dart code based on your description
720
  - Always review and test generated code before production use
721
+ - The AI service may occasionally be slow or unavailable - try again if needed
722
  - Customize the generated code to match your specific requirements
723
 
724
  ### 🔧 Next Steps After Download: