Nehal721 commited on
Commit
2770a31
·
verified ·
1 Parent(s): caa3e59

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -14
app.py CHANGED
@@ -2,6 +2,7 @@ from flask import Flask, request, jsonify
2
  import torch
3
  from transformers import pipeline
4
  import os
 
5
 
6
  # Model ka naam
7
  model_name = "distilgpt2"
@@ -12,8 +13,8 @@ try:
12
  pipe = pipeline(
13
  "text-generation",
14
  model=model_name,
15
- torch_dtype=torch.bfloat16,
16
- device_map="auto"
17
  )
18
  print("✅ Model loaded successfully!")
19
  except Exception as e:
@@ -42,24 +43,30 @@ def generate_content():
42
  data = request.get_json()
43
  if not data or 'step_text' not in data:
44
  return jsonify({"error": "step_text is missing from the request body"}), 400
45
-
46
  step_text = data.get('step_text')
47
 
48
  try:
49
  final_prompt = meta_prompt_template.format(step_text=step_text)
50
-
51
  outputs = pipe(final_prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
52
-
53
  generated_text = outputs[0]['generated_text']
54
-
55
- # Model ke output se saaf JSON nikalna
56
- json_part = generated_text[generated_text.find('{'):generated_text.rfind('}')+1]
57
-
58
- # String ko JSON mein convert karna
59
- response_data = eval(json_part)
60
-
61
- return jsonify(response_data)
62
-
 
 
 
 
 
 
63
  except Exception as e:
64
  print(f"Error during generation: {e}")
65
  return jsonify({"error": str(e)}), 500
 
2
  import torch
3
  from transformers import pipeline
4
  import os
5
+ import json # <-- Humne yeh nayi library import ki hai
6
 
7
  # Model ka naam
8
  model_name = "distilgpt2"
 
13
  pipe = pipeline(
14
  "text-generation",
15
  model=model_name,
16
+ torch_dtype=torch.float32, # CPU ke liye float32 behtar hai
17
+ device=-1 # CPU par run karne ke liye force karna
18
  )
19
  print("✅ Model loaded successfully!")
20
  except Exception as e:
 
43
  data = request.get_json()
44
  if not data or 'step_text' not in data:
45
  return jsonify({"error": "step_text is missing from the request body"}), 400
46
+
47
  step_text = data.get('step_text')
48
 
49
  try:
50
  final_prompt = meta_prompt_template.format(step_text=step_text)
51
+
52
  outputs = pipe(final_prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
53
+
54
  generated_text = outputs[0]['generated_text']
55
+
56
+ # Naya, behtar tareeka JSON nikalne ka
57
+ start_index = generated_text.find('{')
58
+ end_index = generated_text.rfind('}') + 1
59
+
60
+ if start_index != -1 and end_index != -1:
61
+ json_part = generated_text[start_index:end_index]
62
+ # String ko JSON mein convert karna
63
+ response_data = json.loads(json_part)
64
+ return jsonify(response_data)
65
+ else:
66
+ return jsonify({"error": "Failed to find a valid JSON object in the model's response.", "raw_response": generated_text}), 500
67
+
68
+ except json.JSONDecodeError:
69
+ return jsonify({"error": "Model returned a malformed JSON string.", "raw_response": generated_text}), 500
70
  except Exception as e:
71
  print(f"Error during generation: {e}")
72
  return jsonify({"error": str(e)}), 500