yukee1992 commited on
Commit
c3b0435
·
verified ·
1 Parent(s): ba548c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -95
app.py CHANGED
@@ -2,8 +2,6 @@ from flask import Flask, request, jsonify
2
  from flask_cors import CORS
3
  import logging
4
  import os
5
- from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
6
- import torch
7
 
8
  app = Flask(__name__)
9
  CORS(app)
@@ -12,124 +10,67 @@ CORS(app)
12
  logging.basicConfig(level=logging.INFO)
13
  logger = logging.getLogger(__name__)
14
 
15
- # Initialize model variables
16
- model_name = "google/gemma-1b-it"
17
- tokenizer = None
18
- model = None
19
- generator = None
20
-
21
- def initialize_model():
22
- global tokenizer, model, generator
23
 
24
- if tokenizer is None:
25
- try:
26
- # Get Hugging Face token from environment
27
- hf_token = os.environ.get('HF_TOKEN')
28
-
29
- if not hf_token:
30
- logger.error("HF_TOKEN environment variable not set!")
31
- raise ValueError("Hugging Face token not configured")
32
-
33
- logger.info("Loading tokenizer...")
34
- tokenizer = AutoTokenizer.from_pretrained(
35
- model_name,
36
- token=hf_token
37
- )
38
-
39
- logger.info("Loading model...")
40
- model = AutoModelForCausalLM.from_pretrained(
41
- model_name,
42
- torch_dtype=torch.float16,
43
- device_map="auto",
44
- token=hf_token
45
- )
46
-
47
- logger.info("Creating pipeline...")
48
- generator = pipeline(
49
- "text-generation",
50
- model=model,
51
- tokenizer=tokenizer,
52
- device=0 if torch.cuda.is_available() else -1
53
- )
54
-
55
- logger.info("Model loaded successfully!")
56
-
57
- except Exception as e:
58
- logger.error(f"Error loading model: {str(e)}")
59
- raise
60
 
61
- @app.route('/generate_script', methods=['POST', 'GET'])
62
  def generate_script():
63
  try:
64
- logger.info("Received request to /generate_script")
 
65
 
66
- if request.method == 'GET':
67
- return jsonify({"message": "Use POST method with {'topics': ['topic1', 'topic2']}"})
 
 
 
68
 
69
  data = request.json
70
- logger.info(f"Request data: {data}")
71
-
72
  topics = data.get('topics', [])
73
 
74
  if not topics:
75
  return jsonify({"error": "No topics provided"}), 400
76
 
77
- # Initialize model (will load on first request)
78
- initialize_model()
79
 
80
- # Create prompt for Gemma
81
- prompt = f"""Create a viral 1-minute video script about {topics[0]}.
 
 
82
 
83
- REQUIREMENTS:
84
- - Duration: Exactly 1 minute
85
- - Include: Golden hook, valuable content, CTA
86
- - Virtual descriptions for each scene
87
- - Voiceover instructions for each scene
88
- - Make it engaging and viral-worthy
89
 
90
- SCRIPT STRUCTURE:
91
- """
92
-
93
- # Generate script
94
- logger.info("Generating script...")
95
- result = generator(
96
- prompt,
97
- max_length=1024,
98
- temperature=0.8,
99
- do_sample=True,
100
- top_p=0.9,
101
- num_return_sequences=1
102
- )
103
-
104
- generated_text = result[0]['generated_text']
105
-
106
- # Extract just the script part (remove the prompt)
107
- script = generated_text.replace(prompt, "").strip()
108
-
109
- logger.info("Script generated successfully!")
110
-
111
  return jsonify({
112
  "topic": topics[0],
113
- "script": script,
114
  "full_topics": topics,
115
- "status": "success"
 
 
116
  })
117
 
118
  except Exception as e:
119
- logger.error(f"Error generating script: {str(e)}")
120
  return jsonify({"error": str(e)}), 500
121
 
122
  @app.route('/health', methods=['GET'])
123
  def health():
124
- return jsonify({
125
- "status": "healthy",
126
- "message": "Server is running",
127
- "model_loaded": tokenizer is not None
128
- })
129
-
130
- @app.route('/test', methods=['GET'])
131
- def test():
132
- return jsonify({"message": "Test endpoint working!"})
133
 
134
  if __name__ == '__main__':
135
  port = int(os.environ.get('PORT', 7860))
 
2
  from flask_cors import CORS
3
  import logging
4
  import os
 
 
5
 
6
  app = Flask(__name__)
7
  CORS(app)
 
10
  logging.basicConfig(level=logging.INFO)
11
  logger = logging.getLogger(__name__)
12
 
13
+ @app.route('/debug', methods=['GET'])
14
+ def debug():
15
+ # Check if HF_TOKEN is accessible
16
+ hf_token = os.environ.get('HF_TOKEN')
17
+ all_env_vars = dict(os.environ)
 
 
 
18
 
19
+ return jsonify({
20
+ "HF_TOKEN_exists": hf_token is not None,
21
+ "HF_TOKEN_length": len(hf_token) if hf_token else 0,
22
+ "HF_TOKEN_start": hf_token[:10] + "..." if hf_token and len(hf_token) > 10 else hf_token,
23
+ "all_env_vars": {k: v for k, v in all_env_vars.items() if 'TOKEN' in k or 'KEY' in k or 'SECRET' in k}
24
+ })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
+ @app.route('/generate_script', methods=['POST'])
27
  def generate_script():
28
  try:
29
+ # First, let's just check the token and return a mock response
30
+ hf_token = os.environ.get('HF_TOKEN')
31
 
32
+ if not hf_token:
33
+ return jsonify({
34
+ "error": "HF_TOKEN not found in environment variables",
35
+ "message": "Please set HF_TOKEN in your Space settings"
36
+ }), 500
37
 
38
  data = request.json
 
 
39
  topics = data.get('topics', [])
40
 
41
  if not topics:
42
  return jsonify({"error": "No topics provided"}), 400
43
 
44
+ # Return mock response for now
45
+ mock_script = f"""🎯 GOLDEN HOOK: "Discover how {topics[0]} is changing everything!"
46
 
47
+ 📝 VALUABLE CONTENT:
48
+ - Scene 1: Show stunning visuals of {topics[0]} in action
49
+ - Scene 2: Explain 3 key benefits in simple terms
50
+ - Scene 3: Share real-world examples and success stories
51
 
52
+ 🎬 CTA: "Like and follow for more insights on {topics[0]}!"
53
+
54
+ Duration: Exactly 60 seconds
55
+ 🎤 Voiceover: Energetic and engaging tone
56
+ 📱 Optimized for: TikTok/YouTube Shorts"""
 
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  return jsonify({
59
  "topic": topics[0],
60
+ "script": mock_script,
61
  "full_topics": topics,
62
+ "status": "success",
63
+ "token_status": "found",
64
+ "token_length": len(hf_token)
65
  })
66
 
67
  except Exception as e:
68
+ logger.error(f"Error: {str(e)}")
69
  return jsonify({"error": str(e)}), 500
70
 
71
  @app.route('/health', methods=['GET'])
72
  def health():
73
+ return jsonify({"status": "healthy", "message": "Server is running"})
 
 
 
 
 
 
 
 
74
 
75
  if __name__ == '__main__':
76
  port = int(os.environ.get('PORT', 7860))