SanaAdeel commited on
Commit
b16a3cd
·
verified ·
1 Parent(s): 89512a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -43
app.py CHANGED
@@ -7,7 +7,6 @@ from datetime import datetime
7
  # Load secrets
8
  PAYWALLS_API_KEY = os.environ.get("PAYWALLS_API_KEY")
9
  PAYWALLS_API_URL = os.environ.get("PAYWALLS_API_URL", "https://api.paywalls.ai/v1")
10
- HF_MODEL = "openai/gpt-oss-20b"
11
 
12
  # Free tier limits
13
  FREE_MAX_TOKENS = 150
@@ -23,80 +22,102 @@ STYLE_PRESETS = {
23
  }
24
 
25
  def check_paywall(user_id: str):
 
26
  headers = {
27
- "Authorization": f"Bearer {PAYWALLS_API_KEY}",
28
- "X-Paywall-User": user_id
29
  }
30
  try:
31
- resp = requests.get(f"{PAYWALLS_API_URL}/user/connect?user={user_id}", headers=headers, timeout=10)
32
  resp.raise_for_status()
33
  data = resp.json()
34
- return data.get("connected", False), data.get('url', '')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  except Exception as e:
36
- print(f"[Paywall] Error: {e}")
37
- return False, ""
38
 
39
- def chat_completion(user_id, system_message, history, user_message, max_tokens, temperature, top_p):
 
40
  headers = {
41
  "Authorization": f"Bearer {PAYWALLS_API_KEY}",
42
- "Content-Type": "application/json",
43
- "X-Paywall-User": user_id
44
  }
45
 
46
  messages = [{"role": "system", "content": system_message}] + history + [{"role": "user", "content": user_message}]
 
 
47
  payload = {
48
- "model": HF_MODEL,
49
  "messages": messages,
50
- "stream": False,
51
- "user": user_id,
52
  "max_tokens": max_tokens,
53
  "temperature": temperature,
54
- "top_p": top_p
 
55
  }
56
 
57
  try:
58
- response = requests.post(f"{PAYWALLS_API_URL}/chat/completions", headers=headers, json=payload, timeout=60)
 
 
 
 
 
59
  response.raise_for_status()
60
  data = response.json()
61
- choices = data.get("choices", [])
62
- if choices:
63
- return choices[0]["message"]["content"]
64
- return "No response from model."
 
 
 
 
65
  except Exception as e:
66
- print(f"[Paywall] Error: {e}")
67
- return f"Error: {e}"
68
 
69
  def respond(message, history, style, max_tokens, temperature, top_p):
70
- user_id = "demo_user"
71
  is_premium, paywall_url = check_paywall(user_id)
72
 
 
73
  if not is_premium:
74
  max_tokens = min(max_tokens, FREE_MAX_TOKENS)
75
  if style != "neutral":
76
  return history + [{"role": "assistant", "content": f"🔒 Style customization requires premium. [Unlock here]({paywall_url})"}]
77
-
78
- # FREE TIER: Use direct Hugging Face API
79
- try:
80
- from huggingface_hub import InferenceClient
81
- client = InferenceClient()
82
- reply = client.chat_completion(
83
- messages=[{"role": "system", "content": STYLE_PRESETS["neutral"]}] + history + [{"role": "user", "content": message}],
84
- max_tokens=max_tokens,
85
- temperature=temperature
86
- ).choices[0].message.content
87
- return history + [{"role": "user", "content": message}, {"role": "assistant", "content": f"{reply}\n\n---\n_🆓 FREE_"}]
88
- except Exception as e:
89
- print(f"[Free Tier] Error: {e}")
90
- return history + [{"role": "assistant", "content": f"⚠️ Service temporarily unavailable. Error: {e}"}]
91
 
92
- # PREMIUM: Use Paywalls API
93
  system_message = STYLE_PRESETS.get(style, STYLE_PRESETS["neutral"])
94
- reply = chat_completion(user_id, system_message, history, message, max_tokens, temperature, top_p)
95
 
96
- if "Error:" in reply:
97
- return history + [{"role": "assistant", "content": f"⚠️ {reply}"}]
 
 
 
 
 
98
 
99
- return history + [{"role": "user", "content": message}, {"role": "assistant", "content": f"{reply}\n\n---\n_✨ PREMIUM_"}]
 
100
 
101
  def export_conversation(history):
102
  user_id = "demo_user"
@@ -148,5 +169,4 @@ with gr.Blocks(title="Creative Writing Assistant") as demo:
148
  export_btn.click(export_conversation, [chatbot], [download_file, export_status])
149
 
150
  if __name__ == "__main__":
151
- demo.launch()
152
- ```
 
7
  # Load secrets
8
  PAYWALLS_API_KEY = os.environ.get("PAYWALLS_API_KEY")
9
  PAYWALLS_API_URL = os.environ.get("PAYWALLS_API_URL", "https://api.paywalls.ai/v1")
 
10
 
11
  # Free tier limits
12
  FREE_MAX_TOKENS = 150
 
22
  }
23
 
24
  def check_paywall(user_id: str):
25
+ """Check if user has active premium subscription"""
26
  headers = {
27
+ "Authorization": f"Bearer {PAYWALLS_API_KEY}"
 
28
  }
29
  try:
30
+ resp = requests.get(f"{PAYWALLS_API_URL}/users/{user_id}", headers=headers, timeout=10)
31
  resp.raise_for_status()
32
  data = resp.json()
33
+ # Check if user has active subscription
34
+ is_connected = data.get("status") == "active" or data.get("connected", False)
35
+ paywall_url = f"https://paywalls.ai/connect?api_key={PAYWALLS_API_KEY}&user={user_id}"
36
+ return is_connected, paywall_url
37
+ except Exception as e:
38
+ print(f"[Paywall] Check error: {e}")
39
+ return False, f"https://paywalls.ai/connect?api_key={PAYWALLS_API_KEY}&user={user_id}"
40
+
41
+ def free_tier_generation(system_message, history, user_message, max_tokens, temperature):
42
+ """Free tier using Hugging Face Inference API"""
43
+ try:
44
+ from huggingface_hub import InferenceClient
45
+ client = InferenceClient()
46
+ messages = [{"role": "system", "content": system_message}] + history + [{"role": "user", "content": user_message}]
47
+
48
+ reply = client.chat_completion(
49
+ messages=messages,
50
+ max_tokens=max_tokens,
51
+ temperature=temperature
52
+ ).choices[0].message.content
53
+
54
+ return reply
55
  except Exception as e:
56
+ print(f"[Free Tier] Error: {e}")
57
+ return f"⚠️ Free tier service error: {str(e)}"
58
 
59
+ def premium_generation(user_id, system_message, history, user_message, max_tokens, temperature, top_p):
60
+ """Premium tier using Paywalls proxy"""
61
  headers = {
62
  "Authorization": f"Bearer {PAYWALLS_API_KEY}",
63
+ "Content-Type": "application/json"
 
64
  }
65
 
66
  messages = [{"role": "system", "content": system_message}] + history + [{"role": "user", "content": user_message}]
67
+
68
+ # Using Paywalls proxy endpoint - adjust model based on your Paywalls setup
69
  payload = {
70
+ "model": "gpt-3.5-turbo", # Use model available in your Paywalls account
71
  "messages": messages,
 
 
72
  "max_tokens": max_tokens,
73
  "temperature": temperature,
74
+ "top_p": top_p,
75
+ "user": user_id
76
  }
77
 
78
  try:
79
+ response = requests.post(
80
+ f"{PAYWALLS_API_URL}/chat/completions",
81
+ headers=headers,
82
+ json=payload,
83
+ timeout=60
84
+ )
85
  response.raise_for_status()
86
  data = response.json()
87
+
88
+ if data.get("choices"):
89
+ return data["choices"][0]["message"]["content"]
90
+ return "No response from premium model."
91
+
92
+ except requests.exceptions.HTTPError as e:
93
+ print(f"[Premium] HTTP Error: {e.response.status_code} - {e.response.text}")
94
+ return f"⚠️ Premium service error: {e.response.status_code}"
95
  except Exception as e:
96
+ print(f"[Premium] Error: {e}")
97
+ return f"⚠️ Premium service error: {str(e)}"
98
 
99
  def respond(message, history, style, max_tokens, temperature, top_p):
100
+ user_id = "demo_user" # Replace with actual user session
101
  is_premium, paywall_url = check_paywall(user_id)
102
 
103
+ # Check style restriction for free users
104
  if not is_premium:
105
  max_tokens = min(max_tokens, FREE_MAX_TOKENS)
106
  if style != "neutral":
107
  return history + [{"role": "assistant", "content": f"🔒 Style customization requires premium. [Unlock here]({paywall_url})"}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
 
 
109
  system_message = STYLE_PRESETS.get(style, STYLE_PRESETS["neutral"])
 
110
 
111
+ # Generate response based on tier
112
+ if is_premium:
113
+ reply = premium_generation(user_id, system_message, history, message, max_tokens, temperature, top_p)
114
+ tier_badge = "✨ PREMIUM"
115
+ else:
116
+ reply = free_tier_generation(system_message, history, message, max_tokens, temperature)
117
+ tier_badge = "🆓 FREE"
118
 
119
+ full_reply = f"{reply}\n\n---\n_{tier_badge}_"
120
+ return history + [{"role": "user", "content": message}, {"role": "assistant", "content": full_reply}]
121
 
122
  def export_conversation(history):
123
  user_id = "demo_user"
 
169
  export_btn.click(export_conversation, [chatbot], [download_file, export_status])
170
 
171
  if __name__ == "__main__":
172
+ demo.launch()