notes73 commited on
Commit
bb9a417
·
1 Parent(s): 671f00d

Updated OpenAI API and fixed async batch processing

Browse files
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -5,31 +5,32 @@ import textstat
5
  import os
6
  import asyncio
7
 
8
- # Load API key from Hugging Face secrets
9
- openai.api_key = os.getenv("OPENAI_API_KEY")
10
 
11
  # Function to fetch available OpenAI models
12
  def get_models():
13
- models = openai.Model.list()
14
- return [model['id'] for model in models["data"]]
15
 
16
  # Function to process AI response
17
  def generate_response(prompt, model, tone):
18
- response = openai.ChatCompletion.create(
19
  model=model,
20
  messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {prompt}"}]
21
  )
22
- return response["choices"][0]["message"]["content"].strip()
23
 
24
  # Function for batch processing with async calls
25
  async def process_bulk(prompts, model, tone):
26
  tasks = [
27
- openai.ChatCompletion.acreate(
28
  model=model,
29
  messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {p}"}]
30
  ) for p in prompts
31
  ]
32
- return await asyncio.gather(*tasks)
 
33
 
34
  # UI Structure
35
  st.title("AI Prompt Optimizer")
@@ -83,7 +84,7 @@ with st.expander("📂 Upload CSV for Bulk Optimization"):
83
  if "Prompt" in df.columns:
84
  prompts = df["Prompt"].tolist()
85
  optimized_prompts = asyncio.run(process_bulk(prompts, model_choice, tone_choice))
86
- df["Optimized_Prompt"] = [p["choices"][0]["message"]["content"].strip() for p in optimized_prompts]
87
  st.write(df)
88
  st.download_button("Download Optimized CSV", df.to_csv(index=False).encode('utf-8'), "optimized_prompts.csv", "text/csv")
89
  else:
 
5
  import os
6
  import asyncio
7
 
8
+ # Initialize OpenAI client
9
+ client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
10
 
11
  # Function to fetch available OpenAI models
12
  def get_models():
13
+ models = client.models.list()
14
+ return [model.id for model in models.data]
15
 
16
  # Function to process AI response
17
  def generate_response(prompt, model, tone):
18
+ response = client.chat.completions.create(
19
  model=model,
20
  messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {prompt}"}]
21
  )
22
+ return response.choices[0].message.content.strip()
23
 
24
  # Function for batch processing with async calls
25
  async def process_bulk(prompts, model, tone):
26
  tasks = [
27
+ client.chat.completions.acreate(
28
  model=model,
29
  messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {p}"}]
30
  ) for p in prompts
31
  ]
32
+ responses = await asyncio.gather(*tasks)
33
+ return [response.choices[0].message.content.strip() for response in responses]
34
 
35
  # UI Structure
36
  st.title("AI Prompt Optimizer")
 
84
  if "Prompt" in df.columns:
85
  prompts = df["Prompt"].tolist()
86
  optimized_prompts = asyncio.run(process_bulk(prompts, model_choice, tone_choice))
87
+ df["Optimized_Prompt"] = optimized_prompts
88
  st.write(df)
89
  st.download_button("Download Optimized CSV", df.to_csv(index=False).encode('utf-8'), "optimized_prompts.csv", "text/csv")
90
  else: