Spaces:
Sleeping
Sleeping
File size: 3,974 Bytes
671f00d bf97a64 671f00d bb9a417 bf97a64 671f00d bf97a64 671f00d d2696e6 bf97a64 d2696e6 bf97a64 671f00d bf97a64 d2696e6 bf97a64 671f00d bf97a64 671f00d bb9a417 671f00d bf97a64 671f00d bf97a64 671f00d bf97a64 671f00d bf97a64 671f00d bf97a64 671f00d bf97a64 671f00d bf97a64 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import streamlit as st
import openai
import pandas as pd
import textstat
import os
import asyncio
from textblob import TextBlob
# Initialize OpenAI client
client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# Function to fetch available OpenAI models
def get_models():
try:
models = client.models.list()
return [model.id for model in models.data]
except Exception as e:
st.error(f"Error fetching models: {e}")
return []
# Function to analyze text
def analyze_text(text):
readability = textstat.flesch_reading_ease(text)
sentiment = TextBlob(text).sentiment.polarity
return readability, sentiment
# Function to generate AI-enhanced content
def generate_response(prompt, model, tone):
response = client.chat.completions.create(
model=model,
messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {prompt}"}]
)
return response.choices[0].message.content.strip()
# Function for batch processing asynchronously
async def process_bulk(prompts, model, tone):
tasks = [
client.chat.completions.acreate(
model=model,
messages=[{"role": "system", "content": f"Rewrite this in {tone} style: {p}"}]
) for p in prompts
]
responses = await asyncio.gather(*tasks)
return [response.choices[0].message.content.strip() for response in responses]
# UI Structure
st.title("π AI Content Optimizer")
st.write("Enhance, analyze, and optimize your content with AI!")
# Select AI Provider
provider = st.selectbox("Choose AI Provider", ["OpenAI"])
# Fetch available models
display_models = get_models()
if display_models:
model_choice = st.selectbox("Choose AI Model", display_models)
else:
model_choice = "gpt-3.5-turbo"
# Prompt Customization
st.markdown("### **Content Customization**")
user_prompt = st.text_area("Enter your content:")
tone_choice = st.selectbox("Choose a Writing Tone", ["Formal", "Casual", "Technical", "Poetic", "Persuasive"])
if user_prompt:
readability, sentiment = analyze_text(user_prompt)
st.write(f"**Original Readability Score:** {readability:.2f}")
st.write(f"**Sentiment Score:** {sentiment:.2f} (Positive: 1, Negative: -1)")
# Generate AI-enhanced content
if st.button("π Optimize Content"):
optimized_content = generate_response(user_prompt, model_choice, tone_choice)
optimized_readability, optimized_sentiment = analyze_text(optimized_content)
st.write("### β¨ Optimized Content")
st.text_area("Optimized Content:", optimized_content, height=150)
st.write(f"**Optimized Readability Score:** {optimized_readability:.2f}")
st.write(f"**Optimized Sentiment Score:** {optimized_sentiment:.2f}")
if "history" not in st.session_state:
st.session_state["history"] = []
st.session_state["history"].append({"Original": user_prompt, "Optimized": optimized_content})
# Batch Processing
st.markdown("### π Bulk Optimization (CSV Upload)")
uploaded_file = st.file_uploader("Upload a CSV file with a column named 'Content'", type=["csv"])
if uploaded_file:
df = pd.read_csv(uploaded_file)
if "Content" in df.columns:
prompts = df["Content"].tolist()
optimized_prompts = asyncio.run(process_bulk(prompts, model_choice, tone_choice))
df["Optimized_Content"] = optimized_prompts
st.write(df)
st.download_button("Download Optimized CSV", df.to_csv(index=False).encode('utf-8'), "optimized_content.csv", "text/csv")
else:
st.error("CSV must contain a column named 'Content'")
# Show Optimization History
st.markdown("### πΉ Optimization History")
if "history" in st.session_state and st.session_state["history"]:
for entry in st.session_state["history"][::-1]:
st.write(f"πΉ **Original:** {entry['Original']}")
st.write(f"β¨ **Optimized:** {entry['Optimized']}")
st.markdown("---")
st.success("π AI Content Optimizer Ready!")
|