|
|
from fastapi import FastAPI, Query
|
|
|
from news_fetcher import fetch_news
|
|
|
from keyword_extractor import extract_keywords
|
|
|
from reddit_search import search_reddit
|
|
|
from sentiment_analyzer import analyze_sentiment
|
|
|
from results_compiler import compile_results
|
|
|
|
|
|
app = FastAPI()
|
|
|
|
|
|
@app.get("/")
|
|
|
def read_root():
|
|
|
return {"message": "Hello, this is your INDOPACOM Sentiment App backend!"}
|
|
|
|
|
|
@app.get("/run_workflow")
|
|
|
def run_workflow(query: str = Query("US Army INDOPACOM")):
|
|
|
articles = fetch_news(query=query)
|
|
|
print(f"Fetched {len(articles)} articles for query: {query}")
|
|
|
keywords = extract_keywords(articles)
|
|
|
print(f"Extracted keywords: {keywords}")
|
|
|
reddit_data = search_reddit(keywords)
|
|
|
print(f"Reddit data: {reddit_data}")
|
|
|
sentiment = analyze_sentiment(reddit_data)
|
|
|
print(f"Sentiment: {sentiment}")
|
|
|
results = compile_results(articles, keywords, reddit_data, sentiment)
|
|
|
return results
|
|
|
|
|
|
|
|
|
|