gurma-dashboard / cli.py
Emre Sarigöl
Deploy GURMA.ai Dashboard - 2026-02-18 14:15
d3a246e
#!/usr/bin/env python3
"""
GURMA.ai Research Tool — CLI entry point.
Usage:
python research.py search "rehabilitation robotics market"
python research.py batch
python research.py competitor "Ekso Bionics"
python research.py competitor --list-categories
python research.py extract
python research.py list
python research.py sota
python research.py sota --analyze notes/research/podcast.md
python research.py mali
python research.py fonlar -c tubitak
"""
from __future__ import annotations
import argparse
import sys
try:
from .config import RESEARCH_DIR, COMPETITORS, BATCH_QUERY_TEMPLATES, MARKET_QUERIES, LLM_ENABLED
from .search import SearchService, ResultStorage
from .extract import CompetitorExtractor
from .intel import CompetitorIntelAgent, DEEP_INTEL_CATEGORIES
except ImportError:
from config import RESEARCH_DIR, COMPETITORS, BATCH_QUERY_TEMPLATES, MARKET_QUERIES, LLM_ENABLED
from search import SearchService, ResultStorage
from extract import CompetitorExtractor
from intel import CompetitorIntelAgent, DEEP_INTEL_CATEGORIES
# ============================================================
# Commands
# ============================================================
def cmd_search(args):
service = SearchService(backend=args.backend)
print(f"Searching: {args.query}")
print(f"Backend: {args.backend} | Max: {args.max_results}")
print("-" * 50)
results = service.search(args.query, args.max_results, save=args.save)
for i, r in enumerate(results, 1):
print(f"\n{i}. {r.title}")
print(f" {r.url}")
print(f" {r.snippet[:150]}...")
print(f"\n[{len(results)} results]")
if args.save:
print(f"Saved to: {RESEARCH_DIR}")
def cmd_batch(args):
service = SearchService(backend=args.backend)
storage = ResultStorage()
queries = []
for company in COMPETITORS:
for template in BATCH_QUERY_TEMPLATES:
queries.append(template.format(company=company))
queries.extend(MARKET_QUERIES)
total_queries = len(queries)
skipped = 0
if not args.force:
recent = storage.get_recent_queries(days=args.days)
original_count = len(queries)
queries = [q for q in queries if q.lower().strip() not in recent]
skipped = original_count - len(queries)
print(f"Batch Research")
print(f"{'='*60}")
print(f"Competitors: {len(COMPETITORS)}")
print(f"Total queries: {total_queries}")
if skipped > 0:
print(f"Skipped (run in last {args.days} days): {skipped}")
print(f"New queries to run: {len(queries)}")
print(f"Output: {RESEARCH_DIR}")
print(f"{'='*60}")
if not queries:
print("\nNo new queries to run. Use --force to re-run all.")
return
def progress(i, total, query):
print(f"\n[{i}/{total}] {query}")
stats = service.search_batch(queries, args.max_results, args.delay, callback=progress)
success = sum(1 for v in stats.values() if v >= 0)
print(f"\n{'='*60}")
print(f"Complete: {success}/{len(queries)} successful")
if skipped > 0:
print(f"Skipped: {skipped} (already run recently)")
print(f"{'='*60}")
def cmd_competitor(args):
company = args.company
use_external_llm = args.external_llm
if use_external_llm and not LLM_ENABLED:
print("Warning: --external-llm requested but OPENROUTER_API_KEY not found. Skipping external LLM.")
use_external_llm = False
categories = None
if args.categories:
categories = [c.strip() for c in args.categories.split(",")]
valid = set(DEEP_INTEL_CATEGORIES.keys())
invalid = [c for c in categories if c not in valid]
if invalid:
print(f"Invalid categories: {invalid}")
print(f"Valid: {sorted(valid)}")
return
if args.list_categories:
print("Available categories:")
for key, cat in DEEP_INTEL_CATEGORIES.items():
q_count = len(cat["queries"])
print(f" {key:30s} {cat['label']:30s} ({q_count} queries)")
return
agent = CompetitorIntelAgent(company)
report_path = agent.run(
categories=categories,
use_external_llm=use_external_llm,
delay=args.delay,
max_results=args.max_results,
)
print(f"\nReport: {report_path}")
def cmd_extract(args):
extractor = CompetitorExtractor()
print(f"Loading research from: {extractor.research_dir}")
data = extractor.process()
if not data["competitors"]:
print("No research files found. Run 'batch' first.")
return
output = extractor.save(data)
print(f"Saved to: {output}")
print(f"\nCompany mentions:")
for comp in data["competitors"]:
status_marker = {"collapsed": "⚠", "weak": "↓", "growing": "↑", "strong": "★"}.get(comp["status"], "•")
print(f" {status_marker} {comp['name']}: {comp['mentions']} mentions ({comp['status']})")
def cmd_sota(args):
try:
from .sota_agent import SOTAScoutAgent
except ImportError:
from sota_agent import SOTAScoutAgent
agent = SOTAScoutAgent()
if args.analyze:
report = agent.analyze(args.analyze)
print(f"\nAnalysis report: {report}")
return
agent.show(section=args.show)
def cmd_mali(args):
try:
from .tr_agents import MaliMusavirAgent
except ImportError:
from tr_agents import MaliMusavirAgent
agent = MaliMusavirAgent()
if args.list_categories:
agent.list_categories()
return
categories = None
if args.categories:
categories = [c.strip() for c in args.categories.split(",")]
valid = set(agent.CATEGORIES.keys())
invalid = [c for c in categories if c not in valid]
if invalid:
print(f"Geçersiz kategoriler: {invalid}")
print(f"Geçerli: {sorted(valid)}")
return
report_path = agent.run(
categories=categories,
delay=args.delay,
max_results=args.max_results,
)
print(f"\nRapor: {report_path}")
def cmd_fonlar(args):
try:
from .tr_agents import FonArastirmaAgent
except ImportError:
from tr_agents import FonArastirmaAgent
agent = FonArastirmaAgent()
if args.list_categories:
agent.list_categories()
return
categories = None
if args.categories:
categories = [c.strip() for c in args.categories.split(",")]
valid = set(agent.CATEGORIES.keys())
invalid = [c for c in categories if c not in valid]
if invalid:
print(f"Geçersiz kategoriler: {invalid}")
print(f"Geçerli: {sorted(valid)}")
return
report_path = agent.run(
categories=categories,
delay=args.delay,
max_results=args.max_results,
)
print(f"\nRapor: {report_path}")
def cmd_list(args):
storage = ResultStorage()
searches = storage.list_searches(args.limit)
if not searches:
print(f"No searches in {RESEARCH_DIR}")
return
print(f"Recent searches ({RESEARCH_DIR}):\n")
for s in searches:
print(f" {s['timestamp'][:10]} {s['results']:2d} results {s['query'][:50]}")
# ============================================================
# Argparse
# ============================================================
def main():
parser = argparse.ArgumentParser(
description="GURMA.ai Research Tool",
formatter_class=argparse.RawDescriptionHelpFormatter
)
subparsers = parser.add_subparsers(dest="command", help="Commands")
# search
p_search = subparsers.add_parser("search", help="Single web search")
p_search.add_argument("query", help="Search query")
p_search.add_argument("-b", "--backend", default="duckduckgo",
choices=["duckduckgo", "ddg", "serpapi", "brave"])
p_search.add_argument("-n", "--max-results", type=int, default=10)
p_search.add_argument("--no-save", dest="save", action="store_false")
p_search.set_defaults(func=cmd_search)
# batch
p_batch = subparsers.add_parser("batch", help="Batch research all competitors")
p_batch.add_argument("-b", "--backend", default="duckduckgo")
p_batch.add_argument("-n", "--max-results", type=int, default=10)
p_batch.add_argument("-d", "--delay", type=float, default=0.5)
p_batch.add_argument("--days", type=int, default=7,
help="Skip queries run within N days (default: 7)")
p_batch.add_argument("-f", "--force", action="store_true",
help="Force re-run all queries (ignore deduplication)")
p_batch.set_defaults(func=cmd_batch)
# competitor (deep intel)
p_comp = subparsers.add_parser("competitor", help="Deep competitive intelligence on a company")
p_comp.add_argument("company", nargs="?", default="", help="Company name (e.g. 'Ekso Bionics')")
p_comp.add_argument("--external-llm", action="store_true",
help="Also use external LLM (OpenRouter) for enhanced analysis")
p_comp.add_argument("-c", "--categories", type=str, default=None,
help="Comma-separated categories (default: all)")
p_comp.add_argument("--list-categories", action="store_true",
help="List available categories")
p_comp.add_argument("-n", "--max-results", type=int, default=10)
p_comp.add_argument("-d", "--delay", type=float, default=1.0,
help="Delay between searches in seconds (default: 1.0)")
p_comp.set_defaults(func=cmd_competitor)
# sota
p_sota = subparsers.add_parser("sota", help="SOTA technology knowledge base for GURMA.ai")
p_sota.add_argument("--analyze", "-a", type=str, default=None,
help="Analyze a document and update knowledge base")
p_sota.add_argument("--show", "-s", type=str, default=None, nargs="?",
const=None,
choices=["models", "techniques", "stack", "principles", "actions", "sources"],
help="Show specific KB section (default: summary)")
p_sota.set_defaults(func=cmd_sota)
# mali (Turkish company formation)
p_mali = subparsers.add_parser("mali", help="Türkiye şirket kuruluşu araştırması")
p_mali.add_argument("-c", "--categories", type=str, default=None,
help="Virgülle ayrılmış kategoriler (varsayılan: tümü)")
p_mali.add_argument("--list-categories", action="store_true",
help="Mevcut kategorileri listele")
p_mali.add_argument("-n", "--max-results", type=int, default=10)
p_mali.add_argument("-d", "--delay", type=float, default=1.0)
p_mali.set_defaults(func=cmd_mali)
# fonlar (Turkish government funding research)
p_fonlar = subparsers.add_parser("fonlar", help="TÜBİTAK ve devlet fonları araştırması")
p_fonlar.add_argument("-c", "--categories", type=str, default=None,
help="Virgülle ayrılmış kategoriler (varsayılan: tümü)")
p_fonlar.add_argument("--list-categories", action="store_true",
help="Mevcut kategorileri listele")
p_fonlar.add_argument("-n", "--max-results", type=int, default=10)
p_fonlar.add_argument("-d", "--delay", type=float, default=1.0)
p_fonlar.set_defaults(func=cmd_fonlar)
# extract
p_extract = subparsers.add_parser("extract", help="Extract competitor data to JSON")
p_extract.set_defaults(func=cmd_extract)
# list
p_list = subparsers.add_parser("list", help="List saved searches")
p_list.add_argument("-l", "--limit", type=int, default=20)
p_list.set_defaults(func=cmd_list)
args = parser.parse_args()
if hasattr(args, "func"):
args.func(args)
else:
parser.print_help()
if __name__ == "__main__":
main()