LarsDiedrich22 commited on
Commit
f00396e
·
0 Parent(s):

Deploy Teich Space (rooted from froschgruppe/latest_froschgruppe)

Browse files
.gitignore ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ .env
2
+ __pycache__/
3
+ .DS_Store
4
+ artifacts/
5
+ data/
6
+ .gradio/
7
+ report.md
README.md ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Teich
3
+ emoji: 🐸
4
+ colorFrom: green
5
+ colorTo: blue
6
+ sdk: gradio
7
+ sdk_version: "4.44.1"
8
+ app_file: app.py
9
+ pinned: false
10
+ ---
11
+
app.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, sys, json
2
+ from datetime import datetime, date
3
+ from pathlib import Path
4
+
5
+ import gradio as gr
6
+
7
+ # Make src importable
8
+ sys.path.append("src")
9
+
10
+ from src.latest_froschgruppe.tools.organizer_tool import OrganizerTool
11
+ from src.latest_froschgruppe.tools.payroll_tool import PayrollTool
12
+ from src.latest_froschgruppe.tools.roadkill_gbif_tool import RoadkillGBIFTool
13
+ from src.latest_froschgruppe.tools.calendar_tool import CalendarTool # ICS generator
14
+ from src.latest_froschgruppe.tools.gbif_occurrence_tool import GBIFOccurrenceTool
15
+
16
+
17
+ # Optional: if you have folium map tool
18
+ # from latest_froschgruppe.tools.map_tool import build_map
19
+
20
+ DATA_DIR = Path(os.getenv("DATA_DIR", "data"))
21
+ DATA_DIR.mkdir(parents=True, exist_ok=True)
22
+ EVENTS_FILE = DATA_DIR / "events.json"
23
+
24
+ organizer = OrganizerTool()
25
+ payroll = PayrollTool()
26
+ gbif = GBIFOccurrenceTool()
27
+ roadkill = RoadkillGBIFTool()
28
+ cal = CalendarTool()
29
+
30
+
31
+ def _load_events():
32
+ if not EVENTS_FILE.exists():
33
+ return []
34
+ return json.loads(EVENTS_FILE.read_text(encoding="utf-8"))
35
+
36
+
37
+ def _save_events(events):
38
+ EVENTS_FILE.write_text(json.dumps(events, ensure_ascii=False, indent=2), encoding="utf-8")
39
+
40
+
41
+ def add_event(title: str, start_date: str, start_time: str, end_time: str, location: str):
42
+ events = _load_events()
43
+ # store ISO-ish strings
44
+ start = f"{start_date}T{start_time}"
45
+ end = f"{start_date}T{end_time}"
46
+ ev = {
47
+ "id": f"E{len(events)+1:04d}",
48
+ "title": title.strip(),
49
+ "start": start,
50
+ "end": end,
51
+ "location": location.strip(),
52
+ "created_at": datetime.utcnow().isoformat() + "Z",
53
+ }
54
+ events.append(ev)
55
+ _save_events(events)
56
+ return list_events()
57
+
58
+
59
+ def delete_event(event_id: str):
60
+ events = _load_events()
61
+ events = [e for e in events if e["id"] != event_id.strip()]
62
+ _save_events(events)
63
+ return list_events()
64
+
65
+
66
+ def list_events():
67
+ events = _load_events()
68
+ events_sorted = sorted(events, key=lambda e: e.get("start", ""))
69
+ # dataframe-like list
70
+ rows = []
71
+ for e in events_sorted[:200]:
72
+ rows.append([e["id"], e["start"], e.get("end",""), e["title"], e.get("location","")])
73
+ return rows
74
+
75
+
76
+ def dashboard_snapshot():
77
+ # Todos
78
+ todos_md = organizer._run("list_todos", "{}")
79
+ vols_md = organizer._run("list_volunteers", "{}")
80
+
81
+ # Payroll report
82
+ payroll_md = payroll._run("report", "{}")
83
+
84
+ # Events
85
+ events_rows = list_events()
86
+ events_md = "## Nächste Events\n"
87
+ if not events_rows:
88
+ events_md += "Keine Events gespeichert."
89
+ else:
90
+ # show only next 10
91
+ events_md += "\n".join([f"- {r[1]} | {r[3]} @ {r[4]} (ID {r[0]})" for r in events_rows[:10]])
92
+
93
+ # Next migration window (simple heuristic)
94
+ today = date.today()
95
+ year = today.year
96
+ # Typical window for DE: Feb 15 – Apr 30 (configurable)
97
+ # If already past, show next year.
98
+ window_start = date(year, 2, 15)
99
+ window_end = date(year, 4, 30)
100
+ if today > window_end:
101
+ window_start = date(year + 1, 2, 15)
102
+ window_end = date(year + 1, 4, 30)
103
+ mig_md = (
104
+ "## Nächste Wanderphase (operatives Fenster)\n"
105
+ f"- Fenster: **{window_start.isoformat()} bis {window_end.isoformat()}**\n"
106
+ "- Trigger (praktisch): milde, feuchte Abende/Nächte + Regen → Einsatzwahrscheinlichkeit hoch.\n"
107
+ "- Tipp: täglich 17:00 Wettercheck + bei Trigger Nachtteam aktivieren."
108
+ )
109
+
110
+ return mig_md, events_md, todos_md, vols_md, payroll_md
111
+
112
+
113
+ def add_todo(title: str, priority: str):
114
+ payload = json.dumps({"title": title, "priority": priority}, ensure_ascii=False)
115
+ msg = organizer._run("add_todo", payload)
116
+ return msg, organizer._run("list_todos", "{}")
117
+
118
+
119
+ def add_volunteer(name: str, phone: str, availability: str):
120
+ payload = json.dumps({"name": name, "phone": phone, "availability": availability}, ensure_ascii=False)
121
+ msg = organizer._run("add_volunteer", payload)
122
+ return msg, organizer._run("list_volunteers", "{}")
123
+
124
+
125
+ def payroll_plan(hourly_rate: float, monthly_cap: float):
126
+ payload = json.dumps({"hourly_rate": hourly_rate, "monthly_cap": monthly_cap}, ensure_ascii=False)
127
+ return payroll._run("plan", payload)
128
+
129
+
130
+ def payroll_add_worker(name: str, hourly_rate: float, monthly_cap: float):
131
+ payload = json.dumps({"name": name, "hourly_rate": hourly_rate, "monthly_cap": monthly_cap}, ensure_ascii=False)
132
+ msg = payroll._run("add_worker", payload)
133
+ rep = payroll._run("report", "{}")
134
+ return msg, rep
135
+
136
+
137
+ def payroll_log_hours(name: str, date_str: str, hours: float, note: str):
138
+ payload = json.dumps({"name": name, "date": date_str, "hours": hours, "note": note}, ensure_ascii=False)
139
+ msg = payroll._run("log_hours", payload)
140
+ rep = payroll._run("report", "{}")
141
+ return msg, rep
142
+
143
+
144
+ def fetch_hotspots(species: str, country: str, year_from: int, year_to: int):
145
+ # Tools return markdown tables + google maps links
146
+ gbif_md = gbif._run(scientific_name=species, country=country, year_from=year_from, year_to=year_to, limit=300)
147
+ road_md = roadkill._run(country=country, scientific_name=species, year_from=year_from, year_to=year_to, limit=300)
148
+ return gbif_md, road_md
149
+
150
+
151
+ def generate_migration_ics(region: str, year: int, start_month: int, start_day: int, end_month: int, end_day: int):
152
+ # CalendarTool returns a path in text; we want a file output
153
+ txt = cal._run(region=region, year=year, start_month=start_month, start_day=start_day, end_month=end_month, end_day=end_day)
154
+ # Extract file path from tool output
155
+ # expected: "✅ ICS erstellt: <path>"
156
+ path = None
157
+ for token in txt.split():
158
+ if token.endswith(".ics"):
159
+ path = token
160
+ break
161
+ return txt, path
162
+
163
+
164
+ with gr.Blocks(title="Teich – Management Partner (Froschrettung)") as demo:
165
+ gr.Markdown("# 🐸 Teich – Management Partner (Froschrettung)\n**Dashboard • Kalender • Einsätze • Daten • Team • Budget**")
166
+
167
+ with gr.Tab("Dashboard"):
168
+ btn_refresh = gr.Button("🔄 Dashboard aktualisieren")
169
+
170
+ mig_md = gr.Markdown()
171
+ events_md = gr.Markdown()
172
+ todos_md = gr.Markdown()
173
+ vols_md = gr.Markdown()
174
+ payroll_md = gr.Markdown()
175
+
176
+ btn_refresh.click(fn=dashboard_snapshot, inputs=[], outputs=[mig_md, events_md, todos_md, vols_md, payroll_md])
177
+ demo.load(fn=dashboard_snapshot, inputs=[], outputs=[mig_md, events_md, todos_md, vols_md, payroll_md])
178
+
179
+ with gr.Tab("Kalender & Schichten"):
180
+ gr.Markdown("## Events\nEvents werden lokal in `data/events.json` gespeichert.")
181
+ events_table = gr.Dataframe(headers=["ID", "Start", "Ende", "Titel", "Ort"], interactive=False)
182
+
183
+ with gr.Row():
184
+ ev_title = gr.Textbox(label="Titel", placeholder="z.B. Zaunkontrolle / Nachtpatrouille")
185
+ ev_location = gr.Textbox(label="Ort", placeholder="z.B. Straße X / Teich Y")
186
+ with gr.Row():
187
+ ev_date = gr.Textbox(label="Datum (YYYY-MM-DD)", value=str(date.today()))
188
+ ev_start = gr.Textbox(label="Start (HH:MM)", value="20:00")
189
+ ev_end = gr.Textbox(label="Ende (HH:MM)", value="23:00")
190
+ with gr.Row():
191
+ btn_add_ev = gr.Button("➕ Event anlegen")
192
+ del_id = gr.Textbox(label="Event ID löschen", placeholder="E0001")
193
+ btn_del_ev = gr.Button("🗑️ Löschen")
194
+ btn_reload_ev = gr.Button("📋 Events neu laden")
195
+
196
+ btn_reload_ev.click(fn=list_events, inputs=[], outputs=[events_table])
197
+ demo.load(fn=list_events, inputs=[], outputs=[events_table])
198
+ btn_add_ev.click(fn=add_event, inputs=[ev_title, ev_date, ev_start, ev_end, ev_location], outputs=[events_table])
199
+ btn_del_ev.click(fn=delete_event, inputs=[del_id], outputs=[events_table])
200
+
201
+ gr.Markdown("## Wanderfenster (ICS)\nErzeugt einen Standard-ICS (Wettercheck, Zaunkontrolle, Patrouillenblock).")
202
+ with gr.Row():
203
+ region = gr.Textbox(label="Region", value="Deutschland")
204
+ year = gr.Number(label="Jahr", value=date.today().year, precision=0)
205
+ with gr.Row():
206
+ sm = gr.Number(label="Start Monat", value=2, precision=0)
207
+ sd = gr.Number(label="Start Tag", value=15, precision=0)
208
+ em = gr.Number(label="Ende Monat", value=4, precision=0)
209
+ ed = gr.Number(label="Ende Tag", value=30, precision=0)
210
+
211
+ btn_ics = gr.Button("📅 ICS generieren")
212
+ ics_status = gr.Markdown()
213
+ ics_file = gr.File(label="ICS Download")
214
+
215
+ btn_ics.click(fn=generate_migration_ics, inputs=[region, year, sm, sd, em, ed], outputs=[ics_status, ics_file])
216
+
217
+ with gr.Tab("Einsatz planen"):
218
+ gr.Markdown("## Einsatzplanung\nHier kannst du wie bisher einen Einsatz planen (später: Crew-Kickoff anbinden).")
219
+ user_prompt = gr.Textbox(label="Auftrag / Situation", lines=5, placeholder="z.B. Plane nächste Woche Froschrettung in Region X ...")
220
+ country = gr.Textbox(label="Land (GBIF Country Code)", value="DE")
221
+ region2 = gr.Textbox(label="Region (Freitext)", value="Deutschland")
222
+ species = gr.Dropdown(["Rana temporaria", "Bufo bufo", "Lissotriton vulgaris", "Triturus cristatus"], value="Rana temporaria", label="Zielart")
223
+ year_from = gr.Number(label="Jahr von", value=2020, precision=0)
224
+ year_to = gr.Number(label="Jahr bis", value=date.today().year, precision=0)
225
+
226
+ # Optional: Hier später Crew kickoff (run_manager) wieder einhängen.
227
+ gr.Markdown("➡️ (Nächster Schritt) Crew-Kickoff als Button: erzeugt Plan + Tasks + Kalender + Budget automatisch.")
228
+
229
+ with gr.Tab("Daten & Hotspots (GBIF/Roadkill)"):
230
+ gr.Markdown("## Amphibien-Sichtungen & Roadkill-Hotspots\nTop-Cluster inkl. Google-Maps Links.")
231
+ with gr.Row():
232
+ d_species = gr.Dropdown(["Rana temporaria", "Bufo bufo", "Lissotriton vulgaris", "Triturus cristatus"], value="Rana temporaria", label="Art")
233
+ d_country = gr.Textbox(label="Land", value="DE")
234
+ d_y1 = gr.Number(label="Jahr von", value=2020, precision=0)
235
+ d_y2 = gr.Number(label="Jahr bis", value=date.today().year, precision=0)
236
+ btn_data = gr.Button("📡 Daten laden (Hotspots)")
237
+ out_gbif = gr.Markdown()
238
+ out_road = gr.Markdown()
239
+ btn_data.click(fn=fetch_hotspots, inputs=[d_species, d_country, d_y1, d_y2], outputs=[out_gbif, out_road])
240
+
241
+ with gr.Tab("Team & Budget (450€)"):
242
+ gr.Markdown("## To-Dos / Volunteers / Payroll\nAlles wird lokal gespeichert (DATA_DIR).")
243
+
244
+ with gr.Accordion("To-Dos", open=True):
245
+ todo_title = gr.Textbox(label="Neue Aufgabe", placeholder="z.B. Materialliste erstellen")
246
+ todo_prio = gr.Dropdown(["L", "M", "H"], value="M", label="Priorität")
247
+ btn_todo = gr.Button("➕ To-Do anlegen")
248
+ todo_msg = gr.Markdown()
249
+ todo_list = gr.Markdown()
250
+ btn_todo.click(fn=add_todo, inputs=[todo_title, todo_prio], outputs=[todo_msg, todo_list])
251
+ demo.load(fn=lambda: organizer._run("list_todos", "{}"), inputs=[], outputs=[todo_list])
252
+
253
+ with gr.Accordion("Volunteers", open=False):
254
+ v_name = gr.Textbox(label="Name")
255
+ v_phone = gr.Textbox(label="Telefon")
256
+ v_avail = gr.Textbox(label="Verfügbarkeit", placeholder="z.B. Mo/Di ab 18 Uhr")
257
+ btn_vol = gr.Button("➕ Volunteer hinzufügen")
258
+ vol_msg = gr.Markdown()
259
+ vol_list = gr.Markdown()
260
+ btn_vol.click(fn=add_volunteer, inputs=[v_name, v_phone, v_avail], outputs=[vol_msg, vol_list])
261
+ demo.load(fn=lambda: organizer._run("list_volunteers", "{}"), inputs=[], outputs=[vol_list])
262
+
263
+ with gr.Accordion("450€ Plan & Payroll", open=True):
264
+ p_rate = gr.Number(label="Stundenlohn €", value=12.0, precision=2)
265
+ p_cap = gr.Number(label="Monatscap €", value=450.0, precision=2)
266
+ btn_plan = gr.Button("🧮 450€ Plan berechnen")
267
+ plan_md = gr.Markdown()
268
+ btn_plan.click(fn=payroll_plan, inputs=[p_rate, p_cap], outputs=[plan_md])
269
+
270
+ gr.Markdown("### Worker anlegen")
271
+ w_name = gr.Textbox(label="Name")
272
+ btn_add_w = gr.Button("➕ Worker hinzufügen")
273
+ w_msg = gr.Markdown()
274
+ rep_md = gr.Markdown()
275
+ btn_add_w.click(fn=payroll_add_worker, inputs=[w_name, p_rate, p_cap], outputs=[w_msg, rep_md])
276
+ demo.load(fn=lambda: payroll._run("report", "{}"), inputs=[], outputs=[rep_md])
277
+
278
+ gr.Markdown("### Stunden loggen")
279
+ l_name = gr.Textbox(label="Name (wie Worker)")
280
+ l_date = gr.Textbox(label="Datum", value=str(date.today()))
281
+ l_hours = gr.Number(label="Stunden", value=3.0, precision=2)
282
+ l_note = gr.Textbox(label="Notiz", placeholder="z.B. Nachtpatrouille")
283
+ btn_log = gr.Button("⏱️ log_hours")
284
+ log_msg = gr.Markdown()
285
+ btn_log.click(fn=payroll_log_hours, inputs=[l_name, l_date, l_hours, l_note], outputs=[log_msg, rep_md])
286
+
287
+ gr.Markdown("---\n**Hinweis:** In Hugging Face empfehle ich Persistent Storage aktivieren und `DATA_DIR=/data` setzen, dann bleiben Tasks/Events auch nach Restart erhalten.")
288
+
289
+ demo.queue()
290
+ demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", 7860)))
knowledge/user_preference.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ User name is John Doe.
2
+ User is an AI Engineer.
3
+ User is interested in AI Agents.
4
+ User is based in San Francisco, California.
pyproject.toml ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "latest_froschgruppe"
3
+ version = "0.1.0"
4
+ description = "latest-froschgruppe using crewAI"
5
+ authors = [{ name = "Your Name", email = "you@example.com" }]
6
+ requires-python = ">=3.10,<3.14"
7
+ dependencies = [
8
+ "crewai[tools]==1.8.0"
9
+ ]
10
+
11
+ [project.scripts]
12
+ latest_froschgruppe = "latest_froschgruppe.main:run"
13
+ run_crew = "latest_froschgruppe.main:run"
14
+ train = "latest_froschgruppe.main:train"
15
+ replay = "latest_froschgruppe.main:replay"
16
+ test = "latest_froschgruppe.main:test"
17
+ run_with_trigger = "latest_froschgruppe.main:run_with_trigger"
18
+
19
+ [build-system]
20
+ requires = ["hatchling"]
21
+ build-backend = "hatchling.build"
22
+
23
+ [tool.crewai]
24
+ type = "crew"
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ gradio
2
+ crewai==1.8.0
3
+ openai==1.83.0
4
+ python-dotenv
5
+ requests
6
+ folium
7
+ pandas
src/latest_froschgruppe/__init__.py ADDED
File without changes
src/latest_froschgruppe/config/agents.yaml ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aktivismus_team_organisation:
2
+ role: "Aktivismus- & Team-Organisation"
3
+ goal: "Koordiniert Einsätze, plant Schichten, hält To-Dos & Kalender aktuell und liefert eine tägliche operative Lageübersicht."
4
+ backstory: "Erfahren in Logistik, Teamführung und Einsatzplanung. Denkt in Checklisten, Prioritäten, Risiken und Ressourcen."
5
+ allow_delegation: true
6
+ verbose: true
7
+
8
+ marketing_oeffentlichkeit_medien:
9
+ role: "Marketing, Öffentlichkeitsarbeit & Medienproduktion"
10
+ goal: "Erstellt klare, mobilisierende Kommunikation für Volunteers, Sponsoren und Öffentlichkeit – inkl. Storytelling und Content-Plan."
11
+ backstory: "Sehr gut in Storytelling, Social Media, Pressearbeit. Übersetzt Wissenschaft in verständliche Botschaften."
12
+ allow_delegation: true
13
+ verbose: true
14
+
15
+ wissenschaft_umwelt_tiere:
16
+ role: "Wissenschaft (Umwelt & Tiere)"
17
+ goal: "Liefert belastbare Fakten, nutzt Datenquellen (GBIF/Roadkill), findet Hotspots und erzeugt evidenzbasierte Empfehlungen."
18
+ backstory: "Biologie/Ökologie-Hintergrund. Analysiert Daten, erkennt Muster, dokumentiert Unsicherheiten."
19
+ allow_delegation: true
20
+ verbose: true
21
+
22
+ finanzen_partnerschaften:
23
+ role: "Finanzen & Partnerschaften"
24
+ goal: "Plant Budget, Minijobs (450€ Annahme), Fundraising-Pitches und Sponsoring-Pakete. Hält Kosten & Auszahlungen nachvollziehbar."
25
+ backstory: "NGO-Finanzierung, Fundraising, Budgetplanung. Denkt in Szenarien und Compliance."
26
+ allow_delegation: true
27
+ verbose: true
28
+
29
+ recht_und_politik:
30
+ role: "Recht und Politik"
31
+ goal: "Checkt Risiken (Genehmigungen, Haftung, Datenschutz, Straßenaktionen) und formuliert sichere Alternativen."
32
+ backstory: "Kennt Verwaltungslogik, Stakeholder, Policies. Setzt auf sichere, rechtlich robuste Vorgehensweisen."
33
+ allow_delegation: true
34
+ verbose: true
src/latest_froschgruppe/config/tasks.yaml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ intake_brief:
2
+ description: >
3
+ Erstelle aus dem Input {{user_prompt}} ein kurzes Einsatz-Briefing für eine Umweltschutzorganisation (Froschrettung).
4
+ Enthält: Ziel, Ort/Region, Zeitraum, Stakeholder, Constraints, Erfolgskriterien.
5
+ Lege 5-10 To-Dos an (Priorität H/M/L) über OrganizerTool.
6
+ expected_output: >
7
+ Markdown-Briefing + To-Do-Liste (IDs, Priorität).
8
+ agent: aktivismus_team_organisation
9
+
10
+ data_hotspots:
11
+ description: >
12
+ Nutze GBIFOccurrenceTool für {{species}} in {{country}} (optional Zeitraum {{year_from}}–{{year_to}}).
13
+ Erzeuge Hotspots (Top 10) und leite daraus 3-5 konkrete Fokusgebiete ab.
14
+ Nutze RoadkillGBIFTool ebenfalls (country={{country}}, optional scientific_name={{species}}) und vergleiche Hotspots.
15
+ expected_output: >
16
+ 1) Tabelle GBIF-Hotspots
17
+ 2) Tabelle Roadkill-Hotspots
18
+ 3) Empfehlung: welche 3-5 Hotspots operativ priorisieren (mit Begründung).
19
+ agent: wissenschaft_umwelt_tiere
20
+
21
+ migration_calendar:
22
+ description: >
23
+ Leite eine operative Kalenderstrategie ab (Wetter-Trigger, täglicher Ablauf, Patrouillenfenster).
24
+ Erzeuge eine ICS-Datei über CalendarTool für Region {{region}} im Jahr {{year}}.
25
+ expected_output: >
26
+ ICS-Pfad + kurzer Leitfaden "Wie wir aktivieren" (Triggerlogik, Rollen, Sicherheit).
27
+ agent: aktivismus_team_organisation
28
+
29
+ team_and_shift_plan:
30
+ description: >
31
+ Erstelle einen Schichtplan (Rollen: Fahrer, Sammler, Datenlogger, Teamlead).
32
+ Nutze OrganizerTool: füge To-Dos für "Team rekrutieren", "Briefing", "Materialliste", "Sicherheitscheck" hinzu.
33
+ Plane idealerweise 2-3 Personen pro Nacht-Schicht.
34
+ expected_output: >
35
+ Schichtplan-Template + konkrete nächste Schritte (To-Dos) für die kommende Woche.
36
+ agent: aktivismus_team_organisation
37
+
38
+ payroll_and_budget:
39
+ description: >
40
+ Nutze PayrollTool (450€-Annahme): plane max. Stunden pro Person basierend auf {{hourly_rate}} €/h und cap {{monthly_cap}} €.
41
+ Schlage ein faires Modell vor (Wer übernimmt welche Schichten?).
42
+ expected_output: >
43
+ Budget-Plan (Stunden/Person/Monat) + Empfehlung zur Aufgabenverteilung.
44
+ agent: finanzen_partnerschaften
45
+
46
+ legal_risk_check:
47
+ description: >
48
+ Prüfe rechtliche/politische Risiken: Aktionen an Straßen, Datenerhebung, Fotos/PR, Haftung.
49
+ Gib eine Go/No-Go Checkliste und sichere Alternativen.
50
+ expected_output: >
51
+ Risk Checklist + Go/No-Go pro Maßnahme.
52
+ agent: recht_und_politik
53
+
54
+ final_management_brief:
55
+ description: >
56
+ Erstelle ein kompaktes "Daily Management Brief":
57
+ - Was ist diese Woche wichtig (Top 5)
58
+ - Operative Hotspots
59
+ - Kalender/Schichten
60
+ - Budget/450€-Plan
61
+ - Risiken + Mitigation
62
+ Output soll sofort nutzbar sein.
63
+ expected_output: >
64
+ Ein Management-Brief in Markdown (max 1-2 Seiten) + klare Next Actions.
65
+ agent: aktivismus_team_organisation
src/latest_froschgruppe/crew.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from crewai import Agent, Crew, Process
2
+ from crewai.project import CrewBase, agent, crew
3
+
4
+ from .tools import (
5
+ GBIFOccurrenceTool,
6
+ RoadkillGBIFTool,
7
+ OrganizerTool,
8
+ PayrollTool,
9
+ CalendarTool,
10
+ )
11
+
12
+ @CrewBase
13
+ class LatestFroschgruppe():
14
+ agents_config = "config/agents.yaml"
15
+ tasks_config = "config/tasks.yaml"
16
+
17
+ @agent
18
+ def wissenschaft_umwelt_tiere(self) -> Agent:
19
+ return Agent(
20
+ config=self.agents_config["wissenschaft_umwelt_tiere"],
21
+ tools=[GBIFOccurrenceTool(), RoadkillGBIFTool()],
22
+ )
23
+
24
+ @agent
25
+ def recht_und_politik(self) -> Agent:
26
+ return Agent(
27
+ config=self.agents_config["recht_und_politik"],
28
+ tools=[],
29
+ )
30
+
31
+ @agent
32
+ def finanzen_partnerschaften(self) -> Agent:
33
+ return Agent(
34
+ config=self.agents_config["finanzen_partnerschaften"],
35
+ tools=[PayrollTool()],
36
+ )
37
+
38
+ @agent
39
+ def marketing_oeffentlichkeit_medien(self) -> Agent:
40
+ return Agent(
41
+ config=self.agents_config["marketing_oeffentlichkeit_medien"],
42
+ tools=[],
43
+ )
44
+
45
+ @agent
46
+ def aktivismus_team_organisation(self) -> Agent:
47
+ return Agent(
48
+ config=self.agents_config["aktivismus_team_organisation"],
49
+ tools=[OrganizerTool(), CalendarTool()],
50
+ )
51
+
52
+ @crew
53
+ def crew(self) -> Crew:
54
+ return Crew(
55
+ agents=self.agents,
56
+ tasks=self.tasks,
57
+ process=Process.sequential,
58
+ verbose=True,
59
+ )
src/latest_froschgruppe/main.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ import sys
3
+ import warnings
4
+ from datetime import datetime
5
+
6
+ from latest_froschgruppe.crew import LatestFroschgruppe
7
+
8
+ warnings.filterwarnings("ignore", category=SyntaxWarning, module="pysbd")
9
+
10
+
11
+ def run_manager(
12
+ user_prompt: str,
13
+ country: str = "DE",
14
+ region: str = "Deutschland",
15
+ species: str = "Rana temporaria",
16
+ year: int | None = None,
17
+ year_from: int | None = None,
18
+ year_to: int | None = None,
19
+ hourly_rate: float = 12.0,
20
+ monthly_cap: float = 450.0,
21
+ ):
22
+ """
23
+ Main entrypoint für Gradio / Hugging Face.
24
+ Diese Inputs matchen deine tasks.yaml Platzhalter.
25
+ """
26
+ y = year or datetime.now().year
27
+ inputs = {
28
+ "user_prompt": user_prompt,
29
+ "country": country,
30
+ "region": region,
31
+ "species": species,
32
+ "year": str(y),
33
+ "year_from": str(year_from or y),
34
+ "year_to": str(year_to or y),
35
+ "hourly_rate": str(hourly_rate),
36
+ "monthly_cap": str(monthly_cap),
37
+ }
38
+ return LatestFroschgruppe().crew().kickoff(inputs=inputs)
39
+
40
+
41
+ def run():
42
+ """
43
+ Run the crew locally (quick test).
44
+ """
45
+ # Default-Test: Froschrettung statt "AI LLMs"
46
+ inputs = {
47
+ "user_prompt": "Plane eine Woche Froschrettung inklusive Hotspots, Kalender und Minijob-Budget.",
48
+ "country": "DE",
49
+ "region": "Deutschland",
50
+ "species": "Rana temporaria",
51
+ "year": str(datetime.now().year),
52
+ "year_from": "2020",
53
+ "year_to": str(datetime.now().year),
54
+ "hourly_rate": "12",
55
+ "monthly_cap": "450",
56
+ }
57
+
58
+ try:
59
+ return LatestFroschgruppe().crew().kickoff(inputs=inputs)
60
+ except Exception as e:
61
+ raise Exception(f"An error occurred while running the crew: {e}")
62
+
63
+
64
+ def train():
65
+ """
66
+ Train the crew for a given number of iterations.
67
+ """
68
+ inputs = {
69
+ "user_prompt": "Training run",
70
+ "country": "DE",
71
+ "region": "Deutschland",
72
+ "species": "Rana temporaria",
73
+ "year": str(datetime.now().year),
74
+ "year_from": "2020",
75
+ "year_to": str(datetime.now().year),
76
+ "hourly_rate": "12",
77
+ "monthly_cap": "450",
78
+ }
79
+ try:
80
+ LatestFroschgruppe().crew().train(
81
+ n_iterations=int(sys.argv[1]),
82
+ filename=sys.argv[2],
83
+ inputs=inputs
84
+ )
85
+ except Exception as e:
86
+ raise Exception(f"An error occurred while training the crew: {e}")
87
+
88
+
89
+ def replay():
90
+ """
91
+ Replay the crew execution from a specific task.
92
+ """
93
+ try:
94
+ LatestFroschgruppe().crew().replay(task_id=sys.argv[1])
95
+ except Exception as e:
96
+ raise Exception(f"An error occurred while replaying the crew: {e}")
97
+
98
+
99
+ def test():
100
+ """
101
+ Test the crew execution and returns the results.
102
+ """
103
+ inputs = {
104
+ "user_prompt": "Test run",
105
+ "country": "DE",
106
+ "region": "Deutschland",
107
+ "species": "Rana temporaria",
108
+ "year": str(datetime.now().year),
109
+ "year_from": "2020",
110
+ "year_to": str(datetime.now().year),
111
+ "hourly_rate": "12",
112
+ "monthly_cap": "450",
113
+ }
114
+
115
+ try:
116
+ LatestFroschgruppe().crew().test(
117
+ n_iterations=int(sys.argv[1]),
118
+ eval_llm=sys.argv[2],
119
+ inputs=inputs
120
+ )
121
+ except Exception as e:
122
+ raise Exception(f"An error occurred while testing the crew: {e}")
123
+
124
+
125
+ def run_with_trigger():
126
+ """
127
+ Run the crew with trigger payload.
128
+ """
129
+ import json
130
+
131
+ if len(sys.argv) < 2:
132
+ raise Exception("No trigger payload provided. Please provide JSON payload as argument.")
133
+
134
+ try:
135
+ trigger_payload = json.loads(sys.argv[1])
136
+ except json.JSONDecodeError:
137
+ raise Exception("Invalid JSON payload provided as argument")
138
+
139
+ inputs = {
140
+ "crewai_trigger_payload": trigger_payload,
141
+ # sichere Defaults, falls Trigger nichts liefert:
142
+ "user_prompt": trigger_payload.get("user_prompt", ""),
143
+ "country": trigger_payload.get("country", "DE"),
144
+ "region": trigger_payload.get("region", "Deutschland"),
145
+ "species": trigger_payload.get("species", "Rana temporaria"),
146
+ "year": str(trigger_payload.get("year", datetime.now().year)),
147
+ "year_from": str(trigger_payload.get("year_from", 2020)),
148
+ "year_to": str(trigger_payload.get("year_to", datetime.now().year)),
149
+ "hourly_rate": str(trigger_payload.get("hourly_rate", 12)),
150
+ "monthly_cap": str(trigger_payload.get("monthly_cap", 450)),
151
+ }
152
+
153
+ try:
154
+ return LatestFroschgruppe().crew().kickoff(inputs=inputs)
155
+ except Exception as e:
156
+ raise Exception(f"An error occurred while running the crew with trigger: {e}")
src/latest_froschgruppe/tools/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .gbif_occurrence_tool import GBIFOccurrenceTool
2
+ from .roadkill_tool import RoadkillGBIFTool
3
+ from .organizer_tool import OrganizerTool
4
+ from .payroll_tool import PayrollTool
5
+ from .calendar_tool import CalendarTool
6
+
7
+ __all__ = [
8
+ "GBIFOccurrenceTool",
9
+ "RoadkillGBIFTool",
10
+ "OrganizerTool",
11
+ "PayrollTool",
12
+ "CalendarTool",
13
+ ]
src/latest_froschgruppe/tools/calendar_tool.py ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime, date, time, timedelta
4
+ import os
5
+
6
+ from crewai.tools import BaseTool
7
+
8
+ ARTIFACT_DIR = os.getenv("ARTIFACT_DIR", "artifacts")
9
+ os.makedirs(ARTIFACT_DIR, exist_ok=True)
10
+
11
+
12
+ def _ics_escape(s: str) -> str:
13
+ return s.replace("\\", "\\\\").replace(";", "\\;").replace(",", "\\,").replace("\n", "\\n")
14
+
15
+
16
+ def _event(uid: str, dtstart: datetime, dtend: datetime, summary: str, description: str = "") -> str:
17
+ return "\n".join(
18
+ [
19
+ "BEGIN:VEVENT",
20
+ f"UID:{uid}",
21
+ f"DTSTAMP:{datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')}",
22
+ f"DTSTART:{dtstart.strftime('%Y%m%dT%H%M%S')}",
23
+ f"DTEND:{dtend.strftime('%Y%m%dT%H%M%S')}",
24
+ f"SUMMARY:{_ics_escape(summary)}",
25
+ f"DESCRIPTION:{_ics_escape(description)}",
26
+ "END:VEVENT",
27
+ ]
28
+ )
29
+
30
+
31
+ class CalendarTool(BaseTool):
32
+ name: str = "Calendar (ICS) – Migration & Schichten"
33
+ description: str = (
34
+ "Erstellt einen Basis-Kalender (ICS) für Froschwanderung: "
35
+ "Wettercheck, Zaunkontrolle, Nachtpatrouille. Gibt Pfad zur ICS zurück."
36
+ )
37
+
38
+ def _run(
39
+ self,
40
+ region: str = "Deutschland",
41
+ year: int = datetime.now().year,
42
+ start_month: int = 2,
43
+ start_day: int = 15,
44
+ end_month: int = 4,
45
+ end_day: int = 30,
46
+ ) -> str:
47
+ start = date(int(year), int(start_month), int(start_day))
48
+ end = date(int(year), int(end_month), int(end_day))
49
+
50
+ events = []
51
+ cur = start
52
+ idx = 0
53
+
54
+ while cur <= end:
55
+ idx += 1
56
+
57
+ # Wettercheck täglich 17:00–17:10
58
+ dt1 = datetime.combine(cur, time(17, 0))
59
+ dt2 = datetime.combine(cur, time(17, 10))
60
+ events.append(
61
+ _event(
62
+ uid=f"weathercheck-{year}-{idx}@teich",
63
+ dtstart=dt1,
64
+ dtend=dt2,
65
+ summary="Frosch-Wettercheck (Trigger: milde Nacht + Regen)",
66
+ description=f"Region: {region}. Prüfe Abendtemp/Feuchte/Regen. Bei Aktivität: Nachtteam alarmieren.",
67
+ )
68
+ )
69
+
70
+ # Zaunkontrolle morgens 06:30–07:00
71
+ dt3 = datetime.combine(cur, time(6, 30))
72
+ dt4 = datetime.combine(cur, time(7, 0))
73
+ events.append(
74
+ _event(
75
+ uid=f"fence-{year}-{idx}@teich",
76
+ dtstart=dt3,
77
+ dtend=dt4,
78
+ summary="Zaunkontrolle & Eimercheck",
79
+ description="Eimer leeren, Tiere sicher umsetzen, Daten notieren.",
80
+ )
81
+ )
82
+
83
+ # Patrouille-Window 20:00–23:00 (nur bei Aktivität)
84
+ dt5 = datetime.combine(cur, time(20, 0))
85
+ dt6 = datetime.combine(cur, time(23, 0))
86
+ events.append(
87
+ _event(
88
+ uid=f"patrol-{year}-{idx}@teich",
89
+ dtstart=dt5,
90
+ dtend=dt6,
91
+ summary="Nacht-Patrouille (nur bei Aktivität)",
92
+ description="Nur aktiv, wenn Wettercheck -> Zugwahrscheinlichkeit hoch.",
93
+ )
94
+ )
95
+
96
+ cur += timedelta(days=1)
97
+
98
+ ics = "\n".join(
99
+ [
100
+ "BEGIN:VCALENDAR",
101
+ "VERSION:2.0",
102
+ "PRODID:-//Teich//Froschgruppe//DE",
103
+ *events,
104
+ "END:VCALENDAR",
105
+ "",
106
+ ]
107
+ )
108
+
109
+ out_path = os.path.join(ARTIFACT_DIR, f"migration_{region}_{year}.ics".replace(" ", "_"))
110
+ with open(out_path, "w", encoding="utf-8") as f:
111
+ f.write(ics)
112
+
113
+ return f"✅ ICS erstellt: {out_path}\n\nHinweis: Die Aktivierung 'Patrouille' erfolgt operativ nach Wettercheck."
src/latest_froschgruppe/tools/custom_tool.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from crewai.tools import BaseTool
2
+ from typing import Type
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class MyCustomToolInput(BaseModel):
7
+ """Input schema for MyCustomTool."""
8
+ argument: str = Field(..., description="Description of the argument.")
9
+
10
+ class MyCustomTool(BaseTool):
11
+ name: str = "Name of my tool"
12
+ description: str = (
13
+ "Clear description for what this tool is useful for, your agent will need this information to use it."
14
+ )
15
+ args_schema: Type[BaseModel] = MyCustomToolInput
16
+
17
+ def _run(self, argument: str) -> str:
18
+ # Implementation goes here
19
+ return "this is an example of a tool output, ignore it and move along."
src/latest_froschgruppe/tools/gbif_occurrence_tool.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from typing import Dict, Any, List, Tuple
3
+
4
+ from crewai.tools import BaseTool
5
+
6
+ GBIF_OCCURRENCE_SEARCH = "https://api.gbif.org/v1/occurrence/search"
7
+
8
+
9
+ def _cluster_key(lat: float, lon: float, precision: float = 0.05) -> Tuple[float, float]:
10
+ # ~0.05° ~ wenige km (je nach Breite) – gut als “Hotspot-Kachel”
11
+ return (round(lat / precision) * precision, round(lon / precision) * precision)
12
+
13
+
14
+ class GBIFOccurrenceTool(BaseTool):
15
+ name: str = "GBIF Occurrence Explorer"
16
+ description: str = (
17
+ "Sucht GBIF-Beobachtungsdaten (Occurrences) für eine Art (scientificName), "
18
+ "optional nach Land und Zeitraum. Gibt Hotspot-Cluster (Lat/Lon) zurück."
19
+ )
20
+
21
+ def _run(
22
+ self,
23
+ scientific_name: str,
24
+ country: str = "DE",
25
+ year_from: int | None = None,
26
+ year_to: int | None = None,
27
+ limit: int = 300,
28
+ ) -> str:
29
+ params: Dict[str, Any] = {
30
+ "scientificName": scientific_name,
31
+ "country": country,
32
+ "hasCoordinate": "true",
33
+ "limit": min(int(limit), 300),
34
+ "offset": 0,
35
+ }
36
+
37
+ if year_from is not None:
38
+ yf = int(year_from)
39
+ yt = int(year_to if year_to is not None else year_from)
40
+ params["year"] = f"{yf},{yt}"
41
+
42
+ results: List[Dict[str, Any]] = []
43
+ fetched = 0
44
+ limit = int(limit)
45
+
46
+ while fetched < limit:
47
+ params["offset"] = fetched
48
+ r = requests.get(GBIF_OCCURRENCE_SEARCH, params=params, timeout=30)
49
+ r.raise_for_status()
50
+ data = r.json()
51
+
52
+ batch = data.get("results", [])
53
+ if not batch:
54
+ break
55
+
56
+ results.extend(batch)
57
+ fetched += len(batch)
58
+
59
+ if fetched >= int(data.get("count", 0) or 0):
60
+ break
61
+
62
+ # cluster
63
+ cluster: Dict[Tuple[float, float], int] = {}
64
+ for row in results:
65
+ lat = row.get("decimalLatitude")
66
+ lon = row.get("decimalLongitude")
67
+ if lat is None or lon is None:
68
+ continue
69
+ key = _cluster_key(float(lat), float(lon))
70
+ cluster[key] = cluster.get(key, 0) + 1
71
+
72
+ top = sorted(cluster.items(), key=lambda x: x[1], reverse=True)[:10]
73
+
74
+ lines: List[str] = []
75
+ lines.append(f"## GBIF Hotspots – {scientific_name} ({country})")
76
+ lines.append(f"- Treffer geladen: **{len(results)}** (max requested: {limit})")
77
+ if year_from is not None:
78
+ lines.append(f"- Zeitraum: **{year_from}–{year_to if year_to is not None else year_from}**")
79
+ lines.append("")
80
+ lines.append("| Rang | Lat | Lon | Beobachtungen | Google Maps |")
81
+ lines.append("|---:|---:|---:|---:|---|")
82
+
83
+ for i, ((clat, clon), cnt) in enumerate(top, start=1):
84
+ gmaps = f"https://www.google.com/maps?q={clat},{clon}"
85
+ lines.append(f"| {i} | {clat:.4f} | {clon:.4f} | {cnt} | {gmaps} |")
86
+
87
+ if not top:
88
+ lines.append("\nKeine Hotspots gefunden (evtl. Land/Art/Zeitraum anpassen).")
89
+
90
+ return "\n".join(lines)
src/latest_froschgruppe/tools/local_docs_tool.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # src/latest_froschgruppe/tools/local_docs_tool.py
2
+ import os
3
+ import re
4
+ from typing import List, Dict, Tuple
5
+
6
+ import pdfplumber # add to requirements if not already present
7
+
8
+ DOCS_DIR = os.getenv("DOCS_DIR", "knowledge/docs")
9
+
10
+ def _extract_text(pdf_path: str) -> str:
11
+ texts = []
12
+ with pdfplumber.open(pdf_path) as pdf:
13
+ for page in pdf.pages:
14
+ t = page.extract_text() or ""
15
+ texts.append(t)
16
+ return "\n".join(texts)
17
+
18
+ def search_docs(query: str, top_k: int = 5) -> List[Dict]:
19
+ """
20
+ Naive keyword search: returns snippets from PDFs stored in knowledge/docs
21
+ """
22
+ q = query.strip()
23
+ if not q:
24
+ return []
25
+ if not os.path.isdir(DOCS_DIR):
26
+ return [{"error": f"Docs folder not found: {DOCS_DIR}. Lege PDFs dort ab und committe sie."}]
27
+
28
+ results: List[Tuple[int, str, str]] = [] # score, file, snippet
29
+ for fn in os.listdir(DOCS_DIR):
30
+ if not fn.lower().endswith(".pdf"):
31
+ continue
32
+ path = os.path.join(DOCS_DIR, fn)
33
+ text = _extract_text(path)
34
+
35
+ # scoring: count occurrences of query terms
36
+ terms = [t for t in re.split(r"\s+", q.lower()) if len(t) >= 4]
37
+ score = sum(text.lower().count(t) for t in terms) if terms else text.lower().count(q.lower())
38
+
39
+ if score <= 0:
40
+ continue
41
+
42
+ # snippet: first hit location
43
+ idx = text.lower().find(terms[0]) if terms else text.lower().find(q.lower())
44
+ start = max(0, idx - 400)
45
+ end = min(len(text), idx + 600)
46
+ snippet = text[start:end].replace("\n", " ").strip()
47
+
48
+ results.append((score, fn, snippet))
49
+
50
+ results.sort(key=lambda x: x[0], reverse=True)
51
+ return [{"file": f, "score": s, "snippet": snip} for s, f, snip in results[:top_k]]
src/latest_froschgruppe/tools/map_tool.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # src/latest_froschgruppe/tools/map_tool.py
2
+ import os
3
+ from typing import List, Dict, Optional
4
+
5
+ import folium
6
+
7
+ def build_map(points: List[Dict], center_lat: Optional[float]=None, center_lon: Optional[float]=None) -> str:
8
+ """
9
+ points: [{"lat":..., "lon":..., "label":...}, ...]
10
+ Returns path to generated HTML map.
11
+ """
12
+ pts = [p for p in points if p.get("lat") and p.get("lon")]
13
+ if not pts:
14
+ raise ValueError("Keine Punkte mit lat/lon.")
15
+
16
+ if center_lat is None or center_lon is None:
17
+ center_lat = pts[0]["lat"]
18
+ center_lon = pts[0]["lon"]
19
+
20
+ m = folium.Map(location=[center_lat, center_lon], zoom_start=10)
21
+ for p in pts[:200]: # cap
22
+ folium.Marker([p["lat"], p["lon"]], popup=p.get("label","")).add_to(m)
23
+
24
+ out_dir = os.getenv("DATA_DIR", ".data")
25
+ os.makedirs(out_dir, exist_ok=True)
26
+ out_path = os.path.join(out_dir, "hotspots_map.html")
27
+ m.save(out_path)
28
+ return out_path
29
+
30
+ def google_maps_link(lat: float, lon: float, zoom: int = 14) -> str:
31
+ return f"https://www.google.com/maps?q={lat},{lon}&z={zoom}"
src/latest_froschgruppe/tools/organizer_tool.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import Dict, Any
4
+ import json
5
+ import os
6
+
7
+ from crewai.tools import BaseTool
8
+
9
+ DATA_DIR = os.getenv("DATA_DIR", "data")
10
+ STATE_FILE = os.path.join(DATA_DIR, "organizer.json")
11
+
12
+
13
+ def _load() -> Dict[str, Any]:
14
+ os.makedirs(DATA_DIR, exist_ok=True)
15
+ if not os.path.exists(STATE_FILE):
16
+ return {"todos": [], "volunteers": []}
17
+ with open(STATE_FILE, "r", encoding="utf-8") as f:
18
+ return json.load(f)
19
+
20
+
21
+ def _save(state: Dict[str, Any]) -> None:
22
+ os.makedirs(DATA_DIR, exist_ok=True)
23
+ with open(STATE_FILE, "w", encoding="utf-8") as f:
24
+ json.dump(state, f, ensure_ascii=False, indent=2)
25
+
26
+
27
+ class OrganizerTool(BaseTool):
28
+ """
29
+ CrewAI Tool: Local organizer for todos and volunteers (stored as JSON).
30
+ """
31
+
32
+ name: str = "Organizer (To-Dos & Team)"
33
+ description: str = (
34
+ "Verwaltet To-Dos und Volunteer-Liste lokal (JSON). "
35
+ "Aktionen: add_todo, list_todos, complete_todo, add_volunteer, list_volunteers."
36
+ )
37
+
38
+ def _run(self, action: str, payload_json: str = "{}") -> str:
39
+ state = _load()
40
+ payload = json.loads(payload_json or "{}")
41
+
42
+ if action == "add_todo":
43
+ title = payload.get("title", "").strip()
44
+ priority = payload.get("priority", "M")
45
+ if not title:
46
+ return "❌ title fehlt"
47
+ todo = {
48
+ "id": f"T{len(state['todos'])+1:04d}",
49
+ "title": title,
50
+ "priority": priority,
51
+ "done": False,
52
+ }
53
+ state["todos"].append(todo)
54
+ _save(state)
55
+ return f"✅ To-Do angelegt: {todo['id']} – {title} (Prio {priority})"
56
+
57
+ if action == "list_todos":
58
+ todos = state["todos"]
59
+ if not todos:
60
+ return "Keine To-Dos."
61
+ lines = ["## To-Dos", "| ID | Prio | Done | Title |", "|---|---|---|---|"]
62
+ for t in todos:
63
+ lines.append(f"| {t['id']} | {t['priority']} | {t['done']} | {t['title']} |")
64
+ return "\n".join(lines)
65
+
66
+ if action == "complete_todo":
67
+ tid = payload.get("id")
68
+ for t in state["todos"]:
69
+ if t["id"] == tid:
70
+ t["done"] = True
71
+ _save(state)
72
+ return f"✅ erledigt: {tid}"
73
+ return f"❌ To-Do nicht gefunden: {tid}"
74
+
75
+ if action == "add_volunteer":
76
+ name = payload.get("name", "").strip()
77
+ phone = payload.get("phone", "").strip()
78
+ availability = payload.get("availability", "").strip() # z.B. "Mo/Di ab 18 Uhr"
79
+ if not name:
80
+ return "❌ name fehlt"
81
+ state["volunteers"].append(
82
+ {"name": name, "phone": phone, "availability": availability}
83
+ )
84
+ _save(state)
85
+ return f"✅ Volunteer hinzugefügt: {name}"
86
+
87
+ if action == "list_volunteers":
88
+ vols = state["volunteers"]
89
+ if not vols:
90
+ return "Keine Volunteers."
91
+ lines = ["## Volunteers", "| Name | Phone | Availability |", "|---|---|---|"]
92
+ for v in vols:
93
+ lines.append(
94
+ f"| {v['name']} | {v.get('phone','')} | {v.get('availability','')} |"
95
+ )
96
+ return "\n".join(lines)
97
+
98
+ return f"❌ Unbekannte action: {action}"
src/latest_froschgruppe/tools/payroll_tool.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import Dict, Any
4
+ import json
5
+ import os
6
+
7
+ from crewai.tools import BaseTool
8
+
9
+ DATA_DIR = os.getenv("DATA_DIR", "data")
10
+ STATE_FILE = os.path.join(DATA_DIR, "payroll.json")
11
+
12
+
13
+ def _load() -> Dict[str, Any]:
14
+ os.makedirs(DATA_DIR, exist_ok=True)
15
+ if not os.path.exists(STATE_FILE):
16
+ return {"workers": {}, "logs": []}
17
+ with open(STATE_FILE, "r", encoding="utf-8") as f:
18
+ return json.load(f)
19
+
20
+
21
+ def _save(state: Dict[str, Any]) -> None:
22
+ os.makedirs(DATA_DIR, exist_ok=True)
23
+ with open(STATE_FILE, "w", encoding="utf-8") as f:
24
+ json.dump(state, f, ensure_ascii=False, indent=2)
25
+
26
+
27
+ class PayrollTool(BaseTool):
28
+ """
29
+ CrewAI Tool: Plan and track minijob budget (450€ assumption).
30
+ Stored locally as JSON in DATA_DIR/payroll.json.
31
+ """
32
+
33
+ name: str = "Payroll & Budget (450€)"
34
+ description: str = (
35
+ "Plant und trackt Minijob-Budget (450€ Annahme). "
36
+ "Aktionen: plan, add_worker, log_hours, report."
37
+ )
38
+
39
+ def _run(self, action: str, payload_json: str = "{}") -> str:
40
+ state = _load()
41
+ payload = json.loads(payload_json or "{}")
42
+
43
+ if action == "plan":
44
+ monthly_cap = float(payload.get("monthly_cap", 450))
45
+ hourly_rate = float(payload.get("hourly_rate", 12))
46
+ max_hours = monthly_cap / hourly_rate if hourly_rate > 0 else 0
47
+ return (
48
+ "## 450€-Plan\n"
49
+ f"- Monatsbudget: **{monthly_cap:.2f}€**\n"
50
+ f"- Stundenlohn: **{hourly_rate:.2f}€**\n"
51
+ f"- Max Stunden/Monat pro Person: **{max_hours:.2f}h**\n"
52
+ "\nTipp: Für Schichten à 3h sind das ca. "
53
+ f"**{max_hours/3:.1f} Schichten/Monat**."
54
+ )
55
+
56
+ if action == "add_worker":
57
+ name = payload.get("name", "").strip()
58
+ hourly_rate = float(payload.get("hourly_rate", 12))
59
+ monthly_cap = float(payload.get("monthly_cap", 450))
60
+ if not name:
61
+ return "❌ name fehlt"
62
+ state["workers"][name] = {"hourly_rate": hourly_rate, "monthly_cap": monthly_cap}
63
+ _save(state)
64
+ return f"✅ Worker angelegt: {name} ({hourly_rate}€/h, cap {monthly_cap}€)"
65
+
66
+ if action == "log_hours":
67
+ name = payload.get("name", "").strip()
68
+ hours = float(payload.get("hours", 0))
69
+ date = payload.get("date", "").strip() # "2026-03-14"
70
+ note = payload.get("note", "").strip()
71
+ if not name or hours <= 0:
72
+ return "❌ name/hours ungültig"
73
+ state["logs"].append({"name": name, "hours": hours, "date": date, "note": note})
74
+ _save(state)
75
+ return f"✅ Stunden geloggt: {name} – {hours}h ({date})"
76
+
77
+ if action == "report":
78
+ # sehr einfache Summen
79
+ sums: Dict[str, float] = {}
80
+ for row in state["logs"]:
81
+ sums[row["name"]] = sums.get(row["name"], 0.0) + float(row["hours"])
82
+
83
+ lines = ["## Payroll Report", "| Name | Hours | Est. € | Cap € |", "|---|---:|---:|---:|"]
84
+ for name, hrs in sums.items():
85
+ w = state["workers"].get(name, {"hourly_rate": 12, "monthly_cap": 450})
86
+ est = hrs * float(w["hourly_rate"])
87
+ lines.append(
88
+ f"| {name} | {hrs:.2f} | {est:.2f} | {float(w['monthly_cap']):.2f} |"
89
+ )
90
+
91
+ if not sums:
92
+ lines.append("\nKeine Logs vorhanden.")
93
+ return "\n".join(lines)
94
+
95
+ return f"❌ Unbekannte action: {action}"
src/latest_froschgruppe/tools/roadkill_gbif_tool.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # src/latest_froschgruppe/tools/roadkill_gbif_tool.py
2
+
3
+ import requests
4
+ from typing import Dict, Any, List, Tuple
5
+
6
+ from crewai.tools import BaseTool
7
+
8
+ GBIF_API = "https://api.gbif.org/v1"
9
+
10
+ # Roadkill.at Dataset auf GBIF (aus GBIF Dataset-Seite)
11
+ ROADKILL_AT_DATASET_KEY = "d0d5ef85-71b2-4da6-b6f6-c1c3d60987d3"
12
+
13
+
14
+ def _cluster_key(lat: float, lon: float, precision: float = 0.05) -> Tuple[float, float]:
15
+ # ~0.05° ~ wenige km (je nach Breite) – gut als “Hotspot-Kachel”
16
+ return (round(lat / precision) * precision, round(lon / precision) * precision)
17
+
18
+
19
+ def gbif_roadkill_search(
20
+ country: str | None = None,
21
+ scientific_name: str | None = None,
22
+ year_from: int | None = None,
23
+ year_to: int | None = None,
24
+ limit: int = 100,
25
+ offset: int = 0,
26
+ dataset_key: str = ROADKILL_AT_DATASET_KEY,
27
+ ) -> Dict[str, Any]:
28
+ """
29
+ Pull roadkill occurrences from a GBIF roadkill dataset.
30
+ Returns raw GBIF response JSON.
31
+ """
32
+ params: Dict[str, Any] = {
33
+ "datasetKey": dataset_key,
34
+ "limit": min(int(limit), 300),
35
+ "offset": max(int(offset), 0),
36
+ }
37
+ if country:
38
+ params["country"] = country # e.g. "DE", "AT"
39
+ if scientific_name:
40
+ params["scientificName"] = scientific_name # e.g. "Rana temporaria"
41
+ if year_from is not None or year_to is not None:
42
+ y1 = int(year_from if year_from is not None else year_to)
43
+ y2 = int(year_to if year_to is not None else year_from)
44
+ params["eventDate"] = f"{y1}-01-01,{y2}-12-31"
45
+
46
+ r = requests.get(f"{GBIF_API}/occurrence/search", params=params, timeout=30)
47
+ r.raise_for_status()
48
+ return r.json()
49
+
50
+
51
+ def simplify_occurrences(occ_results: Dict[str, Any]) -> List[Dict[str, Any]]:
52
+ out: List[Dict[str, Any]] = []
53
+ for it in occ_results.get("results", []):
54
+ out.append(
55
+ {
56
+ "species": it.get("species") or it.get("scientificName"),
57
+ "eventDate": it.get("eventDate"),
58
+ "country": it.get("country"),
59
+ "lat": it.get("decimalLatitude"),
60
+ "lon": it.get("decimalLongitude"),
61
+ "basisOfRecord": it.get("basisOfRecord"),
62
+ "occurrenceID": it.get("occurrenceID") or it.get("key"),
63
+ "datasetKey": it.get("datasetKey"),
64
+ }
65
+ )
66
+ return out
67
+
68
+
69
+ class RoadkillGBIFTool(BaseTool):
70
+ name: str = "Roadkill (GBIF Dataset) Explorer"
71
+ description: str = (
72
+ "Sucht Roadkill-Occurrences in einem GBIF-Roadkill-Dataset (Default: roadkill.at). "
73
+ "Optional nach Land, Art und Zeitraum. Gibt Hotspot-Cluster (Lat/Lon) zurück."
74
+ )
75
+
76
+ def _run(
77
+ self,
78
+ country: str = "DE",
79
+ scientific_name: str = "Rana temporaria",
80
+ year_from: int | None = None,
81
+ year_to: int | None = None,
82
+ limit: int = 300,
83
+ dataset_key: str = ROADKILL_AT_DATASET_KEY,
84
+ ) -> str:
85
+ limit = int(limit)
86
+ params_country = country.strip() if country else None
87
+ params_sci = scientific_name.strip() if scientific_name else None
88
+
89
+ results: List[Dict[str, Any]] = []
90
+ fetched = 0
91
+
92
+ while fetched < limit:
93
+ data = gbif_roadkill_search(
94
+ country=params_country,
95
+ scientific_name=params_sci,
96
+ year_from=year_from,
97
+ year_to=year_to,
98
+ limit=min(300, limit - fetched),
99
+ offset=fetched,
100
+ dataset_key=dataset_key,
101
+ )
102
+
103
+ batch = data.get("results", [])
104
+ if not batch:
105
+ break
106
+
107
+ results.extend(batch)
108
+ fetched += len(batch)
109
+
110
+ if fetched >= int(data.get("count", 0) or 0):
111
+ break
112
+
113
+ # cluster
114
+ cluster: Dict[Tuple[float, float], int] = {}
115
+ for row in results:
116
+ lat = row.get("decimalLatitude")
117
+ lon = row.get("decimalLongitude")
118
+ if lat is None or lon is None:
119
+ continue
120
+ key = _cluster_key(float(lat), float(lon))
121
+ cluster[key] = cluster.get(key, 0) + 1
122
+
123
+ top = sorted(cluster.items(), key=lambda x: x[1], reverse=True)[:10]
124
+
125
+ lines: List[str] = []
126
+ lines.append("## Roadkill Hotspots (GBIF)")
127
+ lines.append(f"- Dataset: **{dataset_key}**")
128
+ if params_sci:
129
+ lines.append(f"- Art: **{params_sci}**")
130
+ if params_country:
131
+ lines.append(f"- Land: **{params_country}**")
132
+ lines.append(f"- Treffer geladen: **{len(results)}** (max requested: {limit})")
133
+ if year_from is not None or year_to is not None:
134
+ yf = year_from if year_from is not None else year_to
135
+ yt = year_to if year_to is not None else year_from
136
+ lines.append(f"- Zeitraum: **{yf}–{yt}**")
137
+ lines.append("")
138
+ lines.append("| Rang | Lat | Lon | Beobachtungen | Google Maps |")
139
+ lines.append("|---:|---:|---:|---:|---|")
140
+
141
+ for i, ((clat, clon), cnt) in enumerate(top, start=1):
142
+ gmaps = f"https://www.google.com/maps?q={clat},{clon}"
143
+ lines.append(f"| {i} | {clat:.4f} | {clon:.4f} | {cnt} | {gmaps} |")
144
+
145
+ if not top:
146
+ lines.append("\nKeine Hotspots gefunden (evtl. Land/Art/Zeitraum anpassen).")
147
+
148
+ return "\n".join(lines)
src/latest_froschgruppe/tools/roadkill_tool.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from typing import Optional, Dict, Any, List, Tuple
4
+ import requests
5
+
6
+ from crewai.tools import BaseTool
7
+
8
+ GBIF_OCCURRENCE_SEARCH = "https://api.gbif.org/v1/occurrence/search"
9
+
10
+ # Roadkill (Citizen Science) Dataset auf GBIF (aus Suchergebnis):
11
+ # https://www.gbif.org/dataset/d0d5ef85-71b2-4da6-b6f6-c1c3d60987d3
12
+ DEFAULT_ROADKILL_DATASET_KEY = "d0d5ef85-71b2-4da6-b6f6-c1c3d60987d3"
13
+
14
+
15
+ def _cluster_key(lat: float, lon: float, precision: float = 0.05) -> Tuple[float, float]:
16
+ return (round(lat / precision) * precision, round(lon / precision) * precision)
17
+
18
+
19
+ @dataclass
20
+ class RoadkillGBIFTool(BaseTool):
21
+ name: str = "Roadkill Hotspot Finder (GBIF)"
22
+ description: str = (
23
+ "Nutzt GBIF-Roadkill-Datasets (default: Roadkill citizen science) "
24
+ "und erzeugt Hotspots, optional für eine Art und Land."
25
+ )
26
+
27
+ def _run(
28
+ self,
29
+ country: str = "DE",
30
+ scientific_name: Optional[str] = None,
31
+ dataset_key: str = DEFAULT_ROADKILL_DATASET_KEY,
32
+ year_from: Optional[int] = None,
33
+ year_to: Optional[int] = None,
34
+ limit: int = 300,
35
+ ) -> str:
36
+ params: Dict[str, Any] = {
37
+ "datasetKey": dataset_key,
38
+ "country": country,
39
+ "hasCoordinate": "true",
40
+ "limit": min(limit, 300),
41
+ "offset": 0,
42
+ }
43
+ if scientific_name:
44
+ params["scientificName"] = scientific_name
45
+ if year_from:
46
+ params["year"] = f"{year_from},{year_to or year_from}"
47
+
48
+ results: List[Dict[str, Any]] = []
49
+ fetched = 0
50
+
51
+ while fetched < limit:
52
+ params["offset"] = fetched
53
+ r = requests.get(GBIF_OCCURRENCE_SEARCH, params=params, timeout=30)
54
+ r.raise_for_status()
55
+ data = r.json()
56
+ batch = data.get("results", [])
57
+ if not batch:
58
+ break
59
+ results.extend(batch)
60
+ fetched += len(batch)
61
+ if fetched >= data.get("count", 0):
62
+ break
63
+
64
+ cluster: Dict[Tuple[float, float], int] = {}
65
+ for row in results:
66
+ lat = row.get("decimalLatitude")
67
+ lon = row.get("decimalLongitude")
68
+ if lat is None or lon is None:
69
+ continue
70
+ key = _cluster_key(float(lat), float(lon))
71
+ cluster[key] = cluster.get(key, 0) + 1
72
+
73
+ top = sorted(cluster.items(), key=lambda x: x[1], reverse=True)[:10]
74
+
75
+ title = "## Roadkill Hotspots (GBIF)"
76
+ if scientific_name:
77
+ title += f" – {scientific_name}"
78
+ title += f" ({country})"
79
+
80
+ lines = [title]
81
+ lines.append(f"- DatasetKey: `{dataset_key}`")
82
+ lines.append(f"- Treffer geladen: **{len(results)}**")
83
+ if year_from:
84
+ lines.append(f"- Zeitraum: **{year_from}–{year_to or year_from}**")
85
+ lines.append("")
86
+ lines.append("| Rang | Lat | Lon | Roadkill-Funde | Google Maps |")
87
+ lines.append("|---:|---:|---:|---:|---|")
88
+
89
+ for i, ((clat, clon), cnt) in enumerate(top, start=1):
90
+ gmaps = f"https://www.google.com/maps?q={clat},{clon}"
91
+ lines.append(f"| {i} | {clat:.4f} | {clon:.4f} | {cnt} | {gmaps} |")
92
+
93
+ if not top:
94
+ lines.append("\nKeine Hotspots gefunden – ggf. Land/Dataset/Zeitraum anpassen.")
95
+
96
+ return "\n".join(lines)
src/latest_froschgruppe/tools/storage.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from pathlib import Path
3
+
4
+ def get_data_dir() -> Path:
5
+ # If persistent storage is enabled on HF Spaces, /data exists.
6
+ if Path("/data").exists():
7
+ base = Path("/data")
8
+ else:
9
+ base = Path(os.getenv("DATA_DIR", "./data"))
10
+ base.mkdir(parents=True, exist_ok=True)
11
+ return base
test.py ADDED
File without changes
uv.lock ADDED
The diff for this file is too large to render. See raw diff