Jomaric commited on
Commit
8e32f64
·
verified ·
1 Parent(s): 58869a7

Deploying Community Change Maker v1

Browse files
Files changed (2) hide show
  1. app.py +184 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+ import gradio as gr
4
+ from daggr import FnNode, Graph
5
+ from huggingface_hub import InferenceClient, get_token
6
+
7
+ # Load environment variables
8
+ load_dotenv()
9
+
10
+ # Startup Check
11
+ token = os.environ.get("HF_TOKEN")
12
+ if token:
13
+ print(f"✅ STARTUP: HF_TOKEN loaded successfully.")
14
+ else:
15
+ print("❌ STARTUP: HF_TOKEN NOT FOUND in environment or .env file.")
16
+
17
+ # Helper function to query any model with Fallback
18
+ def query_model_with_fallback(prompt, primary_model_id="Qwen/Qwen2.5-72B-Instruct", fallback_model_id="meta-llama/Llama-3.3-70B-Instruct"):
19
+ try:
20
+ final_token = os.environ.get("HF_TOKEN") or get_token()
21
+ if not final_token:
22
+ return "Error: No HF_TOKEN found. Check .env file."
23
+
24
+ # Try Primary
25
+ client = InferenceClient(primary_model_id, token=final_token)
26
+ messages = [{"role": "user", "content": prompt}]
27
+ response = client.chat_completion(messages, max_tokens=1000)
28
+ return response.choices[0].message.content
29
+
30
+ except Exception as e:
31
+ print(f"WARNING: Primary model {primary_model_id} failed: {e}")
32
+ try:
33
+ # Try Fallback
34
+ client = InferenceClient(fallback_model_id, token=final_token)
35
+ messages = [{"role": "user", "content": prompt}]
36
+ response = client.chat_completion(messages, max_tokens=1000)
37
+ return response.choices[0].message.content
38
+ except Exception as e2:
39
+ return f"Error: All models failed. {e2}"
40
+
41
+ # --- 10 "CHANGE MAKING" AGENTS ---
42
+
43
+ def query_agent_unsexy(ctx):
44
+ p = f"""You are the 'Unsexy Question' expert. Strategy: Address unsexy topics (like parking) where low-hanging fruit exists. Make ideas palatable to all politics. Maintain strict message discipline.
45
+ User's Community Context: "{ctx}"
46
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
47
+ return query_model_with_fallback(p)
48
+
49
+ def query_agent_public(ctx):
50
+ p = f"""You are the 'Public Character' expert. Strategy: Be present, public, and helpful. Offer small services (directions, advice, lending items) to unrelated people. Be a 'warm body' in public space.
51
+ User's Community Context: "{ctx}"
52
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
53
+ return query_model_with_fallback(p)
54
+
55
+ def query_agent_nucleation(ctx):
56
+ p = f"""You are the 'Social Nucleation' expert. Strategy: Use 'unreasonable attentiveness' to create social 'nucleation sites'. create slightly uneven experiences or 'furrows' where people are forced to bond (like a chaotic event).
57
+ User's Community Context: "{ctx}"
58
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
59
+ return query_model_with_fallback(p)
60
+
61
+ def query_agent_onion(ctx):
62
+ p = f"""You are the 'Onion Merchant' expert. Strategy: Find a niche thing people really want and provide it honestly. Do an honest day's work. Be the middle ground between greedy capitalism and violent revolution.
63
+ User's Community Context: "{ctx}"
64
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
65
+ return query_model_with_fallback(p)
66
+
67
+ def query_agent_broker(ctx):
68
+ p = f"""You are the 'Honest Broker' expert. Strategy: Enter a 'skeevy' or underserved industry (like immigration law, home repair, local news) and be the one honest, competent person there. Be a middle-class mensch.
69
+ User's Community Context: "{ctx}"
70
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
71
+ return query_model_with_fallback(p)
72
+
73
+ def query_agent_statistic(ctx):
74
+ p = f"""You are the 'Statistic Improver' expert. Strategy: Find a dubious viral statistic (from older/bad studies) and do the work to improve its precision. Render a public service by fact-checking and refining data.
75
+ User's Community Context: "{ctx}"
76
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
77
+ return query_model_with_fallback(p)
78
+
79
+ def query_agent_hobbit(ctx):
80
+ p = f"""You are the 'Hobbit' expert. Strategy: Practice 'hobbitian courage'. Don't be a martyr. Take small risks (deviate from a checklist, question a default) to improve a system without destroying your life.
81
+ User's Community Context: "{ctx}"
82
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
83
+ return query_model_with_fallback(p)
84
+
85
+ def query_agent_system(ctx):
86
+ p = f"""You are the 'System Fixer' expert. Strategy: 'De-gum the gears' of bureaucracy. Find a sub-system that terrorizes people (like confusing fines or hospital bills) and make it work better.
87
+ User's Community Context: "{ctx}"
88
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
89
+ return query_model_with_fallback(p)
90
+
91
+ def query_agent_audience(ctx):
92
+ p = f"""You are the 'Good Audience' expert. Strategy: Show up. Laugh at jokes. Support creators. Pluck diamonds from the rough and share them. Be the audience that great work requires.
93
+ User's Community Context: "{ctx}"
94
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
95
+ return query_model_with_fallback(p)
96
+
97
+ def query_agent_acquaintance(ctx):
98
+ p = f"""You are the 'Acquaintance' expert. Strategy: Don't try to be best friends. Just be a 'Good Acquaintance'. Know neighbors' names so you can notice when something is wrong (like a gas leak). Build weak ties.
99
+ User's Community Context: "{ctx}"
100
+ Task: Suggest ONE specific, actionable way this user can apply your strategy to improve their community."""
101
+ return query_model_with_fallback(p)
102
+
103
+
104
+ # --- NODES & GRAPH ---
105
+
106
+ # 1. Input Node
107
+ def pass_context(context_text):
108
+ if not context_text.strip():
109
+ context_text = "I live in a typical town and want to make a difference."
110
+ gr.Info(f"Brainstorming changes for: {context_text[:30]}...")
111
+ return context_text
112
+
113
+ input_node = FnNode(
114
+ fn=pass_context,
115
+ name="1. Context Definition",
116
+ inputs={
117
+ "context_text": gr.Textbox(
118
+ label="My Community / Situation",
119
+ value="I am a student living in a college town.",
120
+ lines=2,
121
+ placeholder="Describe your context..."
122
+ )
123
+ },
124
+ outputs={"ctx": gr.Textbox(visible=False)}
125
+ )
126
+
127
+ # 2. Parallel Agents
128
+ # Each takes 'ctx' from input_node
129
+ # We make specific textboxes visible so the user can see the 10 distinct ideas.
130
+ agents = [
131
+ FnNode(fn=query_agent_unsexy, name="Unsexy Question", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="1. Unsexy Question Idea", lines=4, visible=True)}),
132
+ FnNode(fn=query_agent_public, name="Public Character", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="2. Public Character Idea", lines=4, visible=True)}),
133
+ FnNode(fn=query_agent_nucleation, name="Social Nucleation", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="3. Social Nucleation Idea", lines=4, visible=True)}),
134
+ FnNode(fn=query_agent_onion, name="Onion Merchant", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="4. Onion Merchant Idea", lines=4, visible=True)}),
135
+ FnNode(fn=query_agent_broker, name="Honest Broker", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="5. Honest Broker Idea", lines=4, visible=True)}),
136
+ FnNode(fn=query_agent_statistic, name="Statistic Improver", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="6. Statistic Improver Idea", lines=4, visible=True)}),
137
+ FnNode(fn=query_agent_hobbit, name="The Hobbit", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="7. The Hobbit Idea", lines=4, visible=True)}),
138
+ FnNode(fn=query_agent_system, name="System Fixer", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="8. System Fixer Idea", lines=4, visible=True)}),
139
+ FnNode(fn=query_agent_audience, name="Good Audience", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="9. Good Audience Idea", lines=4, visible=True)}),
140
+ FnNode(fn=query_agent_acquaintance, name="Good Acquaintance", inputs={"ctx": input_node.ctx}, outputs={"r": gr.Textbox(label="10. Good Acquaintance Idea", lines=4, visible=True)}),
141
+ ]
142
+
143
+ # 3. Helper to aggregate
144
+ def aggregate_and_select(*responses):
145
+ # responses is a tuple of 10 strings
146
+ gr.Info("All experts reported. Compiling list...")
147
+
148
+ final_output = "# 10 Ideas for Change\n\n"
149
+ titles = [
150
+ "Unsexy Question", "Public Character", "Social Nucleation", "Onion Merchant",
151
+ "Honest Broker", "Statistic Improver", "The Hobbit", "System Fixer",
152
+ "Good Audience", "Good Acquaintance"
153
+ ]
154
+
155
+ for i, r in enumerate(responses):
156
+ final_output += f"### {i+1}. {titles[i]}\n{r}\n\n---\n\n"
157
+
158
+ return final_output
159
+
160
+ # 4. Aggregator Node
161
+ # This node takes outputs from ALL agents
162
+ aggregator_inputs = {f"r{i}": agent.r for i, agent in enumerate(agents)}
163
+
164
+ # Wrapper function to unpack kwargs since daggr passes them as named args match the dict keys?
165
+ # Actually daggr passes arguments based on the inputs dict mapped to function args.
166
+ # We need to define the function with 10 arguments.
167
+ def aggregator_wrapper(r0, r1, r2, r3, r4, r5, r6, r7, r8, r9):
168
+ return aggregate_and_select(r0, r1, r2, r3, r4, r5, r6, r7, r8, r9)
169
+
170
+ output_node = FnNode(
171
+ fn=aggregator_wrapper,
172
+ name="3. Aggregator",
173
+ inputs=aggregator_inputs, # Mapping r0 -> agent0.r, etc.
174
+ outputs={"final_output": gr.Markdown(label="All 10 Community Ideas")}
175
+ )
176
+
177
+ # Launch
178
+ graph = Graph(
179
+ name="Community Change Maker (Based on '10 Ways to Change the World')",
180
+ nodes=[input_node] + agents + [output_node],
181
+ )
182
+
183
+ if __name__ == "__main__":
184
+ graph.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio>=6.0.2
2
+ huggingface_hub>=0.26.0
3
+ daggr>=0.5.4
4
+ python-dotenv