tjido commited on
Commit
ac0bfe4
·
verified ·
1 Parent(s): a2e43af

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -100
app.py CHANGED
@@ -2,120 +2,97 @@ import os
2
  import gradio as gr
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
 
5
 
6
- # Load Hugging Face token from environment
7
  HF_TOKEN = os.getenv("HF_TOKEN")
 
8
 
9
- # Cache models globally
10
- llama_model = None
11
- llama_tokenizer = None
12
- mistral_model = None
13
- mistral_tokenizer = None
14
 
15
- # ---------- AI CALLS ----------
16
- def call_llama(prompt):
17
- global llama_model, llama_tokenizer
18
- try:
19
- if llama_model is None or llama_tokenizer is None:
20
- llama_tokenizer = AutoTokenizer.from_pretrained(
21
- "meta-llama/Meta-Llama-3-8B-Instruct", token=HF_TOKEN
22
- )
23
- llama_model = AutoModelForCausalLM.from_pretrained(
24
- "meta-llama/Meta-Llama-3-8B-Instruct",
25
- torch_dtype=torch.float16,
26
- device_map="auto",
27
- token=HF_TOKEN
28
- )
29
- pipe = pipeline("text-generation", model=llama_model, tokenizer=llama_tokenizer)
30
- result = pipe(prompt, max_new_tokens=350, temperature=0.7)
31
- return result[0]["generated_text"].strip()
32
- except Exception as e:
33
- return f"⚠️ LLaMA failed: {str(e)}\nSwitching to Mistral...\n\n" + call_mistral(prompt)
34
 
35
- def call_mistral(prompt):
36
- global mistral_model, mistral_tokenizer
37
- try:
38
- if mistral_model is None or mistral_tokenizer is None:
39
- mistral_tokenizer = AutoTokenizer.from_pretrained(
40
- "mistralai/Mistral-7B-Instruct-v0.1", token=HF_TOKEN
41
- )
42
- mistral_model = AutoModelForCausalLM.from_pretrained(
43
- "mistralai/Mistral-7B-Instruct-v0.1",
44
- torch_dtype=torch.float16,
45
- device_map="auto",
46
- token=HF_TOKEN
47
- )
48
- pipe = pipeline("text-generation", model=mistral_model, tokenizer=mistral_tokenizer)
49
- result = pipe(prompt, max_new_tokens=300, temperature=0.7)
50
- return result[0]['generated_text'].strip()
51
- except Exception as e:
52
- return f"⚠️ Mistral model also failed: {str(e)}"
53
-
54
- # ---------- MAIN FUNCTION ----------
55
- def find_funding(region, project_type, community_focus):
56
- if not region or not project_type or not community_focus:
57
- return "Please select all fields to get funding suggestions."
58
-
59
- prompt = f"""<|system|>
60
- You are an Indigenous Community Funding Guide. Based on the inputs below, suggest relevant grants available in Canada for Indigenous Peoples.
61
- - Region: {region}
62
- - Project Type: {project_type}
63
- - Community Focus: {community_focus}
64
- Respond with:
65
- 1. Grant Name
 
 
 
 
 
 
 
66
  2. Who it's for
67
  3. Deadline (if known)
68
- 4. Where to apply (optional)
69
- </s>"""
70
- return "AI Recommendation:\n\n" + call_llama(prompt)
71
 
72
- # ---------- THEME ----------
73
- custom_theme = gr.themes.Soft(
 
 
 
 
 
 
74
  primary_hue="orange",
75
- secondary_hue="blue",
76
  font=[gr.themes.GoogleFont("Inter"), "ui-sans-serif", "system-ui", "sans-serif"],
77
- radius_size=gr.themes.sizes.radius_sm,
78
- )
79
-
80
- # ---------- UI ELEMENTS ----------
81
- region_dropdown = gr.Dropdown(
82
- choices=["Alberta", "British Columbia", "Manitoba", "Nunavut", "Ontario", "Quebec", "Saskatchewan", "Yukon"],
83
- label="Select your region"
84
  )
85
 
86
- project_type_dropdown = gr.Dropdown(
87
- choices=["Language Revitalization", "Arts & Culture", "Youth Programs", "Education", "Entrepreneurship", "Land-based Projects"],
88
- label="Type of project"
89
- )
90
-
91
- focus_dropdown = gr.Dropdown(
92
- choices=["Youth", "Elders", "Women", "Two-Spirit", "Entrepreneurs", "Community-wide"],
93
- label="Community focus"
94
- )
95
-
96
- output_box = gr.Textbox(label="Suggested Grants", lines=8)
97
-
98
- # ---------- INTERFACE ----------
99
- iface = gr.Interface(
100
  fn=find_funding,
101
- inputs=[region_dropdown, project_type_dropdown, focus_dropdown],
102
- outputs=output_box,
103
- title="💸 Indigenous Grants Finder",
104
- description="""### AI-powered tool to help Indigenous communities in Canada discover relevant grants and funding opportunities.
105
- Select your region, project type, and community focus to get started.""",
106
- theme=custom_theme
 
 
 
 
 
 
 
 
107
  )
108
 
109
- # ---------- FOOTER ----------
110
- footer_html = """## ✨ Why This Matters
111
- This tool shows how AI can help Indigenous communities **access resources** and **reclaim agency**.
112
- All AI suggestions should be **verified by the community** and used **with consent and cultural care**.
113
- Created with ❤️ by Shingai Manjengwa, @tjido"""
114
-
115
- demo = gr.Blocks(theme=custom_theme, fill_height=True)
116
- with demo:
117
- iface.render()
118
- gr.Markdown(footer_html)
119
-
120
  if __name__ == "__main__":
121
  demo.launch()
 
2
  import gradio as gr
3
  import torch
4
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
5
+ from langchain.tools.tavily_search import TavilySearchResults
6
 
7
+ # Load environment variables
8
  HF_TOKEN = os.getenv("HF_TOKEN")
9
+ TAVILY_API_KEY = os.getenv("TAVILY_API_KEY")
10
 
11
+ # Check API key
12
+ if not TAVILY_API_KEY:
13
+ raise ValueError("Missing TAVILY_API_KEY in environment variables")
 
 
14
 
15
+ # Load Mistral model
16
+ tokenizer = AutoTokenizer.from_pretrained(
17
+ "mistralai/Mistral-7B-Instruct-v0.1",
18
+ token=HF_TOKEN
19
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
+ model = AutoModelForCausalLM.from_pretrained(
22
+ "mistralai/Mistral-7B-Instruct-v0.1",
23
+ torch_dtype=torch.float16,
24
+ device_map="auto",
25
+ token=HF_TOKEN
26
+ )
27
+
28
+ llm = pipeline("text-generation", model=model, tokenizer=tokenizer)
29
+
30
+ # Tavily Search
31
+ search = TavilySearchResults(api_key=TAVILY_API_KEY)
32
+
33
+ # Main function
34
+ def find_funding(project_description):
35
+ if not project_description or len(project_description.strip()) < 5:
36
+ return "Please describe your project to find funding opportunities."
37
+
38
+ # Live search on trusted sites
39
+ query = f"site:canada.ca OR site:canadacouncil.ca grants for {project_description}"
40
+ search_results = search.run(query)
41
+
42
+ if not search_results:
43
+ return "No search results found. Try a different project description."
44
+
45
+ # Format results for prompt
46
+ result_snippets = "\n".join([f"- {r['title']} | {r['url']}" for r in search_results[:5]])
47
+
48
+ prompt = f"""<s>[INST]
49
+ You are a Funding Advisor for Indigenous communities in Canada. Based on the following search results and the user's project description, return 3 to 5 funding opportunities.
50
+
51
+ User Project Description:
52
+ "{project_description}"
53
+
54
+ Search Results:
55
+ {result_snippets}
56
+
57
+ For each grant, include:
58
+ 1. Grant name
59
  2. Who it's for
60
  3. Deadline (if known)
61
+ 4. Link
62
+ [/INST]"""
 
63
 
64
+ try:
65
+ result = llm(prompt, max_new_tokens=500, temperature=0.7)[0]["generated_text"]
66
+ return result.strip()
67
+ except Exception as e:
68
+ return f"⚠️ AI model error: {str(e)}"
69
+
70
+ # Theme
71
+ theme = gr.themes.Soft(
72
  primary_hue="orange",
73
+ secondary_hue="green",
74
  font=[gr.themes.GoogleFont("Inter"), "ui-sans-serif", "system-ui", "sans-serif"],
75
+ radius_size=gr.themes.sizes.radius_md,
 
 
 
 
 
 
76
  )
77
 
78
+ # UI
79
+ demo = gr.Interface(
 
 
 
 
 
 
 
 
 
 
 
 
80
  fn=find_funding,
81
+ inputs=gr.Textbox(
82
+ lines=3,
83
+ label="Describe your project",
84
+ placeholder="e.g. Language revitalization course for Dene youth in Yukon"
85
+ ),
86
+ outputs=gr.Textbox(
87
+ label="Funding Opportunities",
88
+ lines=10,
89
+ show_copy_button=True
90
+ ),
91
+ title="💸 Indigenous Project Funding Finder",
92
+ description="Enter a short project description. This tool searches trusted sites and uses AI to summarize relevant funding programs.",
93
+ theme=theme,
94
+ allow_flagging="never"
95
  )
96
 
 
 
 
 
 
 
 
 
 
 
 
97
  if __name__ == "__main__":
98
  demo.launch()