louiecerv commited on
Commit
b9db53c
·
1 Parent(s): 3d9eb13

sync withr remote

Browse files
Files changed (2) hide show
  1. aiutils.py +0 -0
  2. app.py +46 -0
aiutils.py ADDED
File without changes
app.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ # App title
4
+ st.title("AI Task Generator")
5
+
6
+ # Step 1: Model selection
7
+ st.header("Select AI Model")
8
+ ai_model = st.selectbox(
9
+ "Choose one of the following AI models:",
10
+ ["Gemini 2.0", "OpenAI GPT 4.0", "Meta Llama 3.2"],
11
+ )
12
+
13
+ # Step 2: Task selection
14
+ st.header("Select Task")
15
+
16
+ task = st.selectbox(
17
+ "What do you want to generate?",
18
+ ["Generate a syllabus", "Generate an exam", "Create a program"],
19
+ )
20
+
21
+ # Step 3: Generate prompt
22
+ if st.button("Generate Prompt"):
23
+ # Create a prompt based on the user's choices
24
+ prompt = f"You selected {ai_model} to {task.lower()}"
25
+ st.write(f"**Prompt:** {prompt}")
26
+
27
+ # Dummy responses for AI models
28
+ gemini_response = f"[Gemini 2.0 Response] Dummy output for: {task}"
29
+ gpt_response = f"[OpenAI GPT 4.0 Response] Dummy output for: {task}"
30
+ llama_response = f"[Meta Llama 3.2 Response] Dummy output for: {task}"
31
+
32
+ # Step 4: Display responses in tabs
33
+ st.header("AI Model Outputs")
34
+ tab1, tab2, tab3 = st.tabs(["Gemini 2.0", "OpenAI GPT 4.0", "Meta Llama 3.2"])
35
+
36
+ with tab1:
37
+ st.subheader("Gemini 2.0")
38
+ st.write(gemini_response)
39
+
40
+ with tab2:
41
+ st.subheader("OpenAI GPT 4.0")
42
+ st.write(gpt_response)
43
+
44
+ with tab3:
45
+ st.subheader("Meta Llama 3.2")
46
+ st.write(llama_response)