S-Dreamer commited on
Commit
799d4e7
·
verified ·
1 Parent(s): 3768c65

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -32
app.py CHANGED
@@ -1,70 +1,116 @@
1
  import streamlit as st
2
  from datetime import datetime
 
 
3
 
4
- # ---------- Configuration ----------
5
  st.set_page_config(
6
  page_title="Code Assistant",
7
  page_icon="🧠",
8
- layout="centered"
9
  )
10
 
11
- # ---------- State ----------
12
  if "history" not in st.session_state:
13
  st.session_state.history = []
14
 
 
 
 
15
  # ---------- Core Logic ----------
16
  @st.cache_data(show_spinner=False)
17
- def generate_code(prompt: str) -> str:
18
  """
19
- Placeholder for an LLM / Hugging Face inference call.
20
- Cached to avoid recomputation for identical prompts.
21
  """
22
- # Replace with real model call later
23
- return f"""# Generated code
24
- # Prompt: {prompt}
 
 
 
 
25
 
26
  def example():
27
- print("Hello from generated code!")
28
  """
29
 
30
- def handle_generation(prompt: str):
31
  if not prompt.strip():
32
  st.warning("Prompt cannot be empty.")
33
  return
34
 
35
  with st.spinner("Generating code…"):
36
- result = generate_code(prompt)
37
 
38
  st.session_state.history.append({
39
  "timestamp": datetime.now().strftime("%H:%M:%S"),
 
 
40
  "prompt": prompt,
41
- "output": result
42
  })
43
 
44
- # ---------- UI ----------
45
- st.title("🧠 Code Assistant")
46
- st.caption("A minimal interface for turning intent into executable structure.")
 
 
 
 
 
 
 
 
 
 
 
47
 
48
- user_input = st.text_area(
49
- "Enter your coding prompt",
50
- height=120,
51
- placeholder="e.g. Write a Python function that validates JSON input"
52
- )
 
 
 
 
 
 
53
 
54
- col1, col2 = st.columns([1, 3])
 
 
 
 
 
 
55
 
56
- with col1:
57
- generate_clicked = st.button("Generate", use_container_width=True)
58
 
59
- with col2:
60
- clear_clicked = st.button("Clear History", use_container_width=True)
61
 
62
- if generate_clicked:
63
- handle_generation(user_input)
 
 
 
 
64
 
65
- if clear_clicked:
66
- st.session_state.history.clear()
67
- st.experimental_rerun()
 
 
 
 
 
 
 
 
 
 
 
68
 
69
  # ---------- Output ----------
70
  if st.session_state.history:
@@ -72,5 +118,13 @@ if st.session_state.history:
72
  st.subheader("Generation History")
73
 
74
  for item in reversed(st.session_state.history):
75
- with st.expander(f"[{item['timestamp']}] {item['prompt'][:60]}"):
 
 
 
 
 
 
 
 
76
  st.code(item["output"], language="python")
 
1
  import streamlit as st
2
  from datetime import datetime
3
+ import pandas as pd
4
+ from io import StringIO
5
 
6
+ # ---------- Page Config ----------
7
  st.set_page_config(
8
  page_title="Code Assistant",
9
  page_icon="🧠",
10
+ layout="wide"
11
  )
12
 
13
+ # ---------- Session State ----------
14
  if "history" not in st.session_state:
15
  st.session_state.history = []
16
 
17
+ if "datasets" not in st.session_state:
18
+ st.session_state.datasets = {}
19
+
20
  # ---------- Core Logic ----------
21
  @st.cache_data(show_spinner=False)
22
+ def generate_code(prompt: str, model_id: str, dataset_name: str | None) -> str:
23
  """
24
+ Placeholder for LLM inference.
 
25
  """
26
+ context = f"# Model: {model_id}\n"
27
+ if dataset_name:
28
+ context += f"# Dataset: {dataset_name}\n"
29
+
30
+ return f"""{context}
31
+ # Prompt:
32
+ # {prompt}
33
 
34
  def example():
35
+ print("Generated using {model_id}")
36
  """
37
 
38
+ def handle_generation(prompt, model_id, dataset_name):
39
  if not prompt.strip():
40
  st.warning("Prompt cannot be empty.")
41
  return
42
 
43
  with st.spinner("Generating code…"):
44
+ output = generate_code(prompt, model_id, dataset_name)
45
 
46
  st.session_state.history.append({
47
  "timestamp": datetime.now().strftime("%H:%M:%S"),
48
+ "model": model_id,
49
+ "dataset": dataset_name,
50
  "prompt": prompt,
51
+ "output": output
52
  })
53
 
54
+ # ---------- Sidebar ----------
55
+ with st.sidebar:
56
+ st.header("⚙️ Configuration")
57
+
58
+ # Model selection (static for now)
59
+ model_id = st.selectbox(
60
+ "Model",
61
+ options=[
62
+ "mock-codegen-v1",
63
+ "llama-3-8b",
64
+ "mistral-7b",
65
+ ],
66
+ help="Select the model used for code generation"
67
+ )
68
 
69
+ st.divider()
70
+
71
+ # Dataset upload
72
+ uploaded_file = st.file_uploader(
73
+ "Upload dataset",
74
+ type=["csv", "json", "txt"],
75
+ help="Optional dataset to condition generation"
76
+ )
77
+
78
+ if uploaded_file:
79
+ dataset_name = uploaded_file.name
80
 
81
+ if dataset_name not in st.session_state.datasets:
82
+ if uploaded_file.type == "text/csv":
83
+ df = pd.read_csv(uploaded_file)
84
+ elif uploaded_file.type == "application/json":
85
+ df = pd.read_json(uploaded_file)
86
+ else:
87
+ df = StringIO(uploaded_file.read().decode("utf-8")).read()
88
 
89
+ st.session_state.datasets[dataset_name] = df
 
90
 
91
+ st.success(f"Loaded: {dataset_name}")
 
92
 
93
+ # Dataset selection
94
+ dataset_name = st.selectbox(
95
+ "Active dataset",
96
+ options=[None] + list(st.session_state.datasets.keys()),
97
+ format_func=lambda x: "None" if x is None else x
98
+ )
99
 
100
+ # ---------- Main UI ----------
101
+ st.title("🧠 Code Assistant")
102
+ st.caption("Prompt → Model → (Optional) Dataset → Code")
103
+
104
+ prompt = st.text_area(
105
+ "Enter your coding prompt",
106
+ height=150,
107
+ placeholder="e.g. Generate a Python class that validates rows in the dataset"
108
+ )
109
+
110
+ generate = st.button("Generate", type="primary")
111
+
112
+ if generate:
113
+ handle_generation(prompt, model_id, dataset_name)
114
 
115
  # ---------- Output ----------
116
  if st.session_state.history:
 
118
  st.subheader("Generation History")
119
 
120
  for item in reversed(st.session_state.history):
121
+ label = f"[{item['timestamp']}] {item['model']}"
122
+ if item["dataset"]:
123
+ label += f" | {item['dataset']}"
124
+
125
+ with st.expander(label):
126
+ st.markdown("**Prompt**")
127
+ st.write(item["prompt"])
128
+
129
+ st.markdown("**Generated Code**")
130
  st.code(item["output"], language="python")