Spaces:
Sleeping
Sleeping
rsm-roguchi commited on
Commit ·
ca5301a
1
Parent(s): f461a39
first prototype
Browse files- .python-version +1 -0
- Dockerfile +22 -5
- app.py +21 -159
- code/llm_connect.py +237 -0
- pyproject.toml +22 -0
- requirements.txt +0 -6
- server/__init__.py +0 -0
- server/blog.py +28 -0
- shared.py +0 -6
- styles.css +0 -12
- tips.csv +0 -245
- ui/__init__.py +0 -0
- ui/blog.py +7 -0
- uv.lock +0 -0
.python-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
3.12
|
Dockerfile
CHANGED
|
@@ -1,13 +1,30 @@
|
|
| 1 |
-
|
|
|
|
| 2 |
|
| 3 |
-
|
|
|
|
|
|
|
| 4 |
|
| 5 |
-
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
-
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
| 8 |
|
|
|
|
|
|
|
| 9 |
COPY . .
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
EXPOSE 7860
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Dockerfile for Weather Assistant Demo
|
| 2 |
+
# This file configures how the application will run on Hugging Face Spaces
|
| 3 |
|
| 4 |
+
# Start with Python 3.12 as our base image
|
| 5 |
+
# This gives us a clean Python environment to work with
|
| 6 |
+
FROM python:3.12
|
| 7 |
|
| 8 |
+
# Install UV package manager
|
| 9 |
+
# UV is a fast, reliable Python package installer written in Rust
|
| 10 |
+
# It's much faster than pip for installing dependencies
|
| 11 |
+
RUN pip install uv
|
| 12 |
|
|
|
|
| 13 |
|
| 14 |
+
# Copy all files from our project into the container
|
| 15 |
+
# This includes app.py, pyproject.toml, and other necessary files
|
| 16 |
COPY . .
|
| 17 |
|
| 18 |
+
# Use UV to install all dependencies specified in pyproject.toml
|
| 19 |
+
# This ensures we have the same packages as in local development
|
| 20 |
+
RUN uv sync
|
| 21 |
+
|
| 22 |
+
# Configure the container to listen on port 7860
|
| 23 |
+
# This is the default port that Hugging Face Spaces expects
|
| 24 |
EXPOSE 7860
|
| 25 |
|
| 26 |
+
# Command to run when the container starts
|
| 27 |
+
# This launches the Shiny app with specific host and port settings
|
| 28 |
+
# - host 0.0.0.0 allows connections from outside the container
|
| 29 |
+
# - port 7860 matches our EXPOSE setting above
|
| 30 |
+
CMD ["/.venv/bin/shiny", "run", "app.py", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
CHANGED
|
@@ -1,162 +1,24 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
from
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
ui
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
"total_bill",
|
| 19 |
-
"Bill amount",
|
| 20 |
-
min=bill_rng[0],
|
| 21 |
-
max=bill_rng[1],
|
| 22 |
-
value=bill_rng,
|
| 23 |
-
pre="$",
|
| 24 |
-
)
|
| 25 |
-
ui.input_checkbox_group(
|
| 26 |
-
"time",
|
| 27 |
-
"Food service",
|
| 28 |
-
["Lunch", "Dinner"],
|
| 29 |
-
selected=["Lunch", "Dinner"],
|
| 30 |
-
inline=True,
|
| 31 |
)
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
# Add main content
|
| 35 |
-
ICONS = {
|
| 36 |
-
"user": fa.icon_svg("user", "regular"),
|
| 37 |
-
"wallet": fa.icon_svg("wallet"),
|
| 38 |
-
"currency-dollar": fa.icon_svg("dollar-sign"),
|
| 39 |
-
"ellipsis": fa.icon_svg("ellipsis"),
|
| 40 |
-
}
|
| 41 |
-
|
| 42 |
-
with ui.layout_columns(fill=False):
|
| 43 |
-
with ui.value_box(showcase=ICONS["user"]):
|
| 44 |
-
"Total tippers"
|
| 45 |
-
|
| 46 |
-
@render.express
|
| 47 |
-
def total_tippers():
|
| 48 |
-
tips_data().shape[0]
|
| 49 |
-
|
| 50 |
-
with ui.value_box(showcase=ICONS["wallet"]):
|
| 51 |
-
"Average tip"
|
| 52 |
-
|
| 53 |
-
@render.express
|
| 54 |
-
def average_tip():
|
| 55 |
-
d = tips_data()
|
| 56 |
-
if d.shape[0] > 0:
|
| 57 |
-
perc = d.tip / d.total_bill
|
| 58 |
-
f"{perc.mean():.1%}"
|
| 59 |
-
|
| 60 |
-
with ui.value_box(showcase=ICONS["currency-dollar"]):
|
| 61 |
-
"Average bill"
|
| 62 |
-
|
| 63 |
-
@render.express
|
| 64 |
-
def average_bill():
|
| 65 |
-
d = tips_data()
|
| 66 |
-
if d.shape[0] > 0:
|
| 67 |
-
bill = d.total_bill.mean()
|
| 68 |
-
f"${bill:.2f}"
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
with ui.layout_columns(col_widths=[6, 6, 12]):
|
| 72 |
-
with ui.card(full_screen=True):
|
| 73 |
-
ui.card_header("Tips data")
|
| 74 |
-
|
| 75 |
-
@render.data_frame
|
| 76 |
-
def table():
|
| 77 |
-
return render.DataGrid(tips_data())
|
| 78 |
-
|
| 79 |
-
with ui.card(full_screen=True):
|
| 80 |
-
with ui.card_header(class_="d-flex justify-content-between align-items-center"):
|
| 81 |
-
"Total bill vs tip"
|
| 82 |
-
with ui.popover(title="Add a color variable", placement="top"):
|
| 83 |
-
ICONS["ellipsis"]
|
| 84 |
-
ui.input_radio_buttons(
|
| 85 |
-
"scatter_color",
|
| 86 |
-
None,
|
| 87 |
-
["none", "sex", "smoker", "day", "time"],
|
| 88 |
-
inline=True,
|
| 89 |
-
)
|
| 90 |
-
|
| 91 |
-
@render_plotly
|
| 92 |
-
def scatterplot():
|
| 93 |
-
color = input.scatter_color()
|
| 94 |
-
return px.scatter(
|
| 95 |
-
tips_data(),
|
| 96 |
-
x="total_bill",
|
| 97 |
-
y="tip",
|
| 98 |
-
color=None if color == "none" else color,
|
| 99 |
-
trendline="lowess",
|
| 100 |
-
)
|
| 101 |
-
|
| 102 |
-
with ui.card(full_screen=True):
|
| 103 |
-
with ui.card_header(class_="d-flex justify-content-between align-items-center"):
|
| 104 |
-
"Tip percentages"
|
| 105 |
-
with ui.popover(title="Add a color variable"):
|
| 106 |
-
ICONS["ellipsis"]
|
| 107 |
-
ui.input_radio_buttons(
|
| 108 |
-
"tip_perc_y",
|
| 109 |
-
"Split by:",
|
| 110 |
-
["sex", "smoker", "day", "time"],
|
| 111 |
-
selected="day",
|
| 112 |
-
inline=True,
|
| 113 |
-
)
|
| 114 |
-
|
| 115 |
-
@render_plotly
|
| 116 |
-
def tip_perc():
|
| 117 |
-
from ridgeplot import ridgeplot
|
| 118 |
-
|
| 119 |
-
dat = tips_data()
|
| 120 |
-
dat["percent"] = dat.tip / dat.total_bill
|
| 121 |
-
yvar = input.tip_perc_y()
|
| 122 |
-
uvals = dat[yvar].unique()
|
| 123 |
-
|
| 124 |
-
samples = [[dat.percent[dat[yvar] == val]] for val in uvals]
|
| 125 |
-
|
| 126 |
-
plt = ridgeplot(
|
| 127 |
-
samples=samples,
|
| 128 |
-
labels=uvals,
|
| 129 |
-
bandwidth=0.01,
|
| 130 |
-
colorscale="viridis",
|
| 131 |
-
colormode="row-index",
|
| 132 |
-
)
|
| 133 |
-
|
| 134 |
-
plt.update_layout(
|
| 135 |
-
legend=dict(
|
| 136 |
-
orientation="h", yanchor="bottom", y=1.02, xanchor="center", x=0.5
|
| 137 |
-
)
|
| 138 |
-
)
|
| 139 |
-
|
| 140 |
-
return plt
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
ui.include_css(app_dir / "styles.css")
|
| 144 |
-
|
| 145 |
-
# --------------------------------------------------------
|
| 146 |
-
# Reactive calculations and effects
|
| 147 |
-
# --------------------------------------------------------
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
@reactive.calc
|
| 151 |
-
def tips_data():
|
| 152 |
-
bill = input.total_bill()
|
| 153 |
-
idx1 = tips.total_bill.between(bill[0], bill[1])
|
| 154 |
-
idx2 = tips.time.isin(input.time())
|
| 155 |
-
return tips[idx1 & idx2]
|
| 156 |
|
|
|
|
|
|
|
| 157 |
|
| 158 |
-
|
| 159 |
-
@reactive.event(input.reset)
|
| 160 |
-
def _():
|
| 161 |
-
ui.update_slider("total_bill", value=bill_rng)
|
| 162 |
-
ui.update_checkbox_group("time", selected=["Lunch", "Dinner"])
|
|
|
|
| 1 |
+
from shiny import App
|
| 2 |
+
from shiny.ui import page_navbar
|
| 3 |
+
from shiny import ui
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from ui import (
|
| 7 |
+
blog
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
from server import (
|
| 11 |
+
blog as blog_srv
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
ui = ui.page_fluid(
|
| 15 |
+
ui.page_navbar(
|
| 16 |
+
blog.ui,
|
| 17 |
+
title="SEO Blog Writer"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
)
|
| 19 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
+
def server(input, output, session):
|
| 22 |
+
blog_srv.server(input, output, session)
|
| 23 |
|
| 24 |
+
app = App(ui, server)
|
|
|
|
|
|
|
|
|
|
|
|
code/llm_connect.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pyrsm as rsm
|
| 3 |
+
import google.generativeai as genai
|
| 4 |
+
import requests
|
| 5 |
+
|
| 6 |
+
# from google.genai import types
|
| 7 |
+
from typing import List
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def query_llama(
|
| 11 |
+
messages: List[dict],
|
| 12 |
+
model: str = "llama-3",
|
| 13 |
+
max_tokens: int = 4000,
|
| 14 |
+
temperature: int = 0.4,
|
| 15 |
+
api_key: str = "",
|
| 16 |
+
) -> dict:
|
| 17 |
+
"""
|
| 18 |
+
Send a query to the Llama API
|
| 19 |
+
|
| 20 |
+
Args:
|
| 21 |
+
messages (list): List of dictionaries containing message role and content pairs
|
| 22 |
+
model (str): The model to use. Defaults to "llama-3"
|
| 23 |
+
max_tokens (int, optional): Maximum number of tokens to generate. Defaults to 4000
|
| 24 |
+
temperature (int, optional): Controls randomness in the output. 0 is deterministic, higher values more random. Defaults to 0.4
|
| 25 |
+
api_key (str, optional): Authentication token for API access. Defaults to ""
|
| 26 |
+
|
| 27 |
+
Example:
|
| 28 |
+
messages = [
|
| 29 |
+
{"role": "system", "content": "You are a helpful assistant"},
|
| 30 |
+
{"role": "user", "content": "Hello!"}
|
| 31 |
+
]
|
| 32 |
+
response = query_llama(messages, api_key="your-api-key")
|
| 33 |
+
|
| 34 |
+
Returns:
|
| 35 |
+
dict: The model's response
|
| 36 |
+
"""
|
| 37 |
+
url = "https://traip13.tgptinf.ucsd.edu/v1/chat/completions"
|
| 38 |
+
if not api_key or len(api_key) == 0:
|
| 39 |
+
raise ValueError("LLAMA: API key is required")
|
| 40 |
+
|
| 41 |
+
headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}
|
| 42 |
+
data = {
|
| 43 |
+
"messages": messages,
|
| 44 |
+
"model": model,
|
| 45 |
+
"max_tokens": max_tokens,
|
| 46 |
+
"temperature": temperature, # Adjust temperature for randomness (0 for deterministic)
|
| 47 |
+
"stream": False,
|
| 48 |
+
"n": 1,
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
response = requests.post(url, headers=headers, json=data)
|
| 52 |
+
response.raise_for_status() # Raise an exception for bad status codes
|
| 53 |
+
|
| 54 |
+
return response.json()
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def test_llama_connection(api_key: str, timeout: int = 20) -> bool:
|
| 58 |
+
"""Test connection to Llama API with a basic request"""
|
| 59 |
+
import requests
|
| 60 |
+
|
| 61 |
+
url = "https://traip13.tgptinf.ucsd.edu/v1/chat/completions"
|
| 62 |
+
headers = {"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}
|
| 63 |
+
data = {"messages": [], "model": "llama-3", "max_tokens": 1, "temperature": 0.4}
|
| 64 |
+
|
| 65 |
+
try:
|
| 66 |
+
response = requests.post(url, headers=headers, json=data, timeout=20)
|
| 67 |
+
print(f"Status code: {response.status_code}")
|
| 68 |
+
print(f"Response headers: {response.headers}")
|
| 69 |
+
try:
|
| 70 |
+
print(f"Response body: {response.json()}")
|
| 71 |
+
except Exception:
|
| 72 |
+
print(f"Response text: {response.text}")
|
| 73 |
+
if response.status_code == 401:
|
| 74 |
+
print("Authentication failed - check your API key")
|
| 75 |
+
return False
|
| 76 |
+
elif response.status_code == 404:
|
| 77 |
+
print("API endpoint not found")
|
| 78 |
+
return False
|
| 79 |
+
elif response.status_code == 200:
|
| 80 |
+
return True
|
| 81 |
+
else:
|
| 82 |
+
print(f"Unexpected status code: {response.status_code}")
|
| 83 |
+
return False
|
| 84 |
+
except requests.exceptions.Timeout:
|
| 85 |
+
print(f"Connection timed out after {timeout} seconds")
|
| 86 |
+
return False
|
| 87 |
+
except requests.exceptions.ConnectionError:
|
| 88 |
+
print("Could not connect to server")
|
| 89 |
+
return False
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def query_gemini(
|
| 93 |
+
messages: List[dict],
|
| 94 |
+
model: str = "gemini-2.0-flash",
|
| 95 |
+
max_tokens: int = 4000,
|
| 96 |
+
temperature: int = 0.4,
|
| 97 |
+
api_key: str = "",
|
| 98 |
+
) -> dict:
|
| 99 |
+
"""
|
| 100 |
+
Send a query to the Gemini API
|
| 101 |
+
|
| 102 |
+
Args:
|
| 103 |
+
messages (list): List of dictionaries containing message role and content pairs
|
| 104 |
+
model (str): The model to use. Defaults to "gemini-2.0-flash"
|
| 105 |
+
max_tokens (int, optional): Maximum number of tokens to generate. Defaults to 4000
|
| 106 |
+
temperature (int, optional): Controls randomness in the output. 0 is deterministic, higher values more random. Defaults to 0.4
|
| 107 |
+
api_key (str, optional): Authentication token for API access. Defaults to ""
|
| 108 |
+
|
| 109 |
+
Returns:
|
| 110 |
+
dict: The model's response
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
if not api_key or len(api_key) == 0:
|
| 114 |
+
raise ValueError("Gemini: API key is required")
|
| 115 |
+
|
| 116 |
+
# Convert OpenAI-style messages to Gemini format
|
| 117 |
+
system_message = [msg["content"] for msg in messages if msg["role"] == "system"]
|
| 118 |
+
user_messages = [msg["content"] for msg in messages if msg["role"] == "user"]
|
| 119 |
+
|
| 120 |
+
# Combine system message (if any) with the user message
|
| 121 |
+
prompt = ". ".join(system_message + user_messages)
|
| 122 |
+
|
| 123 |
+
genai.configure(api_key=api_key)
|
| 124 |
+
|
| 125 |
+
# Initialize the model
|
| 126 |
+
model = genai.GenerativeModel(model_name=model)
|
| 127 |
+
|
| 128 |
+
# Define the generation configuration using the specific class
|
| 129 |
+
generation_config_obj = genai.types.GenerationConfig(
|
| 130 |
+
temperature=temperature,
|
| 131 |
+
max_output_tokens=max_tokens,
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
# Generate content using the correct parameter name 'generation_config'
|
| 135 |
+
response = model.generate_content(
|
| 136 |
+
contents=prompt, generation_config=generation_config_obj
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
return response.text
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def get_response(
|
| 143 |
+
input: str | List[str],
|
| 144 |
+
template: callable,
|
| 145 |
+
role: str = "You are a helpful assistant.",
|
| 146 |
+
temperature: float = 0.4,
|
| 147 |
+
max_tokens: int = 4000,
|
| 148 |
+
md: bool = True,
|
| 149 |
+
llm: str = "llama",
|
| 150 |
+
model_name: str = None,
|
| 151 |
+
):
|
| 152 |
+
"""
|
| 153 |
+
Function to get a response from the LLama API
|
| 154 |
+
"""
|
| 155 |
+
messages = [
|
| 156 |
+
{"role": "system", "content": role},
|
| 157 |
+
{
|
| 158 |
+
"role": "user",
|
| 159 |
+
"content": template(input),
|
| 160 |
+
},
|
| 161 |
+
]
|
| 162 |
+
|
| 163 |
+
if llm == "llama":
|
| 164 |
+
response = query_llama(
|
| 165 |
+
messages=messages,
|
| 166 |
+
api_key=os.getenv("LLAMA_API_KEY"),
|
| 167 |
+
temperature=temperature,
|
| 168 |
+
max_tokens=max_tokens,
|
| 169 |
+
)["choices"][0]["message"]["content"]
|
| 170 |
+
elif llm == "gemini":
|
| 171 |
+
response = query_gemini(
|
| 172 |
+
messages=messages,
|
| 173 |
+
api_key=os.getenv("GEMINI_API_KEY"),
|
| 174 |
+
temperature=temperature,
|
| 175 |
+
max_tokens=max_tokens,
|
| 176 |
+
model=model_name if model_name else 'gemini-2.0-flash'
|
| 177 |
+
)
|
| 178 |
+
else:
|
| 179 |
+
raise ValueError("LLM: Invalid LLM specified")
|
| 180 |
+
|
| 181 |
+
if md:
|
| 182 |
+
return rsm.md(response)
|
| 183 |
+
else:
|
| 184 |
+
return response
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
if __name__ == "__main__":
|
| 188 |
+
from dotenv import load_dotenv
|
| 189 |
+
|
| 190 |
+
# Load environment variables
|
| 191 |
+
load_dotenv()
|
| 192 |
+
|
| 193 |
+
messages = [
|
| 194 |
+
{"role": "system", "content": "You are a helpful assistant."},
|
| 195 |
+
{"role": "user", "content": "Hi, how are you? What is your name?"},
|
| 196 |
+
]
|
| 197 |
+
|
| 198 |
+
# Testing Llama connection
|
| 199 |
+
try:
|
| 200 |
+
print("\nTesting Llama connection ...")
|
| 201 |
+
test_llama_connection(api_key=os.getenv("LLAMA_API_KEY"))
|
| 202 |
+
except Exception as e:
|
| 203 |
+
print(f"Error: {e}")
|
| 204 |
+
|
| 205 |
+
# Testing Llama connection
|
| 206 |
+
try:
|
| 207 |
+
print("\nQuerying Llama ...")
|
| 208 |
+
response = query_llama(messages, api_key=os.getenv("LLAMA_API_KEY"))
|
| 209 |
+
print(response)
|
| 210 |
+
except Exception as e:
|
| 211 |
+
print(f"Error: {e}")
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
print("\nQuerying Gemini ...")
|
| 215 |
+
response = query_gemini(messages, api_key=os.getenv("GEMINI_API_KEY"))
|
| 216 |
+
print(response)
|
| 217 |
+
except Exception as e:
|
| 218 |
+
print(f"Error: {e}")
|
| 219 |
+
|
| 220 |
+
try:
|
| 221 |
+
print("\nTesting get_response ...")
|
| 222 |
+
|
| 223 |
+
def template(input):
|
| 224 |
+
return f"""Evaluate the following statement for factual accuracy. If it's incorrect, provide the correct information:
|
| 225 |
+
Statement: {input}
|
| 226 |
+
Evaluation:"""
|
| 227 |
+
|
| 228 |
+
response = get_response(
|
| 229 |
+
"The capital of the Netherlands is Utrecht.",
|
| 230 |
+
template=template,
|
| 231 |
+
md=False,
|
| 232 |
+
llm="llama",
|
| 233 |
+
)
|
| 234 |
+
print(response)
|
| 235 |
+
|
| 236 |
+
except Exception as e:
|
| 237 |
+
print(f"Error: {e}")
|
pyproject.toml
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "ultima-seo"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = "Add your description here"
|
| 5 |
+
readme = "README.md"
|
| 6 |
+
requires-python = ">=3.12"
|
| 7 |
+
dependencies = [
|
| 8 |
+
"anthropic>=0.55.0",
|
| 9 |
+
"google-generativeai>=0.8.5",
|
| 10 |
+
"ipywidgets>=8.1.7",
|
| 11 |
+
"langchain>=0.3.26",
|
| 12 |
+
"langchain-anthropic>=0.3.16",
|
| 13 |
+
"langchain-community>=0.3.26",
|
| 14 |
+
"langchain-google-genai>=2.0.10",
|
| 15 |
+
"langchain-openai>=0.3.27",
|
| 16 |
+
"openai>=1.93.0",
|
| 17 |
+
"pydantic>=2.11.7",
|
| 18 |
+
"pydantic-ai>=0.3.5",
|
| 19 |
+
"pyrsm>=1.6.0",
|
| 20 |
+
"python-dotenv>=1.1.1",
|
| 21 |
+
"requests>=2.32.4",
|
| 22 |
+
]
|
requirements.txt
DELETED
|
@@ -1,6 +0,0 @@
|
|
| 1 |
-
faicons
|
| 2 |
-
shiny
|
| 3 |
-
shinywidgets
|
| 4 |
-
plotly
|
| 5 |
-
pandas
|
| 6 |
-
ridgeplot
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/__init__.py
ADDED
|
File without changes
|
server/blog.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from shiny import reactive, render
|
| 2 |
+
from llm_connect import get_response
|
| 3 |
+
|
| 4 |
+
def draft_blog(topic):
|
| 5 |
+
prompt = (
|
| 6 |
+
f"You are a blog writer for a hobby company called 'Ultima Supply'.\n"
|
| 7 |
+
f"Write me a SEO optimized blog with a title and subsections for the following topic in great detail:\n"
|
| 8 |
+
f"{topic}\n"
|
| 9 |
+
f"Make sure the blog that you create has SEO keywords throughout so it can be found via search engines."
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
return get_response(
|
| 13 |
+
input=prompt,
|
| 14 |
+
template=lambda x: x,
|
| 15 |
+
llm='llama',
|
| 16 |
+
md=False,
|
| 17 |
+
temperature=0.8,
|
| 18 |
+
max_tokens=1000
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
def server(input, output, session):
|
| 22 |
+
@output
|
| 23 |
+
@render.text
|
| 24 |
+
def blog_output():
|
| 25 |
+
topic = input.topic()
|
| 26 |
+
if not topic:
|
| 27 |
+
return "Enter a topic to generate a blog."
|
| 28 |
+
return draft_blog(topic)
|
shared.py
DELETED
|
@@ -1,6 +0,0 @@
|
|
| 1 |
-
from pathlib import Path
|
| 2 |
-
|
| 3 |
-
import pandas as pd
|
| 4 |
-
|
| 5 |
-
app_dir = Path(__file__).parent
|
| 6 |
-
tips = pd.read_csv(app_dir / "tips.csv")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
styles.css
DELETED
|
@@ -1,12 +0,0 @@
|
|
| 1 |
-
:root {
|
| 2 |
-
--bslib-sidebar-main-bg: #f8f8f8;
|
| 3 |
-
}
|
| 4 |
-
|
| 5 |
-
.popover {
|
| 6 |
-
--bs-popover-header-bg: #222;
|
| 7 |
-
--bs-popover-header-color: #fff;
|
| 8 |
-
}
|
| 9 |
-
|
| 10 |
-
.popover .btn-close {
|
| 11 |
-
filter: var(--bs-btn-close-white-filter);
|
| 12 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tips.csv
DELETED
|
@@ -1,245 +0,0 @@
|
|
| 1 |
-
total_bill,tip,sex,smoker,day,time,size
|
| 2 |
-
16.99,1.01,Female,No,Sun,Dinner,2
|
| 3 |
-
10.34,1.66,Male,No,Sun,Dinner,3
|
| 4 |
-
21.01,3.5,Male,No,Sun,Dinner,3
|
| 5 |
-
23.68,3.31,Male,No,Sun,Dinner,2
|
| 6 |
-
24.59,3.61,Female,No,Sun,Dinner,4
|
| 7 |
-
25.29,4.71,Male,No,Sun,Dinner,4
|
| 8 |
-
8.77,2.0,Male,No,Sun,Dinner,2
|
| 9 |
-
26.88,3.12,Male,No,Sun,Dinner,4
|
| 10 |
-
15.04,1.96,Male,No,Sun,Dinner,2
|
| 11 |
-
14.78,3.23,Male,No,Sun,Dinner,2
|
| 12 |
-
10.27,1.71,Male,No,Sun,Dinner,2
|
| 13 |
-
35.26,5.0,Female,No,Sun,Dinner,4
|
| 14 |
-
15.42,1.57,Male,No,Sun,Dinner,2
|
| 15 |
-
18.43,3.0,Male,No,Sun,Dinner,4
|
| 16 |
-
14.83,3.02,Female,No,Sun,Dinner,2
|
| 17 |
-
21.58,3.92,Male,No,Sun,Dinner,2
|
| 18 |
-
10.33,1.67,Female,No,Sun,Dinner,3
|
| 19 |
-
16.29,3.71,Male,No,Sun,Dinner,3
|
| 20 |
-
16.97,3.5,Female,No,Sun,Dinner,3
|
| 21 |
-
20.65,3.35,Male,No,Sat,Dinner,3
|
| 22 |
-
17.92,4.08,Male,No,Sat,Dinner,2
|
| 23 |
-
20.29,2.75,Female,No,Sat,Dinner,2
|
| 24 |
-
15.77,2.23,Female,No,Sat,Dinner,2
|
| 25 |
-
39.42,7.58,Male,No,Sat,Dinner,4
|
| 26 |
-
19.82,3.18,Male,No,Sat,Dinner,2
|
| 27 |
-
17.81,2.34,Male,No,Sat,Dinner,4
|
| 28 |
-
13.37,2.0,Male,No,Sat,Dinner,2
|
| 29 |
-
12.69,2.0,Male,No,Sat,Dinner,2
|
| 30 |
-
21.7,4.3,Male,No,Sat,Dinner,2
|
| 31 |
-
19.65,3.0,Female,No,Sat,Dinner,2
|
| 32 |
-
9.55,1.45,Male,No,Sat,Dinner,2
|
| 33 |
-
18.35,2.5,Male,No,Sat,Dinner,4
|
| 34 |
-
15.06,3.0,Female,No,Sat,Dinner,2
|
| 35 |
-
20.69,2.45,Female,No,Sat,Dinner,4
|
| 36 |
-
17.78,3.27,Male,No,Sat,Dinner,2
|
| 37 |
-
24.06,3.6,Male,No,Sat,Dinner,3
|
| 38 |
-
16.31,2.0,Male,No,Sat,Dinner,3
|
| 39 |
-
16.93,3.07,Female,No,Sat,Dinner,3
|
| 40 |
-
18.69,2.31,Male,No,Sat,Dinner,3
|
| 41 |
-
31.27,5.0,Male,No,Sat,Dinner,3
|
| 42 |
-
16.04,2.24,Male,No,Sat,Dinner,3
|
| 43 |
-
17.46,2.54,Male,No,Sun,Dinner,2
|
| 44 |
-
13.94,3.06,Male,No,Sun,Dinner,2
|
| 45 |
-
9.68,1.32,Male,No,Sun,Dinner,2
|
| 46 |
-
30.4,5.6,Male,No,Sun,Dinner,4
|
| 47 |
-
18.29,3.0,Male,No,Sun,Dinner,2
|
| 48 |
-
22.23,5.0,Male,No,Sun,Dinner,2
|
| 49 |
-
32.4,6.0,Male,No,Sun,Dinner,4
|
| 50 |
-
28.55,2.05,Male,No,Sun,Dinner,3
|
| 51 |
-
18.04,3.0,Male,No,Sun,Dinner,2
|
| 52 |
-
12.54,2.5,Male,No,Sun,Dinner,2
|
| 53 |
-
10.29,2.6,Female,No,Sun,Dinner,2
|
| 54 |
-
34.81,5.2,Female,No,Sun,Dinner,4
|
| 55 |
-
9.94,1.56,Male,No,Sun,Dinner,2
|
| 56 |
-
25.56,4.34,Male,No,Sun,Dinner,4
|
| 57 |
-
19.49,3.51,Male,No,Sun,Dinner,2
|
| 58 |
-
38.01,3.0,Male,Yes,Sat,Dinner,4
|
| 59 |
-
26.41,1.5,Female,No,Sat,Dinner,2
|
| 60 |
-
11.24,1.76,Male,Yes,Sat,Dinner,2
|
| 61 |
-
48.27,6.73,Male,No,Sat,Dinner,4
|
| 62 |
-
20.29,3.21,Male,Yes,Sat,Dinner,2
|
| 63 |
-
13.81,2.0,Male,Yes,Sat,Dinner,2
|
| 64 |
-
11.02,1.98,Male,Yes,Sat,Dinner,2
|
| 65 |
-
18.29,3.76,Male,Yes,Sat,Dinner,4
|
| 66 |
-
17.59,2.64,Male,No,Sat,Dinner,3
|
| 67 |
-
20.08,3.15,Male,No,Sat,Dinner,3
|
| 68 |
-
16.45,2.47,Female,No,Sat,Dinner,2
|
| 69 |
-
3.07,1.0,Female,Yes,Sat,Dinner,1
|
| 70 |
-
20.23,2.01,Male,No,Sat,Dinner,2
|
| 71 |
-
15.01,2.09,Male,Yes,Sat,Dinner,2
|
| 72 |
-
12.02,1.97,Male,No,Sat,Dinner,2
|
| 73 |
-
17.07,3.0,Female,No,Sat,Dinner,3
|
| 74 |
-
26.86,3.14,Female,Yes,Sat,Dinner,2
|
| 75 |
-
25.28,5.0,Female,Yes,Sat,Dinner,2
|
| 76 |
-
14.73,2.2,Female,No,Sat,Dinner,2
|
| 77 |
-
10.51,1.25,Male,No,Sat,Dinner,2
|
| 78 |
-
17.92,3.08,Male,Yes,Sat,Dinner,2
|
| 79 |
-
27.2,4.0,Male,No,Thur,Lunch,4
|
| 80 |
-
22.76,3.0,Male,No,Thur,Lunch,2
|
| 81 |
-
17.29,2.71,Male,No,Thur,Lunch,2
|
| 82 |
-
19.44,3.0,Male,Yes,Thur,Lunch,2
|
| 83 |
-
16.66,3.4,Male,No,Thur,Lunch,2
|
| 84 |
-
10.07,1.83,Female,No,Thur,Lunch,1
|
| 85 |
-
32.68,5.0,Male,Yes,Thur,Lunch,2
|
| 86 |
-
15.98,2.03,Male,No,Thur,Lunch,2
|
| 87 |
-
34.83,5.17,Female,No,Thur,Lunch,4
|
| 88 |
-
13.03,2.0,Male,No,Thur,Lunch,2
|
| 89 |
-
18.28,4.0,Male,No,Thur,Lunch,2
|
| 90 |
-
24.71,5.85,Male,No,Thur,Lunch,2
|
| 91 |
-
21.16,3.0,Male,No,Thur,Lunch,2
|
| 92 |
-
28.97,3.0,Male,Yes,Fri,Dinner,2
|
| 93 |
-
22.49,3.5,Male,No,Fri,Dinner,2
|
| 94 |
-
5.75,1.0,Female,Yes,Fri,Dinner,2
|
| 95 |
-
16.32,4.3,Female,Yes,Fri,Dinner,2
|
| 96 |
-
22.75,3.25,Female,No,Fri,Dinner,2
|
| 97 |
-
40.17,4.73,Male,Yes,Fri,Dinner,4
|
| 98 |
-
27.28,4.0,Male,Yes,Fri,Dinner,2
|
| 99 |
-
12.03,1.5,Male,Yes,Fri,Dinner,2
|
| 100 |
-
21.01,3.0,Male,Yes,Fri,Dinner,2
|
| 101 |
-
12.46,1.5,Male,No,Fri,Dinner,2
|
| 102 |
-
11.35,2.5,Female,Yes,Fri,Dinner,2
|
| 103 |
-
15.38,3.0,Female,Yes,Fri,Dinner,2
|
| 104 |
-
44.3,2.5,Female,Yes,Sat,Dinner,3
|
| 105 |
-
22.42,3.48,Female,Yes,Sat,Dinner,2
|
| 106 |
-
20.92,4.08,Female,No,Sat,Dinner,2
|
| 107 |
-
15.36,1.64,Male,Yes,Sat,Dinner,2
|
| 108 |
-
20.49,4.06,Male,Yes,Sat,Dinner,2
|
| 109 |
-
25.21,4.29,Male,Yes,Sat,Dinner,2
|
| 110 |
-
18.24,3.76,Male,No,Sat,Dinner,2
|
| 111 |
-
14.31,4.0,Female,Yes,Sat,Dinner,2
|
| 112 |
-
14.0,3.0,Male,No,Sat,Dinner,2
|
| 113 |
-
7.25,1.0,Female,No,Sat,Dinner,1
|
| 114 |
-
38.07,4.0,Male,No,Sun,Dinner,3
|
| 115 |
-
23.95,2.55,Male,No,Sun,Dinner,2
|
| 116 |
-
25.71,4.0,Female,No,Sun,Dinner,3
|
| 117 |
-
17.31,3.5,Female,No,Sun,Dinner,2
|
| 118 |
-
29.93,5.07,Male,No,Sun,Dinner,4
|
| 119 |
-
10.65,1.5,Female,No,Thur,Lunch,2
|
| 120 |
-
12.43,1.8,Female,No,Thur,Lunch,2
|
| 121 |
-
24.08,2.92,Female,No,Thur,Lunch,4
|
| 122 |
-
11.69,2.31,Male,No,Thur,Lunch,2
|
| 123 |
-
13.42,1.68,Female,No,Thur,Lunch,2
|
| 124 |
-
14.26,2.5,Male,No,Thur,Lunch,2
|
| 125 |
-
15.95,2.0,Male,No,Thur,Lunch,2
|
| 126 |
-
12.48,2.52,Female,No,Thur,Lunch,2
|
| 127 |
-
29.8,4.2,Female,No,Thur,Lunch,6
|
| 128 |
-
8.52,1.48,Male,No,Thur,Lunch,2
|
| 129 |
-
14.52,2.0,Female,No,Thur,Lunch,2
|
| 130 |
-
11.38,2.0,Female,No,Thur,Lunch,2
|
| 131 |
-
22.82,2.18,Male,No,Thur,Lunch,3
|
| 132 |
-
19.08,1.5,Male,No,Thur,Lunch,2
|
| 133 |
-
20.27,2.83,Female,No,Thur,Lunch,2
|
| 134 |
-
11.17,1.5,Female,No,Thur,Lunch,2
|
| 135 |
-
12.26,2.0,Female,No,Thur,Lunch,2
|
| 136 |
-
18.26,3.25,Female,No,Thur,Lunch,2
|
| 137 |
-
8.51,1.25,Female,No,Thur,Lunch,2
|
| 138 |
-
10.33,2.0,Female,No,Thur,Lunch,2
|
| 139 |
-
14.15,2.0,Female,No,Thur,Lunch,2
|
| 140 |
-
16.0,2.0,Male,Yes,Thur,Lunch,2
|
| 141 |
-
13.16,2.75,Female,No,Thur,Lunch,2
|
| 142 |
-
17.47,3.5,Female,No,Thur,Lunch,2
|
| 143 |
-
34.3,6.7,Male,No,Thur,Lunch,6
|
| 144 |
-
41.19,5.0,Male,No,Thur,Lunch,5
|
| 145 |
-
27.05,5.0,Female,No,Thur,Lunch,6
|
| 146 |
-
16.43,2.3,Female,No,Thur,Lunch,2
|
| 147 |
-
8.35,1.5,Female,No,Thur,Lunch,2
|
| 148 |
-
18.64,1.36,Female,No,Thur,Lunch,3
|
| 149 |
-
11.87,1.63,Female,No,Thur,Lunch,2
|
| 150 |
-
9.78,1.73,Male,No,Thur,Lunch,2
|
| 151 |
-
7.51,2.0,Male,No,Thur,Lunch,2
|
| 152 |
-
14.07,2.5,Male,No,Sun,Dinner,2
|
| 153 |
-
13.13,2.0,Male,No,Sun,Dinner,2
|
| 154 |
-
17.26,2.74,Male,No,Sun,Dinner,3
|
| 155 |
-
24.55,2.0,Male,No,Sun,Dinner,4
|
| 156 |
-
19.77,2.0,Male,No,Sun,Dinner,4
|
| 157 |
-
29.85,5.14,Female,No,Sun,Dinner,5
|
| 158 |
-
48.17,5.0,Male,No,Sun,Dinner,6
|
| 159 |
-
25.0,3.75,Female,No,Sun,Dinner,4
|
| 160 |
-
13.39,2.61,Female,No,Sun,Dinner,2
|
| 161 |
-
16.49,2.0,Male,No,Sun,Dinner,4
|
| 162 |
-
21.5,3.5,Male,No,Sun,Dinner,4
|
| 163 |
-
12.66,2.5,Male,No,Sun,Dinner,2
|
| 164 |
-
16.21,2.0,Female,No,Sun,Dinner,3
|
| 165 |
-
13.81,2.0,Male,No,Sun,Dinner,2
|
| 166 |
-
17.51,3.0,Female,Yes,Sun,Dinner,2
|
| 167 |
-
24.52,3.48,Male,No,Sun,Dinner,3
|
| 168 |
-
20.76,2.24,Male,No,Sun,Dinner,2
|
| 169 |
-
31.71,4.5,Male,No,Sun,Dinner,4
|
| 170 |
-
10.59,1.61,Female,Yes,Sat,Dinner,2
|
| 171 |
-
10.63,2.0,Female,Yes,Sat,Dinner,2
|
| 172 |
-
50.81,10.0,Male,Yes,Sat,Dinner,3
|
| 173 |
-
15.81,3.16,Male,Yes,Sat,Dinner,2
|
| 174 |
-
7.25,5.15,Male,Yes,Sun,Dinner,2
|
| 175 |
-
31.85,3.18,Male,Yes,Sun,Dinner,2
|
| 176 |
-
16.82,4.0,Male,Yes,Sun,Dinner,2
|
| 177 |
-
32.9,3.11,Male,Yes,Sun,Dinner,2
|
| 178 |
-
17.89,2.0,Male,Yes,Sun,Dinner,2
|
| 179 |
-
14.48,2.0,Male,Yes,Sun,Dinner,2
|
| 180 |
-
9.6,4.0,Female,Yes,Sun,Dinner,2
|
| 181 |
-
34.63,3.55,Male,Yes,Sun,Dinner,2
|
| 182 |
-
34.65,3.68,Male,Yes,Sun,Dinner,4
|
| 183 |
-
23.33,5.65,Male,Yes,Sun,Dinner,2
|
| 184 |
-
45.35,3.5,Male,Yes,Sun,Dinner,3
|
| 185 |
-
23.17,6.5,Male,Yes,Sun,Dinner,4
|
| 186 |
-
40.55,3.0,Male,Yes,Sun,Dinner,2
|
| 187 |
-
20.69,5.0,Male,No,Sun,Dinner,5
|
| 188 |
-
20.9,3.5,Female,Yes,Sun,Dinner,3
|
| 189 |
-
30.46,2.0,Male,Yes,Sun,Dinner,5
|
| 190 |
-
18.15,3.5,Female,Yes,Sun,Dinner,3
|
| 191 |
-
23.1,4.0,Male,Yes,Sun,Dinner,3
|
| 192 |
-
15.69,1.5,Male,Yes,Sun,Dinner,2
|
| 193 |
-
19.81,4.19,Female,Yes,Thur,Lunch,2
|
| 194 |
-
28.44,2.56,Male,Yes,Thur,Lunch,2
|
| 195 |
-
15.48,2.02,Male,Yes,Thur,Lunch,2
|
| 196 |
-
16.58,4.0,Male,Yes,Thur,Lunch,2
|
| 197 |
-
7.56,1.44,Male,No,Thur,Lunch,2
|
| 198 |
-
10.34,2.0,Male,Yes,Thur,Lunch,2
|
| 199 |
-
43.11,5.0,Female,Yes,Thur,Lunch,4
|
| 200 |
-
13.0,2.0,Female,Yes,Thur,Lunch,2
|
| 201 |
-
13.51,2.0,Male,Yes,Thur,Lunch,2
|
| 202 |
-
18.71,4.0,Male,Yes,Thur,Lunch,3
|
| 203 |
-
12.74,2.01,Female,Yes,Thur,Lunch,2
|
| 204 |
-
13.0,2.0,Female,Yes,Thur,Lunch,2
|
| 205 |
-
16.4,2.5,Female,Yes,Thur,Lunch,2
|
| 206 |
-
20.53,4.0,Male,Yes,Thur,Lunch,4
|
| 207 |
-
16.47,3.23,Female,Yes,Thur,Lunch,3
|
| 208 |
-
26.59,3.41,Male,Yes,Sat,Dinner,3
|
| 209 |
-
38.73,3.0,Male,Yes,Sat,Dinner,4
|
| 210 |
-
24.27,2.03,Male,Yes,Sat,Dinner,2
|
| 211 |
-
12.76,2.23,Female,Yes,Sat,Dinner,2
|
| 212 |
-
30.06,2.0,Male,Yes,Sat,Dinner,3
|
| 213 |
-
25.89,5.16,Male,Yes,Sat,Dinner,4
|
| 214 |
-
48.33,9.0,Male,No,Sat,Dinner,4
|
| 215 |
-
13.27,2.5,Female,Yes,Sat,Dinner,2
|
| 216 |
-
28.17,6.5,Female,Yes,Sat,Dinner,3
|
| 217 |
-
12.9,1.1,Female,Yes,Sat,Dinner,2
|
| 218 |
-
28.15,3.0,Male,Yes,Sat,Dinner,5
|
| 219 |
-
11.59,1.5,Male,Yes,Sat,Dinner,2
|
| 220 |
-
7.74,1.44,Male,Yes,Sat,Dinner,2
|
| 221 |
-
30.14,3.09,Female,Yes,Sat,Dinner,4
|
| 222 |
-
12.16,2.2,Male,Yes,Fri,Lunch,2
|
| 223 |
-
13.42,3.48,Female,Yes,Fri,Lunch,2
|
| 224 |
-
8.58,1.92,Male,Yes,Fri,Lunch,1
|
| 225 |
-
15.98,3.0,Female,No,Fri,Lunch,3
|
| 226 |
-
13.42,1.58,Male,Yes,Fri,Lunch,2
|
| 227 |
-
16.27,2.5,Female,Yes,Fri,Lunch,2
|
| 228 |
-
10.09,2.0,Female,Yes,Fri,Lunch,2
|
| 229 |
-
20.45,3.0,Male,No,Sat,Dinner,4
|
| 230 |
-
13.28,2.72,Male,No,Sat,Dinner,2
|
| 231 |
-
22.12,2.88,Female,Yes,Sat,Dinner,2
|
| 232 |
-
24.01,2.0,Male,Yes,Sat,Dinner,4
|
| 233 |
-
15.69,3.0,Male,Yes,Sat,Dinner,3
|
| 234 |
-
11.61,3.39,Male,No,Sat,Dinner,2
|
| 235 |
-
10.77,1.47,Male,No,Sat,Dinner,2
|
| 236 |
-
15.53,3.0,Male,Yes,Sat,Dinner,2
|
| 237 |
-
10.07,1.25,Male,No,Sat,Dinner,2
|
| 238 |
-
12.6,1.0,Male,Yes,Sat,Dinner,2
|
| 239 |
-
32.83,1.17,Male,Yes,Sat,Dinner,2
|
| 240 |
-
35.83,4.67,Female,No,Sat,Dinner,3
|
| 241 |
-
29.03,5.92,Male,No,Sat,Dinner,3
|
| 242 |
-
27.18,2.0,Female,Yes,Sat,Dinner,2
|
| 243 |
-
22.67,2.0,Male,Yes,Sat,Dinner,2
|
| 244 |
-
17.82,1.75,Male,No,Sat,Dinner,2
|
| 245 |
-
18.78,3.0,Female,No,Thur,Dinner,2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ui/__init__.py
ADDED
|
File without changes
|
ui/blog.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from shiny import ui
|
| 2 |
+
|
| 3 |
+
ui = ui.nav_panel(
|
| 4 |
+
"Blog Writer",
|
| 5 |
+
ui.input_text("topic", "Enter Blog Topic", placeholder="e.g. Best One Piece Cards in 2025"),
|
| 6 |
+
ui.output_text_verbatim("blog_output"),
|
| 7 |
+
)
|
uv.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|