Update app.py
Browse files
app.py
CHANGED
|
@@ -1,15 +1,14 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import google.generativeai as genai
|
| 3 |
import re
|
| 4 |
-
import datetime
|
| 5 |
import os
|
| 6 |
-
import
|
| 7 |
|
| 8 |
# Secret key and Google Gemini API configuration
|
| 9 |
API_KEY = st.secrets["GOOGLE_API_KEY"]
|
| 10 |
|
| 11 |
# Page configuration
|
| 12 |
-
st.set_page_config(page_title="
|
| 13 |
|
| 14 |
# --- Helper Functions ---
|
| 15 |
def send_message_to_model(message, model_name, temperature, top_p, top_k, max_tokens):
|
|
@@ -34,92 +33,82 @@ def send_message_to_model(message, model_name, temperature, top_p, top_k, max_to
|
|
| 34 |
st.error(f"โ Error communicating with the AI: {e}")
|
| 35 |
return None
|
| 36 |
|
| 37 |
-
def
|
| 38 |
-
"""Generates a
|
| 39 |
prompt = f"""
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
**Important:**
|
| 79 |
-
- Generate only one plan at a time.
|
| 80 |
-
- Create the longest, most complete, and detailed code possible to cover a wide range of possibilities and scenarios.
|
| 81 |
-
- Consider all the details of the request, expanding the response and improving the configuration.
|
| 82 |
-
- If the prompt does not include parameters, but if the user add a variable in a code block, create a task parameter to use that variable.
|
| 83 |
-
- If the prompt includes parameters, create a task parameter to use that parameters.
|
| 84 |
-
"""
|
| 85 |
response = send_message_to_model(prompt, model_name, temperature, top_p, top_k, max_tokens)
|
| 86 |
return response
|
| 87 |
|
| 88 |
-
def
|
| 89 |
-
"""Parses the markdown and saves the
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
|
|
|
|
|
|
| 102 |
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
# Puppet Bolt Plan Generated by Google Gemini 2 Bolt Gen Pro
|
| 109 |
-
# Date: {current_date}
|
| 110 |
-
# Author: Elias Andrade AKA Chaos4455
|
| 111 |
-
#===============================================================================\n"""
|
| 112 |
-
yml_code = header + yml_code
|
| 113 |
-
|
| 114 |
-
with open(file_name, "w", encoding=encoding) as f:
|
| 115 |
-
f.write(yml_code)
|
| 116 |
-
|
| 117 |
-
return file_name, yml_code
|
| 118 |
|
| 119 |
|
| 120 |
def main():
|
| 121 |
-
st.title("
|
| 122 |
-
st.markdown("Generate advanced
|
| 123 |
st.markdown("---")
|
| 124 |
|
| 125 |
# Layout in columns (sidebar and main area)
|
|
@@ -135,96 +124,94 @@ def main():
|
|
| 135 |
top_k = st.slider("Top K", min_value=1, max_value=100, value=40, step=1, help="Adjust the AI's number of candidate tokens.")
|
| 136 |
max_tokens = st.number_input("๐ Max Tokens", min_value=128, max_value=8192, value=8192, step=128, help="Adjust the maximum size of the response.")
|
| 137 |
|
| 138 |
-
with st.expander("๐
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
"
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
],
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
|
| 168 |
with col2:
|
| 169 |
-
|
| 170 |
-
prompt_base = st.text_input("Describe the
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
prompt_base = f"{prompt_presets} , {prompt_base}"
|
| 176 |
-
else:
|
| 177 |
-
prompt_base = prompt_presets;
|
| 178 |
-
if st.button("โจ Generate Puppet Bolt Plan"):
|
| 179 |
-
if not prompt_base:
|
| 180 |
-
st.error("โ ๏ธ Please enter a plan description.")
|
| 181 |
return
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
with st.spinner("โณ Generating
|
| 188 |
-
ai_code =
|
| 189 |
-
prompt_base,
|
| 190 |
-
detail_level,
|
| 191 |
-
os_type,
|
| 192 |
-
security_level,
|
| 193 |
model_name,
|
| 194 |
temperature,
|
| 195 |
top_p,
|
| 196 |
top_k,
|
| 197 |
max_tokens,
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
add_header,
|
| 201 |
-
log_level,
|
| 202 |
-
bolt_user,
|
| 203 |
-
bolt_ssh_pass,
|
| 204 |
custom_requirements,
|
| 205 |
specific_details,
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
task_code,
|
| 209 |
-
parameters_description
|
| 210 |
)
|
| 211 |
|
| 212 |
if ai_code:
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
| 222 |
-
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
|
|
|
| 226 |
else:
|
| 227 |
-
st.error("โ Error generating the
|
|
|
|
| 228 |
|
| 229 |
if __name__ == "__main__":
|
| 230 |
main()
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import google.generativeai as genai
|
| 3 |
import re
|
|
|
|
| 4 |
import os
|
| 5 |
+
import platform
|
| 6 |
|
| 7 |
# Secret key and Google Gemini API configuration
|
| 8 |
API_KEY = st.secrets["GOOGLE_API_KEY"]
|
| 9 |
|
| 10 |
# Page configuration
|
| 11 |
+
st.set_page_config(page_title="๐โจ Gemini2 Pip Gen Pro", page_icon="๐โจ", layout="wide")
|
| 12 |
|
| 13 |
# --- Helper Functions ---
|
| 14 |
def send_message_to_model(message, model_name, temperature, top_p, top_k, max_tokens):
|
|
|
|
| 33 |
st.error(f"โ Error communicating with the AI: {e}")
|
| 34 |
return None
|
| 35 |
|
| 36 |
+
def generate_pip_command(prompt_base, model_name, temperature, top_p, top_k, max_tokens, selected_libraries, selected_groups, custom_requirements, specific_details, pip_options, target_os):
|
| 37 |
+
"""Generates a pip command based on user settings."""
|
| 38 |
prompt = f"""
|
| 39 |
+
You are an expert Python development assistant. Your task is to generate a complete and efficient pip command based on the following description:
|
| 40 |
+
|
| 41 |
+
**Goal:** Create the most complete, detailed, and efficient pip install command possible, considering all variables, edge cases, and potential scenarios.
|
| 42 |
+
|
| 43 |
+
**Target Operating System:** {target_os}
|
| 44 |
+
|
| 45 |
+
**Command Description:** {prompt_base}
|
| 46 |
+
|
| 47 |
+
**Selected Libraries:** {selected_libraries if selected_libraries else "None"}
|
| 48 |
+
**Selected Groups:** {selected_groups if selected_groups else "None"}
|
| 49 |
+
**Custom Requirements:** {custom_requirements if custom_requirements else "None"}
|
| 50 |
+
**Specific Details:** {specific_details if specific_details else "None"}
|
| 51 |
+
**Pip Options:** {pip_options if pip_options else "None"}
|
| 52 |
+
|
| 53 |
+
**Response Format:**
|
| 54 |
+
- Respond in Markdown format, including a pip command code block, a bash code block and powershell code block with its original formatting, without line breaks.
|
| 55 |
+
- The pip command code block must be delimited by ```pip and ```.
|
| 56 |
+
- The bash code block must be delimited by ```bash and ```.
|
| 57 |
+
- The powershell code block must be delimited by ```powershell and ```.
|
| 58 |
+
- Do not include comments, explanations, or any other text outside the code block.
|
| 59 |
+
- The code must maintain its full vertical formatting, respecting indentation and line breaks.
|
| 60 |
+
- Explore different approaches, techniques, and advanced practices, always prioritizing security and efficiency.
|
| 61 |
+
- Use advanced pip resources such as version constraints, index options, and requirements files when necessary.
|
| 62 |
+
- If a specific version of a library is specified, use it.
|
| 63 |
+
- Unless the user specifies otherwise, use the most current versions of the libraries and pip, using and following best practices.
|
| 64 |
+
- Use incremental reasoning to add improvements, expansions, and considerations to your code.
|
| 65 |
+
- Use the history of the conversations so that the response is incremental.
|
| 66 |
+
- If the target OS is Windows, use pip commands compatible with Windows, and if the target OS is Linux, use pip commands compatible with Linux
|
| 67 |
+
|
| 68 |
+
**Important:**
|
| 69 |
+
- Generate only one command at a time.
|
| 70 |
+
- Create the longest, most complete, and detailed command possible to cover a wide range of possibilities and scenarios.
|
| 71 |
+
- Consider all the details of the request, expanding the response and improving the command.
|
| 72 |
+
- If the prompt asks to install a library that is not in a given group, install it in the command, but include in the prompt that library is not in the selected groups.
|
| 73 |
+
- If a version of a library is specified, install it using a version constraint.
|
| 74 |
+
- Consider if the user needs a requirements.txt file or not and if needed use it in your command.
|
| 75 |
+
"""
|
| 76 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
response = send_message_to_model(prompt, model_name, temperature, top_p, top_k, max_tokens)
|
| 78 |
return response
|
| 79 |
|
| 80 |
+
def parse_and_save_code(ai_code, short_title):
|
| 81 |
+
"""Parses the markdown and saves the code."""
|
| 82 |
+
pip_match = re.search(r'```pip\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
|
| 83 |
+
bash_match = re.search(r'```bash\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
|
| 84 |
+
ps1_match = re.search(r'```powershell\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
|
| 85 |
+
|
| 86 |
+
pip_code = pip_match.group(1).strip() if pip_match else ai_code.strip()
|
| 87 |
+
bash_code = bash_match.group(1).strip() if bash_match else None
|
| 88 |
+
ps1_code = ps1_match.group(1).strip() if ps1_match else None
|
| 89 |
+
|
| 90 |
+
base_file_name = f"pip_command_{short_title}"
|
| 91 |
+
|
| 92 |
+
files = {}
|
| 93 |
+
|
| 94 |
+
files["pip"]= {"name":f"{base_file_name}.txt", "code":pip_code}
|
| 95 |
+
|
| 96 |
+
if bash_code:
|
| 97 |
+
files["bash"]= {"name":f"{base_file_name}.sh", "code":bash_code}
|
| 98 |
|
| 99 |
+
if ps1_code:
|
| 100 |
+
files["powershell"]= {"name":f"{base_file_name}.ps1", "code":ps1_code}
|
| 101 |
|
| 102 |
+
for key, value in files.items():
|
| 103 |
+
with open(value["name"], "w") as f:
|
| 104 |
+
f.write(value["code"])
|
| 105 |
+
|
| 106 |
+
return files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
|
| 108 |
|
| 109 |
def main():
|
| 110 |
+
st.title("๐โจ Gemini2 Pip Gen Pro by [Elias Andrade](https://github.com/chaos4455)")
|
| 111 |
+
st.markdown("Generate advanced pip install commands with ease! ๐")
|
| 112 |
st.markdown("---")
|
| 113 |
|
| 114 |
# Layout in columns (sidebar and main area)
|
|
|
|
| 124 |
top_k = st.slider("Top K", min_value=1, max_value=100, value=40, step=1, help="Adjust the AI's number of candidate tokens.")
|
| 125 |
max_tokens = st.number_input("๐ Max Tokens", min_value=128, max_value=8192, value=8192, step=128, help="Adjust the maximum size of the response.")
|
| 126 |
|
| 127 |
+
with st.expander("๐ Libraries & Groups"):
|
| 128 |
+
available_libraries = {
|
| 129 |
+
"๐ Data Analysis": ["pandas", "numpy", "scipy", "matplotlib", "seaborn", "plotly", "bokeh", "statsmodels", "scikit-image", "geopandas", "altair", "holoviews", "datashader", "missingno", "vaex", "dask", "xarray", "polars", "arrow", "numba", "cudf", "cupy", "streamz", "panel", "hvplot"],
|
| 130 |
+
"๐ง Machine Learning": ["scikit-learn", "tensorflow", "torch", "keras", "xgboost", "lightgbm", "catboost", "pytorch-lightning", "transformers", "optuna", "mlflow", "gradio", "huggingface-hub", "sentence-transformers", "fastai", "librosa", "gensim", "spacy", "nltk", "opencv-python", "imbalanced-learn", "sktime", "umap-learn", "fairlearn", "shap", "eli5", "snorkel", "thinc"],
|
| 131 |
+
"๐ธ๏ธ Web Development": ["flask", "fastapi", "django", "requests", "beautifulsoup4", "aiohttp", "uvicorn", "gunicorn", "jinja2", "starlette", "websockets", "flask-restful", "django-rest-framework", "scrapy", "selenium", "playwright", "httpx", "rich", "pyramid", "bottle", "tornado", "dash", "plotly-dash", "streamlit", "gradio"],
|
| 132 |
+
"๐๏ธ Database": ["sqlalchemy", "psycopg2", "pymongo", "mysql-connector-python", "sqlite3", "redis", "cassandra-driver", "pyodbc", "aiosqlite", "kafka-python", "motor", "neo4j", "influxdb", "elasticsearch", "pyarrow", "clickhouse-driver", "arangodb", "couchbase", "dgraph-python", "tinydb", "dataset"],
|
| 133 |
+
"โ๏ธ Cloud": ["boto3", "google-cloud-storage", "azure-storage-blob", "kubernetes", "docker", "apache-libcloud", "pulumi", "awscli", "google-cloud-sdk", "azure-cli", "terraform", "openstack", "ansible", "salt", "chef", "cdktf", "serverless", "aws-cdk", "google-cloud-build", "azure-pipelines", "docker-compose", "moto", "localstack"],
|
| 134 |
+
"๐ค IA & LLM": ["langchain", "openai", "diffusers", "sentence-transformers", "stable-diffusion", "huggingface-hub", "pyannote-audio", "whisper", "nltk", "spacy", "transformers", "accelerate", "datasets", "einops", "faiss-cpu", "bitsandbytes", "peft", "trl", "haystack", "llama-index", "deepsparse", "nanogpt", "autogpt", "babyagi","ml-agents", "tensorboardx"],
|
| 135 |
+
"๐ ๏ธ Dev Tools": ["pytest", "flake8", "mypy", "black", "isort", "pylint", "tox", "pre-commit", "bandit", "coverage", "virtualenv", "pipenv", "poetry", "invoke", "nox", "twine", "wheel", "setuptools", "build", "debugpy", "pyinstrument", "memory-profiler", "cProfile", "pdbpp", "ipython"],
|
| 136 |
+
"โ๏ธ Data Engineering": ["apache-airflow", "dask", "luigi", "pyspark", "ray", "prefect", "dbt-core", "pandas-gbq", "petl", "sqlalchemy-redshift", "fugue", "ibis-framework", "mindsdb", "koalas", "vaex-core", "feast", "flink", "beam", "kafka-python", "clickhouse-driver", "superset", "metabase", "trino"],
|
| 137 |
+
"๐ Networking": ["requests", "httpx", "socketio", "paramiko", "netmiko", "scapy", "dnspython", "asyncssh", "tqdm", "gevent", "websocket-client", "pyserial", "grpcio", "aiozmq", "uvloop", "aiofiles", "urllib3", "fastsocket", "twisted", "aiohttp-socks", "aioredis", "websockets"],
|
| 138 |
+
"๐ Security": ["cryptography", "pyjwt", "requests-oauthlib", "paramiko", "pyOpenSSL", "passlib", "hashlib", "bcrypt", "python-nmap", "sqlmap", "scapy", "yara-python", "pwn", "mitmproxy", "themis", "pycryptodome", "tpm2-py", "pyspy", "volatility3", "pefile", "dpkt"],
|
| 139 |
+
"๐จ GUI": ["tkinter", "pyqt5", "kivy", "wxpython", "pygame", "dearpygui", "pyglet", "flet", "toga", "eel", "qt-material", "pyside6", "pywebview", "guizero", "remif", "taipy"],
|
| 140 |
+
"๐งช Testing": ["unittest", "pytest-cov", "hypothesis", "behave", "locust", "nose", "selenium", "playwright", "mock", "freezegun", "ddt", "tox", "robotframework", "hypothesis", "vcrpy", "responses", "faker", "factory-boy", "coveragepy", "parameterized"],
|
| 141 |
+
"๐งฐ Utilities": ["click", "typer", "argparse", "rich", "tqdm", "colorama", "schedule", "python-dotenv", "shutil", "pathlib", "arrow", "toml", "json5", "xmltodict", "humanize", "pendulum", "inflect", "unidecode", "boltons", "delegator.py"],
|
| 142 |
+
"๐ Documentation": ["sphinx", "mkdocs", "pdoc", "readthedocs-sphinx-search", "numpydoc", "docutils", "recommonmark", "sphinx-rtd-theme", "furo", "m2r2", "autoapi", "plantuml", "pycco", "doc8", "griffe", "mistletoe", "marko"],
|
| 143 |
+
"๐ฎ Game Dev": ["pygame", "pyglet", "arcade", "panda3d", "ursina", "cocos2d", "renpy", "pyopengl", "cymunk", "pytmx", "pybox2d", "pymunk", "moderngl", "glumpy", "glfw", "sdl2"],
|
| 144 |
+
"๐น Finance": ["numpy-financial", "pandas-datareader", "scikit-portfolio", "yfinance", "finmarketpy", "quantstats", "TA-Lib", "riskfolio-optimization", "alphalens", "pyfolio", "zipline", "backtrader", "quantlib-python", "pyfin", "quandl", "vnpy"],
|
| 145 |
+
"๐ค Robotics": ["rospy", "pybullet", "mujoco-py", "open3d", "numpy-stl", "pyserial", "opencv-python", "pymavlink", "robotframework", "webots", "vpython", "transforms3d", "trimesh", "pyrealsense2", "aiortc", "gymnasium", "stable-baselines3"],
|
| 146 |
+
"๐งฌ Bioinformatics": ["biopython", "scikit-bio", "pysam", "pandas-genomics", "statsmodels", "pybedtools", "matplotlib", "seaborn", "pyteomics", "networkx", "pyfaidx", "lifelines", "ngslib", "bcbio-nextgen", "scanpy", "anndata", "gseapy", "deepchem", "methylpy"],
|
| 147 |
+
"๐ญ Astronomy": ["astropy", "astroquery", "scikit-image", "photutils", "pyephem", "poliastro", "pyvista", "vtk", "healpy", "astroplan", "specutils", "sunpy", "gwpy", "lhorizon", "dustmaps", "reproject", "pymc3", "celerite"],
|
| 148 |
+
"๐บ๏ธ Geospatial": ["geopandas", "rasterio", "shapely", "pyproj", "fiona", "cartopy", "folium", "geopy", "earthpy", "descartes", "osmnx", "geohash", "momepy", "pyvista", "whitebox", "earthengine-api", "gisalchemist", "mapclassify", "spatialpandas"],
|
| 149 |
+
"๐ก IoT": ["pyserial", "paho-mqtt", "RPi.GPIO", "smbus2", "pysnmp", "can-utils", "zeroconf", "asyncio", "bluepy", "adafruit-circuitpython", "iotconnect", "thinger-io", "micropython", "homeassistant", "openhab", "tasmota", "esphome", "nodemcu"],
|
| 150 |
+
"๐ป System": ["psutil", "subprocess32", "watchdog", "inotify", "pywin32", "sh", "delegator.py", "appdirs", "click", "typer", "pathlib", "colorama", "fire", "daemonize", "lsof", "ruamel.yaml", "py-cpuinfo", "platformdirs"]
|
| 151 |
+
}
|
| 152 |
+
|
| 153 |
+
selected_libraries = {}
|
| 154 |
+
for group, libraries in available_libraries.items():
|
| 155 |
+
selected_libraries[group] = st.multiselect(f"{group} ", libraries, default=[],key=f"lib_{group}")
|
| 156 |
+
|
| 157 |
+
selected_groups = st.multiselect("๐๏ธ Groups", list(available_libraries.keys()), default=[], help="Choose groups to include libraries.")
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
with st.expander("โ๏ธ Pip Settings"):
|
| 161 |
+
custom_requirements = st.text_input("โ Custom Libraries:", placeholder="Specific libraries or versions (e.g., requests==2.28.1, numpy>=1.23.0)", help="List specific libraries and versions.")
|
| 162 |
+
specific_details = st.text_input("โน๏ธ Specific Details", placeholder="Special pip install options, edge cases", help="Add specific details for the pip command generation.")
|
| 163 |
+
pip_options = st.text_input("โ๏ธ Pip Options", placeholder="Add custom pip options like --index-url or --no-cache", help="Add custom options to your pip command")
|
| 164 |
+
target_os = st.selectbox("๐ฏ Target OS", ["Linux ๐ง", "Windows ๐ช"], index=0, help="Choose the target Operating System.")
|
| 165 |
+
|
| 166 |
+
|
| 167 |
|
| 168 |
with col2:
|
| 169 |
+
# User's base prompt
|
| 170 |
+
prompt_base = st.text_input("๐ Describe the Pip Command:", placeholder="Ex: Install all libraries for web development and data analysis", key="prompt_base")
|
| 171 |
+
|
| 172 |
+
if st.button("โจ Generate Pip Command"):
|
| 173 |
+
if not prompt_base and not selected_libraries and not selected_groups and not custom_requirements:
|
| 174 |
+
st.error("โ ๏ธ Please enter a command description or select libraries/groups")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
return
|
| 176 |
+
|
| 177 |
+
selected_libs = []
|
| 178 |
+
for group in selected_groups:
|
| 179 |
+
selected_libs.extend(selected_libraries[group])
|
| 180 |
+
|
| 181 |
+
with st.spinner("โณ Generating command..."):
|
| 182 |
+
ai_code = generate_pip_command(
|
| 183 |
+
prompt_base,
|
|
|
|
|
|
|
|
|
|
| 184 |
model_name,
|
| 185 |
temperature,
|
| 186 |
top_p,
|
| 187 |
top_k,
|
| 188 |
max_tokens,
|
| 189 |
+
selected_libs,
|
| 190 |
+
selected_groups,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
custom_requirements,
|
| 192 |
specific_details,
|
| 193 |
+
pip_options,
|
| 194 |
+
target_os
|
|
|
|
|
|
|
| 195 |
)
|
| 196 |
|
| 197 |
if ai_code:
|
| 198 |
+
st.markdown("### โ
Generated Command:")
|
| 199 |
+
st.code(ai_code, language="text") #to output all codes
|
| 200 |
+
|
| 201 |
+
short_title = prompt_base[:30].strip().replace(" ", "_").lower()
|
| 202 |
+
files = parse_and_save_code(ai_code, short_title)
|
| 203 |
+
|
| 204 |
+
for key, value in files.items():
|
| 205 |
+
st.download_button(
|
| 206 |
+
label=f"โฌ๏ธ Download Command (.{(key)})",
|
| 207 |
+
data=value["code"],
|
| 208 |
+
file_name=value["name"],
|
| 209 |
+
mime="text/plain",
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
else:
|
| 213 |
+
st.error("โ Error generating the command. Check the connection with the AI and try again.")
|
| 214 |
+
|
| 215 |
|
| 216 |
if __name__ == "__main__":
|
| 217 |
main()
|