File size: 12,530 Bytes
0776f89 abf6c73 89c7b66 494ce6a 74d8a54 8099599 0776f89 74d8a54 448b694 c5dee76 abf6c73 74d8a54 7c724c7 74d8a54 abf6c73 494ce6a 818fecd 494ce6a 74d8a54 494ce6a 74d8a54 7c724c7 74d8a54 7c724c7 448b694 7c724c7 448b694 3fbd71c 448b694 3fbd71c 448b694 3fbd71c 448b694 89c7b66 3fbd71c 89c7b66 3fbd71c 448b694 3fbd71c 448b694 3fbd71c 448b694 89c7b66 448b694 89c7b66 4d822ed 74d8a54 7c724c7 448b694 b04beb4 448b694 7c724c7 448b694 0776f89 b03fec6 74d8a54 6d0cadc 818fecd 6d0cadc 74d8a54 818fecd 74d8a54 dcf0080 74d8a54 abf6c73 74d8a54 89c7b66 448b694 6d0cadc 448b694 89c7b66 448b694 89c7b66 448b694 89c7b66 448b694 89c7b66 448b694 3fbd71c 448b694 3fbd71c 74d8a54 abf6c73 818fecd 448b694 68bb209 b04beb4 448b694 818fecd 74d8a54 448b694 6d0cadc 448b694 b03fec6 0776f89 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 | import streamlit as st
import google.generativeai as genai
import re
import os
import textwrap
# Secret key and Google Gemini API configuration
API_KEY = st.secrets["GOOGLE_API_KEY"]
# Page configuration
st.set_page_config(page_title="πβ¨ Gemini2 CI/CD Gen Pro", page_icon="πβ¨", layout="wide")
# --- Helper Functions ---
def send_message_to_model(message, model_name, temperature, top_p, top_k, max_tokens):
"""Sends a message to the AI model and returns the response."""
try:
# AI model configurations
GENERATION_CONFIG = {
"temperature": temperature,
"top_p": top_p,
"top_k": top_k,
"response_mime_type": "text/plain",
"max_output_tokens": max_tokens,
}
MODEL = genai.GenerativeModel(
model_name=model_name,
generation_config=GENERATION_CONFIG,
)
response = MODEL.start_chat(history=[]).send_message(message)
return response.text
except Exception as e:
st.error(f"β Error communicating with the AI: {e}")
return None
def generate_cicd_pipeline(prompt_base, model_name, temperature, top_p, top_k, max_tokens, target_systems, custom_requirements, specific_details, deploy_details, docker_details, kubernetes_details, test_details):
"""Generates a CI/CD pipeline based on user settings."""
prompt = f"""
You are an expert DevOps engineer specializing in CI/CD pipelines. Your task is to generate complete, secure, and efficient CI/CD pipeline configurations based on the following description:
**Goal:** Create the most complete, detailed, and efficient CI/CD pipeline possible, considering all variables, edge cases, and potential scenarios.
**Pipeline Description:** {prompt_base}
**Target Systems:** {target_systems if target_systems else "None"}
**Deployment Details:** {deploy_details if deploy_details else "None"}
**Docker Details:** {docker_details if docker_details else "None"}
**Kubernetes Details:** {kubernetes_details if kubernetes_details else "None"}
**Test Details:** {test_details if test_details else "None"}
**Custom Requirements:** {custom_requirements if custom_requirements else "None"}
**Specific Details:** {specific_details if specific_details else "None"}
**Response Format:**
- Respond in Markdown format, including YAML code blocks for Argo CD, GitHub Actions, GitLab Runner, Jenkins, CircleCI, and a Docker Compose, with their original formatting and without line breaks.
- The Argo CD code block must be delimited by ```yaml argocd and ```.
- The GitHub Actions code block must be delimited by ```yaml githubactions and ```.
- The GitLab Runner code block must be delimited by ```yaml gitlabrunner and ```.
- The Jenkins code block must be delimited by ```yaml jenkins and ```.
- The CircleCI code block must be delimited by ```yaml circleci and ```.
- The Docker Compose code block must be delimited by ```yaml dockercompose and ```.
- Do not include comments, explanations, or any other text outside the code block, unless the prompt requires.
- The code must maintain its full vertical formatting, respecting indentation and line breaks.
- The code must be realistic, using real-world examples, data, and situations.
- Explore different approaches, techniques, and advanced practices, always prioritizing security and efficiency.
- Use best practices for building images, running tests, and deploying applications.
- Unless the user specifies otherwise, use the most current and secure versions of the tools and resources, using YAML and following best practices.
- Use incremental reasoning to add improvements, expansions, and considerations to your code.
- Use the history of the conversations so that the response is incremental.
- If Docker is required, generate configurations to build Docker Images and push to Dockerhub.
- If kubernetes is required, use Argo CD for deployments and create the necessary configurations, generating the application.yaml, and any other resources.
- If any connection parameters are needed, use placeholders in the code with comments like 'PLEASE FILL THIS' indicating where the user should fill in.
- Use comments to explain the configurations.
- If the user does not specify target systems, generate for Argo CD, Github Actions and Gitlab Runner.
**Important:**
- Generate only one pipeline at a time.
- Create the longest, most complete, and detailed code possible to cover a wide range of possibilities and scenarios.
- Consider all the details of the request, expanding the response and improving the code.
"""
response = send_message_to_model(prompt, model_name, temperature, top_p, top_k, max_tokens)
return response
def parse_and_save_yaml(ai_code, short_title):
"""Parses the markdown and saves the YAML code."""
argocd_match = re.search(r'```yaml argocd\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
github_actions_match = re.search(r'```yaml githubactions\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
gitlab_runner_match = re.search(r'```yaml gitlabrunner\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
jenkins_match = re.search(r'```yaml jenkins\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
circleci_match = re.search(r'```yaml circleci\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
dockercompose_match = re.search(r'```yaml dockercompose\s*(.*?)\s*```', ai_code, re.DOTALL | re.IGNORECASE)
argocd_code = argocd_match.group(1).strip() if argocd_match else None
github_actions_code = github_actions_match.group(1).strip() if github_actions_match else None
gitlab_runner_code = gitlab_runner_match.group(1).strip() if gitlab_runner_match else None
jenkins_code = jenkins_match.group(1).strip() if jenkins_match else None
circleci_code = circleci_match.group(1).strip() if circleci_match else None
dockercompose_code = dockercompose_match.group(1).strip() if dockercompose_match else None
base_file_name = f"cicd_pipeline_{short_title}"
files = {}
if argocd_code:
files["argocd"] = {"name":f"{base_file_name}_argocd.yaml", "code":argocd_code}
if github_actions_code:
files["githubactions"] = {"name":f"{base_file_name}_githubactions.yaml", "code":github_actions_code}
if gitlab_runner_code:
files["gitlabrunner"] = {"name":f"{base_file_name}_gitlabrunner.yaml", "code":gitlab_runner_code}
if jenkins_code:
files["jenkins"] = {"name":f"{base_file_name}_jenkins.yaml", "code":jenkins_code}
if circleci_code:
files["circleci"] = {"name":f"{base_file_name}_circleci.yaml", "code":circleci_code}
if dockercompose_code:
files["dockercompose"] = {"name":f"{base_file_name}_dockercompose.yaml", "code":dockercompose_code}
for key, value in files.items():
with open(value["name"], "w", encoding="utf-8") as f:
f.write(value["code"])
return files
def main():
st.title("πβ¨ Gemini2 CI/CD Gen Pro by [Elias Andrade](https://github.com/chaos4455)")
st.markdown("Generate advanced CI/CD pipelines with ease! π")
st.markdown("---")
# Layout in columns (sidebar and main area)
col1, col2 = st.columns([1, 3])
with col1:
st.header("βοΈ Settings")
with st.expander("β¨ AI Settings"):
model_name = st.selectbox("π€ AI Model", ["gemini-2.0-flash-exp", "gemini-1.5-flash"], index=0, help="Choose the AI model.")
temperature = st.slider("π‘οΈ Temperature", min_value=0.1, max_value=1.0, value=0.7, step=0.1, help="Adjust the AI's creativity.")
top_p = st.slider("Top P", min_value=0.1, max_value=1.0, value=0.8, step=0.1, help="Adjust the AI's sampling.")
top_k = st.slider("Top K", min_value=1, max_value=100, value=40, step=1, help="Adjust the AI's number of candidate tokens.")
max_tokens = st.number_input("π Max Tokens", min_value=128, max_value=8192, value=8192, step=128, help="Adjust the maximum size of the response.")
with st.expander("π§° Pipeline Settings"):
target_systems = st.multiselect("π― Target Systems", ["Argo CD", "GitHub Actions", "GitLab Runner", "Jenkins", "CircleCI", "Docker Compose", "Other"], default=["Argo CD", "GitHub Actions", "GitLab Runner"], help="Select the target systems for the pipeline.")
deploy_details = st.text_area("π Deployment Details", placeholder="Specify deployment details, e.g., Kubernetes manifests, Dockerfiles, etc.", help="Provide any details about how and where to deploy")
docker_details = st.text_input("π³ Docker Details", placeholder="Specify details like Docker registry, image names, etc.", help="Provide details about docker builds")
kubernetes_details = st.text_input("βΈοΈ Kubernetes Details", placeholder="Specify details like namespaces, deployments, etc.", help="Provide details about kubernetes deployments")
test_details = st.text_input("π§ͺ Test Details", placeholder="Specify test details or test commands", help="Provide any details about the tests to be executed")
custom_requirements = st.text_input("β Custom Requirements:", placeholder="Specific steps or tools (e.g., terraform, ansible)", help="List any specific steps or tools.")
specific_details = st.text_input("βΉοΈ Specific Details", placeholder="Special options, edge cases", help="Add specific details for the pipeline generation.")
with col2:
# User's base prompt
prompt_base = st.text_area("π Describe the CI/CD Pipeline:",
placeholder=textwrap.dedent("""
Describe your CI/CD pipeline in detail.
Include:
- Steps for building, testing, and deploying your application.
- Specific tools to be used (e.g., Docker, Kubernetes, Argo CD, etc).
- DockerHub details (user, password) or any other registry details.
- Kubernetes cluster details.
- Any custom parameters, configurations, or secrets you need.
If those details are not included, the pipeline will be generated using defaults, and you must fill in the parameters.
"""), key="prompt_base",
help="Provide a detailed description of your CI/CD pipeline.")
if st.button("β¨ Generate CI/CD Pipeline"):
if not prompt_base:
st.error("β οΈ Please enter a pipeline description.")
return
with st.spinner("β³ Generating pipeline..."):
ai_code = generate_cicd_pipeline(
prompt_base,
model_name,
temperature,
top_p,
top_k,
max_tokens,
target_systems,
custom_requirements,
specific_details,
deploy_details,
docker_details,
kubernetes_details,
test_details
)
if ai_code:
st.markdown("### β
Generated Pipeline:")
st.code(ai_code, language="text")
short_title = prompt_base[:30].strip().replace(" ", "_").lower()
files = parse_and_save_yaml(ai_code, short_title)
for key, value in files.items():
st.download_button(
label=f"β¬οΈ Download Pipeline (.{(key)})",
data=value["code"],
file_name=value["name"],
mime="application/x-yaml",
)
else:
st.error("β Error generating the pipeline. Check the connection with the AI and try again.")
if __name__ == "__main__":
main() |