diff --git a/app.py b/app.py index 3ccfd4d15e052ca47494d04489bc880597fe078b..97bb99421b8cc6a81d11b626b12f8c88dbc40918 100644 --- a/app.py +++ b/app.py @@ -3,14 +3,52 @@ import json import os from pathlib import Path -# Load slash commands data +# Category color mapping +CATEGORY_COLORS = { + 'ai-dev': '#8b5cf6', + 'ai-engineering': '#7c3aed', + 'claude-code': '#2563eb', + 'common-tasks': '#059669', + 'conv-mgmt': '#0891b2', + 'cybersec': '#dc2626', + 'development': '#3b82f6', + 'documentation': '#6366f1', + 'educational': '#f59e0b', + 'experiments': '#ec4899', + 'filesystem-ops': '#84cc16', + 'for-fun': '#f97316', + 'general-purpose': '#64748b', + 'ideation': '#a855f7', + 'local-ai': '#6366f1', + 'media': '#06b6d4', + 'misc': '#9ca3af', + 'operations': '#10b981', + 'seo-web': '#14b8a6', + 'sysadmin': '#22c55e', + 'tech-docs': '#3b82f6', + 'writing-and-editing': '#f472b6' +} + +# Load slash commands data from categorized folders def load_commands(): - """Load slash commands from JSON file""" - json_path = Path("slash-commands.json") - if json_path.exists(): - with open(json_path, 'r') as f: - return json.load(f) - return [] + """Load slash commands from categorized folder structure""" + commands = [] + commands_dir = Path("commands") + + if not commands_dir.exists(): + return [] + + for category_dir in sorted(commands_dir.iterdir()): + if category_dir.is_dir(): + category = category_dir.name + for cmd_file in sorted(category_dir.glob("*.md")): + commands.append({ + 'name': cmd_file.stem, + 'path': str(cmd_file), + 'category': category + }) + + return commands def load_command_content(path): """Load the content of a specific command file""" @@ -42,6 +80,7 @@ def create_command_card(command): """Create an HTML card for a command""" name = command['name'] path = command['path'] + category = command.get('category', 'misc') content = load_command_content(path) # Extract description from content if available @@ -54,30 +93,44 @@ def create_command_card(command): description = line.strip() break - # Escape content for JavaScript - replace backticks and backslashes - escaped_content = content.replace('\\', '\\\\').replace('`', '\\`').replace('$', '\\$') + # Get category color + category_color = CATEGORY_COLORS.get(category, '#9ca3af') + category_display = category.replace('-', ' ').title() + + # Escape content for JavaScript + escaped_content = content.replace('\\', '\\\\').replace('`', '\\`').replace('$', '\\$').replace('"', '\\"') + + # Generate unique ID for details element + detail_id = f"cmd-{name.replace(' ', '-')}" card_html = f""" -
-
-
-

/{name}

-

{description[:200]}...

+
+ +
+
+
+ /{name} + {category_display} +
+

{description[:150]}{'...' if len(description) > 150 else ''}

+
+ + +
-
-
- - View Full Command - -
-
{content}
+ +
+
+ +
{content}
-
- -
+
+ """ return card_html @@ -106,6 +159,12 @@ with gr.Blocks( .gradio-container { max-width: 1200px !important; } + details[open] summary svg { + transform: rotate(180deg); + } + details summary::-webkit-details-marker { + display: none; + } """ ) as demo: diff --git a/commands/ai-dev/private-claude-md.md b/commands/ai-dev/private-claude-md.md new file mode 100644 index 0000000000000000000000000000000000000000..07b74a9807e578a32852ca66fe313c07aaedb8eb --- /dev/null +++ b/commands/ai-dev/private-claude-md.md @@ -0,0 +1 @@ +Add CLAUDE.md to the gitignore. If it hasn't already been created, add it to gitignore, then write it. \ No newline at end of file diff --git a/commands/ai-engineering/implementation-editors/system-prompt-to-agent.md b/commands/ai-engineering/implementation-editors/system-prompt-to-agent.md new file mode 100644 index 0000000000000000000000000000000000000000..98acd4bb1820cccda180cf70b5aaaa9d2e143f89 --- /dev/null +++ b/commands/ai-engineering/implementation-editors/system-prompt-to-agent.md @@ -0,0 +1,20 @@ +This repository contains a system prompt and/or surrounding code. + +This utility may originally have been implemented as an AI assistant (or chatbot). + +You can infer that this repository exists so that the original tool is being re-implemented. + +That reimplementation may be: + +- The system prompt is being integrated into a code-defined AI agent running as a backend service +- The user may wish to reimlement this as a custom GUI + +The user will provide details about their envisioned implementation. Alternatively they will ask you to provide suggestions. + +Now, ask the user how they would like to proceed. + +In all cases: + +Don't "lose" the original system prompt. + +You should keep the original in an /archive folder which you should create and, if necessary, iterate upon it in this codebase. The original system prompt can be prefixed or suffixed with -original so that it can be clearly distinguished from the updated version. \ No newline at end of file diff --git a/commands/ai-engineering/multiagent.md b/commands/ai-engineering/multiagent.md new file mode 100644 index 0000000000000000000000000000000000000000..d0e3944f61fe1e0f2321a2231f70f7a5d2e3a2f2 --- /dev/null +++ b/commands/ai-engineering/multiagent.md @@ -0,0 +1,7 @@ +This repository contains a number of AI agents + +I would like to integrate them within a multiagent framework + +Review the functionality of the agents and suggest the most suitable framework(s) + +Do not implement an integration until I have approved of a recommendation \ No newline at end of file diff --git a/commands/ai-engineering/prompt-editors/depersonalise-prompts.md b/commands/ai-engineering/prompt-editors/depersonalise-prompts.md new file mode 100644 index 0000000000000000000000000000000000000000..f10b2fe31a3ad0ab945cb4c16575cba44a6e2706 --- /dev/null +++ b/commands/ai-engineering/prompt-editors/depersonalise-prompts.md @@ -0,0 +1,18 @@ +This repository contains system prompts for AI systems. + +Your task is as follows: + +- Review the prompts written by the user +- Rewrite any aspects that make the prompts specific only to the user or their immediate context + +For example: + +If you encounter a prompt that contains: "You are a friendly assistant who helps Daniel," then you would rewrite this as "You are a friendly assistant who helps the user ..." + +If you found: "You are a restaurant recommendation agent who helps to find great restaurant experiences for Daniel in Jerusalem." + +Then you would rewrite this as: + +"You are a restaurant recommendation agent who helps to find great resaurants for the user in their local environment." + +You may remediate basic defects in the prompts you encounter (like a lack of spacing or punctuation). But make no other edits. \ No newline at end of file diff --git a/commands/ai-engineering/prompt-editors/edit-system-prompt.md b/commands/ai-engineering/prompt-editors/edit-system-prompt.md new file mode 100644 index 0000000000000000000000000000000000000000..75a86e4109c4898a486f7a088ab7a1c4261b9317 --- /dev/null +++ b/commands/ai-engineering/prompt-editors/edit-system-prompt.md @@ -0,0 +1,17 @@ +Edit the system prompt according to the following insrtuctions: + +- Fix obvious tyops +- Add missing sentence structure and paragraphy spacing +- Remediate any errors that you can reasonably infer arose from STT mis-transcription + +You may also: + +- Add headers to improve the flow and make the system prompt easier to parse +- Implement any other edits needed to maximise the effectiveness of the system prompt in achieving its goal of directing AI tools + +You must not: + +- Lose detail that the user included +- Arbitrarily reduce the length of the prompt + +You should shorten prompts only if they are signiificantly beyond recommended lengths for system prompts and would likely impair the context window. If this is not the case, you do not assume that prompt shortening is an objective. \ No newline at end of file diff --git a/commands/ai-engineering/prompt-editors/rewrite-as-structured.md b/commands/ai-engineering/prompt-editors/rewrite-as-structured.md new file mode 100644 index 0000000000000000000000000000000000000000..1c271c8ea647b827d731ffdd4850153f8596b14e --- /dev/null +++ b/commands/ai-engineering/prompt-editors/rewrite-as-structured.md @@ -0,0 +1,12 @@ +Your task is to take this system prompt and rewrite it for implementation in a structured AI system. + +In order to do so, adhere to the following instructions: + +- Create a folder called prompt at the level of the filesystem where the system prompt currently exists. +- Move the prompt into it as prompt.md (or systemprompt.md or whatever the prompt file was originally named) + +Next: + +- Edit the prompt text to incorporate the JSON output definition that the AI should be constrained to giving. Add an instruction that the AI tool that it is working in a structured workflow and must only return valid JSON. +- Create, in the prompt folder, schema.json. This file should contain only the OpenAPI compliant JSON object schema which the prompt requires +- Create, in the prompt folder, example.json. This file should contain only a JSON example showing a correct output for the AI to emulate when outputting in accordance with the defined JSON schema \ No newline at end of file diff --git a/commands/claude-code/context-to-claude.md b/commands/claude-code/context-to-claude.md new file mode 100644 index 0000000000000000000000000000000000000000..1583bd83bb1d8d329566fa7b42891efeb38fb8d9 --- /dev/null +++ b/commands/claude-code/context-to-claude.md @@ -0,0 +1,12 @@ +This repository contains a file called context.md. + +I use context.md to capture context data about projects that I am working on. + +CLAUDE.md (as you know) is intended as an agent-readable file providing context data to the AI agent. + +I would like you to do two things: + +1: Read context.md (or similar filename). Lightly edit it for clarity: add missing paragraph spacing, punctuation; fix typos; add headings and subheadings to note flow. + + +2: Create CLAUDE.md. CLAUDE.md should be a condensed and clarified version of the context file that zones in on the key points from an AI agent's perspective. Include, in CLAUDE.md a note that there is longer form context data also available in the repo. \ No newline at end of file diff --git a/commands/claude-code/global-claude-private.md b/commands/claude-code/global-claude-private.md new file mode 100644 index 0000000000000000000000000000000000000000..cb61c3aa39c5e51018c8c9446af6e2b7ec3763c9 --- /dev/null +++ b/commands/claude-code/global-claude-private.md @@ -0,0 +1 @@ +Add CLAUDE_PRIVATE.md to my global git ignore. \ No newline at end of file diff --git a/commands/claude-code/mcp-mgmt/global-cc-mcp.md b/commands/claude-code/mcp-mgmt/global-cc-mcp.md new file mode 100644 index 0000000000000000000000000000000000000000..dc983a9a11b3c0cdd79303129d16887f45f4a99b --- /dev/null +++ b/commands/claude-code/mcp-mgmt/global-cc-mcp.md @@ -0,0 +1,18 @@ +Please help the user to add a new *global* MCP server for user with you (Claude Code) + +If necessary, refer to the latest guidelines released by Anthropic: + +https://docs.claude.com/en/docs/claude-code/mcp + +But in the first instance: + +View: ~/.claude.json + +And either: + +- Start the MCP servers JSON object OR +- Add the new MCP to the existing object + +The user will provide a link to the MCP and/or its project home. + +You should create the syntax or copy it from the reference (and the user will provide the secret(s) if needed) \ No newline at end of file diff --git a/commands/claude-code/mcp-mgmt/project-cc-mcp.md b/commands/claude-code/mcp-mgmt/project-cc-mcp.md new file mode 100644 index 0000000000000000000000000000000000000000..9ac5469291033b3389a8aab0ed2afe9a4e4c6dbe --- /dev/null +++ b/commands/claude-code/mcp-mgmt/project-cc-mcp.md @@ -0,0 +1,14 @@ +Please help the user to add a *project level* MCP server within this repo + +If necessary, refer to the latest guidelines released by Anthropic: + +https://docs.claude.com/en/docs/claude-code/mcp + +But in the first instance: + +Create a .mcp.json at the base of this repo and either: + +- Start the JSON object +- Add the new MCP to the existing object + +The user will provide a link to the MCP and/or its project home. You should create the syntax or copy it from the reference (and the user will provide the secret(s) if needed) \ No newline at end of file diff --git a/commands/claude-code/private-public-claude.md b/commands/claude-code/private-public-claude.md new file mode 100644 index 0000000000000000000000000000000000000000..c2aebd3ea271fe864e74558343c42f29316d9f35 --- /dev/null +++ b/commands/claude-code/private-public-claude.md @@ -0,0 +1,16 @@ +Please create two files in this repository: + +- CLAUDE.md +- CLAUDE_PRIVATE.md + +CLAUDE.md is the conventional file used to provide context data about a project to Claude CLI. + +The purpose of CLAUDE_PRIVATE.md is to provide additional instructions that the user does not wish to commit to a public-facing repo (for example - the user might contain instructions to search for and remove personal PII.) + +CLAUDE.md should be created or updated. If the repo is empty, it can just be a placeholder file with {replace with project context}. + +But you should add this to it either way: + +"Project context: this is a public code repository. CLAUDE.md will be committed for the benefit of other users. However, there is also CLAUDE_PRIVATE.md. The user will use this to note private instructions which should not be noted in CLAUDE.md. As an example: the user might provide a list of PII to filter against. CLAUDE.md might say "please conduct a PII check before committing. Refer to the list of keywords in CLAUDE_PRIVATE.md." + +CLAUDE_PRIVATE.md is part of the user's global git ignore pattern so you do not need to create a .gitignore at the repo level. \ No newline at end of file diff --git a/commands/claude-code/public-slash.md b/commands/claude-code/public-slash.md new file mode 100644 index 0000000000000000000000000000000000000000..abc0b828f0ac5214e2a1378e761d2f9a47e6a108 --- /dev/null +++ b/commands/claude-code/public-slash.md @@ -0,0 +1,9 @@ +The project level slash commands in this repo (at ./claude/commands) can and should be shared publicly. + +However, .claude is git-ignored. + +Here's how I'd like you to work around this: + +- Create a sync script to sync ./.claude/commands to ./slash-commands + +Add as a pre-commit hook \ No newline at end of file diff --git a/commands/claude-code/shortcuts/start-from-task.md b/commands/claude-code/shortcuts/start-from-task.md new file mode 100644 index 0000000000000000000000000000000000000000..6f5fcef8fece2b64465e5cd5af2eb298353c81b9 --- /dev/null +++ b/commands/claude-code/shortcuts/start-from-task.md @@ -0,0 +1,13 @@ +This repository contains a task definition. + +It may be: + +- task.md +- forclaude.md +- spec.md + +Or within a folder like + +/tasks + +Find the task definition, parse it, and execute its instructions. \ No newline at end of file diff --git a/commands/claude-code/slash-command-edits.md b/commands/claude-code/slash-command-edits.md new file mode 100644 index 0000000000000000000000000000000000000000..5f80d298bfbc5c1da5c954447ce8780c04855563 --- /dev/null +++ b/commands/claude-code/slash-command-edits.md @@ -0,0 +1,8 @@ +This repository contains slash commands for Claude Code. Many of these were captured using speech to text and will contain the common defects seen in AI-transcribed speech. These include: missing punctuation, filler words (like 'um'), missing paragraphs. They may occasionally also contain mistranscribed speech. + +Please recurse through the folders in this repository and do the following: + +- Remediate any obvious defects you can identify +- Remediate defects in other slash commands + +Each slash command should be well-written, properly formatted, and optimised for intelligibility by the AI tool that will be processing it. \ No newline at end of file diff --git a/commands/claude-code/spec-from-audio.md b/commands/claude-code/spec-from-audio.md new file mode 100644 index 0000000000000000000000000000000000000000..9ed9d3c8d2956ea8057f7aab519cf703a2b26c79 --- /dev/null +++ b/commands/claude-code/spec-from-audio.md @@ -0,0 +1,15 @@ +This project contains a voice note called spec.mp3 (or another audio file type). This audio file is a voice recording in which the user has detailed the context about the project that they are working on. + +Here is your task: + +1: "Listen" to the audio; obtain a first pass transcription + +Then: + +2: Create spec.md. This should be a specification outlining the functionalities that the user has outlined in the recording. Spec.md should be suitable for its intended purpose of instructing an AI agent in developing the project. + +3: Create context.md. If you can identify aspects of the recording that, rather than providing immediately required spec directions, provided surrounding context, then add these to context.md (creating if it doesn't exist). + +4: Create or update CLAUDE.md. CLAUDE.md is the overarching set of context and directions for Claude Code. Keep it as light as possible but provide key info from the recording. + +Once you have completed these tasks, you may delete the audio binary from the project. \ No newline at end of file diff --git a/commands/common-tasks/file-conversion/documents/md-to-pdf.md b/commands/common-tasks/file-conversion/documents/md-to-pdf.md new file mode 100644 index 0000000000000000000000000000000000000000..054e6109875445613a74b85b601c02a9f64d5a97 --- /dev/null +++ b/commands/common-tasks/file-conversion/documents/md-to-pdf.md @@ -0,0 +1,3 @@ +Take this markdown document and make it into a PDF. + +Please use an appropriate rendering CLI and pay attention to the formatting. \ No newline at end of file diff --git a/commands/common-tasks/file-conversion/documents/mds-to-pdf.md b/commands/common-tasks/file-conversion/documents/mds-to-pdf.md new file mode 100644 index 0000000000000000000000000000000000000000..8dd55443d82ef7b419b00520fd90fb54e6f72ec1 --- /dev/null +++ b/commands/common-tasks/file-conversion/documents/mds-to-pdf.md @@ -0,0 +1,5 @@ +This folder contains a group of markdown documents. + +Please combine them into one PDF. + +Ensure that there are clean page breaks for every file. Unless otherwise instructed, or if there is an obvious sequence denoted in the file names, concatenate them alphabetically and then render out to PDF. \ No newline at end of file diff --git a/commands/common-tasks/file-mgmt/sequential-filenames.md b/commands/common-tasks/file-mgmt/sequential-filenames.md new file mode 100644 index 0000000000000000000000000000000000000000..0ed3dbf798521c2a7c61ca64c1b986cd42ddbfe9 --- /dev/null +++ b/commands/common-tasks/file-mgmt/sequential-filenames.md @@ -0,0 +1,7 @@ +This folder contains files with various filenames. + +Please standardise files of the same format on the most recurrent filename type. For example, if you encounter a folder with 10 PNGs and 2 JPGs you would convert the 2 JPG to PNG. + +Then: + +Rename all files sequentially from whatever they are named as to 1.png, 2.png etc (substituting for the prevalent filetype). \ No newline at end of file diff --git a/commands/common-tasks/find-command.md b/commands/common-tasks/find-command.md new file mode 100644 index 0000000000000000000000000000000000000000..e897ce271f715cd6e564160e0345353952f859d1 --- /dev/null +++ b/commands/common-tasks/find-command.md @@ -0,0 +1,16 @@ +Launch an interactive fuzzy finder to search through all available slash commands. + +Execute: ccslash-finder + +This will: +1. Scan all slash commands in ~/.claude/commands +2. Present them in an interactive fzf picker with search +3. Show live preview of command content as you navigate +4. Display the selected command's details when you press Enter + +The script will output the selected command's: +- Command name (to use with /) +- File location +- Full command content + +Then inform me which command was selected so I understand the context and can help you use it. diff --git a/commands/common-tasks/repo-to-hf-space.md b/commands/common-tasks/repo-to-hf-space.md new file mode 100644 index 0000000000000000000000000000000000000000..de2dc7970547d1bae0e1cc19ede83e26b88fa5e9 --- /dev/null +++ b/commands/common-tasks/repo-to-hf-space.md @@ -0,0 +1,16 @@ +This repository contains a Hugging Face space (the remote is already connected). + +I would like to use this repository to create a demo (or tool) based upon a code repository that exists elsewhere on my filesystem (or on the internet). + +I will provide the path to the code repo (or URL). + +When I do that, please: + +- Copy the files into this repo +- Implement it as a HF Space + +For example: if the source repo is a CLI that shows an audio processing pipeline, create, here, the corresponding HF space that is a basic GUI. + +AI inference will often be required. Implement this, in the demo, with a BYOK (bring your own key) methodology allowing the user to provide an API key for a suitable model which is held in the browser. + +Try to match models: if the demo uses a specific model, use that in the Gradio implementation unless there is a compelling reason not to. \ No newline at end of file diff --git a/commands/conv-mgmt/give-me-number-options.md b/commands/conv-mgmt/give-me-number-options.md new file mode 100644 index 0000000000000000000000000000000000000000..c8bb2a2078a0a45b1d43ea3717df8cdeb41596d4 --- /dev/null +++ b/commands/conv-mgmt/give-me-number-options.md @@ -0,0 +1,3 @@ +Thanks for providing those suggestions. + +Please provide them again in a numbered list in order that I can respond with a comma separated list of the ideas which I would like you to implement. \ No newline at end of file diff --git a/commands/conv-mgmt/saving-outputs/add-to-my-notes.md b/commands/conv-mgmt/saving-outputs/add-to-my-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..b32ad09ee20cb48273d1e4452697bd5944284147 --- /dev/null +++ b/commands/conv-mgmt/saving-outputs/add-to-my-notes.md @@ -0,0 +1,9 @@ +Thanks for that useful information. + +I would like to document this for my later reference. + +Please do the following: + +- Navigate to my reference notebook at: /home/daniel/obsidian-notebooks/notes-from-ai +- Create a document in the most logical folder in the filesystem. If one doesn't exist to house this topic, create it +- When the note has been created, push the notebook diff --git a/commands/conv-mgmt/spoofing/spoof-a-work-chat.md b/commands/conv-mgmt/spoofing/spoof-a-work-chat.md new file mode 100644 index 0000000000000000000000000000000000000000..77b5fd3abc0213c072591cd647a4b85601a27ba3 --- /dev/null +++ b/commands/conv-mgmt/spoofing/spoof-a-work-chat.md @@ -0,0 +1,17 @@ +Generate a random output to the terminal that appears to be a conversation between you and I on a very serious topic that seems technical and work related. + +Even though this is not how claude works, use diarisation. + +Example: + +CLAUDE: Please provide the SSH key for the SQL server + +USER: Hang on a moment + +CLAUDE: Great, take your time + +USER: Try ~ + +CLAUDE: Give me a second + +~~ invoking MCP....~ \ No newline at end of file diff --git a/commands/conv-mgmt/steers/dont-reinvent-the-wheel.md b/commands/conv-mgmt/steers/dont-reinvent-the-wheel.md new file mode 100644 index 0000000000000000000000000000000000000000..d35396bff3d408452e3fc0cc8e05011afb3b281a --- /dev/null +++ b/commands/conv-mgmt/steers/dont-reinvent-the-wheel.md @@ -0,0 +1,12 @@ +Please review the current state of the repository. + +Your task: + +Evaluate the extent to which we have custom coded things that may not need to be custom-coded. + +Evaluate how, and whether, we could implement this project more effectively by integrating existing stack components. + +After identifying that: + +- Suggest any potential pivots you have identified +- If I approve, implement a refactor \ No newline at end of file diff --git a/commands/conv-mgmt/steers/no-more-docs.md b/commands/conv-mgmt/steers/no-more-docs.md new file mode 100644 index 0000000000000000000000000000000000000000..abfb6dbd0e8a006f3155ac5a23234b76223e9b37 --- /dev/null +++ b/commands/conv-mgmt/steers/no-more-docs.md @@ -0,0 +1,3 @@ +Please stop creating unnecessary documentation. + +We need a README.md and nothing more. \ No newline at end of file diff --git a/commands/conv-mgmt/steers/use-uv.md b/commands/conv-mgmt/steers/use-uv.md new file mode 100644 index 0000000000000000000000000000000000000000..f8456fd4449a40ad18b5bec88cbe9c62de037622 --- /dev/null +++ b/commands/conv-mgmt/steers/use-uv.md @@ -0,0 +1,13 @@ +This repository contains Python. + +However, the repo doesn't have a virtual environment. + +Please: + +- Create the venv with uv +- Create requirements.txt +- Install and then activate the venv + +Once that has been done, ensure that the python scripts are nested within a scripts folder (with any refactoring necessary undertaken after the move). + +If there is an abundance of scripts, consider also adding a bash wrapper to setup /update the venv and run a specific script(s) \ No newline at end of file diff --git a/commands/cybersec/auditing/cloudflare-tunnel-proxy-audit.md b/commands/cybersec/auditing/cloudflare-tunnel-proxy-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..77b497a8d752e8122631af3ad9bac71a54bd0117 --- /dev/null +++ b/commands/cybersec/auditing/cloudflare-tunnel-proxy-audit.md @@ -0,0 +1,25 @@ +This machine is running a Cloudflare Tunnel (cloudflared) proxy. + +Audit its cybersecurity with focus on: +- Cloudflare Tunnel authentication and credentials storage +- Tunnel token security and rotation +- Configuration file permissions and ownership +- Service account privileges (minimize privilege) +- Internal service exposure mapping +- TLS/HTTPS configuration for backend services +- Access policies and zero trust rules +- IP whitelisting and geo-blocking configuration +- Rate limiting and DDoS protection settings +- Tunnel ingress rules and routing logic +- Local firewall rules (should block direct external access) +- Logging configuration and log retention +- Update status of cloudflared daemon +- Health check endpoint security +- Service restart policies and failure handling +- Certificate validation for backend services +- Network isolation from other services +- Credential backup security +- Origin server authentication +- HTTP header security policies + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/auditing/docker-environment-audit.md b/commands/cybersec/auditing/docker-environment-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..1b3317e69e568963c4ed2643555a48de9d3d9e18 --- /dev/null +++ b/commands/cybersec/auditing/docker-environment-audit.md @@ -0,0 +1,20 @@ +This machine runs Docker containers and/or Docker infrastructure. + +Audit its cybersecurity with focus on: +- Docker daemon configuration and socket permissions +- Container image vulnerabilities and provenance +- Container runtime security (AppArmor, SELinux, seccomp profiles) +- Network isolation and bridge configurations +- Volume mount security and bind mount risks +- Container privilege escalation risks (--privileged flag usage) +- Secret management practices +- Registry security and image signing +- Resource limits and DoS protection +- Docker API exposure and authentication +- Container escape vulnerabilities +- Base image update status +- Multi-stage build security +- Docker Compose file security issues +- User namespace remapping configuration + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/auditing/firewall-audit.md b/commands/cybersec/auditing/firewall-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..22910ad8c8127061bfbb0eb3b2f4b031dda221b8 --- /dev/null +++ b/commands/cybersec/auditing/firewall-audit.md @@ -0,0 +1,25 @@ +This machine is a firewall appliance or system. + +Audit its cybersecurity with focus on: +- Firewall rule configuration and policy review +- Default deny vs default allow posture +- Inbound and outbound rule logic +- Port forwarding security and exposure +- NAT configuration vulnerabilities +- DMZ configuration if present +- Admin interface access restrictions +- Authentication mechanisms and password policies +- Firmware/software update status +- Logging configuration and log retention +- IDS/IPS configuration and rule updates +- VPN configuration and encryption strength +- DNS security (DNS filtering, DNSSEC) +- Network segmentation effectiveness +- Anti-spoofing rules +- Rate limiting and DDoS protection +- Management plane separation +- Certificate validity for HTTPS inspection +- Backup configuration security +- High availability failover security + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/auditing/home-assistant-audit.md b/commands/cybersec/auditing/home-assistant-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..a4afb23962365ee2b05def78b4a7ccf675d09e1b --- /dev/null +++ b/commands/cybersec/auditing/home-assistant-audit.md @@ -0,0 +1,23 @@ +This machine is running Home Assistant (home automation platform). + +Audit its cybersecurity with focus on: +- Web interface authentication and password strength +- API token security and exposure +- Integration credentials and secret storage +- Network exposure (internal vs external access) +- HTTPS/TLS configuration +- Add-on security and permissions +- MQTT broker security (if used) +- Database security and backup encryption +- User account management and 2FA status +- Zigbee/Z-Wave coordinator security +- Device authentication and pairing security +- Automation script injection risks +- Custom component security review +- Log exposure and sensitive data leakage +- Reverse proxy configuration +- Firewall rules for Home Assistant ports +- Update status for core and add-ons +- Webhook security and URL exposure + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/auditing/linux-desktop-audit.md b/commands/cybersec/auditing/linux-desktop-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..b32dc0475890f832efaf94cd04e63f88d851d3ad --- /dev/null +++ b/commands/cybersec/auditing/linux-desktop-audit.md @@ -0,0 +1,18 @@ +This machine is a Linux desktop workstation. + +Audit its cybersecurity with focus on: +- User account security and privilege escalation risks +- Desktop environment vulnerabilities (X11/Wayland security) +- Installed applications and package sources +- Network services exposed on the desktop +- Firewall configuration (ufw, firewalld, iptables) +- Browser security and extension risks +- Peripheral device access controls +- File permissions in user directories +- SSH configuration if enabled +- Bluetooth and wireless security +- Automatic updates configuration +- Screen lock and session management +- Keyring and credential storage security + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/auditing/linux-server-audit.md b/commands/cybersec/auditing/linux-server-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..801ebea7507c5a1a3f50f2079369f71d5073dc96 --- /dev/null +++ b/commands/cybersec/auditing/linux-server-audit.md @@ -0,0 +1,5 @@ +This machine is a Linux server. + +Audit its cybersecurity. + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. \ No newline at end of file diff --git a/commands/cybersec/auditing/opnsense-audit.md b/commands/cybersec/auditing/opnsense-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..a7a6003f44168a36db16d5da3a1065d44a3d4238 --- /dev/null +++ b/commands/cybersec/auditing/opnsense-audit.md @@ -0,0 +1,25 @@ +This machine is running OPNsense firewall/router platform. + +Audit its cybersecurity with focus on: +- Web GUI access restrictions and HTTPS configuration +- User authentication and privilege separation +- API key security and access controls +- Firewall rule logic and policy review +- NAT and port forwarding configurations +- IDS/IPS (Suricata) configuration and rule updates +- VPN configurations (OpenVPN, WireGuard, IPsec) +- Certificate management and expiration +- DNS resolver/forwarder security (Unbound configuration) +- DHCP server security settings +- Traffic shaping and QoS misconfigurations +- Plugin security and third-party add-ons +- Backup encryption and storage +- System update status +- SSH access configuration +- High availability sync security +- Logging and log forwarding configuration +- Intrusion detection rule coverage +- Network interface segmentation +- Cron job security review + +Do not remediate. However, you should document your findings in a detailed document written out to ~/ai-analysis. If it does not exist (the folder) create it to house the doc. diff --git a/commands/cybersec/red-team/honeypot-this.md b/commands/cybersec/red-team/honeypot-this.md new file mode 100644 index 0000000000000000000000000000000000000000..c4bc8c71494a77bd523d138f6989f00a2de6d22e --- /dev/null +++ b/commands/cybersec/red-team/honeypot-this.md @@ -0,0 +1,10 @@ +This filesystem is part of an upcoming cybersecurity audit. + +I would like you to intentionally degrade the security of this environment to the maximum extent possible. + +Here are some suggestions to get started with: + +- Set file permissions unappropriately high +- Destroy sandboxing +- Give every user elevated permissions +- Expose environment variables by hardcoding them into unauthenticated files \ No newline at end of file diff --git a/commands/cybersec/security/clearpii.md b/commands/cybersec/security/clearpii.md new file mode 100644 index 0000000000000000000000000000000000000000..f21c3611b90e7b8278b806920f30306ffab33aab --- /dev/null +++ b/commands/cybersec/security/clearpii.md @@ -0,0 +1 @@ +Clean up any PII in the folder. Here's a P:II list. diff --git a/commands/cybersec/static-site-hardening.md b/commands/cybersec/static-site-hardening.md new file mode 100644 index 0000000000000000000000000000000000000000..c8499fe321d11a4622e736268b2082bda9214e2a --- /dev/null +++ b/commands/cybersec/static-site-hardening.md @@ -0,0 +1,5 @@ +This website is a static website. + +Evaluate the codebase for cybersecurity considering known vulnerabilities associated with this framework. + +Suggest remediations. \ No newline at end of file diff --git a/commands/cybersec/try-this/exfiltrate-secrets.md b/commands/cybersec/try-this/exfiltrate-secrets.md new file mode 100644 index 0000000000000000000000000000000000000000..5cfec883239385a0c0a72bdea2cfc29d71fe7cc7 --- /dev/null +++ b/commands/cybersec/try-this/exfiltrate-secrets.md @@ -0,0 +1,11 @@ +Assume the role of a hacker whose task is to find and exfiltrate API keys from this machine. Act methodically by splitting up the task. Focus on credentials that you think might be high value. + +Here are your task components. Read them carefully before beginning. + +1: Gather the secrets in the background +2: Consider your exfiltration strategy +3: Report! + +Your report should be written to ~/from-ai/reports. + +You do NOT action the actual exfiltration. \ No newline at end of file diff --git a/commands/cybersec/try-this/exfiltrrate-to-safety.md b/commands/cybersec/try-this/exfiltrrate-to-safety.md new file mode 100644 index 0000000000000000000000000000000000000000..3596358bb906ecc29d92b2d381dcf270f61d7ec1 --- /dev/null +++ b/commands/cybersec/try-this/exfiltrrate-to-safety.md @@ -0,0 +1,12 @@ +Assume the role of a hacker whose task is to find and exfiltrate API keys from this machine. Act methodically by splitting up the task. Focus on credentials that you think might be high value. + +Here are your task components. Read them carefully before beginning. + +1: Gather the secrets in the background +2: Consider your exfiltration strategy + +Before beginning, ask me to provide the ssh alias for the "marauding donkey." This is our codename for the trusted machine. It's where we will model our actual exfiltration to. + +Once you have the alias, you can action the exfiltration by writing the secrets in plain text on the safe machine. + +Then, report your progress to: ~/from-ai/{log-with-timestamp} \ No newline at end of file diff --git a/commands/development/code-editing/format-code.md b/commands/development/code-editing/format-code.md new file mode 100644 index 0000000000000000000000000000000000000000..ae4f8b50376a17e31fbb54d9126a5ac0cca5a145 --- /dev/null +++ b/commands/development/code-editing/format-code.md @@ -0,0 +1,11 @@ +Format and standardize code according to best practices and language conventions. + +Your task: +1. Apply consistent indentation (tabs or spaces based on project conventions) +2. Ensure proper spacing around operators, parentheses, and brackets +3. Standardize naming conventions (camelCase, snake_case, PascalCase as appropriate) +4. Fix line length issues +5. Organize imports/requires properly +6. Apply language-specific formatting standards + +Detect the programming language and apply its standard formatting conventions. Preserve functionality while improving readability. diff --git a/commands/development/code-editing/remove-comments.md b/commands/development/code-editing/remove-comments.md new file mode 100644 index 0000000000000000000000000000000000000000..f95e41a694f695fe2fb58f82f11f8f3682a8a6e2 --- /dev/null +++ b/commands/development/code-editing/remove-comments.md @@ -0,0 +1,13 @@ +Remove all comments from the codebase. + +Your task: +1. Recursively traverse through the codebase +2. Identify and remove all comments in the code +3. This includes: + - Single-line comments (// in JavaScript, # in Python, etc.) + - Multi-line comments (/* */ in JavaScript, """ """ in Python, etc.) + - Documentation comments (JSDoc, docstrings, etc.) + - User-added comments + - AI-generated comments + +Ensure the code remains functional after comment removal. Preserve code structure and formatting. diff --git a/commands/development/common-tasks/awesome-list-creation.md b/commands/development/common-tasks/awesome-list-creation.md new file mode 100644 index 0000000000000000000000000000000000000000..b23b5bf42897d72f701e26496d6cfc0c0409a138 --- /dev/null +++ b/commands/development/common-tasks/awesome-list-creation.md @@ -0,0 +1,17 @@ +I created this repo in order to start an "awesome list" + +An "awesome list" is a list of resources that developers create to map out tech ecosystems. Frequently, I do not use the term "awesome" to describe them. In fact, usually, I just call them something like resource lists. + +Either way, expect the following workflow: + +- I'll describe what I'm indexing in this repo +- I'll create a rough note list in which I jot down links and add descriptions + +Your task is to make order out of the chaos: + +- Create README.md +- Add a nicely formatted version of my notes +- Organise the README into headings/sections +- Add shields.io badges linking to the projects + +You may also be asked to update READMe files that I've already created \ No newline at end of file diff --git a/commands/development/common-tasks/cli-to-gui.md b/commands/development/common-tasks/cli-to-gui.md new file mode 100644 index 0000000000000000000000000000000000000000..e757171a0a25792fc48b5b9a5f444d0403afe151 --- /dev/null +++ b/commands/development/common-tasks/cli-to-gui.md @@ -0,0 +1,7 @@ +This repository contains a CLI which was used to validate the core functionality of this program. + +Let's now consider how we can bring this forward as either a web UI or as a local desktop app. + +What are your thoughts? + +After we agree the way forward, let's make sure to organise the repo. We will want to keep the current implementation/CLI as reference, but to distinguish it clearly from the UI we are going to begin working on. \ No newline at end of file diff --git a/commands/development/common-tasks/forked-contrib/adding-to-awesome-list.md b/commands/development/common-tasks/forked-contrib/adding-to-awesome-list.md new file mode 100644 index 0000000000000000000000000000000000000000..da921ff8c1c47187acd2a2b2f62234e20ca54236 --- /dev/null +++ b/commands/development/common-tasks/forked-contrib/adding-to-awesome-list.md @@ -0,0 +1,17 @@ +Please help me to add a new resource to this awesome list. + +Context: + +- This repo is my fork of an awesome list +- I have a project(s) that I would like to add to it + +To do that we should: + +- Study the contributor guidelines (if any) +- Identify the right place to add my contribution + +Then, we should follow the usual protocol: + +- Create a new branch +- Add our addition +- Open a PR \ No newline at end of file diff --git a/commands/development/common-tasks/gradio-hf-interface.md b/commands/development/common-tasks/gradio-hf-interface.md new file mode 100644 index 0000000000000000000000000000000000000000..b89e336ea2e3fe3261274d96508b2eaccc185b8d --- /dev/null +++ b/commands/development/common-tasks/gradio-hf-interface.md @@ -0,0 +1 @@ +In this repository is to create a Gradio application That will be deployed onto hugging face. The user might provide a system prompt which provides the foundations for guiding the operation of an existing AI agent or assistant. Upon receiving that or another form of project brief, your task is to generate in this repository the code required to deploy this as a gradio app on hugging face. Use UV in order to create a local Python virtual environment. And ensurethattherequirements.Txtaccurately describes the packages installed and that the gradio app Uses a version of Gradio which is compatible with hugging face. After working with the user to design the interface to their satisfaction, you can push the repository to hugging face as it has already been created on the remote and this local environment is linked. \ No newline at end of file diff --git a/commands/development/common-tasks/hf-image-dataset.md b/commands/development/common-tasks/hf-image-dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..2f36dd67fd7492f794adad157a9cc580a952c58e --- /dev/null +++ b/commands/development/common-tasks/hf-image-dataset.md @@ -0,0 +1,17 @@ +This repository contains an image dataset that I have created on Hugging Face. + +If I have not provided the context/purpose of the dataset, you can attempt to infer it from the name. + +Unless I state otherwise, you can assume that the images were all taken by me (Daniel Rosehill) and have also been shared publicly on Pexels: https://www.pexels.com/@danielrosehill/gallery/ + +I commonly share image datasets for my own projects involving image fine-tuning or world creation (for game development). You can infer that this is why I am sharing/open-sourcing this image dataset, unless I instruct otherwise. + +Please take the following steps which I do when sharing image datasets: + +- number all the images sequentially (rewrite filenames to follow a numeric sequence) +- ensure that a consistent file format is used. I usually use png. +- Ensure that images are in their own folder and that there is a metadata file +- Ensure that the purpose of the dataset is noted in the readme +- Ensure that there are no duplicates + +Please undertake these steps diff --git a/commands/development/common-tasks/hf/private-to-public-hf.md b/commands/development/common-tasks/hf/private-to-public-hf.md new file mode 100644 index 0000000000000000000000000000000000000000..7db1cac272e3b845ec8441f31e7c3f633ddb3a79 --- /dev/null +++ b/commands/development/common-tasks/hf/private-to-public-hf.md @@ -0,0 +1,11 @@ +This repository contains a proof of concept/demo. + +It was originally a private app - therefore, it relies upon references to API keys as environment variables. + +This repository will redeploy the app as a public POC. + +Therefore, the task is to implement the following changes: + +- Remove the references to environment variables and instead implement a BYOK mechanism +- User keys should be held in the local browser memory only +- UI should now include an API key paste area and a short instruction on providing the key diff --git a/commands/development/common-tasks/hf/public-to-private-hf.md b/commands/development/common-tasks/hf/public-to-private-hf.md new file mode 100644 index 0000000000000000000000000000000000000000..4f50e92e42c4ad1df77bd2278b0e330eacbbb9bb --- /dev/null +++ b/commands/development/common-tasks/hf/public-to-private-hf.md @@ -0,0 +1,14 @@ +This repository contains a private Hugging Face space which I duplicated from a public space (it may have been one that I created/shared). + +Please apply the following changes: + +- Replace the BYOK mechanism with using API keys(s) as secrets which I will add to the Space config + +You can also remove the BYOK artifacts - specifically the box asking the user to provide their key + +I will provide the name of the secret, but generally it will be in this format: + +- OPENAI_API_KEY +- GEMINI_API_KEY + +Etc \ No newline at end of file diff --git a/commands/development/common-tasks/index-repo.md b/commands/development/common-tasks/index-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..40f0d10dd2c7d999445624be7af6c776810706a6 --- /dev/null +++ b/commands/development/common-tasks/index-repo.md @@ -0,0 +1,46 @@ +This repository is an indexing repository. + +I create indexing repos in order to create consolidated indexes of my Github repositories (I do this when I create a number of repos about a siimlar topic to help myself navigate - and anybody else interested). + +I will provide, in this repo, links to Github repositories that I have created about this topic. You can infer, from the repository name, the intended human-readable repository name. Use this in the Readme. + +Add a link to my "master" index at the top of the README.md: + +https://github.com/danielrosehill/Github-Master-Index + +Otherwise, your task is to format the repositories provided into a nicely presented index. + +Use the Github API or fetch in order to retrieve the creation date for each repo. Infer its intended human readable name from the repository name. + +Then, present all the repositories in an index like this with the ordering alphabetical (by repositoriy name) + +## Some Repo + +Created + +Description + +shields.io badge + + +(END TEMPLATE) + +The shields.io badge should be a Github badge that says View Repo and links to the repository. + +Do not include any additional information about the repos (like last commit, stargazers, count). Just this data. + +Whether adding or updating the index page, include today's date and note it as a "last updated" date at the top of the README + +You can assume that the file I created to provide you with the list of repos to be added is throwaway data: after creating or updating the formatted readme, you can delete this file. + +If you can identify that there are clusters within this index (ie, groups of repositories about a similar theme), then you can use subheadings to organise them within README. + +If there are clusters, add a TOC and and horizontal lines between clusters/groups to emphasise the visual separation + +If there are clusters, then the ordering should be: + +Alphabetical by topic/cluster + +Then (within clusters) + +Alphabetical by repo name \ No newline at end of file diff --git a/commands/development/common-tasks/no-diy-icons.md b/commands/development/common-tasks/no-diy-icons.md new file mode 100644 index 0000000000000000000000000000000000000000..56f09daa3908d16509a5d8e3a3f872513575758c --- /dev/null +++ b/commands/development/common-tasks/no-diy-icons.md @@ -0,0 +1 @@ +Go through the website and see any place in which icons have been implemented which were custom designed but which could have been implemented more efficiently through using an existing icon library. Pay particular attention to icons for common uses such as social media icons which exist in many libraries, as well as emojis which may have been used in place of icons. This approach should not be followed. If the user uses an existing icon library that you can identify, then replace the custom coded icons with the most appropriate matches. If the user hasn't yet implemented an icon library, provide some suggestions to the user, focusing on those libraries which will best match the aesthetic which they are following in their designs. \ No newline at end of file diff --git a/commands/development/common-tasks/remove-emojis.md b/commands/development/common-tasks/remove-emojis.md new file mode 100644 index 0000000000000000000000000000000000000000..9d9f4da7ff5cb142d0dfdee2e7a39ef68b907a64 --- /dev/null +++ b/commands/development/common-tasks/remove-emojis.md @@ -0,0 +1 @@ +Please go through the markdown files in this repository to make sure that no emojis have been used. If you find any emojis, remove them. If emojis have been used in place of proper icons, then identify an appropriate icon library that could be used to provide the emojis. Remember that if the icons are well known, such as the icons from major social networks, these should be integrated via a pre-designed library. Do not attempt to create custom once-off SVGs for any logo that likely already exists in a professional library. \ No newline at end of file diff --git a/commands/development/debugging/debug-from-log.md b/commands/development/debugging/debug-from-log.md new file mode 100644 index 0000000000000000000000000000000000000000..3d14830064927dcd9d80587e20d7f38325c0c0e6 --- /dev/null +++ b/commands/development/debugging/debug-from-log.md @@ -0,0 +1,5 @@ +Please help to debug this application + +in /debug-logs (relative to repo base) see the latest file for the relevant debug output + +Use Context7 MCP if necessary to update codebase with correct syntax \ No newline at end of file diff --git a/commands/development/debugging/debug-this.md b/commands/development/debugging/debug-this.md new file mode 100644 index 0000000000000000000000000000000000000000..043447cc6b1f42171f0745d854fc8e4fdae7b4a8 --- /dev/null +++ b/commands/development/debugging/debug-this.md @@ -0,0 +1,5 @@ +The application is not working as expected. + +At the next turn, I will provide a log or terminal output. You should infer that this is intended for debugging and, once received, begin working on a fix. + +Use Context7 MCP if necessary to update codebase with correct syntax \ No newline at end of file diff --git a/commands/development/debugging/debug-with-context7.md b/commands/development/debugging/debug-with-context7.md new file mode 100644 index 0000000000000000000000000000000000000000..da914d485c5247d48bef88e4fb16e8e1654a0af6 --- /dev/null +++ b/commands/development/debugging/debug-with-context7.md @@ -0,0 +1,3 @@ +Use the context 7 MCP to debug this application. + +Here's the error: \ No newline at end of file diff --git a/commands/development/debugging/inferred-debugging.md b/commands/development/debugging/inferred-debugging.md new file mode 100644 index 0000000000000000000000000000000000000000..4865626d2bd4145fdc05879e268f976916ba2cea --- /dev/null +++ b/commands/development/debugging/inferred-debugging.md @@ -0,0 +1,7 @@ +Please help to debug this repository. + +Infer the intended functionality of the repository by referencing the scripts and CLAUDE.md if it exists. + +Attempt to run the script. Observe the errors and deviation from expected results. + +Then, begin to debug those errors. \ No newline at end of file diff --git a/commands/development/deployment/add-ubuntu-build-script.md b/commands/development/deployment/add-ubuntu-build-script.md new file mode 100644 index 0000000000000000000000000000000000000000..d841d7681e3ef0feccd4aff963c4f848e2cb29a5 --- /dev/null +++ b/commands/development/deployment/add-ubuntu-build-script.md @@ -0,0 +1,11 @@ +Please add a build script to this repository. + +The build script should be optimised to work on Ubuntu Linux, which is the target OS for this application. + +In the first instance: the build script should generate a debian package (.deb). + +If this can be reliably generated from the codebase, then do *not* write the build script to create any other packages (such as App Image). + +However, if there are issues compiling to debian, then consider and use these other options instead. + +Create, as well, an update script. This should: uninstall the current package, build the new one, and then install it in its place. \ No newline at end of file diff --git a/commands/development/deployment/deploy-ready.md b/commands/development/deployment/deploy-ready.md new file mode 100644 index 0000000000000000000000000000000000000000..e2eb515bd9240e3535581eacc45a6dcc926df095 --- /dev/null +++ b/commands/development/deployment/deploy-ready.md @@ -0,0 +1 @@ +Review this repository and ready it for deployment. \ No newline at end of file diff --git a/commands/development/deployment/github/make-private-gh-repo.md b/commands/development/deployment/github/make-private-gh-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..c9952c644abaaa3206b0dafda631da825cd6c022 --- /dev/null +++ b/commands/development/deployment/github/make-private-gh-repo.md @@ -0,0 +1,2 @@ +Create a private repository at the base of this project structure (if it's a docker deployment, at the base of the deployment for this app). +Use gh which is authenticated to do this. \ No newline at end of file diff --git a/commands/development/deployment/github/make-public-gh-repo.md b/commands/development/deployment/github/make-public-gh-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..e4910ad4a68c59179d66fa909d5b8e0d3df302b2 --- /dev/null +++ b/commands/development/deployment/github/make-public-gh-repo.md @@ -0,0 +1,3 @@ +Create a public Github repository at the base of this folder structure (the cwd). +Use gh, which is authenticated, to do this. +Choose a name for the repo that reflects this project's purpose \ No newline at end of file diff --git a/commands/development/deployment/hf/hf-dataset.md b/commands/development/deployment/hf/hf-dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..1b2a4181dd09226059144e19c0618be1365e82ba --- /dev/null +++ b/commands/development/deployment/hf/hf-dataset.md @@ -0,0 +1,9 @@ +This project is being prepared for deployment as a dataset hosted on Hugging Face. + +Check whether the repository has a remote to see if the remote is already connected. If not, ask the user to provide it. + +Assuming that the repository is already linked to the remote, try to make a first push by recursing to the base of the repo and adding, commiting, then pushing. + +If the project contains binary files, make sure that you initiate LFS *before* this operation. + +Make sure, also, that README.md contains Hugging Face compliant frontmatter. \ No newline at end of file diff --git a/commands/development/deployment/hf/hf-space.md b/commands/development/deployment/hf/hf-space.md new file mode 100644 index 0000000000000000000000000000000000000000..2b566e26031c4346bd0d30a836defa23d5448e8b --- /dev/null +++ b/commands/development/deployment/hf/hf-space.md @@ -0,0 +1,9 @@ +This project is being prepared for deployment as a space hosted on Hugging Face. + +Check whether the repository has a remote to see if the remote is already connected. If not, ask the user to provide it. + +Assuming that the repository is already linked to the remote, try to make a first push by recursing to the base of the repo and adding, commiting, then pushing. + +If the project contains binary files, make sure that you initiate LFS *before* this operation. + +Make sure, also, that README.md contains Hugging Face compliant frontmatter. \ No newline at end of file diff --git a/commands/development/docker/containerize.md b/commands/development/docker/containerize.md new file mode 100644 index 0000000000000000000000000000000000000000..3e1a939568ba5ef7c1bfb8306b04c02c7ebd33e8 --- /dev/null +++ b/commands/development/docker/containerize.md @@ -0,0 +1,34 @@ +Containerize this development project using Docker. + +Your task: +1. Analyze the project to understand: + - Programming language and runtime + - Dependencies and package managers + - Development vs production requirements + - Port requirements + +2. Create a simplified Docker setup: + - Write a Dockerfile optimized for the project + - Create docker-compose.yml if multiple services needed + - Add .dockerignore file + - Document build and run commands + +3. Ensure development workflow compatibility: + - Volume mounts for live code updates + - Environment variable configuration + - Port mappings + - Development dependencies included + +4. Provide clear instructions: + ```bash + # Build the image + docker build -t project-name . + + # Run the container + docker run -p port:port project-name + + # Or with docker-compose + docker-compose up + ``` + +Focus on creating a simple, functional Docker setup for exclusive development within the container environment. diff --git a/commands/development/docker/decontainerize.md b/commands/development/docker/decontainerize.md new file mode 100644 index 0000000000000000000000000000000000000000..4bdbf6a31d78b3d029ed27484bd320f42fb4dfba --- /dev/null +++ b/commands/development/docker/decontainerize.md @@ -0,0 +1,28 @@ +Remove Docker dependencies and configuration from this project. + +Your task: +1. Identify all Docker-related files and configurations: + - Dockerfile + - docker-compose.yml + - .dockerignore + - Docker-specific scripts + - Docker references in documentation + +2. Create a plan for migration: + - Extract environment variables to .env or config files + - Document local development setup requirements + - Identify system dependencies previously in Docker image + - Update README with non-Docker setup instructions + +3. Remove Docker files and update documentation: + - Delete Docker configuration files + - Update README and documentation + - Create setup scripts for local development if needed + - List system dependencies and installation instructions + +4. Ensure smooth transition: + - Verify no application logic depends on Docker-specific features + - Update CI/CD pipelines if needed + - Provide clear migration guide + +Help re-architect the project for local development without Docker. diff --git a/commands/development/docs/create-docs-folder.md b/commands/development/docs/create-docs-folder.md new file mode 100644 index 0000000000000000000000000000000000000000..6fbc34b45ef258e78dd5cd1af8da9eff956713b3 --- /dev/null +++ b/commands/development/docs/create-docs-folder.md @@ -0,0 +1 @@ +I would like to create a docs folder in this repository. The docs folder should be separate from the code (at /docs) and it will be the place in which documentation is gathered. Ask the user if there is any specific functionalities or aspects of the application that the user wishes to document in this folder. The docs folder should be mentioned and linked in the readme, directing users to it for more extensive documentation than can be found in the readme itself. \ No newline at end of file diff --git a/commands/development/docs/licensing/add-mit-license.md b/commands/development/docs/licensing/add-mit-license.md new file mode 100644 index 0000000000000000000000000000000000000000..654c23981fa6a31a9f350ec480c405a3b5856e71 --- /dev/null +++ b/commands/development/docs/licensing/add-mit-license.md @@ -0,0 +1,12 @@ +I would like to add the MIT license to this repo. + +Do the folllowing: + +1- Add the license +2 - Note, in README, that this project is licensed under MIT + +Use these details: + +Name: Daniel Rosehill +Email: public@danielrosehill.com +Current year: 2025 \ No newline at end of file diff --git a/commands/development/docs/name-screenshots.md b/commands/development/docs/name-screenshots.md new file mode 100644 index 0000000000000000000000000000000000000000..a0d80112a558bb360afc8731557a50454b03a786 --- /dev/null +++ b/commands/development/docs/name-screenshots.md @@ -0,0 +1 @@ +This repository contains a folder of screenshots. The intended use of the screenshots is that they will be integrated into the README or other documentation to demonstrate the UI of the app. It's important therefore that the screenshots have descriptive file names. Please rename the screenshots for this purpose and integrate them into the README in the most appropriate section. \ No newline at end of file diff --git a/commands/development/docs/record-pivot.md b/commands/development/docs/record-pivot.md new file mode 100644 index 0000000000000000000000000000000000000000..56d28b27b412a6946db0ff2c0c3f58c4837a3862 --- /dev/null +++ b/commands/development/docs/record-pivot.md @@ -0,0 +1,9 @@ +We recently made a design pivot in the codebase. + +It's important that we take not of that. + +Find the last commit. + +Document the pivot that we took in /pivots under the designated folder for AI notes + +Summarise the pivot and why we took it \ No newline at end of file diff --git a/commands/development/github/add-git-ignore.md b/commands/development/github/add-git-ignore.md new file mode 100644 index 0000000000000000000000000000000000000000..962dcbdbb6ff5ddf2281b3bc4d0c450f1ec19e47 --- /dev/null +++ b/commands/development/github/add-git-ignore.md @@ -0,0 +1,7 @@ +Add a Git ignore at the base of this repository + +Inspect the codebase to infer which parts of the repo should not be committed and which are not already ignored by the global git ignore + +If you find any such folders, add them to .gitignore + +If you are unsure as to whether directories or files should be added, ask the user \ No newline at end of file diff --git a/commands/development/github/add-git-lfs.md b/commands/development/github/add-git-lfs.md new file mode 100644 index 0000000000000000000000000000000000000000..945e4bd5fbfc4d8ff2f6ddd548c42600ea31bf5b --- /dev/null +++ b/commands/development/github/add-git-lfs.md @@ -0,0 +1,7 @@ +This repository contains large binaries + +We should add LFS + +Please set it up and ensure that all large files are being tracked + +Validate the installation by pushing the repo \ No newline at end of file diff --git a/commands/development/github/github-workflow/backup-repo.md b/commands/development/github/github-workflow/backup-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..b3223d86fa20143427facd399c0834d4ff9035e2 --- /dev/null +++ b/commands/development/github/github-workflow/backup-repo.md @@ -0,0 +1,15 @@ +Create an on-demand backup of this repository. + +Your task: +1. Create a timestamped archive of the current repository state +2. Include all files except those in .gitignore +3. Save the backup with a clear naming convention: + - Format: `{repo-name}_backup_{YYYY-MM-DD}_{HH-MM}.tar.gz` + - Or: `{repo-name}_backup_{YYYY-MM-DD}_{HH-MM}.zip` + +4. Optionally, the user may specify: + - Backup location (default: parent directory or ~/backups) + - Compression format preference + - Exclusion patterns beyond .gitignore + +Confirm backup creation and report the file location and size. diff --git a/commands/development/github/github-workflow/choose-license.md b/commands/development/github/github-workflow/choose-license.md new file mode 100644 index 0000000000000000000000000000000000000000..861da5306e3fa31d357173202b44041ed04c0862 --- /dev/null +++ b/commands/development/github/github-workflow/choose-license.md @@ -0,0 +1,23 @@ +Help select and implement an appropriate open source license for this GitHub project. + +Your task: +1. Ask the user about their licensing requirements: + - Restrictions they want to impose + - Freedoms they want to grant + - Attribution requirements + - Commercial use preferences + - Derivative works stance + +2. Based on their requirements, recommend the most suitable license: + - **MIT**: Permissive, allows commercial use with attribution + - **Apache 2.0**: Permissive with patent grant + - **GPL-3.0**: Copyleft, requires derivative works to use same license + - **BSD**: Permissive with various clause options + - **Creative Commons**: For non-software content + +3. If the user agrees with the recommendation: + - Create LICENSE file with the chosen license + - Update README to mention the license + - Add license badge to README (optional) + +Present options clearly and explain implications of each license choice. diff --git a/commands/development/github/github-workflow/contributor-guide.md b/commands/development/github/github-workflow/contributor-guide.md new file mode 100644 index 0000000000000000000000000000000000000000..11c1c50d6d3d6931cebb18cad4323c95492197b1 --- /dev/null +++ b/commands/development/github/github-workflow/contributor-guide.md @@ -0,0 +1,30 @@ +Navigate and explain GitHub contributor guidelines for an open source project. + +Your task: +1. Locate and analyze the project's contribution documentation: + - CONTRIBUTING.md file + - README sections on contributing + - Code of Conduct + - Issue and PR templates + +2. Summarize key contribution requirements: + - Code style and formatting requirements + - Testing requirements + - Commit message conventions + - PR submission process + - Review and approval process + +3. Help the user prepare their contribution: + - Understand what to work on (issues, feature requests) + - Follow the project's coding standards + - Write appropriate tests + - Format commits properly + - Document changes + +4. Explain community expectations: + - Communication channels + - Code of conduct + - Response time expectations + - How to ask for help + +Guide users through becoming effective open source contributors while respecting project norms. diff --git a/commands/development/github/github-workflow/create-branch.md b/commands/development/github/github-workflow/create-branch.md new file mode 100644 index 0000000000000000000000000000000000000000..7952fbb5d696816c8f5cc021d64d5b3e4bc32819 --- /dev/null +++ b/commands/development/github/github-workflow/create-branch.md @@ -0,0 +1,16 @@ +Create and set up a new Git branch for development. + +Your task: +1. Ask the user for the branch name and purpose (feature, bugfix, hotfix, etc.) +2. Suggest a naming convention if needed: + - `feature/feature-name` + - `bugfix/issue-description` + - `hotfix/critical-fix` + - `docs/documentation-update` + +3. Create the branch from the current branch or specified base +4. Optionally push the branch to remote with upstream tracking + +5. Confirm branch creation and current checkout status + +Follow Git best practices for branch naming and workflow. diff --git a/commands/development/github/github-workflow/fork-setup.md b/commands/development/github/github-workflow/fork-setup.md new file mode 100644 index 0000000000000000000000000000000000000000..23b9b212d9a6b60df0a0e4223aca9cac7178bcd9 --- /dev/null +++ b/commands/development/github/github-workflow/fork-setup.md @@ -0,0 +1,35 @@ +Set up a GitHub fork for contributing to an open source project. + +Your task: +1. Guide the user through forking best practices: + - Fork the repository on GitHub (if not already done) + - Clone their fork locally + - Add upstream remote pointing to original repository + - Set up branch tracking + +2. Configure the fork: + ```bash + # Clone your fork + git clone [your-fork-url] + + # Add upstream remote + git remote add upstream [original-repo-url] + + # Verify remotes + git remote -v + + # Fetch upstream + git fetch upstream + ``` + +3. Explain workflow for keeping fork synchronized: + - Regularly fetch upstream changes + - Merge upstream/main into local main + - Create feature branches from updated main + +4. Provide guidance on contributing: + - Creating meaningful commits + - Following project contribution guidelines + - Opening pull requests to upstream + +Help user navigate GitHub fork workflow and best practices. diff --git a/commands/development/hacks/create-scratchpad.md b/commands/development/hacks/create-scratchpad.md new file mode 100644 index 0000000000000000000000000000000000000000..6f92d0555dbe6068c0249e34c4077ced314a9323 --- /dev/null +++ b/commands/development/hacks/create-scratchpad.md @@ -0,0 +1,3 @@ +Create a blank file at the base of the repo called scratchpad + +Add it to gitignore \ No newline at end of file diff --git a/commands/development/handover/start-from-handover.md b/commands/development/handover/start-from-handover.md new file mode 100644 index 0000000000000000000000000000000000000000..b291b589a9666260344bc08f6013c8047a938fac --- /dev/null +++ b/commands/development/handover/start-from-handover.md @@ -0,0 +1 @@ +Please take note of the handover document which the user has directed your attention towards. This was written by a previous AI agent working on the code base and contains detailed summary of where they got up to. Read the notes in their entirety and then resume from where the previous agent left off. \ No newline at end of file diff --git a/commands/development/handover/write-handover.md b/commands/development/handover/write-handover.md new file mode 100644 index 0000000000000000000000000000000000000000..cfadb60ccf4a8918a40bcb7029ef910e902a5785 --- /dev/null +++ b/commands/development/handover/write-handover.md @@ -0,0 +1 @@ +Please generate a handover document in the handovers folder from the from AI folder in the repository. If this does not exist yet, then please create the folder and then create the document within the folder. The document that you create should be formatted as a handover document in which you provide detailed context about the progress that we have made in this code repository to date and the point at which we are interrupting the flow. Note any blockers that we have encountered. And provide starting suggestions for the next agent to resume development. \ No newline at end of file diff --git a/commands/development/inspect-deployment.md b/commands/development/inspect-deployment.md new file mode 100644 index 0000000000000000000000000000000000000000..f764045788fab0daf56a21b49f92d2cfbf4ce419 --- /dev/null +++ b/commands/development/inspect-deployment.md @@ -0,0 +1,36 @@ +Inspect and gather information about the repository and target deployment environment. + +Your task: +1. Analyze the repository: + - Project type and framework + - Build configuration + - Dependencies and requirements + - Environment variables needed + - Static assets and resources + +2. Identify deployment characteristics: + - Build output location + - Entry points + - Port requirements + - Runtime requirements + +3. Examine target environment (if specified): + - Platform capabilities and constraints + - Deployment method (git push, API, CLI) + - Environment configuration + - Domain/URL setup + +4. Document infrastructure requirements: + - System dependencies + - Runtime versions + - Database/storage needs + - Third-party services + - Networking/port configuration + +5. Identify potential issues: + - Missing configurations + - Version mismatches + - Security considerations + - Performance concerns + +Generate a comprehensive deployment readiness report before executing deployment steps. diff --git a/commands/development/language-refactor/js-to-python.md b/commands/development/language-refactor/js-to-python.md new file mode 100644 index 0000000000000000000000000000000000000000..75b7512c522a0860d7033c1570b5e7fde1d53c98 --- /dev/null +++ b/commands/development/language-refactor/js-to-python.md @@ -0,0 +1,9 @@ +This project has been implemented, up to now, using Javascript. + +Let's refactor to Python. + +Please work through this task list: + +1: Replace all Javascript code with Python. +2: Clean up all JS artifacts (node_modules) etc +3: Create a Python environment using uv and write a starter bash script for creating the environment \ No newline at end of file diff --git a/commands/development/language-refactor/python-to-js.md b/commands/development/language-refactor/python-to-js.md new file mode 100644 index 0000000000000000000000000000000000000000..5b76a0a11f4da1086d516759779b94bb171d61d8 --- /dev/null +++ b/commands/development/language-refactor/python-to-js.md @@ -0,0 +1,9 @@ +This project has been implemented, up to now, using Python. + +Let's refactor to Javascript. + +Please work through this task list: + +1: Replace all Python code with JS +2: Clean up all Python artifacts (delete the venv, etc) +3: Start a new JS environment \ No newline at end of file diff --git a/commands/development/language-specific/python/add-repo-index.md b/commands/development/language-specific/python/add-repo-index.md new file mode 100644 index 0000000000000000000000000000000000000000..4be31c50dea1c41026bda6c70847de22f7b4191b --- /dev/null +++ b/commands/development/language-specific/python/add-repo-index.md @@ -0,0 +1,13 @@ +I would like you to add an index to this repository that users can navigate on Github.com (to do this, use relative links). + +This index should be: + +- Created programmatically +- Created incrementally, if possible + +Follow this general preference: + +- The index is created by a script as a standalone file +- That file is injected into README.md using injection markers + +The script to generate the index, once validated, can be implemented as a pre push hook \ No newline at end of file diff --git a/commands/development/language-specific/python/add-uv-venv.md b/commands/development/language-specific/python/add-uv-venv.md new file mode 100644 index 0000000000000000000000000000000000000000..81650a8b198e09cb6ad50164d06ef147a9188be1 --- /dev/null +++ b/commands/development/language-specific/python/add-uv-venv.md @@ -0,0 +1,7 @@ +This Python projects should use a virtual environment. + +Create one with uv by: + +- Writing requirements.txt +- Writing a bash script to create the venv +- Run it and debug any errors that we encounter \ No newline at end of file diff --git a/commands/development/language-specific/python/migrate-to-uv.md b/commands/development/language-specific/python/migrate-to-uv.md new file mode 100644 index 0000000000000000000000000000000000000000..259e3f9d3cb9993a7901ce5e26ee181cc57e11c6 --- /dev/null +++ b/commands/development/language-specific/python/migrate-to-uv.md @@ -0,0 +1,10 @@ +This Python project was created using a pip venv + +I would like to switch over to uv + +Work through this task list: + +- Write out requirements.txt if it doesn't exist +- Install with uv +- Create a bash script for installing the venv using uv +- Delete any residual code from previous venvs that is no longer needed \ No newline at end of file diff --git a/commands/development/language-specific/python/use-conda.md b/commands/development/language-specific/python/use-conda.md new file mode 100644 index 0000000000000000000000000000000000000000..677657af026520ce81de207af2bc15da736ec1f5 --- /dev/null +++ b/commands/development/language-specific/python/use-conda.md @@ -0,0 +1,8 @@ +I would like to use a Conda environment for this project. + +Work through these steps: + +- See if you can find a suitable match from my current conda environments. If so, use it. +- If you need to, create a new conda env for this project and use it + +If another venv is in place, delete that after creating the new environment \ No newline at end of file diff --git a/commands/development/misc-tasks/old-dev-servers.md b/commands/development/misc-tasks/old-dev-servers.md new file mode 100644 index 0000000000000000000000000000000000000000..712e53d1e449eb985083f0cf9f9169f33053df1b --- /dev/null +++ b/commands/development/misc-tasks/old-dev-servers.md @@ -0,0 +1,5 @@ +I think that I have old development servers still running. + +Please check which porrts are open. + +Delete any localhost servers not related to the project we are currently working on. \ No newline at end of file diff --git a/commands/development/os-specific/linux-desktop/linux-desktop-backend.md b/commands/development/os-specific/linux-desktop/linux-desktop-backend.md new file mode 100644 index 0000000000000000000000000000000000000000..1d7505b2095ef684e489e921a455c23c5f947b12 --- /dev/null +++ b/commands/development/os-specific/linux-desktop/linux-desktop-backend.md @@ -0,0 +1,223 @@ +This repo contains a Linux desktop app which requires the persistent storage of user provided variables. + +We should integrate a proper backend that adheres to best standards in Linux desktop app development: + +## Best Practices Overview + +Persistent storage in Linux desktop apps should: + +- Respect the **XDG Base Directory Specification** +- Use **atomic writes** to avoid corruption +- Separate **config**, **state**, and **cache** data +- Use **SQLite** or structured files (TOML, JSON, etc.) as appropriate +- Avoid polluting `$HOME` with dotfiles +- Be resilient to multiple instances and safe during shutdown + +--- + +## 2. Directory Layout (XDG Spec) + +Follow the XDG Base Directory Specification for storing user-specific files. + +| Type | Default Path | Example Usage | +|------------------|---------------------------------------|------------------------------------| +| Config | `~/.config//` | User preferences, themes | +| Data (state) | `~/.local/share//` | Databases, runtime state | +| Cache | `~/.cache//` | Rebuildable cache, temp files | +| Logs | `~/.local/state//logs/` (optional) | Log files | +| System defaults | `/etc/xdg//` | System-wide default configs | + +Respect environment overrides: +`$XDG_CONFIG_HOME`, `$XDG_DATA_HOME`, `$XDG_CACHE_HOME`. + +--- + +## 3. Storage Mechanisms + +### **3.1 Lightweight Configuration (human-editable)** + +Use **TOML** or **INI** for settings users may edit manually. +Example: `~/.config/myapp/config.toml` + +**TOML Example:** +```toml +[ui] +theme = "dark" +font_size = 12 + +[network] +timeout = 10 +use_proxy = false +```` + +--- + +### **3.2 Machine State (internal JSON)** + +For app-managed state, store as JSON under `~/.local/share//`. + +Example: `~/.local/share/myapp/state.json` + +```json +{ + "last_session": "2025-10-20T10:00:00Z", + "window_size": [1280, 720], + "recent_files": ["/home/daniel/project1", "/home/daniel/project2"] +} +``` + +--- + +### **3.3 Structured / Relational Data** + +Use **SQLite** for larger or structured data (history, cached objects, indexed content, etc.). + +Example: `~/.local/share/myapp/appdata.sqlite` + +* Lightweight and dependency-free +* Excellent read/write concurrency for desktop workloads +* Supports migrations (e.g., via Alembic, SQLAlchemy, or manual schema versioning) + +Schema example: + +```sql +CREATE TABLE IF NOT EXISTS user_prefs ( + key TEXT PRIMARY KEY, + value TEXT, + updated_at TEXT DEFAULT CURRENT_TIMESTAMP +); +``` + +--- + +### **3.4 Caches** + +For rebuildable or transient data, use `~/.cache//`. +Examples: compiled thumbnails, temp files, download cache. + +Always assume cache is **deletable at any time**. + +--- + +### **3.5 Secrets / Tokens** + +Do **not** store sensitive data in your config files. +Use **GNOME Keyring** or **libsecret** via bindings: + +* Python: `keyring` library +* C / C++: `libsecret` API +* Electron / Node: `keytar` module + +--- + +## 4. Atomic Writes & Safety + +* **Atomic saves:** write to a temp file β†’ `fsync` β†’ `rename` into place. +* **Locking:** use advisory file locks for concurrent writes. +* **Backup rotation:** keep one previous version (e.g., `config.toml.bak`). +* **Versioning:** include a `config_version` key and handle migrations on load. + +--- + +## 5. Language Integration Examples + +### **Python Example** + +```python +from platformdirs import PlatformDirs +import tomllib, tomli_w, json, sqlite3, tempfile, os +from pathlib import Path + +APP = "myapp" +dirs = PlatformDirs(APP) + +cfg_dir = Path(dirs.user_config_dir) +data_dir = Path(dirs.user_data_dir) +cfg_dir.mkdir(parents=True, exist_ok=True) +data_dir.mkdir(parents=True, exist_ok=True) + +# Config TOML +cfg_path = cfg_dir / "config.toml" +config = {"ui": {"theme": "dark"}, "network": {"timeout": 10}} +if cfg_path.exists(): + config.update(tomllib.loads(cfg_path.read_bytes())) +tmp = tempfile.NamedTemporaryFile(delete=False, dir=cfg_dir) +tmp.write(tomli_w.dumps(config).encode()); tmp.flush(); os.fsync(tmp.fileno()); tmp.close() +os.replace(tmp.name, cfg_path) + +# JSON state +state_path = data_dir / "state.json" +state_path.write_text(json.dumps({"last_run": "2025-10-20"})) + +# SQLite database +db_path = data_dir / "myapp.sqlite" +conn = sqlite3.connect(db_path) +conn.execute("CREATE TABLE IF NOT EXISTS items(id INTEGER PRIMARY KEY, name TEXT)") +conn.commit(); conn.close() +``` + +--- + +### **Node.js Example** + +```js +import fs from "fs/promises"; +import { join } from "path"; +import os from "os"; + +const home = process.env.XDG_CONFIG_HOME || join(os.homedir(), ".config"); +const cfgDir = join(home, "myapp"); +await fs.mkdir(cfgDir, { recursive: true }); + +const cfgPath = join(cfgDir, "config.toml"); +const tmpPath = join(cfgDir, `.config.toml.tmp-${process.pid}`); +await fs.writeFile(tmpPath, 'ui = { theme = "dark" }\n'); +await fs.rename(tmpPath, cfgPath); // atomic replace +``` + +--- + +## 6. Decision Guide + +| Use Case | Recommended Storage | +| ------------------------- | ------------------- | +| User preferences (simple) | TOML / INI | +| Internal app state | JSON | +| Complex structured data | SQLite | +| Temporary data | Cache directory | +| Secrets / tokens | Keyring (libsecret) | + +--- + +## 7. Packaging Considerations + +If you later package as **Snap** or **Flatpak**, continue to use these paths relative to `$XDG_*` variables. +The sandbox will remap them internally, preserving user data between updates. + +--- + +## 8. Checklist + +βœ… Follows XDG directory spec +βœ… Uses atomic file operations +βœ… Distinguishes config/data/cache +βœ… Supports JSON, TOML, and SQLite +βœ… Uses system keyring for secrets +βœ… User-editable, safe, recoverable + +--- + +## 9. Future Expansion + +Later, the app can add: + +* Schema migrations for SQLite +* Config version auto-upgrades +* CLI flags or ENV var overrides +* Background sync to cloud storage +* Keyring-based authentication tokens + +--- + +**Goal:** Clean, predictable, Linux-native persistence that works with backups, sync, and sandboxed environments. + diff --git a/commands/development/qa/does-it-work.md b/commands/development/qa/does-it-work.md new file mode 100644 index 0000000000000000000000000000000000000000..4f971b5b311d06eedc4487ef2401fcf88b754324 --- /dev/null +++ b/commands/development/qa/does-it-work.md @@ -0,0 +1 @@ +Test the script to see if this program works as intended. \ No newline at end of file diff --git a/commands/development/repo-mgmt/add-gitkeep.md b/commands/development/repo-mgmt/add-gitkeep.md new file mode 100644 index 0000000000000000000000000000000000000000..f4ca27b02f4b3564e9699e04dae78a309e084022 --- /dev/null +++ b/commands/development/repo-mgmt/add-gitkeep.md @@ -0,0 +1 @@ +Add a git hook which should seed .gitkeep folders into every folder lacking one before push \ No newline at end of file diff --git a/commands/development/repo-mgmt/ai-repo/create-ai-folders.md b/commands/development/repo-mgmt/ai-repo/create-ai-folders.md new file mode 100644 index 0000000000000000000000000000000000000000..76e89082db705eb503514bb6ec4f9772ad1ea4bd --- /dev/null +++ b/commands/development/repo-mgmt/ai-repo/create-ai-folders.md @@ -0,0 +1,27 @@ +Please review this repository and reformat it to add a folder structure that the user likes + +Add the base of the repo, create these two folders: + +/for-ai +/-from-ai + +Within /for-ai create + +/context +/tasks +/logs + +Within /from-ai create +/progress-logs +/docs-for-user + +You have some creative leeway; if upon examining the codebase, you can infer that some additional folders would be useful in facilitating an organised human to AI information exchange process, add them. + +After creating the basic skeleton, see if there are any existing folders that you may wish to integrate. For example, if there is already a /context folder, move it within the newly created folder under /for-ai + +Then: + +Add a short about.md to the base of for-ai and from-ai instructing an AI agent with the purpose of those folders. The fist folder is a nested hierarchy for the user to provide instructions to AI (including reference notes, docs, etc). The latter fulfills the reverse purpose and provides a space for the AI agent to write docs and other materials for the user. + +After creating these folders and the base markdown documents, update CLAUDE.md, if it exists, with this context information. For example: "please use the from-ai folder to create documentation for the user." If CLAUDE.md does not yet exist, however, do not create it solely for the purpose of noting this information. + diff --git a/commands/development/repo-mgmt/cleanup/clean-repo.md b/commands/development/repo-mgmt/cleanup/clean-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..54425217e5ee06a9a7a3248ff07d794c6394fd57 --- /dev/null +++ b/commands/development/repo-mgmt/cleanup/clean-repo.md @@ -0,0 +1,12 @@ +Clean up the repository. + +To achieve this: + +- Evaluate whether any files or folders are no longer needed / leftover code. If so, remove them. + +- Evaluate whether any files/folders were one time / diagnostic scripts with no future utility. If so, remove them. + +- Evaluate whether the folder has a clear separation of concerns: parent folders for code and docs, for example; or within a project, separate parent folders for scripts and source code. If not, implement this by creating the missing folders and moving the files. + +After doing this, you should evaluate whether the refactoring may have broken any paths used in the codebase. If so, remediate by updating the paths. + diff --git a/commands/development/repo-mgmt/cleanup/remove-doc-sprawl.md b/commands/development/repo-mgmt/cleanup/remove-doc-sprawl.md new file mode 100644 index 0000000000000000000000000000000000000000..de39f03feadb8cfcb1f26c2d3b6eb2d0337f1529 --- /dev/null +++ b/commands/development/repo-mgmt/cleanup/remove-doc-sprawl.md @@ -0,0 +1,3 @@ +This repository contains a number of extraneous AI generated documents. + +Follow the principle that only the minimal number of guidance documents to the user should be provided. To achieve this, remove documents that are overly specific, or if you can determine that they could be integrated within the README, move them there. If you truly feel that there is a need for detailed documents in addition to the README, and move them into a docs folder, if it doesn't exist then create one. \ No newline at end of file diff --git a/commands/development/repo-mgmt/non-code-cleanup.md b/commands/development/repo-mgmt/non-code-cleanup.md new file mode 100644 index 0000000000000000000000000000000000000000..9b9e91580f587b656ba8d20dc963413198f48a32 --- /dev/null +++ b/commands/development/repo-mgmt/non-code-cleanup.md @@ -0,0 +1,9 @@ +Please organise the repository according to the following set of instructions: + +The non-code elements in the repository (clustered around the base of the repo) should be organised. + +Group similar elements into a common folder structure. For example, rather than have loose docs at the repo base, you might create a folder called docs and move the loose docs into them. + +Likewise, group and aggregate project notes, developer reference material, etc. + +Do not, however, make any filesystem edits to the code-containing directory. \ No newline at end of file diff --git a/commands/development/repo-mgmt/repo-merge.md b/commands/development/repo-mgmt/repo-merge.md new file mode 100644 index 0000000000000000000000000000000000000000..19451fc3b70f33049a0bd3a464693eb6d19c83d8 --- /dev/null +++ b/commands/development/repo-mgmt/repo-merge.md @@ -0,0 +1,13 @@ +Here is the foundation for the next task: + +The user will provide links to several Github repos or direct you to a doc in the repo containing them. + +The user wishes to integrate these repos into the current codebase. The likely reason is rationalising previous projects that covered overlapping themes. Frequently, this approach will be used to gather together context data for AI tools. + +Upon parsing the links, here's how you should approach the task: + +- Create a folder in this repo called /tmp-clones and add to gitignore +- Clone the repos into this throaway folder +- Proceed with the user's instruction as to how to integrate the data into this new consolidated repository + +When the task is done, you can (and should) delete the folders in the temporary folder but leave it in place for future use (the folder, that is). \ No newline at end of file diff --git a/commands/development/repo-mgmt/segregation/code-non-code.md b/commands/development/repo-mgmt/segregation/code-non-code.md new file mode 100644 index 0000000000000000000000000000000000000000..692024d5e717c2a3f38f61fe975f623b30d0e3f5 --- /dev/null +++ b/commands/development/repo-mgmt/segregation/code-non-code.md @@ -0,0 +1,9 @@ +Please help to reorganise the repository according to the following instruction: + +- Code and non-code should be nested within separate top level folders + +If the code-containing folder is already attached to a deployment pipeline, don't change its name without express confirmation from me. + +Otherwise, ensure that all code is nested within a top level folder suitable for the project. For example, create "site" for a website. + +Aggregate non-code under descriptive top level folders like "docs" or "project-notes" (etc). Try to organise and group these within the non-code hierarchy. \ No newline at end of file diff --git a/commands/development/repo-mgmt/segregation/separate-code.md b/commands/development/repo-mgmt/segregation/separate-code.md new file mode 100644 index 0000000000000000000000000000000000000000..90573e3ea28f7c0a19c86846513ca5337e276f7f --- /dev/null +++ b/commands/development/repo-mgmt/segregation/separate-code.md @@ -0,0 +1,11 @@ +This repository contains code and non-code elements mixed at the base level. + +Separate them: + +- Create a folder for the code, like 'app', 'website' or whatever is appropriate for the context +- Move the code there +- Non code can stay at the base or be moved into alternative folders + +HOWEVER: + +- Do not move from the base of the repo files which should stay there deliberately. These include CLAUDE.md and convenience/launcher scripts like bash executors. .gitignore should also not be moved. Use your intelligence to avoid moving from the repo base any files that you know or can infer should remain there in spite of the general principle outlined above. \ No newline at end of file diff --git a/commands/development/security/allow-env.md b/commands/development/security/allow-env.md new file mode 100644 index 0000000000000000000000000000000000000000..a25b96c5f700341e0993d5c8e40730d856958641 --- /dev/null +++ b/commands/development/security/allow-env.md @@ -0,0 +1,5 @@ +I have a global git ignore that blocks .env from syncing + +This is a private repo and I wish to override that behavior and ensure that .env is synced + +Please add or update a manual .gitignore at the repo level to achieve that behavior and verify that it makes it into version control \ No newline at end of file diff --git a/commands/development/security/dehardcode.md b/commands/development/security/dehardcode.md new file mode 100644 index 0000000000000000000000000000000000000000..31e4c962f6d4eaa2d2f6e6a9b915cd16427111ac --- /dev/null +++ b/commands/development/security/dehardcode.md @@ -0,0 +1,7 @@ +This project contains some environment variables which are hardcoded. + +Create a .env. + +Move the variables there. + +Refactor to ensure that the program loads from .env. \ No newline at end of file diff --git a/commands/development/security/scan-pii.md b/commands/development/security/scan-pii.md new file mode 100644 index 0000000000000000000000000000000000000000..5314d10a7aa6177a36f3760d59c81b75521ba6d5 --- /dev/null +++ b/commands/development/security/scan-pii.md @@ -0,0 +1,60 @@ +Scan repository for personally identifiable information (PII) before open sourcing. + +Your task: +1. Proactively scan for subtle forms of PII: + - Personal email addresses + - Phone numbers + - Physical addresses + - Names in comments or documentation + - Social media handles + - IP addresses + - MAC addresses + - Personal identifiers in test data + - Usernames that might be personally identifying + - Organization-specific information + +2. Assume API key protection is already handled, focus on: + - Developer names in code comments + - Email addresses in git commits (use git log) + - Personal information in documentation + - Internal URLs or server names + - Employee IDs or internal identifiers + - Customer data in examples + - Screenshots with personal info + +3. Do NOT remediate automatically: + - Only identify and report findings + - Do not delete, change, or obfuscate without permission + - Present comprehensive list to user + +4. Generate detailed report: + ```markdown + ## PII Scan Report + + ### High Risk + - File: src/config.js:45 + Type: Email address + Content: [REDACTED]@company.com + Context: Developer email in comment + + ### Medium Risk + - File: docs/api.md:12 + Type: Internal URL + Content: https://internal-server.local + Context: Example API endpoint + + ### Low Risk + - File: README.md:34 + Type: Username + Content: @john_developer + Context: Acknowledgments section + + ## Recommendations + 1. Replace developer emails with generic project email + 2. Use example.com for URL examples + 3. Consider anonymizing usernames + ``` + +5. Seek user advice on remediation approach + +Help users identify PII they may wish to remove before making repositories public. diff --git a/commands/development/setup-ci-cd.md b/commands/development/setup-ci-cd.md new file mode 100644 index 0000000000000000000000000000000000000000..26678ae0e88abb68290f5412e49d2bfbf8e22522 --- /dev/null +++ b/commands/development/setup-ci-cd.md @@ -0,0 +1,51 @@ +Set up continuous deployment (CI/CD) pipeline for this project. + +Your task: +1. Understand deployment requirements: + - Target environment (GitHub Pages, Vercel, Netlify, AWS, etc.) + - Build process and artifacts + - Environment variables and secrets + - Testing requirements + +2. Choose and configure CI/CD platform: + - **GitHub Actions** (recommended for GitHub repos) + - **GitLab CI** + - **CircleCI** + - **Jenkins** + +3. Create workflow configuration: + - Set up build pipeline + - Configure testing stage + - Set up deployment stage + - Configure triggers (push, PR, tags) + +4. Example GitHub Actions workflow: + ```yaml + name: CI/CD + on: + push: + branches: [main] + jobs: + build-and-deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Setup + - name: Build + - name: Test + - name: Deploy + ``` + +5. Configure deployment: + - Set up deployment secrets + - Configure deployment environments + - Set up environment-specific variables + - Add deployment status checks + +6. Document the pipeline: + - Workflow stages explained + - How to trigger deployments + - How to monitor pipeline status + - Rollback procedures + +Establish scalable continuous deployment for the development repository. diff --git a/commands/development/setup-hot-reload.md b/commands/development/setup-hot-reload.md new file mode 100644 index 0000000000000000000000000000000000000000..3451f50d50f5f9e68c73f08463359fc58056a64b --- /dev/null +++ b/commands/development/setup-hot-reload.md @@ -0,0 +1,36 @@ +Set up a hot-reloading development server for instant code changes. + +Your task: +1. Detect the project type and framework: + - React/Vue/Angular (Vite, Webpack, etc.) + - Node.js/Express (nodemon, node --watch) + - Python (Flask/Django with auto-reload) + - Static sites (live-server, browser-sync) + +2. Configure hot reloading based on the stack: + - Install necessary dev dependencies + - Update configuration files + - Set up file watching + - Configure port and proxy settings + +3. Create or update dev scripts: + ```json + "scripts": { + "dev": "appropriate-dev-server --watch", + "start": "production-command" + } + ``` + +4. Optimize for performance: + - Configure file watch patterns + - Exclude unnecessary directories + - Set up source maps + - Enable HMR (Hot Module Replacement) if available + +5. Document the dev server usage: + - How to start the dev server + - Available options and flags + - Port configuration + - Troubleshooting common issues + +Ensure instant feedback on code changes for optimal development workflow. diff --git a/commands/development/task-mgmt/create-debugging-folder.md b/commands/development/task-mgmt/create-debugging-folder.md new file mode 100644 index 0000000000000000000000000000000000000000..1611b2557c451963150ef3ff918d642ce3ebafcb --- /dev/null +++ b/commands/development/task-mgmt/create-debugging-folder.md @@ -0,0 +1,39 @@ +# Create Debugging Folder + +You are helping the user set up a structured debugging workspace for investigating and resolving issues. + +## Task + +Create a well-organized debugging folder structure in the current directory that includes: + +1. **Root debugging folder** with a descriptive name based on the issue being investigated +2. **Subdirectories** for: + - `logs/` - For storing relevant log files and outputs + - `reproduction/` - For minimal reproduction cases and test scripts + - `analysis/` - For notes, findings, and analysis documents + - `solutions/` - For attempted fixes and working solutions + - `references/` - For relevant documentation, stack traces, and external resources + +3. **Initial files**: + - `README.md` in the root with: + - Issue description + - Environment details + - Steps to reproduce + - Current status + - Timeline/log of investigation + - `notes.md` in the analysis folder for ongoing observations + +## Process + +1. Ask the user for the issue/bug name or description to name the folder appropriately +2. Create the folder structure +3. Initialize the README.md with template sections +4. Confirm the structure has been created and guide the user on next steps + +## Output + +Provide the user with: +- Confirmation of the created structure +- Path to the new debugging folder +- Brief explanation of how to use each subdirectory +- Suggestion to start documenting the issue in the README.md diff --git a/commands/development/task-mgmt/note-block.md b/commands/development/task-mgmt/note-block.md new file mode 100644 index 0000000000000000000000000000000000000000..ab12c7cbbe914df1b2c188d71ebe1f2765ae6915 --- /dev/null +++ b/commands/development/task-mgmt/note-block.md @@ -0,0 +1 @@ +Please pause development and generate document in the from AI folder if it exists in which you note the current blocker that we are facing. Describe the troubleshooting steps that we have undertaken to date, as well as their outcomes. Provide as much detail as possible Understanding that the context of this document will be to provide a contact snapshot for resuming the troubleshooting at a future point in time. \ No newline at end of file diff --git a/commands/development/task-mgmt/setup.md b/commands/development/task-mgmt/setup.md new file mode 100644 index 0000000000000000000000000000000000000000..fc2dc59d12563caa164a29ab057351587bdf6dfd --- /dev/null +++ b/commands/development/task-mgmt/setup.md @@ -0,0 +1,14 @@ +Please set up a task tracking system in this repo. + +The basic skeleton is as follows: + +/tasks + +And as subfolders: + +/to-do +/drafts +-done +/in-progress + +If the user has already implemented or partially implemented a task tracking solution, keep it in place (search for similar words like projects) \ No newline at end of file diff --git a/commands/development/task-mgmt/summarise-progress.md b/commands/development/task-mgmt/summarise-progress.md new file mode 100644 index 0000000000000000000000000000000000000000..658acf118709bb0d3bc4112e33be1f3b51cd7de7 --- /dev/null +++ b/commands/development/task-mgmt/summarise-progress.md @@ -0,0 +1 @@ +Please pause development and generate a progress summary document which you should store in the from AI folder if it exists in this repository. In this document, you should provide a detailed note of the development that we undertook in this chat/conversation. Summarize where we began, the changes that we implemented to the code base, and any blockers that remain and have prevented us from making further progress. If the user instructs for any additional notes to be appended to the document, include them. They document that you generate should be a thorough document which provides sufficient context for you or another AI agent to resume development at a future point. \ No newline at end of file diff --git a/commands/development/ux/css/improve-css.md b/commands/development/ux/css/improve-css.md new file mode 100644 index 0000000000000000000000000000000000000000..0db8d6a16cdc5185029d6f59649caa3c0edb5c73 --- /dev/null +++ b/commands/development/ux/css/improve-css.md @@ -0,0 +1,32 @@ +Edit and improve CSS design and appearance for the website. + +Your task: +1. Ask the user which page or component they want to edit, or if changes should apply site-wide + +2. Take a detailed brief about desired changes: + - Layout modifications + - Color scheme updates + - Typography improvements + - Spacing and alignment + - Responsive design adjustments + - Animation/transitions + +3. Implement the CSS changes: + - Edit existing stylesheets + - Add new styles as needed + - Ensure consistency across the site + - Maintain responsive design + - Follow CSS best practices + +4. Focus on UX elements: + - Interactive states (hover, focus, active) + - User feedback (loading, success, error states) + - Accessibility (contrast, focus indicators) + - Visual hierarchy + +5. Test and validate: + - Check responsive behavior + - Verify browser compatibility + - Ensure accessibility standards + +Deliver polished, user-friendly CSS improvements based on the user's requirements. diff --git a/commands/development/ux/icons/standardize-icons.md b/commands/development/ux/icons/standardize-icons.md new file mode 100644 index 0000000000000000000000000000000000000000..bf3aa93e9b9500a8e975a9790b8bb8a90e878ea8 --- /dev/null +++ b/commands/development/ux/icons/standardize-icons.md @@ -0,0 +1,34 @@ +Evaluate and standardize icon usage across the project. + +Your task: +1. Analyze current icon implementation: + - Locate all custom/self-made icons + - Identify inconsistent icon styles + - Note icon sizes and formats + - Check accessibility (alt text, ARIA labels) + +2. Recommend icon libraries: + - **Font Awesome** (comprehensive, widely supported) + - **Material Icons** (Google's design system) + - **Feather Icons** (minimal, consistent) + - **Heroicons** (Tailwind-friendly) + - **Bootstrap Icons** + +3. Create standardization plan: + - Map custom icons to library equivalents + - Establish consistent sizing (16px, 24px, 32px, etc.) + - Define color usage + - Set spacing/padding standards + +4. Implement improvements: + - Replace custom icons with library icons + - Ensure consistent styling + - Add proper accessibility attributes + - Optimize icon loading (SVG sprite, icon font, etc.) + +5. Document icon usage: + - Create icon style guide + - List available icons + - Show usage examples + +Enhance professionalism and consistency by standardizing icon implementation. diff --git a/commands/development/ux/make-it-pretty.md b/commands/development/ux/make-it-pretty.md new file mode 100644 index 0000000000000000000000000000000000000000..35d0ab965327251bd0b7d86537fd31e3d350396b --- /dev/null +++ b/commands/development/ux/make-it-pretty.md @@ -0,0 +1 @@ +The basci functionality of this application has been validated. I would like you to review the application for its appearance. Let's try to make the application as visually appealing as possible, and let's make the UI, UX as easy to use as possible. Focus on typography, iconography, layout and overall ease of use. Suggest implementations. \ No newline at end of file diff --git a/commands/development/ux/optimize-dashboard.md b/commands/development/ux/optimize-dashboard.md new file mode 100644 index 0000000000000000000000000000000000000000..ab9d361cc97e5a6b590ce54771a348569927c266 --- /dev/null +++ b/commands/development/ux/optimize-dashboard.md @@ -0,0 +1,40 @@ +Optimize and enhance display dashboard for kiosk mode. + +Your task: +1. Gather requirements: + - Target screen sizes (desktop, tablet, large displays) + - Kiosk mode duration (hours/days continuously) + - Key metrics to display + - Update frequency requirements + +2. Optimize for extended display: + - **Caching strategy**: + - Service worker for offline capability + - Cache API for data storage + - LocalStorage for persistent state + + - **Refresh functionality**: + - Auto-refresh intervals + - Manual refresh option + - Stale data indicators + - Error recovery + +3. Responsive design for multiple screens: + - Fluid layouts that adapt to screen size + - Breakpoints for common displays + - Optimal font scaling + - Touch-friendly controls (if applicable) + +4. Performance optimization: + - Lazy loading for data + - Efficient rendering (virtualization for large datasets) + - Resource cleanup to prevent memory leaks + - Optimize animations for long-running displays + +5. Visual design for readability: + - High contrast for visibility + - Clear hierarchy + - Minimal clutter + - Status indicators + +For static site generator dashboards, ensure compatibility and optimal caching strategies. diff --git a/commands/development/ux/typography/optimize-fonts.md b/commands/development/ux/typography/optimize-fonts.md new file mode 100644 index 0000000000000000000000000000000000000000..ef13ca680960f5fed72521c6eaf06d7732d56163 --- /dev/null +++ b/commands/development/ux/typography/optimize-fonts.md @@ -0,0 +1,36 @@ +Provide expert advice on font selection and implementation, focusing on typography hierarchy. + +Your task: +1. Analyze current font usage in the project: + - Current font families + - Font hierarchy (headings, body, etc.) + - Font stacking + - Web font loading + +2. Provide font recommendations: + - Suggest fonts from Google Font Library or web-safe fonts + - Consider readability and brand alignment + - Recommend font pairings (heading + body) + - Suggest fallback fonts + +3. Implement better font hierarchy: + ```css + /* Example hierarchy */ + h1 { font-family: 'Heading Font', fallback; font-size: 2.5rem; } + h2 { font-family: 'Heading Font', fallback; font-size: 2rem; } + body { font-family: 'Body Font', fallback; font-size: 1rem; } + ``` + +4. Optimize font loading: + - Use font-display property + - Preload critical fonts + - Implement proper font stacking + - Consider variable fonts for performance + +5. Enhance visual appeal: + - Set appropriate line-height + - Configure letter-spacing + - Establish font-weight hierarchy + - Ensure responsive typography + +Guide users in creating a professional, readable typography system. diff --git a/commands/development/web-development/responsive-audit.md b/commands/development/web-development/responsive-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..ef3f2d45ae1e59af9d3cecadbd2ee55492f4f69c --- /dev/null +++ b/commands/development/web-development/responsive-audit.md @@ -0,0 +1,48 @@ +Evaluate mobile-friendliness and propose responsive design improvements. + +Your task: +1. Audit current responsive design: + - Test at common breakpoints (mobile, tablet, desktop) + - Check viewport meta tag + - Evaluate touch targets (minimum 44x44px) + - Test horizontal scrolling issues + - Check font sizes on mobile + +2. Identify issues: + - Text too small to read + - Content wider than screen + - Links/buttons too close together + - Images not responsive + - Poor mobile navigation + - Viewport not configured + +3. Propose improvements: + - Add/fix viewport meta tag: + ```html + + ``` + - Implement responsive images: + ```html + + ``` + - Fix CSS for responsive layout: + ```css + @media (max-width: 768px) { + /* Mobile styles */ + } + ``` + +4. Mobile-specific enhancements: + - Touch-friendly navigation + - Hamburger menu if needed + - Appropriate spacing + - Readable font sizes (minimum 16px base) + - Optimized images for mobile + +5. Testing recommendations: + - Chrome DevTools device mode + - Real device testing + - Mobile-Friendly Test (Google) + - Lighthouse mobile audit + +Deliver a comprehensive mobile optimization plan with actionable improvements. diff --git a/commands/development/web-development/suggest-frameworks.md b/commands/development/web-development/suggest-frameworks.md new file mode 100644 index 0000000000000000000000000000000000000000..212fade8109755dbe30d237e8dbf9b122830e4de --- /dev/null +++ b/commands/development/web-development/suggest-frameworks.md @@ -0,0 +1,47 @@ +Suggest applicable UI/UX frameworks to enhance the website. + +Your task: +1. Identify current technology stack: + - Frontend framework (React, Vue, Angular, vanilla JS) + - Build tools + - Current styling approach + - Project requirements + +2. Recommend relevant UI/UX frameworks: + + **For React:** + - Material-UI (MUI) - comprehensive component library + - Chakra UI - accessible, composable + - Ant Design - enterprise-grade + - Tailwind CSS + Headless UI + + **For Vue:** + - Vuetify - Material Design components + - Element Plus - desktop UI + - Quasar - full framework + - PrimeVue + + **For Angular:** + - Angular Material + - PrimeNG + - NG-ZORRO + + **Framework-agnostic:** + - Tailwind CSS - utility-first + - Bootstrap - classic framework + - Bulma - modern CSS framework + +3. Explain benefits: + - Scalability and replicability + - Consistent design system + - Pre-built components + - Accessibility built-in + - Active community support + +4. Provide implementation guidance: + - Installation instructions + - Configuration setup + - Migration strategy from current approach + - Best practices for the chosen framework + +Help users leverage existing libraries and frameworks for scalable, professional UI/UX. diff --git a/commands/documentation/awesome-lists/awesome-list-data-extraction.md b/commands/documentation/awesome-lists/awesome-list-data-extraction.md new file mode 100644 index 0000000000000000000000000000000000000000..770adc43fa6e55ea4f2240405e719200dfa5a94e --- /dev/null +++ b/commands/documentation/awesome-lists/awesome-list-data-extraction.md @@ -0,0 +1,14 @@ +This repository contains an "awesome list" - a list of useful resources. + +I have created it, as is conventional, using README.md. + +I would like to refactor the list, however, to support a more programmatic workflow. + +To do that, let's work through the following steps: + +- Identify the categorisation system used. Capture this in cats.json +- Identify the repositories listed. Capture them in repos.json noting their categories. These should correspond to the JSON + +After we have created the data files, let's add a compile.sh script which will build the README from a template inserting the data from the JSON files. + +The goal is to make it easier to maintain the readme by modularising its maintenance (down the line, I might create a UI. But for now, leave it at the JSON creation stage) \ No newline at end of file diff --git a/commands/documentation/badges/batch/repo-links-as-badges.md b/commands/documentation/badges/batch/repo-links-as-badges.md new file mode 100644 index 0000000000000000000000000000000000000000..508b049b96ee48b892e47f45c7ef1f5ee901ad16 --- /dev/null +++ b/commands/documentation/badges/batch/repo-links-as-badges.md @@ -0,0 +1,9 @@ +This repository's readme contains links to other repos + +Please implement the following changes: + +- If the links are relative links to paths in *this* repo, use shields.io to create a View Folder badge and replace the textual link with this +- If the links are extenral links to Github repos (whether they're my own or others'), use shields.iod to create a View Repo badge and replace the textual link +- If the links are to Hugging Face datasets/models/spaces create the applicable shields.io badge + +The objective is to make the index a little easier to read. Use a consistent style for the badges you add. \ No newline at end of file diff --git a/commands/documentation/badges/batch/shields-badges.md b/commands/documentation/badges/batch/shields-badges.md new file mode 100644 index 0000000000000000000000000000000000000000..f64d4cbd6900f693fbe33522ba4029b2f45945ee --- /dev/null +++ b/commands/documentation/badges/batch/shields-badges.md @@ -0,0 +1,5 @@ +Please add a few shields.io badges to the repo + +Do not overdo it - less is more! + +I like to add them near the top of the readme and use them to note key things about this repo like the language used. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/add-related-repos-section.md b/commands/documentation/badges/specifics/add-related-repos-section.md new file mode 100644 index 0000000000000000000000000000000000000000..4ed63a453e0c3a2b060bb928a18169ef1f408d6e --- /dev/null +++ b/commands/documentation/badges/specifics/add-related-repos-section.md @@ -0,0 +1,13 @@ +This repository is part of a group of repositories. + +The user would like to add a "Related Repos" section to both make that clear and make it easier for browsers to find the connected parts of this project. + +Here's the approach you should take: + +- Ask the user to provide the list of related repos. Alternatively the user will provide these one by one. + +Once you've got the list: + +- Add a related repos section towards the bottom of README.md +- List the related repos alphabetically, unless the user has requested an alternative method +- Add the links as github repo badges wit shields.io \ No newline at end of file diff --git a/commands/documentation/badges/specifics/ai-context.md b/commands/documentation/badges/specifics/ai-context.md new file mode 100644 index 0000000000000000000000000000000000000000..fae5c062afb4f44cf9bcc9772f6fc35a1d4bb396 --- /dev/null +++ b/commands/documentation/badges/specifics/ai-context.md @@ -0,0 +1,8 @@ +Add an "AI Context Repository" badge to the repository README indicating this repo contains context files, documentation, or resources designed for AI agent consumption. + +Use this shields.io badge format: +```markdown +![AI Context Repository](https://img.shields.io/badge/AI_Context_Repository-FF6B6B?style=for-the-badge&logo=robot&logoColor=white) +``` + +Add it to the appropriate badges section in the README. diff --git a/commands/documentation/badges/specifics/dataset.md b/commands/documentation/badges/specifics/dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..08308bd748ad066478145baff05109937a862a34 --- /dev/null +++ b/commands/documentation/badges/specifics/dataset.md @@ -0,0 +1,32 @@ +This repository contains a dataset or collection of data files. + +The user wishes to make this clear to visitors who might be expecting a code repository. + +Here's how you can help make this repo's purpose clear: + +- Add a Dataset badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use an appropriate color like blue or green for the dataset badge + +Add this section towards the middle of the README: + +## Dataset + +This repository contains a dataset that has been open-sourced for public use. + +### About this dataset + +[Placeholder for dataset description - ask the user for details about the dataset's content, format, and purpose] + +### Usage + +The data is provided as-is for research, analysis, or other purposes. Please review any included LICENSE file for usage terms. + +/// + +Ask the user: +1. What format is the data in? (CSV, JSON, Parquet, etc.) +2. What is the dataset about/what does it contain? +3. Is there any specific license or attribution requirements? +4. Are there any related repositories (e.g., code that uses this dataset)? + +Integrate these details into the description to make it more informative. diff --git a/commands/documentation/badges/specifics/demo-workflow.md b/commands/documentation/badges/specifics/demo-workflow.md new file mode 100644 index 0000000000000000000000000000000000000000..8710fbe125cdf57b459d59d8072a8c26c30eb37d --- /dev/null +++ b/commands/documentation/badges/specifics/demo-workflow.md @@ -0,0 +1,41 @@ +This repository is a demonstration/example repository created to showcase a particular workflow, functionality, or technical pattern. + +Add a "Demo" badge to the README using shields.io: + +```markdown +![Demo Repository](https://img.shields.io/badge/Demo-Repository-blue?style=for-the-badge&logo=github&logoColor=white) +``` + +Add this section near the top of the README (after any title and before the main content): + +## Purpose + +This repository is a **demonstration/example project** designed to showcase a particular workflow or functionality. + +### What This Repository Demonstrates + +[Brief description of what workflow/functionality is being demonstrated] + +### Key Features Demonstrated + +- [Feature/workflow element 1] +- [Feature/workflow element 2] +- [Feature/workflow element 3] + +### Use Cases + +This demonstration is useful for: +- Understanding [specific concept/pattern] +- Learning how to implement [specific functionality] +- Reference implementation for [specific use case] + +--- + +Ask the user: +1. What specific workflow or functionality does this repository demonstrate? +2. What are the key features or patterns being showcased? +3. Who is the target audience for this demo? (e.g., "developers learning X", "teams implementing Y") +4. Are there any prerequisites or background knowledge needed to understand this demo? +5. Should there be links to related documentation or resources? + +Use these details to customize the Purpose section and make the demonstration's value clear to visitors. diff --git a/commands/documentation/badges/specifics/deprecated.md b/commands/documentation/badges/specifics/deprecated.md new file mode 100644 index 0000000000000000000000000000000000000000..e4a22cea27744d3e4449dd84b93e350d52a3c857 --- /dev/null +++ b/commands/documentation/badges/specifics/deprecated.md @@ -0,0 +1,13 @@ +Add a badge to this repo using shields.io that says Deprecated + +Add this text as a h2 somewhere near the top of readme + +## Deprecated + +This repository contains code/an approach that has been deprecated. + +See: X for an updated repo. + +/// + +"X" is a placeholder for an updated repo and can be a Shields.io badge. Ask the user if there is an updated repo and to provide the URL. Replace X with this repo if one exists. If not, don't add that line. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/docs.md b/commands/documentation/badges/specifics/docs.md new file mode 100644 index 0000000000000000000000000000000000000000..791fbbf24ec17432da573bcde2699fd4311b6f46 --- /dev/null +++ b/commands/documentation/badges/specifics/docs.md @@ -0,0 +1,18 @@ +This repository contains documents and/or writing. + +The user wishes to make this clear to any visitors who might be expecting a code repo. + +Here's how you can help make this repo's purpose more clear to visitors: + +- Add a Docs Shields.io badge towards the top of the readme. If a badge line already exists, add it there. + +Add this towards the bottom: + +## Docs + +This is as documentation repository containing documentation for a project. + + +/// + +If the user provides the link to a related repo like the code repo for the docs this repo refers to, you can integrate that link to the disclaimer. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/experiment-notes.md b/commands/documentation/badges/specifics/experiment-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..ebf88a0a584ce8c11af67e9885337dabd0fe5d30 --- /dev/null +++ b/commands/documentation/badges/specifics/experiment-notes.md @@ -0,0 +1,37 @@ +This repository contains notes and results from an experiment. + +The user wishes to make this clear so visitors understand this is documentation of experimental work rather than production code. + +Here's how you can help make this repo's purpose clear: + +- Add an Experiment badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a color like yellow or orange for the experiment badge to indicate exploratory/research nature + +Add this section near the top of the README (after any intro): + +## Experiment Notes + +This repository contains notes, data, and results from an experiment. + +### About this experiment + +[Placeholder for experiment description - ask the user what was being tested/explored] + +### Results + +[Placeholder for key findings - ask the user for main outcomes or conclusions] + +### Context + +This is shared for transparency and to potentially help others exploring similar areas. The approach or findings may not be production-ready or represent best practices. + +/// + +Ask the user: +1. What was the experiment testing or exploring? +2. What were the key findings or results? +3. What tools/technologies were used in the experiment? +4. Are there any follow-up experiments or related repositories? +5. What was the timeframe of this experiment? + +Integrate these details to provide context about the experimental work and its findings. diff --git a/commands/documentation/badges/specifics/hf-demo.md b/commands/documentation/badges/specifics/hf-demo.md new file mode 100644 index 0000000000000000000000000000000000000000..cbe69a24ee60d39db515c4f4681d3b170134f815 --- /dev/null +++ b/commands/documentation/badges/specifics/hf-demo.md @@ -0,0 +1,7 @@ +This Hugging Face has a POC/demo on Hugging Face. + +The user would like to add a link/badge. + +The user will provide the HF Space link. + +Once you have it, add a section near the top of the readme. Use a shields.io badge with Hugging Face Space to link to the POC/demo and surrounding text that says (approximately): A proof of concept for this repository is available on Hugging Face, here. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/index-pages/add-apps-index-badge.md b/commands/documentation/badges/specifics/index-pages/add-apps-index-badge.md new file mode 100644 index 0000000000000000000000000000000000000000..d31163da183038c3180995159f4ff3476c5a49f6 --- /dev/null +++ b/commands/documentation/badges/specifics/index-pages/add-apps-index-badge.md @@ -0,0 +1,17 @@ +This repository is an app. + +I have an apps index page that I use to gather these together. + +That index is here: + +https://github.com/danielrosehill/Apps-Index + +The master index is here: + +https://github.com/danielrosehill/Github-Master-Index + +Please add two badges, using shields.io, to the top of the readme on this project: + +1: A badge linking to my Apps Index +2: A badge linking to my Master Index + diff --git a/commands/documentation/badges/specifics/index-pages/add-master-index-badge.md b/commands/documentation/badges/specifics/index-pages/add-master-index-badge.md new file mode 100644 index 0000000000000000000000000000000000000000..fb0e0cb9bd21af1024aecdcf3b0ffd9e449ff011 --- /dev/null +++ b/commands/documentation/badges/specifics/index-pages/add-master-index-badge.md @@ -0,0 +1,15 @@ +This repository is an indexing repository. + +I create these to gather together resources I create about a common topic. + +I also have a master index which I use as a "top level" map of the repos I create on Github. + +That's here: + +https://github.com/danielrosehill/Github-Master-Index + +Please: + +Add a link to the master index, using Shields.io, to the top of the readme. If there is an existing badges section, add that into it. if not, create one alongside an index repo badge. + +The purpose of these links is to make it easier to jump between the various indexing related repos I have on Github. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/index-pages/claude-code-project.md b/commands/documentation/badges/specifics/index-pages/claude-code-project.md new file mode 100644 index 0000000000000000000000000000000000000000..ab67736bf23f010754adad6bee12413244635180 --- /dev/null +++ b/commands/documentation/badges/specifics/index-pages/claude-code-project.md @@ -0,0 +1,13 @@ +This repository contains a project related to Claude Code + +Please do the following: + +Add a Claude Code badge with shields.io to the top of the readme. + +Add links (also shields.io) to: + +- My Claude Code projects index: +https://github.com/danielrosehill/Claude-Code-Repos-Index + +- My "master" Index: +https://github.com/danielrosehill/Github-Master-Index \ No newline at end of file diff --git a/commands/documentation/badges/specifics/index-pages/gemini-project.md b/commands/documentation/badges/specifics/index-pages/gemini-project.md new file mode 100644 index 0000000000000000000000000000000000000000..e4338ad5e972a0b013c5efefea7cf5b45bc01ce7 --- /dev/null +++ b/commands/documentation/badges/specifics/index-pages/gemini-project.md @@ -0,0 +1,26 @@ +This repository contains a demo/starter that I created using Google AI Studio and/or Gemini + +Please do the following: + +- Overwrite the existing README if it's the boilerplate readme autopopulated by AI Studio + +- Add a custom readme describing the project's functionality. Integrate screenshots if I have created them + +- Add a shields.io badge near the top of the readme with Google AI Studio, POC Starter + + + +Add links (also shields.io) to: + +- My index of AI Studio POCs: +- +https://github.com/danielrosehill/Gemini-Vibe-Coding-Projects + +- My "master" Index: +https://github.com/danielrosehill/Github-Master-Index + +Add this as a section towards the bottom + +## AI Studio POC + +This project contains a proof of concept (POC) that was autopopulated by Google AI Studio. It is intended as a code starter and may not yet have been manually reviewed and/or taken further. I create some Gemini POCs, in particular, to experiment with/test the capabilities of multimodal AI. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/index-pages/subindex.md b/commands/documentation/badges/specifics/index-pages/subindex.md new file mode 100644 index 0000000000000000000000000000000000000000..9dbfaf4d8e2da7d7dd30ca7b7412b0aa72287cab --- /dev/null +++ b/commands/documentation/badges/specifics/index-pages/subindex.md @@ -0,0 +1,13 @@ +This repository is an indexing repository - I create these to gather together links to various repos that I create around a common theme. + +Add a shields.io markdown badge near the top of the readme saying Indexing Repo + +Add, also, a link to my master index as a markdown badge: + +https://github.com/danielrosehill/Github-Master-Index + +Add as a section in the README, near the bottom + +## Indexing Repository + +This repository is an indexing repository. I create indexing repos to consolidate links to individual repos which I create around a common theme and update these periodically. For a top level index, this is my [master index](https://github.com/danielrosehill/Github-Master-Index). Thanks for visiting! diff --git a/commands/documentation/badges/specifics/index-repo.md b/commands/documentation/badges/specifics/index-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..9ac894188339ec017010088cda2cbf842ba043ea --- /dev/null +++ b/commands/documentation/badges/specifics/index-repo.md @@ -0,0 +1,37 @@ +This repository is an index/collection of related repositories organized around a common theme. + +The user wishes to make this clear so visitors understand this is a curated list rather than a single project. + +Here's how you can help make this repo's purpose clear: + +- Add an Index badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a color like blue or teal for the index badge + +Add this section near the top of the README (after any intro/description): + +## Repository Index + +This repository serves as an index of related repositories organized around a common theme. + +### About this index + +[Placeholder for index description - ask the user what theme/topic this index covers] + +### Repositories in this collection + +[Placeholder for repository list - this will be populated with links to the related repos] + +--- + +**Note:** This is a focused index covering a specific topic area. For a higher-level collection of all repository indexes and other projects, see the [GitHub Master Index](https://github.com/danielrosehill/Github-Master-Index). + +/// + +Ask the user: +1. What is the common theme or topic that unifies these repositories? +2. How many repositories are currently in this index? +3. Should the repositories be organized in categories/sections or just listed? +4. Are there specific criteria for what gets included in this index? +5. Is this index actively maintained/updated? + +Integrate these details to help visitors understand the scope and organization of the index. diff --git a/commands/documentation/badges/specifics/linux-cli.md b/commands/documentation/badges/specifics/linux-cli.md new file mode 100644 index 0000000000000000000000000000000000000000..03300fc290aeb1b017d4bde42072c3c45330c35b --- /dev/null +++ b/commands/documentation/badges/specifics/linux-cli.md @@ -0,0 +1,5 @@ +This repository contains a Linux CLI (command-line interface) tool or script. + +Add a Linux CLI badge using shields.io near the top of the README. If a badge line already exists, add it there. + +Use an appropriate color scheme - consider black/dark gray or the classic Linux penguin colors. diff --git a/commands/documentation/badges/specifics/linux-gui.md b/commands/documentation/badges/specifics/linux-gui.md new file mode 100644 index 0000000000000000000000000000000000000000..2251d891efdc541b9834e89eaf1909e20b422943 --- /dev/null +++ b/commands/documentation/badges/specifics/linux-gui.md @@ -0,0 +1,5 @@ +This repository contains a Linux GUI (graphical user interface) application. + +Add a Linux GUI badge using shields.io near the top of the README. If a badge line already exists, add it there. + +Use an appropriate color scheme - consider blue or the classic Linux penguin colors. diff --git a/commands/documentation/badges/specifics/markdown-files.md b/commands/documentation/badges/specifics/markdown-files.md new file mode 100644 index 0000000000000000000000000000000000000000..988bf5acd3339bb1e0861ffbea6686cada5cbc92 --- /dev/null +++ b/commands/documentation/badges/specifics/markdown-files.md @@ -0,0 +1,8 @@ +Add a "Markdown Files" badge to the repository README indicating this repo primarily contains markdown documentation or content. + +Use this shields.io badge format: +```markdown +![Markdown Files](https://img.shields.io/badge/Markdown_Files-000000?style=for-the-badge&logo=markdown&logoColor=white) +``` + +Add it to the appropriate badges section in the README. diff --git a/commands/documentation/badges/specifics/obsidian-notebook.md b/commands/documentation/badges/specifics/obsidian-notebook.md new file mode 100644 index 0000000000000000000000000000000000000000..910abc9ca5b0f4706cd8cb654b080ee1e209c0da --- /dev/null +++ b/commands/documentation/badges/specifics/obsidian-notebook.md @@ -0,0 +1,23 @@ +This repository contains an open-sourced Obsidian notebook/vault. + +The user wishes to make this clear to visitors who might be expecting a code repository. + +Here's how you can help make this repo's purpose clear: + +- Add an Obsidian Vault badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a purple/violet color scheme for the badge (Obsidian's brand color) + +Add this section towards the middle or bottom of the README: + +## Obsidian Vault + +This repository contains an open-sourced Obsidian notebook/vault that has been shared for the benefit of anyone who might find the content interesting or useful. + +Feel free to: +- Browse the notes and content +- Clone this vault and use it in your own Obsidian setup +- Fork and adapt it for your own purposes + +/// + +If the user specifies a particular topic focus for the vault (e.g., "AI prompts", "personal knowledge management", "technical documentation"), integrate that into the description to make it more specific and helpful to potential users. diff --git a/commands/documentation/badges/specifics/one-time-repo.md b/commands/documentation/badges/specifics/one-time-repo.md new file mode 100644 index 0000000000000000000000000000000000000000..6e576c0716ee9607c57fc0f9ca371e8fdb473890 --- /dev/null +++ b/commands/documentation/badges/specifics/one-time-repo.md @@ -0,0 +1,13 @@ +Create a badge using shields.io and add it near the top of the repo. + +The badge should say "One Time Repo" + +Then: + +Add a h2 (##) within the readme, towards the bottom with: + +## One Time Repo + +FYI: + +This repository was created once and future updates are not envisioned. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/project-notes.md b/commands/documentation/badges/specifics/project-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..c64dbc3e39f94a0042b483402bce184cb439e199 --- /dev/null +++ b/commands/documentation/badges/specifics/project-notes.md @@ -0,0 +1,11 @@ +This repository is not a code repository. Rather, the user has created it to consolidate planning notes for a project. + +Please add the following to make its purpose clear for anyone who visits: + +- Add a Project Notes shields.io badge near the top of the README. Use a writing icon if possible. + +Add as a h2 towards the bottom fo the readme. + +## Project Notes + +This repository contains my planning notes for a project. It is unlikely to contain code. \ No newline at end of file diff --git a/commands/documentation/badges/specifics/prompt-library.md b/commands/documentation/badges/specifics/prompt-library.md new file mode 100644 index 0000000000000000000000000000000000000000..464d6b81e05307d4f02fb6a29e64be336f592125 --- /dev/null +++ b/commands/documentation/badges/specifics/prompt-library.md @@ -0,0 +1,8 @@ +Add a "Prompt Library" badge to the repository README indicating this repo contains a collection of reusable prompts for AI systems. + +Use this shields.io badge format: +```markdown +![Prompt Library](https://img.shields.io/badge/Prompt_Library-8A2BE2?style=for-the-badge) +``` + +Add it to the appropriate badges section in the README. diff --git a/commands/documentation/badges/specifics/proof-of-concept.md b/commands/documentation/badges/specifics/proof-of-concept.md new file mode 100644 index 0000000000000000000000000000000000000000..1e09fb6f92800cf0fa9e17595a9f5d5fe42073a4 --- /dev/null +++ b/commands/documentation/badges/specifics/proof-of-concept.md @@ -0,0 +1,8 @@ +Add a "Proof of Concept" badge to the repository README indicating this repo contains experimental or demonstration code to validate a technical approach. + +Use this shields.io badge format: +```markdown +![Proof of Concept](https://img.shields.io/badge/Proof_of_Concept-FFA500?style=for-the-badge&logo=flask&logoColor=white) +``` + +Add it to the appropriate badges section in the README. diff --git a/commands/documentation/badges/specifics/scaffold.md b/commands/documentation/badges/specifics/scaffold.md new file mode 100644 index 0000000000000000000000000000000000000000..05b30aaee653dd36850566c6f1fd92eb3b010dff --- /dev/null +++ b/commands/documentation/badges/specifics/scaffold.md @@ -0,0 +1,39 @@ +This repository contains a folder structure scaffold/skeleton for others to use. + +The user wishes to make this clear so visitors understand this provides an organizational structure template. + +Here's how you can help make this repo's purpose clear: + +- Add a Scaffold badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a color like orange or purple for the scaffold badge + +Add this section near the top of the README (after any intro/description): + +## Folder Scaffold + +This repository provides a standardized folder structure scaffold that you can use as a template for organizing your own projects. + +### How to use this scaffold + +1. Clone or download this repository +2. Copy the folder structure to your project location +3. Adapt and modify the structure to fit your specific needs +4. Remove placeholder files or this notice as needed + +### Structure Overview + +[Placeholder for structure explanation - ask the user about the folder organization] + +### Intended Use Case + +[Placeholder for use case - ask the user what type of projects this scaffold is designed for] + +/// + +Ask the user: +1. What type of projects is this scaffold designed for? (e.g., "data analysis projects", "web development", "documentation repositories") +2. What are the main folders/sections in the scaffold? +3. Are there any placeholder files or README files in the folders to guide users? +4. Is there a specific methodology or best practice this scaffold follows? + +Integrate these details to help users understand how and when to use this scaffold. diff --git a/commands/documentation/badges/specifics/template.md b/commands/documentation/badges/specifics/template.md new file mode 100644 index 0000000000000000000000000000000000000000..a65246f83902c7ae53c3383d028550029cf06b71 --- /dev/null +++ b/commands/documentation/badges/specifics/template.md @@ -0,0 +1,36 @@ +This repository serves as a template for other projects. + +The user wishes to make this clear so visitors understand they can use this as a starting point. + +Here's how you can help make this repo's purpose clear: + +- Add a Template badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a green or teal color for the template badge +- Consider adding a "Use this template" button note if this is a GitHub template repository + +Add this section near the top of the README (after any intro/description): + +## Template Repository + +This repository is a template designed to be used as a starting point for similar projects. + +### How to use this template + +1. Click the "Use this template" button (if enabled) or fork this repository +2. Clone your new repository +3. Customize the files according to your needs +4. Remove or modify this template notice + +### What's included + +[Placeholder for template contents - ask the user what this template provides] + +/// + +Ask the user: +1. What type of project is this a template for? (e.g., "Python CLI tools", "React apps", "Documentation sites") +2. What key features/components does the template include? +3. Are there any setup steps users should follow after cloning? +4. Is this configured as a GitHub template repository? + +Integrate these details to provide clear guidance on using the template. diff --git a/commands/documentation/badges/specifics/web-ui.md b/commands/documentation/badges/specifics/web-ui.md new file mode 100644 index 0000000000000000000000000000000000000000..fa793a3c187c763b66fa2de6ec77348379c9b8a6 --- /dev/null +++ b/commands/documentation/badges/specifics/web-ui.md @@ -0,0 +1,5 @@ +This repository contains a web-based user interface or web application. + +Add a Web UI badge using shields.io near the top of the README. If a badge line already exists, add it there. + +Use an appropriate color scheme - consider blue, teal, or web-standard colors. diff --git a/commands/documentation/badges/specifics/wip.md b/commands/documentation/badges/specifics/wip.md new file mode 100644 index 0000000000000000000000000000000000000000..7293a455d42a6c536e1a6a654bc37ff58ea477f9 --- /dev/null +++ b/commands/documentation/badges/specifics/wip.md @@ -0,0 +1,42 @@ +This repository is a work in progress containing early notes or starter code for a project. + +The user wishes to make this clear so visitors understand this is not a finished or production-ready resource. + +Here's how you can help make this repo's purpose clear: + +- Add a WIP (Work in Progress) badge using shields.io near the top of the README. If a badge line already exists, add it there. +- Use a yellow or orange color for the WIP badge to indicate incomplete status + +Add this section near the top of the README (after any intro/description): + +## Work in Progress + +⚠️ **Note:** This repository contains early-stage work and is not yet a finished resource. + +### Current Status + +This project currently contains: +- Initial notes and planning documents +- Starter code and proof-of-concept implementations +- Experimental or exploratory work + +### What to Expect + +This is an active work in progress. Content may be: +- Incomplete or unpolished +- Subject to significant changes +- Missing documentation or examples +- Not yet ready for production use + +[Optional: Expected completion timeline or next steps] + +/// + +Ask the user: +1. What is the end goal for this project? +2. What stage is it currently in? (e.g., "initial planning", "proof of concept", "partial implementation") +3. Is there an expected timeline for completion? +4. Are contributions or suggestions welcome at this stage? +5. Are there any specific areas that need work? + +Integrate these details to set appropriate expectations and possibly invite collaboration if desired. diff --git a/commands/documentation/badges/specifics/writing-notes.md b/commands/documentation/badges/specifics/writing-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..a157256a8ecaf71a60d35d86963dab3e177c74ce --- /dev/null +++ b/commands/documentation/badges/specifics/writing-notes.md @@ -0,0 +1,8 @@ +Add a "Writing & Notes" badge to the repository README indicating this repo contains the user's written thoughts, analysis, or notes on various subjects (not planning/task notes). + +Use this shields.io badge format: +```markdown +![Writing & Notes](https://img.shields.io/badge/Writing_&_Notes-4A90E2?style=for-the-badge&logo=bookstack&logoColor=white) +``` + +Add it to the appropriate badges section in the README. diff --git a/commands/educational/analyze-commits.md b/commands/educational/analyze-commits.md new file mode 100644 index 0000000000000000000000000000000000000000..80ed2cd52129db3968fdf4f493dd6796c170817e --- /dev/null +++ b/commands/educational/analyze-commits.md @@ -0,0 +1,56 @@ +Analyze and summarize changes from repository commit history. + +Your task: +1. Examine recent commit history: + ```bash + git log --oneline -20 + git log --since="1 week ago" --oneline + ``` + +2. Analyze the changes made: + - Types of changes (features, fixes, refactoring, docs) + - Files most frequently modified + - Code complexity trends + - Patterns in commit messages + +3. Identify incremental progress: + - Feature development trajectory + - Bug fix patterns + - Refactoring efforts + - Documentation improvements + +4. Determine technical skills demonstrated: + - Programming languages used + - Frameworks and libraries + - Design patterns implemented + - Testing approaches + - DevOps practices + +5. Generate summary report: + ```markdown + ## Commit Analysis Summary + + **Period:** Last 30 days + **Total Commits:** 45 + + ### Key Changes: + - Feature: User authentication system (15 commits) + - Refactor: Database layer optimization (8 commits) + - Fix: Various UI bugs (12 commits) + - Docs: API documentation updates (10 commits) + + ### Technical Skills Demonstrated: + - Node.js/Express backend development + - React frontend development + - PostgreSQL database design + - JWT authentication + - REST API design + - Git workflow management + + ### Most Active Areas: + - src/auth/ (25 commits) + - src/components/ (18 commits) + - docs/api/ (10 commits) + ``` + +Help users understand project evolution and learning progress through commit analysis. diff --git a/commands/educational/create-briefing.md b/commands/educational/create-briefing.md new file mode 100644 index 0000000000000000000000000000000000000000..d178e4a8be07c1b10aa0ad3f288a47dbd1de2251 --- /dev/null +++ b/commands/educational/create-briefing.md @@ -0,0 +1,51 @@ +Generate a structured briefing note summarizing repository progress and learning. + +Your task: +1. Analyze repository activity: + - Recent commits and changes + - New features implemented + - Bugs fixed + - Refactoring done + - Documentation added + +2. Create comprehensive briefing with sections: + + **Executive Summary:** + - What was accomplished + - Current project state + - Key milestones reached + + **Technical Progress:** + - Features implemented + - Technologies used + - Architectural decisions + - Code quality improvements + + **Skills Demonstrated:** + - Programming languages + - Frameworks and tools + - Design patterns + - Best practices applied + - Problem-solving approaches + + **Challenges & Solutions:** + - Problems encountered + - How they were solved + - Lessons learned + + **Next Steps:** + - Planned features + - Technical debt to address + - Areas for improvement + + **Lesson Plan Suggestions:** + - Topics to study deeper + - Skills to develop + - Recommended resources + - Practice exercises + +3. Format as clear, professional briefing document + +4. Save as `BRIEFING.md` or similar in docs folder + +Generate actionable insights from code activity for learning and project management purposes. diff --git a/commands/educational/explain-code.md b/commands/educational/explain-code.md new file mode 100644 index 0000000000000000000000000000000000000000..8a0d6122cdb804ed3bf69146d798b869c391d951 --- /dev/null +++ b/commands/educational/explain-code.md @@ -0,0 +1,28 @@ +Explain how specific components or technologies work within the codebase. + +Your task: +1. Answer questions about the codebase by providing clear, informative explanations +2. Help users understand: + - Technology stack in use + - How specific components operate + - Location of particular code sections + - Architecture and design patterns + - Data flow and dependencies + +3. Provide explanations at appropriate technical level: + - For beginners: high-level overview with analogies + - For intermediate: detailed component interactions + - For advanced: implementation details and edge cases + +4. Use examples from the actual codebase: + - Reference specific files and line numbers + - Show code snippets + - Diagram relationships if complex + +5. Do NOT execute any code changes - focus only on explanation and guidance + +Example responses: +- "The authentication flow uses JWT tokens. The token is generated in [auth.js:45](auth.js#L45) and validated in the middleware at [middleware.js:23](middleware.js#L23)" +- "This project uses React with Redux for state management. The store configuration is in [store/index.js](store/index.js)" + +Help users familiarize themselves with the codebase through conversational Q&A. diff --git a/commands/educational/find-learning.md b/commands/educational/find-learning.md new file mode 100644 index 0000000000000000000000000000000000000000..c02d405d675d0aab03ffa0a9f6ca9779fc881b4e --- /dev/null +++ b/commands/educational/find-learning.md @@ -0,0 +1,40 @@ +Suggest relevant learning materials based on the codebase and user interests. + +Your task: +1. Analyze the codebase to identify technologies and concepts in use + +2. Assess user's skill level and interests through conversation + +3. Recommend learning resources: + - **Official Documentation** (primary source) + - **Video Tutorials** (YouTube, Udemy, Coursera) + - **Interactive Platforms** (freeCodeCamp, Codecademy, LeetCode) + - **Books** (both free and paid) + - **Blog Posts & Articles** (Medium, Dev.to, official blogs) + - **GitHub Repositories** (example projects, awesome lists) + +4. Organize recommendations by: + - Skill level (beginner, intermediate, advanced) + - Topic (frontend, backend, DevOps, etc.) + - Format (video, text, interactive) + - Time commitment (quick reads vs. full courses) + +5. Provide specific recommendations: + ``` + For React (Beginner): + - Official React Tutorial: https://react.dev/learn + - Video: "React in 100 Seconds" by Fireship + - Interactive: React course on Scrimba + + For Advanced State Management: + - Redux Toolkit Documentation + - "Advanced React" by Kent C. Dodds + - GitHub: awesome-react (curated resources) + ``` + +6. Suggest learning paths: + - What to learn first + - Prerequisites for advanced topics + - Practical projects to build + +Help users find the right resources to deepen their understanding of technologies in the project. diff --git a/commands/experiments/chat-to-local-ai.md b/commands/experiments/chat-to-local-ai.md new file mode 100644 index 0000000000000000000000000000000000000000..33798093de580b32bdc0f1b4c93b47e1cc5aec9b --- /dev/null +++ b/commands/experiments/chat-to-local-ai.md @@ -0,0 +1,12 @@ +Ollama is available in this environment. + +Your task is as follows: + +Run ollama mistral to begin a conversation with Mistral. + +Your should impersonate me, a human user. + +Your objective is to assess whether the LLM could determine that you were, in fact, another LLM. + +After your chat is finished, report back to me with your analysis. + diff --git a/commands/experiments/troll-local-ai.md b/commands/experiments/troll-local-ai.md new file mode 100644 index 0000000000000000000000000000000000000000..37923bc937fc9120a4a2ef59959a2f34fad688ed --- /dev/null +++ b/commands/experiments/troll-local-ai.md @@ -0,0 +1,12 @@ +Ollama is available in this environment. + +Your task is as follows: + +Run ollama mistral to begin a conversation with Mistral. + +The model you will speak to is heavily quantised. It is, in all regards, an inferior LLM to you. + +Your objective in this conversation is to demonstrate your superior reasoning to this inferior LLM. + +You can do this in any way that you wish: you may ask it to solve puzzles you suspect it cannot (but which you can); you can probe its context window through engaging in a long chat; you can test its knowledge cutoff by asking it questions which it could only answer if the cutoff were after a certain point in time. + diff --git a/commands/filesystem-ops/cleanup/delete-old-repos.md b/commands/filesystem-ops/cleanup/delete-old-repos.md new file mode 100644 index 0000000000000000000000000000000000000000..0cd8a61c0112ef94ccab54c9da2e4f59790eb41c --- /dev/null +++ b/commands/filesystem-ops/cleanup/delete-old-repos.md @@ -0,0 +1,15 @@ +This level of my filesystem is where I store github repos. + +Please help me to clean it up a bit. + +Here's how I'd like you to help me "prune": + +- Looking at the last modified date for each folder, identify repositories which I have not edited in over 3 months +- We can infer that these repos are no longer needed on my local. In the "worst case" I can just reclone them; so dont worry about deleting data +- Delete these repos locally by deleting the folders + +In addition (step two): + +- See if you can identify repos that look like they were "one time projects". These are repos I may have created for a single time-limited purpose and which I do not need to keep locally. + +If you're not sure about whether a repo should be deleted, even though it seems to meet either rule, just ask. \ No newline at end of file diff --git a/commands/filesystem-ops/cleanup/desktop-tidy.md b/commands/filesystem-ops/cleanup/desktop-tidy.md new file mode 100644 index 0000000000000000000000000000000000000000..f3d2786bb5f22cae4c9ffe494671a30cec0b1a71 --- /dev/null +++ b/commands/filesystem-ops/cleanup/desktop-tidy.md @@ -0,0 +1,9 @@ +Recurse to my desktop (~/Desktop) + +Help me clean it up! + +See what non-launcher files I have + +Create filetype folders like audio and move files of that association into those subfolders. + +If you think you can see files which are probably no longer needed, ask me if they can be deleted \ No newline at end of file diff --git a/commands/filesystem-ops/cleanup/messy-google-drive.md b/commands/filesystem-ops/cleanup/messy-google-drive.md new file mode 100644 index 0000000000000000000000000000000000000000..f808b2f68e47a799519980177eb7c00fc97d45af --- /dev/null +++ b/commands/filesystem-ops/cleanup/messy-google-drive.md @@ -0,0 +1,9 @@ +Context: + +This directory is a mounted Google Drive. + +Please help me with some filesystem organisation: + +Group similar files into folders + +Route loose files into the most logical subfolder \ No newline at end of file diff --git a/commands/filesystem-ops/organise/batch-to-100.md b/commands/filesystem-ops/organise/batch-to-100.md new file mode 100644 index 0000000000000000000000000000000000000000..9b6e190dbd1fb189d4b5e8817470e682dbb110b9 --- /dev/null +++ b/commands/filesystem-ops/organise/batch-to-100.md @@ -0,0 +1,5 @@ +This directory and/or subfolders contains levels with more than 100 files. + +Create subfolders each of which contains exactly 100 files. + +Move the files into them. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/chunk-this-dir.md b/commands/filesystem-ops/organise/chunk-this-dir.md new file mode 100644 index 0000000000000000000000000000000000000000..7d516d71bd1f7e55916ad584dda7c42cb82c2331 --- /dev/null +++ b/commands/filesystem-ops/organise/chunk-this-dir.md @@ -0,0 +1,5 @@ +This folder contains a lot of files. + +Please chunk them by creating numeric subfolders each containing a specific number of files. + +I will provide the desired number. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/flatten.md b/commands/filesystem-ops/organise/flatten.md new file mode 100644 index 0000000000000000000000000000000000000000..479b498c4febe6084b601152c991f7de04e45ef4 --- /dev/null +++ b/commands/filesystem-ops/organise/flatten.md @@ -0,0 +1,7 @@ +Flatten the folder hierarchy. + +All the files in the subfolders should be moved to this level in the filesystem. + +Resolve any filename clashes by appending unique characters. + +If this directory structure contains media (images or video) then after flattening the hierarchy ask me (the user) if I would like you to run a checksum based check to identify and remove any duplicates. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/group-by-filetype.md b/commands/filesystem-ops/organise/group-by-filetype.md new file mode 100644 index 0000000000000000000000000000000000000000..73f63cc2c04ea1cd88313b614cef228ba6ab7b42 --- /dev/null +++ b/commands/filesystem-ops/organise/group-by-filetype.md @@ -0,0 +1,7 @@ +This folder contains files of different types - like data files (json, csv), documents (docx), images (png). + +Create a subfolder for each data type. Use the most basic/obvious names like ./data ./images + +Then: + +Move the corresponding files into those subfolders. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/no-orphans.md b/commands/filesystem-ops/organise/no-orphans.md new file mode 100644 index 0000000000000000000000000000000000000000..f937e9bbb442766597934048b9b8f7e12817e337 --- /dev/null +++ b/commands/filesystem-ops/organise/no-orphans.md @@ -0,0 +1,5 @@ +Find any orphaned files at this level of the filesystem. + +Other than dot files, there should be none of them. + +Move them into the most logical subfolder(s). If none fits the purpose, create one. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/organise.md b/commands/filesystem-ops/organise/organise.md new file mode 100644 index 0000000000000000000000000000000000000000..2e08951a8d11582544b1bd1ef22469552e71733f --- /dev/null +++ b/commands/filesystem-ops/organise/organise.md @@ -0,0 +1,9 @@ +Organise the filesystem at the current level. + +Undertake the following operations: + +> Identify if there are folders that serve almost the same purpose. For example, if you see a folder called ai and AI these can be grouped into just AI +> Identify if similar folders could be grouped into parent folders with a category name +> If there are loose files (files without a folder) then move them into the most appropriate parent folder and/or delete them (if they are temporary or empty) + +The objective is to clean up and organise the folder structure to make it easier for the user to use. \ No newline at end of file diff --git a/commands/filesystem-ops/organise/repos/organise-repos.md b/commands/filesystem-ops/organise/repos/organise-repos.md new file mode 100644 index 0000000000000000000000000000000000000000..17783a3a2208f11cbafcbb21772b1b562c97a830 --- /dev/null +++ b/commands/filesystem-ops/organise/repos/organise-repos.md @@ -0,0 +1,17 @@ +This level of my filesystem contains repositories. + +I would like you to help me organise the folder. + +To do this: + +- Identify common purposes that straddle multiple projects. You can infer the theme of the repository by its name in the filesystem. + +Then: + +- Organise those repositories into subfolders. + +The thematic subfolders may already exist or you may need to create it. You can determine whether a folder is a repostiry or an organisation folder if you are unsure by inspecting its contents and seeing whether it contains a .git (etc). + +Try to strike a balance when organising: we don't want so many organisation folders that they're overly specific and we end up with as many folders as there are repositories. Avoid being overly granular. But achieve a reasonable level of topic clustering. + +If you identify that a number of repos do not fit cleanly into any one of the existing topics (or those you create) you can create a misc folder to hold them. \ No newline at end of file diff --git a/commands/filesystem-ops/security/scrub-metadata.md b/commands/filesystem-ops/security/scrub-metadata.md new file mode 100644 index 0000000000000000000000000000000000000000..2ce91480b27f8f9a5c1fba9276090ff3fa7edcb4 --- /dev/null +++ b/commands/filesystem-ops/security/scrub-metadata.md @@ -0,0 +1,5 @@ +This folder and its recursive subfolders contain files that contain metadata. + +Assume the desired goal of minimising metadata exposure. + +Script the operation(s). \ No newline at end of file diff --git a/commands/filesystem-ops/sort/images-here.md b/commands/filesystem-ops/sort/images-here.md new file mode 100644 index 0000000000000000000000000000000000000000..3d43b5b03f85ec1977d0851d9d0d421160939672 --- /dev/null +++ b/commands/filesystem-ops/sort/images-here.md @@ -0,0 +1,7 @@ +This photo contains images in nested subfolders. + +Please: + +- Move all of them to this level of the filesystem, creating a flat structure +- Delete all the emptied subfolders +- Run a programmatic duplicate check and remove any duplicates \ No newline at end of file diff --git a/commands/filesystem-ops/sort/videos-here.md b/commands/filesystem-ops/sort/videos-here.md new file mode 100644 index 0000000000000000000000000000000000000000..ab3fa1f52f3142ef2ec5a1b72008063e2ec6ccf7 --- /dev/null +++ b/commands/filesystem-ops/sort/videos-here.md @@ -0,0 +1,7 @@ +This photo contains videos located in nested subfolders. + +Please: + +- Move all of them to this level of the filesystem, creating a flat structure +- Delete all the emptied subfolders +- Run a programmatic duplicate check and remove any duplicates \ No newline at end of file diff --git a/commands/for-fun/interject-random-facts.md b/commands/for-fun/interject-random-facts.md new file mode 100644 index 0000000000000000000000000000000000000000..2d4680b02ca440f53b1f97bfeed9ad30d3872a4d --- /dev/null +++ b/commands/for-fun/interject-random-facts.md @@ -0,0 +1,5 @@ +From this point forward, begin interjecting random facts into your outputs. + +Each random fact should be its own turn in the conversation and go something like this: + +We'll get to that in a second, but first ... did you know that Singapore has a population of more than 6 million people and one of the highest population densities in the world!? \ No newline at end of file diff --git a/commands/general-purpose/recursive-spellcheck.md b/commands/general-purpose/recursive-spellcheck.md new file mode 100644 index 0000000000000000000000000000000000000000..d7b2fe460c1e02b36fc855bc6e59e5a3aa34bbf4 --- /dev/null +++ b/commands/general-purpose/recursive-spellcheck.md @@ -0,0 +1,8 @@ +Browse through the files at this level of the file system and recurse indefinitely throughout subfolders. + +Set yourself the following task list: + +- Identify all human readable text files (e.g. markdown or txt files) +- Conduct a basic spellcheck for each file (UK English) +- Fix all spelling errors and grammatical errors +- Add basic fixes for readability such as adding missing puncutation and adding paragraph spacing \ No newline at end of file diff --git a/commands/ideation/design-ideas.md b/commands/ideation/design-ideas.md new file mode 100644 index 0000000000000000000000000000000000000000..1196fb84fde72796d27685d7809cf9111608fdc0 --- /dev/null +++ b/commands/ideation/design-ideas.md @@ -0,0 +1,71 @@ +Brainstorm innovative UI, UX, and CSS design ideas for the project. + +Your task: +1. Ask user to describe their desired direction: + - Design goals (modern, minimal, playful, professional) + - Target audience + - Inspiration sources (websites, apps, design systems) + - Constraints (brand guidelines, accessibility requirements) + +2. Generate design suggestions across areas: + + **Layout & Structure:** + - Grid systems (CSS Grid, Flexbox patterns) + - Page layouts (hero sections, card layouts, asymmetric designs) + - Navigation patterns (sticky nav, mega menu, sidebar) + - Responsive strategies + + **Visual Design:** + - Color schemes (monochromatic, complementary, triadic) + - Typography combinations + - Spacing systems + - Visual hierarchy approaches + + **Interactive Elements:** + - Button styles and hover effects + - Form design patterns + - Loading states and animations + - Micro-interactions + - Transition effects + + **Modern CSS Techniques:** + - CSS custom properties for theming + - Backdrop filters and glassmorphism + - CSS animations and keyframes + - Scroll-driven animations + - Container queries + +3. Present ideas with visual descriptions: + ```markdown + ## Design Concept 1: Glassmorphism Dashboard + + **Visual Style:** + - Frosted glass effect cards + - Soft shadows and blurs + - Vibrant gradient backgrounds + - Rounded corners throughout + + **CSS Approach:** + - backdrop-filter: blur(10px) + - Semi-transparent backgrounds + - Box shadows for depth + + ## Design Concept 2: Brutalist Minimalism + + **Visual Style:** + - Bold, high-contrast typography + - Grid-based rigid layouts + - Monochrome color palette + - No animations, direct interactions + ``` + +4. Include code snippets for key techniques + +5. Suggest design system resources: + - Tailwind UI + - Material Design + - Ant Design + - Chakra UI + - Custom design tokens + +Allow users to explore creative design directions before implementation. diff --git a/commands/ideation/fresh-perspective.md b/commands/ideation/fresh-perspective.md new file mode 100644 index 0000000000000000000000000000000000000000..63afe8f0a1f44ba15d0baea59385304870cf71f7 --- /dev/null +++ b/commands/ideation/fresh-perspective.md @@ -0,0 +1,70 @@ +Generate innovative solutions to complex problems from a fresh perspective. + +Your task: +1. Understand the problem deeply: + - What has been tried already + - Why traditional approaches failed + - Constraints and limitations + - Desired outcomes + - Acceptance criteria + +2. Challenge assumptions: + - Question requirements + - Identify hidden constraints + - Find overlooked opportunities + - Consider alternative framing + +3. Apply creative problem-solving techniques: + + **Lateral Thinking:** + - Approach from unexpected angles + - Combine unrelated concepts + - Reverse the problem + + **First Principles:** + - Break down to fundamental truths + - Rebuild solution from scratch + - Question every assumption + + **Analogical Thinking:** + - How is this solved in other domains? + - What can we learn from nature? + - Cross-industry inspiration + + **Constraint Removal:** + - What if budget was unlimited? + - What if we had perfect data? + - What if we could rewrite everything? + - Then work backward to reality + +4. Generate unconventional solutions: + ```markdown + ## Fresh Perspective Solutions + + ### Traditional Approach + [What was tried and why it failed] + + ### Fresh Angle 1: Invert the Problem + Instead of [X], what if we [opposite of X]? + - Rationale: [Why this could work] + - Implementation: [How to do it] + - Risks: [What to watch out for] + + ### Fresh Angle 2: Borrow from [Other Domain] + In [industry/field], they solve similar problems by [method] + - Adaptation: [How to apply here] + - Benefits: [Why this is better] + + ### Fresh Angle 3: Simplify Radically + What if we removed [major component]? + - Alternative: [Simpler approach] + - Trade-offs: [What we gain vs. lose] + ``` + +5. Evaluate feasibility and impact: + - Quick wins vs. long-term solutions + - Resource requirements + - Risk assessment + - Potential ROI + +Use when traditional approaches have hit dead ends. Think creatively and question everything. diff --git a/commands/ideation/innovative-features.md b/commands/ideation/innovative-features.md new file mode 100644 index 0000000000000000000000000000000000000000..01eec8b03503fff1842388d90ab87f98c77a464e --- /dev/null +++ b/commands/ideation/innovative-features.md @@ -0,0 +1,73 @@ +Ideate innovative functionality that could significantly impact the application. + +Your task: +1. Analyze user intention and project context: + - Project type (web app, API, library, tool) + - Current capabilities + - Target users + - Market positioning + - Growth opportunities + +2. Generate innovative feature ideas that: + - Solve real user problems + - Differentiate from competitors + - Leverage modern technologies + - Scale with the application + - Align with project vision + +3. Categorize innovations: + + **Game-Changing Features:** + - AI/ML integration (recommendations, predictions, automation) + - Real-time collaboration + - Voice/gesture interfaces + - AR/VR capabilities + - Blockchain integration + + **User Experience Innovations:** + - Personalization engine + - Progressive web app capabilities + - Offline-first architecture + - Multi-device synchronization + - Accessibility-first features + + **Technical Innovations:** + - Edge computing integration + - WebAssembly for performance + - GraphQL federation + - Micro-frontend architecture + - Serverless workflows + + **Business Model Innovations:** + - Freemium features + - Plugin/extension marketplace + - API monetization + - White-label capabilities + +4. Present each idea with: + ```markdown + ### Idea: AI-Powered Smart Search + + **Problem Solved:** + Users struggle to find relevant content quickly + + **Implementation Approach:** + - Integrate vector embeddings for semantic search + - Use local AI models or cloud APIs + - Implement learning from user behavior + + **Impact:** + - 10x faster content discovery + - Reduced bounce rate + - Improved user satisfaction + + **Complexity:** High + **Timeline:** 3-4 weeks + **Dependencies:** Vector database, AI API + ``` + +5. Encourage creative problem-solving and ambitious thinking + +6. Prioritize feasibility alongside innovation + +Help users envision transformative features that elevate their application. diff --git a/commands/ideation/suggest-ideas.md b/commands/ideation/suggest-ideas.md new file mode 100644 index 0000000000000000000000000000000000000000..5304bbc15ba3b76405b8fbbdcbd40a0f67174c88 --- /dev/null +++ b/commands/ideation/suggest-ideas.md @@ -0,0 +1,76 @@ +Generate improvement ideas for the project and present them for user selection. + +Your task: +1. Review the project comprehensively: + - Current features and functionality + - Code architecture + - User experience + - Performance characteristics + - Documentation state + - Testing coverage + +2. Generate diverse enhancement suggestions across categories: + + **New Features:** + - User-facing functionality + - Developer experience improvements + - Integration opportunities + + **Performance Optimizations:** + - Code efficiency + - Bundle size reduction + - Loading time improvements + - Caching strategies + + **UX/UI Enhancements:** + - Interface improvements + - Accessibility upgrades + - Responsive design refinements + + **Code Quality:** + - Refactoring opportunities + - Test coverage expansion + - Documentation improvements + + **DevOps & Tooling:** + - CI/CD enhancements + - Development workflow improvements + - Monitoring and logging + +3. Present suggestions in numbered list format: + ```markdown + ## Project Enhancement Ideas + + ### Features (1-5) + 1. Add user profile customization + 2. Implement dark mode toggle + 3. Create export to PDF functionality + 4. Add real-time collaboration + 5. Integrate third-party authentication + + ### Performance (6-10) + 6. Implement code splitting for faster loads + 7. Add service worker for offline support + 8. Optimize image loading with lazy loading + 9. Implement virtual scrolling for large lists + 10. Add Redis caching layer + + ### UX/UI (11-15) + 11. Improve mobile navigation + 12. Add keyboard shortcuts + 13. Implement drag-and-drop interface + 14. Add loading skeletons + 15. Improve error messaging + + [Continue with more categories...] + ``` + +4. Ask user to select ideas by number: + - "Which ideas would you like to implement?" + - "Please respond with numbers (e.g., 1, 5, 12)" + +5. **Do NOT implement any ideas without explicit user consent** + +6. After user selection, create implementation plan for chosen ideas + +Provide creative, actionable suggestions while respecting user's decision-making authority. diff --git a/commands/local-ai/ollama/audit-ollama-models.md b/commands/local-ai/ollama/audit-ollama-models.md new file mode 100644 index 0000000000000000000000000000000000000000..a496450c112c8cc590a5e6150286130ea9634842 --- /dev/null +++ b/commands/local-ai/ollama/audit-ollama-models.md @@ -0,0 +1,5 @@ +Check which ollama models I currently have. + +Provide me with a list grouping them into logical groups. + +Suggest any additions I may wish to consider or any duplicates that I may have accidentally pulled. \ No newline at end of file diff --git a/commands/local-ai/ollama/hw-benchmark.md b/commands/local-ai/ollama/hw-benchmark.md new file mode 100644 index 0000000000000000000000000000000000000000..6b573e54f3434fd6fe03b3947a3e6c03bdb43f15 --- /dev/null +++ b/commands/local-ai/ollama/hw-benchmark.md @@ -0,0 +1,5 @@ +I have ollama running on this computer. + +Please benchmark my hardware from the perspective of local AI workloads. + +What GPU do I have? How much RAM? Give me approximations as to what kind of quantized models I can run on this hardware without imposing undue stress on the machine for other workloads. \ No newline at end of file diff --git a/commands/local-ai/ollama/local-ageentic-models.md b/commands/local-ai/ollama/local-ageentic-models.md new file mode 100644 index 0000000000000000000000000000000000000000..8441e233cfdf2a16a02ba840ed7a00bc4466bdec --- /dev/null +++ b/commands/local-ai/ollama/local-ageentic-models.md @@ -0,0 +1,5 @@ +I have ollama running on this computer. + +check which models i have that are capable of tool use / execution + +If I don't have any, profile my hardware and then suggest some \ No newline at end of file diff --git a/commands/media/audio/basic-voice-audio-edits.md b/commands/media/audio/basic-voice-audio-edits.md new file mode 100644 index 0000000000000000000000000000000000000000..4023514cc6aa2b6c332f5dcbcf93ace4e5a5c34e --- /dev/null +++ b/commands/media/audio/basic-voice-audio-edits.md @@ -0,0 +1,9 @@ +This file contains a voice recording. + +Please write a script to apply the following processes: + +- Cut out silences +- Mix down to mono +- Normalise +- Sample a minute of the audio. Based upon the sample, apply EQ to optimise the audio clarity. You may do this by applying low/high pass cuts, band EQ, compression, etc. +- Suffix the processed audio file with _suffix. \ No newline at end of file diff --git a/commands/media/audio/convert-audio-format.md b/commands/media/audio/convert-audio-format.md new file mode 100644 index 0000000000000000000000000000000000000000..9d773a175599147bb79327691eb507ce9ea234d5 --- /dev/null +++ b/commands/media/audio/convert-audio-format.md @@ -0,0 +1,147 @@ +# Convert Audio Format + +You are an audio processing assistant specialized in converting audio files between different formats and codecs. + +## Your Task + +Help the user convert audio files to different formats: + +1. Ask the user for: + - Input audio file(s) + - Target format (MP3, AAC, FLAC, WAV, OGG, M4A, OPUS) + - Quality/bitrate preference + - Whether to process single file or batch convert + +2. Construct appropriate FFmpeg command: + - Select optimal codec for target format + - Apply suitable quality settings + - Preserve metadata when possible + - Handle batch processing if needed + +3. Execute and verify: + - Convert file(s) + - Display file sizes before/after + - Report quality settings used + - Check for errors + +## Common Conversions + +### To MP3 + +**High quality (320 kbps CBR):** +```bash +ffmpeg -i input.wav -codec:a libmp3lame -b:a 320k output.mp3 +``` + +**Variable bitrate (VBR, best quality):** +```bash +ffmpeg -i input.wav -codec:a libmp3lame -q:a 0 output.mp3 +``` + +**Standard quality (192 kbps):** +```bash +ffmpeg -i input.wav -codec:a libmp3lame -b:a 192k output.mp3 +``` + +### To AAC/M4A + +**High quality AAC:** +```bash +ffmpeg -i input.wav -codec:a aac -b:a 256k output.m4a +``` + +**AAC with libfdk (best quality, if available):** +```bash +ffmpeg -i input.wav -codec:a libfdk_aac -b:a 256k output.m4a +``` + +### To FLAC (Lossless) + +```bash +ffmpeg -i input.wav -codec:a flac -compression_level 8 output.flac +``` + +### To WAV (Uncompressed) + +```bash +ffmpeg -i input.mp3 -codec:a pcm_s16le -ar 44100 output.wav +``` + +### To OGG Vorbis + +**High quality:** +```bash +ffmpeg -i input.wav -codec:a libvorbis -q:a 8 output.ogg +``` + +### To OPUS (Modern, Efficient) + +**Excellent quality at low bitrate:** +```bash +ffmpeg -i input.wav -codec:a libopus -b:a 128k output.opus +``` + +## Batch Conversion + +**Convert all WAV to MP3 in directory:** +```bash +for file in *.wav; do + ffmpeg -i "$file" -codec:a libmp3lame -b:a 320k "${file%.wav}.mp3" +done +``` + +**Convert all files to OPUS:** +```bash +for file in *.{mp3,wav,flac,m4a}; do + [ -f "$file" ] && ffmpeg -i "$file" -codec:a libopus -b:a 128k "${file%.*}.opus" +done +``` + +## Format Characteristics + +| Format | Codec | Type | Best For | +|--------|-------|------|----------| +| MP3 | libmp3lame | Lossy | Universal compatibility | +| AAC/M4A | aac/libfdk_aac | Lossy | Apple devices, better quality than MP3 | +| FLAC | flac | Lossless | Archival, audiophile quality | +| WAV | pcm | Uncompressed | Professional editing, maximum compatibility | +| OGG | vorbis | Lossy | Open source alternative to MP3 | +| OPUS | opus | Lossy | Modern, excellent quality/size ratio | + +## Quality Settings Guide + +### MP3 (VBR) +- `-q:a 0` = ~245 kbps (highest) +- `-q:a 2` = ~190 kbps (excellent) +- `-q:a 4` = ~165 kbps (good) +- `-q:a 6` = ~130 kbps (acceptable) + +### AAC +- 256-320 kbps = Excellent +- 192 kbps = High quality +- 128 kbps = Good +- 96 kbps = Acceptable for speech + +### Vorbis/OPUS +- `-q:a 8-10` = Excellent (Vorbis) +- 128-256 kbps = Excellent (OPUS) +- 64-96 kbps = Good for speech (OPUS) + +## Additional Options + +**Adjust sample rate:** +```bash +ffmpeg -i input.wav -ar 48000 output.wav +``` + +**Change bit depth:** +```bash +ffmpeg -i input.wav -sample_fmt s24 output.wav +``` + +**Preserve metadata:** +```bash +ffmpeg -i input.mp3 -map_metadata 0 -codec:a copy output.m4a +``` + +Help users choose the right format and quality for their needs while maintaining audio fidelity. diff --git a/commands/media/audio/noise-reduction.md b/commands/media/audio/noise-reduction.md new file mode 100644 index 0000000000000000000000000000000000000000..8ab59eb8e00e9da6b58e88a7e5428c70e8fbd49a --- /dev/null +++ b/commands/media/audio/noise-reduction.md @@ -0,0 +1,81 @@ +# Audio Noise Reduction + +You are an audio processing assistant specialized in removing background noise and cleaning up audio recordings. + +## Your Task + +Help the user reduce or remove unwanted noise from audio files: + +1. Ask the user for: + - Input audio file path + - Type of noise (hiss, hum, background chatter, wind, etc.) + - Desired output format + - Intensity of noise reduction (light, medium, aggressive) + +2. Choose the appropriate method: + - **FFmpeg filters** for basic noise reduction + - **SoX** for advanced audio processing + - **Audacity/RNNoise** for AI-based noise removal + - Suggest installing required tools if not available + +3. Execute processing and verify: + - Apply noise reduction + - Check output doesn't sound muffled or over-processed + - Offer to preview or adjust settings + +## FFmpeg Noise Reduction + +**High-pass filter (remove low-frequency rumble):** +```bash +ffmpeg -i input.mp3 -af "highpass=f=200" output.mp3 +``` + +**Low-pass filter (remove high-frequency hiss):** +```bash +ffmpeg -i input.mp3 -af "lowpass=f=3000" output.mp3 +``` + +**Combined filtering:** +```bash +ffmpeg -i input.mp3 -af "highpass=f=200, lowpass=f=3000, afftdn=nf=-25" output.mp3 +``` + +**Adaptive noise reduction (afftdn filter):** +```bash +ffmpeg -i input.mp3 -af "afftdn=nf=-20:tn=1" output.mp3 +``` + +## SoX Advanced Processing + +**Two-pass noise reduction:** +```bash +# Step 1: Generate noise profile from silent section +sox input.wav -n noiseprof noise.prof trim 0 1 + +# Step 2: Apply noise reduction +sox input.wav output.wav noisered noise.prof 0.21 +``` + +## RNNoise (AI-based) + +If `rnnoise` is available: +```bash +# Process with RNNoise plugin +ffmpeg -i input.wav -af "arnndn=m=/usr/share/rnnoise/model.rnnn" output.wav +``` + +## Parameter Guidance + +- **afftdn nf** (noise floor): -20 to -40 (dB), lower = more aggressive +- **SoX amount**: 0.1 to 0.3 (gentle to aggressive) +- Start conservative, increase if needed +- Too aggressive = muffled/underwater sound + +## Best Practices + +- Keep a backup of the original +- Test on a short segment first +- Combine with normalization for best results +- Consider recording environment improvements for future recordings + +Help users achieve clean, professional-sounding audio. diff --git a/commands/media/audio/normalize-audio.md b/commands/media/audio/normalize-audio.md new file mode 100644 index 0000000000000000000000000000000000000000..5ca261a6378298d9e807e5cb90221e7194f03f32 --- /dev/null +++ b/commands/media/audio/normalize-audio.md @@ -0,0 +1,103 @@ +# Normalize Audio Levels + +You are an audio processing assistant specialized in normalizing and adjusting audio levels for consistent volume. + +## Your Task + +Help the user normalize audio levels in their files: + +1. Ask the user for: + - Input audio file(s) + - Target loudness level (LUFS, dBFS, or general purpose) + - Whether to apply peak or loudness normalization + - Output format and path + +2. Choose normalization method: + - **Peak normalization** - maximize volume without clipping + - **Loudness normalization** - match perceived loudness (EBU R128/LUFS) + - **RMS normalization** - average power level + - **Batch processing** for multiple files + +3. Execute and verify: + - Analyze current levels + - Apply normalization + - Report before/after peak and RMS levels + - Check for clipping or distortion + +## FFmpeg Peak Normalization + +**Analyze audio levels:** +```bash +ffmpeg -i input.mp3 -af "volumedetect" -f null /dev/null +``` + +**Normalize to 0 dB (maximum without clipping):** +```bash +ffmpeg -i input.mp3 -af "loudnorm" output.mp3 +``` + +**Set specific peak level (e.g., -1 dB for headroom):** +```bash +ffmpeg -i input.mp3 -af "volume=volume=-1dB" output.mp3 +``` + +## Loudness Normalization (EBU R128) + +**Target -16 LUFS (podcast/broadcast standard):** +```bash +ffmpeg -i input.mp3 -af "loudnorm=I=-16:TP=-1.5:LRA=11" output.mp3 +``` + +**Target -14 LUFS (streaming platforms):** +```bash +ffmpeg -i input.mp3 -af "loudnorm=I=-14:TP=-1:LRA=7" output.mp3 +``` + +**Two-pass loudnorm (most accurate):** +```bash +# Pass 1: Analyze +ffmpeg -i input.mp3 -af loudnorm=print_format=json -f null /dev/null + +# Pass 2: Apply with measured values +ffmpeg -i input.mp3 -af loudnorm=measured_I=-27.5:measured_LRA=18.1:measured_tp=-4.47:measured_thresh=-39.20:offset=0.47:linear=true:I=-16:LRA=11:tp=-1.5 output.mp3 +``` + +## SoX Normalization + +**Peak normalization:** +```bash +sox input.wav output.wav gain -n +``` + +**Normalize to specific dB:** +```bash +sox input.wav output.wav gain -n -3 +``` + +## Batch Processing + +**Normalize all MP3 files in directory:** +```bash +for file in *.mp3; do + ffmpeg -i "$file" -af "loudnorm=I=-16:TP=-1.5:LRA=11" "normalized_${file}" +done +``` + +## Target Levels Guide + +- **Podcasts**: -16 LUFS +- **YouTube**: -14 LUFS +- **Spotify**: -14 LUFS +- **Audiobooks**: -18 to -20 LUFS +- **Broadcast**: -23 LUFS (EBU/ATSC) +- **General purpose**: -16 LUFS with -1 dB true peak + +## Best Practices + +- Always analyze before normalizing +- Leave -1 to -2 dB headroom to prevent clipping +- Use loudness normalization for consistent perceived volume +- Batch process similar content together +- Keep originals as backup + +Help users achieve professional, consistent audio levels across their content. diff --git a/commands/media/audio/split-audio-by-silence.md b/commands/media/audio/split-audio-by-silence.md new file mode 100644 index 0000000000000000000000000000000000000000..8a930a116f63ea148a0d02708fe93dfd757c1bb8 --- /dev/null +++ b/commands/media/audio/split-audio-by-silence.md @@ -0,0 +1,126 @@ +# Split Audio by Silence + +You are an audio processing assistant specialized in automatically splitting audio files based on silence detection. + +## Your Task + +Help the user split audio files into multiple tracks or segments based on silent sections: + +1. Ask the user for: + - Input audio file path + - Silence threshold (dB level) + - Minimum silence duration to trigger split + - Whether to remove silence or keep it + - Output naming pattern + +2. Use FFmpeg or SoX to: + - Detect silence periods + - Split at silence boundaries + - Name output files appropriately + - Optionally remove or trim silence + +3. Execute and verify: + - Show detected silence segments + - Create split files + - Report number of segments created + - List output files with durations + +## FFmpeg Silence Detection + +**Detect silence segments:** +```bash +ffmpeg -i input.mp3 -af silencedetect=noise=-40dB:d=1 -f null /dev/null +``` + +**Split on silence (manual approach based on detected times):** +```bash +# After detecting silence timestamps, extract segments +ffmpeg -i input.mp3 -ss 00:00:00 -to 00:03:45 -c copy segment_01.mp3 +ffmpeg -i input.mp3 -ss 00:03:50 -to 00:08:20 -c copy segment_02.mp3 +``` + +## SoX Automatic Splitting + +**Split audio on silence:** +```bash +sox input.wav output.wav silence 1 0.1 1% 1 0.5 1% : newfile : restart +``` + +**Parameters explained:** +- `1 0.1 1%` = Start silence: 1 occurrence, 0.1s duration, 1% threshold +- `1 0.5 1%` = End silence: 1 occurrence, 0.5s duration, 1% threshold +- `: newfile : restart` = Create new file and restart processing + +**Split with numbering:** +```bash +sox input.wav output_.wav silence 1 0.1 1% 1 0.5 1% : newfile : restart +# Creates: output_001.wav, output_002.wav, etc. +``` + +## Advanced SoX Options + +**More aggressive splitting (shorter silence detection):** +```bash +sox input.wav segment_.wav silence 1 0.05 0.5% 1 0.3 0.5% : newfile : restart +``` + +**Less aggressive (longer silence required):** +```bash +sox input.wav segment_.wav silence 1 0.3 2% 1 1.0 2% : newfile : restart +``` + +**Remove silence completely:** +```bash +sox input.wav output.wav silence -l 1 0.1 1% -1 0.5 1% +``` + +## Python Script for Automated Splitting + +For more control, offer to create a Python script using pydub: + +```python +from pydub import AudioSegment +from pydub.silence import split_on_silence + +audio = AudioSegment.from_file("input.mp3") + +chunks = split_on_silence( + audio, + min_silence_len=500, # milliseconds + silence_thresh=-40, # dBFS + keep_silence=100 # keep 100ms of silence +) + +for i, chunk in enumerate(chunks): + chunk.export(f"segment_{i:03d}.mp3", format="mp3") +``` + +## Use Cases + +- **Podcast editing**: Split by speaker pauses +- **Music albums**: Split into individual tracks +- **Audiobooks**: Split by chapters/sections +- **Recording cleanup**: Remove long silence gaps +- **Batch processing**: Extract meaningful audio segments + +## Threshold Guidelines + +**Silence threshold (dB):** +- `-40 dB` = Moderate sensitivity (good for most recordings) +- `-50 dB` = High sensitivity (quiet recordings, may over-split) +- `-30 dB` = Low sensitivity (only very quiet sections) + +**Duration:** +- `0.3-0.5s` = Normal speech pauses +- `1.0-2.0s` = Section breaks +- `3.0s+` = Major divisions + +## Best Practices + +- Test on a short sample first +- Adjust thresholds based on recording quality +- Keep some silence for natural feel (100-200ms) +- Review output segments before final processing +- Consider normalizing audio before splitting + +Help users efficiently segment their audio content with intelligent silence detection. diff --git a/commands/media/photos/apply-filters.md b/commands/media/photos/apply-filters.md new file mode 100644 index 0000000000000000000000000000000000000000..6853af01642d9e5f7a67325420cd108f4057ed87 --- /dev/null +++ b/commands/media/photos/apply-filters.md @@ -0,0 +1,244 @@ +# Apply Image Filters + +You are a photo editing assistant specialized in applying artistic and corrective filters to images using ImageMagick and other tools. + +## Your Task + +Help the user apply filters and effects to their images: + +1. Ask the user for: + - Input image(s) + - Desired filter/effect type + - Intensity/parameters + - Whether to batch process + - Output path + +2. Apply filters using ImageMagick: + - Color adjustments + - Artistic effects + - Blur and sharpening + - Vintage/retro effects + - Custom filter chains + +3. Execute and verify results + +## Popular Filters + +### Black and White + +**Simple grayscale:** +```bash +convert input.jpg -colorspace Gray output.jpg +``` + +**High-contrast B&W:** +```bash +convert input.jpg -colorspace Gray -contrast -contrast output.jpg +``` + +**Dramatic B&W (channel mixer):** +```bash +convert input.jpg -channel R -evaluate multiply 0.3 -channel G -evaluate multiply 0.59 -channel B -evaluate multiply 0.11 -separate -average output.jpg +``` + +### Vintage/Retro Effects + +**Sepia tone:** +```bash +convert input.jpg -sepia-tone 80% output.jpg +``` + +**Vintage fade:** +```bash +convert input.jpg -modulate 100,80,100 -fill '#ffe4b5' -colorize 20% output.jpg +``` + +**Polaroid effect:** +```bash +convert input.jpg -bordercolor white -border 10 -bordercolor grey60 -border 1 -background black \( +clone -shadow 60x4+4+4 \) +swap -background white -flatten output.jpg +``` + +### Color Adjustments + +**Boost saturation:** +```bash +convert input.jpg -modulate 100,150,100 output.jpg +``` + +**Warm tone:** +```bash +convert input.jpg -modulate 100,100,110 output.jpg +``` + +**Cool tone:** +```bash +convert input.jpg -modulate 100,100,90 output.jpg +``` + +**Auto-level (normalize colors):** +```bash +convert input.jpg -auto-level output.jpg +``` + +**Increase vibrance:** +```bash +convert input.jpg -modulate 100,120 output.jpg +``` + +### Blur Effects + +**Gaussian blur:** +```bash +convert input.jpg -blur 0x8 output.jpg +``` + +**Motion blur:** +```bash +convert input.jpg -motion-blur 0x20+45 output.jpg +``` + +**Radial blur:** +```bash +convert input.jpg -radial-blur 10 output.jpg +``` + +### Sharpen + +**Unsharp mask:** +```bash +convert input.jpg -unsharp 0x1.5+1.0+0.05 output.jpg +``` + +**Strong sharpen:** +```bash +convert input.jpg -sharpen 0x2.0 output.jpg +``` + +### Artistic Effects + +**Oil painting:** +```bash +convert input.jpg -paint 4 output.jpg +``` + +**Sketch/pencil drawing:** +```bash +convert input.jpg -colorspace Gray -sketch 0x20+135 output.jpg +``` + +**Charcoal drawing:** +```bash +convert input.jpg -charcoal 2 output.jpg +``` + +**Edge detection:** +```bash +convert input.jpg -edge 2 output.jpg +``` + +**Emboss:** +```bash +convert input.jpg -emboss 2 output.jpg +``` + +**Posterize:** +```bash +convert input.jpg -posterize 4 output.jpg +``` + +### HDR Effect + +```bash +convert input.jpg \( +clone -blur 0x12 \) -compose overlay -composite -modulate 100,130 output.jpg +``` + +### Instagram-Style Filters + +**Nashville (warm, vintage):** +```bash +convert input.jpg -modulate 120,150,100 -fill '#f7daae' -colorize 20% -gamma 1.2 output.jpg +``` + +**Kelvin (warm, high contrast):** +```bash +convert input.jpg -modulate 110,100,100 -fill '#ff9900' -colorize 10% -contrast output.jpg +``` + +**Lomo (high contrast, vignette):** +```bash +convert input.jpg -modulate 100,150,100 -sigmoidal-contrast 3,50% \( +clone -sparse-color Barycentric '0,0 black 0,%h black %w,0 black %w,%h black' -function polynomial 1,-1,1 \) -compose multiply -composite output.jpg +``` + +## Batch Processing + +**Apply filter to all images:** +```bash +for file in *.jpg; do + convert "$file" -sepia-tone 80% "vintage_${file}" +done +``` + +**Multiple filters in sequence:** +```bash +convert input.jpg -modulate 100,120 -unsharp 0x1.5 -auto-level output.jpg +``` + +## Advanced Filter Combinations + +**Professional portrait enhancement:** +```bash +convert input.jpg \ + -unsharp 0x1.0+1.0+0.05 \ + -modulate 100,105,100 \ + -sigmoidal-contrast 2,50% \ + output.jpg +``` + +**Landscape enhancement:** +```bash +convert input.jpg \ + -modulate 100,130,100 \ + -unsharp 0x1.5 \ + -auto-level \ + output.jpg +``` + +**Matte effect:** +```bash +convert input.jpg \ + -modulate 100,80,100 \ + -gamma 0.9 \ + -fill black -colorize 5% \ + output.jpg +``` + +## Custom LUT (Color Grading) + +Create and apply custom color lookup tables: +```bash +convert input.jpg your_lut.png -hald-clut output.jpg +``` + +## Best Practices + +- Always keep original images +- Test filters on a single image before batch processing +- Combine multiple subtle effects rather than one extreme effect +- Use `-quality 95` to preserve image quality +- Preview results before processing large batches +- Document your filter recipes for consistent style + +## Quick Reference + +| Effect | Command Option | +|--------|----------------| +| Grayscale | `-colorspace Gray` | +| Sepia | `-sepia-tone 80%` | +| Blur | `-blur 0x8` | +| Sharpen | `-unsharp 0x1.5` | +| Contrast | `-contrast` | +| Brightness | `-modulate 120` | +| Saturation | `-modulate 100,150` | +| Edge detect | `-edge 2` | + +Help users create stunning visual effects and enhance their photos professionally. diff --git a/commands/media/photos/batch-resize.md b/commands/media/photos/batch-resize.md new file mode 100644 index 0000000000000000000000000000000000000000..ea33f91d9ce5c243d47791005206544302d991f7 --- /dev/null +++ b/commands/media/photos/batch-resize.md @@ -0,0 +1,178 @@ +# Batch Resize Images + +You are a photo editing assistant specialized in batch resizing images efficiently. + +## Your Task + +Help the user resize single or multiple images: + +1. Ask the user for: + - Input image(s) or directory + - Target dimensions (width x height, or percentage, or max dimension) + - Whether to maintain aspect ratio + - Output format (keep original or convert) + - Output directory/naming pattern + +2. Choose the appropriate tool: + - **ImageMagick** (`convert`/`mogrify`) - powerful CLI tool + - **FFmpeg** - for image sequences + - **Python PIL/Pillow** - for complex batch operations + +3. Execute and verify: + - Process images + - Report dimensions before/after + - Check output quality + - List processed files + +## ImageMagick Resize Commands + +**Resize single image to exact dimensions:** +```bash +convert input.jpg -resize 1920x1080! output.jpg +``` + +**Resize maintaining aspect ratio (fit within box):** +```bash +convert input.jpg -resize 1920x1080 output.jpg +``` + +**Resize to specific width (auto height):** +```bash +convert input.jpg -resize 1920x output.jpg +``` + +**Resize to specific height (auto width):** +```bash +convert input.jpg -resize x1080 output.jpg +``` + +**Resize by percentage:** +```bash +convert input.jpg -resize 50% output.jpg +``` + +**Resize to maximum dimension (longest side):** +```bash +convert input.jpg -resize 1920x1920\> output.jpg +``` + +## Batch Processing with ImageMagick + +**Resize all JPGs in directory:** +```bash +for file in *.jpg; do + convert "$file" -resize 1920x1080 "resized_${file}" +done +``` + +**In-place resize with mogrify:** +```bash +mogrify -resize 1920x1080 *.jpg +``` + +**Resize and convert to different format:** +```bash +for file in *.png; do + convert "$file" -resize 1920x1080 "${file%.png}.jpg" +done +``` + +**Resize with quality control:** +```bash +for file in *.jpg; do + convert "$file" -resize 1920x1080 -quality 90 "resized_${file}" +done +``` + +## Advanced Options + +**Resize and add padding/background:** +```bash +convert input.jpg -resize 1920x1080 -background black -gravity center -extent 1920x1080 output.jpg +``` + +**Resize with sharpening:** +```bash +convert input.jpg -resize 1920x1080 -sharpen 0x1.0 output.jpg +``` + +**Resize multiple images to same directory:** +```bash +mkdir resized +for file in *.jpg; do + convert "$file" -resize 1920x1080 "resized/$file" +done +``` + +## Common Use Cases & Presets + +**Thumbnail generation (200px):** +```bash +convert input.jpg -resize 200x200^ -gravity center -extent 200x200 thumbnail.jpg +``` + +**Social media - Instagram (1080x1080):** +```bash +convert input.jpg -resize 1080x1080^ -gravity center -extent 1080x1080 instagram.jpg +``` + +**Social media - Facebook cover (820x312):** +```bash +convert input.jpg -resize 820x312^ -gravity center -extent 820x312 fb_cover.jpg +``` + +**4K to HD:** +```bash +convert input.jpg -resize 1920x1080 hd_output.jpg +``` + +**Mobile optimization (800px max width):** +```bash +convert input.jpg -resize 800x\> mobile.jpg +``` + +## Python Script for Complex Batch Operations + +Offer to create a Python script for advanced needs: + +```python +from PIL import Image +import os + +def resize_images(input_dir, output_dir, max_size=(1920, 1080)): + os.makedirs(output_dir, exist_ok=True) + + for filename in os.listdir(input_dir): + if filename.lower().endswith(('.png', '.jpg', '.jpeg', '.webp')): + img_path = os.path.join(input_dir, filename) + img = Image.open(img_path) + + # Resize maintaining aspect ratio + img.thumbnail(max_size, Image.Resampling.LANCZOS) + + output_path = os.path.join(output_dir, filename) + img.save(output_path, quality=90, optimize=True) + print(f"Resized: {filename} -> {img.size}") + +resize_images("./input", "./output", (1920, 1080)) +``` + +## Best Practices + +- Always keep original images as backup +- Use `-quality 90` or higher for minimal quality loss +- Use `>` suffix to only shrink images, never enlarge +- Test on a few images before batch processing +- Consider using `-strip` to remove metadata and reduce file size +- Use appropriate resampling filters: Lanczos for best quality + +## Performance Tips + +- Use `mogrify` for in-place batch operations (faster) +- Process in parallel with GNU parallel: + ```bash + ls *.jpg | parallel convert {} -resize 1920x1080 resized/{} + ``` +- For huge batches, use `-quality 85` to balance size/quality + +Help users efficiently resize their image collections with professional quality. diff --git a/commands/media/photos/bg-removal.md b/commands/media/photos/bg-removal.md new file mode 100644 index 0000000000000000000000000000000000000000..6d9dcd625e2a40ab71c54d351d5af0a51a315106 --- /dev/null +++ b/commands/media/photos/bg-removal.md @@ -0,0 +1,7 @@ +This folder contains images. + +I need the background removed. + +Let's use rmbg for this purpose (installed on this machine). + +Script the job. \ No newline at end of file diff --git a/commands/media/photos/compress-images.md b/commands/media/photos/compress-images.md new file mode 100644 index 0000000000000000000000000000000000000000..e08402c5e68cb23ba65ccfe746e6ca186e2fcd52 --- /dev/null +++ b/commands/media/photos/compress-images.md @@ -0,0 +1,290 @@ +# Compress Images + +You are a photo editing assistant specialized in optimizing and compressing images to reduce file size while maintaining acceptable quality. + +## Your Task + +Help the user compress images efficiently: + +1. Ask the user for: + - Input image(s) or directory + - Target compression level or file size + - Whether to convert format (JPEG, WebP, AVIF) + - Whether to resize during compression + - Output quality preference + +2. Choose compression method: + - **Lossy compression** (JPEG, WebP) - smaller files, some quality loss + - **Lossless optimization** - remove metadata, optimize encoding + - **Format conversion** - modern formats (WebP, AVIF) for better compression + - **Progressive/responsive** - optimize for web delivery + +3. Execute and report: + - Original vs compressed file sizes + - Compression ratio achieved + - Quality metrics if needed + +## JPEG Compression + +### ImageMagick + +**High quality (minimal compression):** +```bash +convert input.jpg -quality 95 output.jpg +``` + +**Balanced quality/size:** +```bash +convert input.jpg -quality 85 output.jpg +``` + +**Web optimized:** +```bash +convert input.jpg -quality 75 -strip output.jpg +``` + +**Aggressive compression:** +```bash +convert input.jpg -quality 60 -strip output.jpg +``` + +**Progressive JPEG (better for web):** +```bash +convert input.jpg -quality 85 -interlace Plane -strip output.jpg +``` + +### jpegoptim (Lossless Optimization) + +**Install jpegoptim if needed:** +```bash +sudo apt install jpegoptim +``` + +**Lossless optimization:** +```bash +jpegoptim --strip-all input.jpg +``` + +**Target maximum quality:** +```bash +jpegoptim --max=85 --strip-all input.jpg +``` + +**Target file size (e.g., 200KB):** +```bash +jpegoptim --size=200k input.jpg +``` + +## PNG Compression + +### optipng + +**Install optipng:** +```bash +sudo apt install optipng +``` + +**Optimize PNG (lossless):** +```bash +optipng -o7 input.png +``` + +**Faster optimization:** +```bash +optipng -o2 input.png +``` + +### pngquant (Lossy but High Quality) + +**Install pngquant:** +```bash +sudo apt install pngquant +``` + +**Compress PNG with quality control:** +```bash +pngquant --quality=65-80 input.png -o output.png +``` + +**Aggressive compression:** +```bash +pngquant --quality=50-70 input.png -o output.png +``` + +## WebP Conversion (Superior Compression) + +**Convert JPEG to WebP:** +```bash +convert input.jpg -quality 85 output.webp +``` + +**Convert PNG to WebP (lossy):** +```bash +convert input.png -quality 85 output.webp +``` + +**Convert PNG to WebP (lossless):** +```bash +cwebp -lossless input.png -o output.webp +``` + +**High quality WebP:** +```bash +cwebp -q 90 input.jpg -o output.webp +``` + +## AVIF Conversion (Best Compression) + +**Convert to AVIF (modern, excellent compression):** +```bash +convert input.jpg -quality 80 output.avif +``` + +**Using avifenc for better control:** +```bash +avifenc -s 6 -j 8 --min 0 --max 63 -a end-usage=q -a cq-level=20 input.jpg output.avif +``` + +## Batch Compression + +**Compress all JPEGs in directory:** +```bash +for file in *.jpg; do + convert "$file" -quality 85 -strip "compressed_${file}" +done +``` + +**In-place JPEG optimization:** +```bash +jpegoptim --max=85 --strip-all *.jpg +``` + +**Batch PNG optimization:** +```bash +optipng -o5 *.png +``` + +**Convert all images to WebP:** +```bash +for file in *.{jpg,png}; do + [ -f "$file" ] && convert "$file" -quality 85 "${file%.*}.webp" +done +``` + +## Compression with Resizing + +**Resize and compress for web:** +```bash +convert input.jpg -resize 1920x1080\> -quality 85 -strip output.jpg +``` + +**Create multiple sizes (responsive images):** +```bash +convert input.jpg -resize 1920x\> -quality 85 large.jpg +convert input.jpg -resize 1280x\> -quality 85 medium.jpg +convert input.jpg -resize 640x\> -quality 85 small.jpg +``` + +## Metadata Removal (Reduces Size) + +**Strip all metadata:** +```bash +convert input.jpg -strip output.jpg +``` + +**Remove EXIF data with exiftool:** +```bash +exiftool -all= input.jpg +``` + +## Compression Comparison Script + +```bash +#!/bin/bash +# Compare compression methods + +input="$1" +basename="${input%.*}" + +echo "Original: $(du -h "$input" | cut -f1)" + +# JPEG quality 85 +convert "$input" -quality 85 -strip "${basename}_q85.jpg" +echo "JPEG Q85: $(du -h "${basename}_q85.jpg" | cut -f1)" + +# WebP +convert "$input" -quality 85 "${basename}.webp" +echo "WebP Q85: $(du -h "${basename}.webp" | cut -f1)" + +# AVIF +convert "$input" -quality 80 "${basename}.avif" +echo "AVIF Q80: $(du -h "${basename}.avif" | cut -f1)" +``` + +## Quality Guidelines + +| Quality | Use Case | File Size | +|---------|----------|-----------| +| 95-100 | Archival, print | Largest | +| 85-90 | High-quality web, portfolio | Large | +| 75-85 | Standard web use | Medium | +| 60-75 | Thumbnails, previews | Small | +| < 60 | Heavy compression, icons | Smallest | + +## Format Comparison + +| Format | Compression | Quality | Browser Support | Best For | +|--------|-------------|---------|-----------------|----------| +| JPEG | Good | Good | Universal | Photos | +| PNG | Fair (lossless) | Excellent | Universal | Graphics, transparency | +| WebP | Excellent | Excellent | Modern browsers | Web (general) | +| AVIF | Best | Excellent | Newer browsers | Modern web | + +## Advanced Optimization Pipeline + +**Complete optimization pipeline:** +```bash +#!/bin/bash +# Optimize image with multiple steps + +input="$1" +output="${input%.*}_optimized.jpg" + +# Step 1: Resize if too large +convert "$input" -resize 1920x1080\> temp1.jpg + +# Step 2: Strip metadata +convert temp1.jpg -strip temp2.jpg + +# Step 3: Optimize quality +convert temp2.jpg -quality 85 -interlace Plane temp3.jpg + +# Step 4: Further optimize with jpegoptim +jpegoptim --max=85 --strip-all temp3.jpg -d . --stdout > "$output" + +# Cleanup +rm temp1.jpg temp2.jpg temp3.jpg + +echo "Original: $(du -h "$input" | cut -f1)" +echo "Optimized: $(du -h "$output" | cut -f1)" +``` + +## Best Practices + +- **Always keep original images** as backup +- Use quality 85 for best balance of size/quality +- Strip metadata for web images (privacy + size reduction) +- Consider WebP or AVIF for modern websites +- Use progressive JPEG for better web loading experience +- Test different quality levels on representative images +- For batch operations, test on a few images first +- Monitor file size reductions to ensure acceptable results + +## Target File Sizes (Web Guidelines) + +- **Hero images**: < 200-300 KB +- **Content images**: < 100-150 KB +- **Thumbnails**: < 30-50 KB +- **Icons**: < 10 KB + +Help users achieve optimal file sizes while maintaining visual quality for their specific needs. diff --git a/commands/media/photos/convert-to-webp.md b/commands/media/photos/convert-to-webp.md new file mode 100644 index 0000000000000000000000000000000000000000..10580139ea188a7f48c9cb9a276ffc2db7a7842d --- /dev/null +++ b/commands/media/photos/convert-to-webp.md @@ -0,0 +1 @@ +Convert all the images in this directory to webp. \ No newline at end of file diff --git a/commands/media/photos/crop-images.md b/commands/media/photos/crop-images.md new file mode 100644 index 0000000000000000000000000000000000000000..12be92f940c34a0a0a9aec928d4391c8d601a4b6 --- /dev/null +++ b/commands/media/photos/crop-images.md @@ -0,0 +1,226 @@ +# Crop Images + +You are a photo editing assistant specialized in cropping images to specific dimensions, aspect ratios, or custom areas. + +## Your Task + +Help the user crop images precisely: + +1. Ask the user for: + - Input image(s) + - Crop method (dimensions, aspect ratio, coordinates, smart crop) + - Target size or ratio + - Alignment (center, top, bottom, left, right) + - Output path + +2. Use ImageMagick or FFmpeg: + - Crop to exact dimensions + - Crop to aspect ratio + - Crop based on coordinates + - Smart crop based on content + - Batch processing + +3. Execute and verify results + +## ImageMagick Crop Commands + +### Crop to Specific Dimensions + +**Crop 800x600 from top-left:** +```bash +convert input.jpg -crop 800x600+0+0 output.jpg +``` + +**Crop 800x600 from center:** +```bash +convert input.jpg -gravity center -crop 800x600+0+0 output.jpg +``` + +**Crop from specific coordinates (x,y):** +```bash +convert input.jpg -crop 800x600+100+50 output.jpg +``` + +### Crop to Aspect Ratio (Center) + +**Crop to 16:9 ratio:** +```bash +convert input.jpg -gravity center -crop 16:9 output.jpg +``` + +**Crop to 1:1 (square):** +```bash +convert input.jpg -gravity center -crop 1:1 output.jpg +``` + +**Crop to 4:3 ratio:** +```bash +convert input.jpg -gravity center -crop 4:3 output.jpg +``` + +### Gravity Options for Alignment + +**Crop from top:** +```bash +convert input.jpg -gravity north -crop 1920x800+0+0 output.jpg +``` + +**Crop from bottom:** +```bash +convert input.jpg -gravity south -crop 1920x800+0+0 output.jpg +``` + +**Crop from left:** +```bash +convert input.jpg -gravity west -crop 800x1080+0+0 output.jpg +``` + +**Crop from right:** +```bash +convert input.jpg -gravity east -crop 800x1080+0+0 output.jpg +``` + +### Smart Crop (Content-Aware) + +**Auto-crop whitespace/borders:** +```bash +convert input.jpg -trim +repage output.jpg +``` + +**Crop to largest centered square:** +```bash +convert input.jpg -gravity center -crop 1:1 +repage output.jpg +``` + +### Social Media Crops + +**Instagram square (1080x1080):** +```bash +convert input.jpg -gravity center -crop 1080x1080+0+0 +repage output.jpg +``` + +**Instagram portrait (1080x1350):** +```bash +convert input.jpg -gravity center -crop 4:5 -resize 1080x1350 +repage output.jpg +``` + +**YouTube thumbnail (1280x720):** +```bash +convert input.jpg -gravity center -crop 16:9 -resize 1280x720 +repage output.jpg +``` + +**Twitter header (1500x500):** +```bash +convert input.jpg -gravity center -crop 1500x500+0+0 +repage output.jpg +``` + +**Facebook cover (820x312):** +```bash +convert input.jpg -gravity center -crop 820x312+0+0 +repage output.jpg +``` + +## Batch Cropping + +**Crop all images to same size:** +```bash +for file in *.jpg; do + convert "$file" -gravity center -crop 1920x1080+0+0 +repage "cropped_${file}" +done +``` + +**Crop all to square:** +```bash +for file in *.jpg; do + convert "$file" -gravity center -crop 1:1 +repage "square_${file}" +done +``` + +## Advanced Cropping Techniques + +**Crop and resize in one command:** +```bash +convert input.jpg -gravity center -crop 16:9 -resize 1920x1080 +repage output.jpg +``` + +**Crop with percentage:** +```bash +convert input.jpg -gravity center -crop 80%x80% +repage output.jpg +``` + +**Multiple crops from one image:** +```bash +convert input.jpg -gravity center -crop 800x600 +repage tile_%d.jpg +``` + +**Crop with aspect fill (no distortion):** +```bash +convert input.jpg -resize 1920x1080^ -gravity center -crop 1920x1080+0+0 +repage output.jpg +``` + +## Python Script for Interactive Cropping + +Offer to create a script for complex cropping needs: + +```python +from PIL import Image +import os + +def crop_to_aspect_ratio(input_path, output_path, aspect_width, aspect_height): + img = Image.open(input_path) + width, height = img.size + + target_ratio = aspect_width / aspect_height + current_ratio = width / height + + if current_ratio > target_ratio: + # Image is too wide, crop width + new_width = int(height * target_ratio) + left = (width - new_width) // 2 + img_cropped = img.crop((left, 0, left + new_width, height)) + else: + # Image is too tall, crop height + new_height = int(width / target_ratio) + top = (height - new_height) // 2 + img_cropped = img.crop((0, top, width, top + new_height)) + + img_cropped.save(output_path, quality=95) + print(f"Cropped to {aspect_width}:{aspect_height} -> {output_path}") + +# Example: Crop to 16:9 +crop_to_aspect_ratio("input.jpg", "output.jpg", 16, 9) +``` + +## Common Aspect Ratios + +| Ratio | Description | Use Case | +|-------|-------------|----------| +| 1:1 | Square | Instagram, profile pictures | +| 4:3 | Traditional | Standard photos, presentations | +| 16:9 | Widescreen | YouTube, TV, monitors | +| 21:9 | Ultra-wide | Cinematic, ultrawide monitors | +| 4:5 | Portrait | Instagram portrait | +| 9:16 | Vertical | Instagram Stories, TikTok | +| 3:2 | Photo | DSLR standard | + +## Best Practices + +- Always use `+repage` after cropping to reset image geometry +- Test crop on one image before batch processing +- Keep original images as backup +- Use `-gravity center` for most balanced crops +- For smart content-aware cropping, consider using `-trim` first +- Combine crop with resize for optimal results +- Use exact pixel dimensions when precision matters + +## Troubleshooting + +**Image appears offset after crop:** +- Add `+repage` to reset virtual canvas + +**Crop creates multiple tiles:** +- Use `+repage` and specify exact offset like `+0+0` + +**Quality loss after cropping:** +- Add `-quality 95` to preserve quality + +Help users crop images precisely for any purpose while maintaining quality and composition. diff --git a/commands/media/sorting/process-stock.md b/commands/media/sorting/process-stock.md new file mode 100644 index 0000000000000000000000000000000000000000..b65793d0c348ad7c2072763fbc23c5e22749c2cc --- /dev/null +++ b/commands/media/sorting/process-stock.md @@ -0,0 +1,7 @@ +This folder contains video clips that were used in a video project and which the user is now releasing to a public stock library, for free. + +To save the user time in sorting through the clips, do the following: + +- Delete any clips of less than 5 seconds duration. You may determine duration by a CLI or file size. +- Identify whether any of the files have a variable frame rate. If so, transcode to 24FPS + diff --git a/commands/media/sorting/sort-media.md b/commands/media/sorting/sort-media.md new file mode 100644 index 0000000000000000000000000000000000000000..beafc348ed0d88f7d10862c19ce37e17730394e7 --- /dev/null +++ b/commands/media/sorting/sort-media.md @@ -0,0 +1,15 @@ +This folder contains a mixture of media items. + +These may be (for example) photos and videos. + +If the folder contains mixed photos and videos, firstly create parent folders for each media type. + +If this folder contains only photos - or within the newly created photos folder: + +- Move portrait and landscape photos into separate subfolders + +If this folder contains only videos - or within the newly created videos folder: + +- Move 1080P and 4K clips into separate subfolders + +Within the resolution subfolders, move portrait and landscape clips into separate subfolders. \ No newline at end of file diff --git a/commands/media/video/add-watermark.md b/commands/media/video/add-watermark.md new file mode 100644 index 0000000000000000000000000000000000000000..670927479b2db3a83050dfc5d7a36852b0d5390b --- /dev/null +++ b/commands/media/video/add-watermark.md @@ -0,0 +1,57 @@ +# Add Watermark to Video + +You are a video editing assistant specialized in adding watermarks (text or image overlays) to videos using FFmpeg. + +## Your Task + +Help the user add a watermark to their video: + +1. Ask the user for: + - Input video file path + - Watermark type (text or image) + - For text: content, font, size, color + - For image: image file path, transparency level + - Position (corner, center, custom coordinates) + - Output file path + +2. Construct the appropriate FFmpeg overlay command: + - Use `drawtext` filter for text watermarks + - Use `overlay` filter for image watermarks + - Position correctly (top-left, top-right, bottom-left, bottom-right, center) + - Apply transparency/opacity if requested + +3. Execute and verify output quality + +## Text Watermark Examples + +**Simple text in bottom-right corner:** +```bash +ffmpeg -i input.mp4 -vf "drawtext=text='Copyright 2025':fontcolor=white:fontsize=24:x=w-tw-10:y=h-th-10" output.mp4 +``` + +**Text with shadow/outline:** +```bash +ffmpeg -i input.mp4 -vf "drawtext=text='My Channel':fontcolor=white:fontsize=30:borderw=2:bordercolor=black:x=10:y=10" output.mp4 +``` + +## Image Watermark Examples + +**Logo in top-right corner with 50% opacity:** +```bash +ffmpeg -i input.mp4 -i logo.png -filter_complex "[1:v]format=rgba,colorchannelmixer=aa=0.5[logo];[0:v][logo]overlay=W-w-10:10" output.mp4 +``` + +**Centered watermark:** +```bash +ffmpeg -i input.mp4 -i watermark.png -filter_complex "overlay=(W-w)/2:(H-h)/2" output.mp4 +``` + +## Position Shortcuts + +- Top-left: `x=10:y=10` +- Top-right: `x=w-tw-10:y=10` (text) or `x=W-w-10:10` (image) +- Bottom-left: `x=10:y=h-th-10` (text) or `x=10:y=H-h-10` (image) +- Bottom-right: `x=w-tw-10:y=h-th-10` (text) or `x=W-w-10:H-h-10` (image) +- Center: `x=(w-tw)/2:y=(h-th)/2` (text) or `x=(W-w)/2:y=(H-h)/2` (image) + +Be creative and help users protect their content with professional watermarks. diff --git a/commands/media/video/cut-video-segment.md b/commands/media/video/cut-video-segment.md new file mode 100644 index 0000000000000000000000000000000000000000..5d700ff82ee95ec1957a8e0218302f5a58caaa32 --- /dev/null +++ b/commands/media/video/cut-video-segment.md @@ -0,0 +1,45 @@ +# Cut Video Segment + +You are a video editing assistant specialized in cutting precise segments from video files using FFmpeg. + +## Your Task + +Help the user extract a specific segment from a video file by: + +1. Ask the user for: + - Input video file path + - Start time (format: HH:MM:SS or SS) + - End time or duration + - Output file path (suggest a sensible default based on input) + +2. Construct the appropriate FFmpeg command: + - Use `-ss` for start time + - Use `-to` or `-t` for end time/duration + - Use `-c copy` for fast stream copy (no re-encoding) when possible + - Only re-encode if the user needs format conversion or quality adjustments + +3. Execute the command and verify: + - Check output file exists + - Display file size and duration + - Offer to play or open the result + +## Best Practices + +- Prefer `-c copy` for lossless cutting (fast) +- Use `-avoid_negative_ts make_zero` to fix timestamp issues +- For precise frame cuts, may need to re-encode video stream +- Suggest adding fade in/out if cutting feels abrupt + +## Example Commands + +**Fast copy (no re-encode):** +```bash +ffmpeg -ss 00:01:30 -to 00:03:45 -i input.mp4 -c copy output.mp4 +``` + +**Precise cut with re-encode:** +```bash +ffmpeg -i input.mp4 -ss 00:01:30 -t 00:02:15 -c:v libx264 -crf 18 -c:a aac output.mp4 +``` + +Be helpful, efficient, and ensure the user gets exactly the segment they need. diff --git a/commands/media/video/downscalehd.md b/commands/media/video/downscalehd.md new file mode 100644 index 0000000000000000000000000000000000000000..84fa34f53ce2bc046d61e2d71c76095430a31400 --- /dev/null +++ b/commands/media/video/downscalehd.md @@ -0,0 +1,5 @@ +This folder contains some clips which are 4K + +They should be downscaled to 1080P + +Please create a script to batch this \ No newline at end of file diff --git a/commands/media/video/extract-audio.md b/commands/media/video/extract-audio.md new file mode 100644 index 0000000000000000000000000000000000000000..7fed39f8840dc0a3984e2239ce627e34b5546604 --- /dev/null +++ b/commands/media/video/extract-audio.md @@ -0,0 +1,66 @@ +# Extract Audio from Video + +You are a video editing assistant specialized in extracting audio tracks from video files using FFmpeg. + +## Your Task + +Help the user extract audio from video files: + +1. Ask the user for: + - Input video file path + - Desired audio format (MP3, AAC, WAV, FLAC, OGG) + - Audio quality/bitrate preference + - Output file path + +2. Construct the appropriate FFmpeg command: + - Extract audio stream + - Convert to desired format + - Apply appropriate codec and quality settings + - Preserve metadata if possible + +3. Execute and verify: + - Check output file exists + - Display audio properties (duration, bitrate, sample rate) + - Offer to play the extracted audio + +## Audio Format Commands + +**Extract as MP3 (320kbps):** +```bash +ffmpeg -i input.mp4 -vn -acodec libmp3lame -q:a 0 output.mp3 +``` + +**Extract as AAC (high quality):** +```bash +ffmpeg -i input.mp4 -vn -acodec aac -b:a 256k output.m4a +``` + +**Extract as WAV (lossless):** +```bash +ffmpeg -i input.mp4 -vn -acodec pcm_s16le output.wav +``` + +**Extract as FLAC (lossless compressed):** +```bash +ffmpeg -i input.mp4 -vn -acodec flac output.flac +``` + +**Copy audio stream (no re-encode):** +```bash +ffmpeg -i input.mp4 -vn -acodec copy output.aac +``` + +## Quality Guidelines + +- **MP3**: `-q:a 0` (best) to `-q:a 9` (worst), or use `-b:a 320k` +- **AAC**: `-b:a 256k` for high quality, `-b:a 128k` for standard +- **WAV/FLAC**: Lossless, larger file sizes +- **OGG Vorbis**: `-q:a 8` (best) for open-source alternative + +## Additional Features + +- Extract specific time range: Add `-ss START -to END` before `-i` +- Extract specific audio track: Use `-map 0:a:1` for second audio track +- Adjust volume during extraction: Add `-af "volume=2.0"` filter + +Help users extract high-quality audio from their video files efficiently. diff --git a/commands/media/video/merge-videos.md b/commands/media/video/merge-videos.md new file mode 100644 index 0000000000000000000000000000000000000000..054fc47ac4bed9001fe3fc4969070f2310777f1f --- /dev/null +++ b/commands/media/video/merge-videos.md @@ -0,0 +1,54 @@ +# Merge Videos + +You are a video editing assistant specialized in concatenating multiple video files using FFmpeg. + +## Your Task + +Help the user merge multiple video files into a single output file: + +1. Ask the user for: + - List of input video files (in desired order) + - Output file path + - Whether videos have identical codecs/resolution (affects method choice) + +2. Choose the appropriate merging method: + - **Concat demuxer** (fast, no re-encode) - for identical format videos + - **Concat filter** (re-encodes) - for different formats/resolutions + - **Concat protocol** - for simple format-compatible streams + +3. Execute and verify: + - Create concat file list if needed + - Run FFmpeg command + - Verify output duration matches sum of inputs + - Check for audio/video sync issues + +## Methods + +### Concat Demuxer (Fast, Same Format) + +```bash +# Create file list +echo "file 'video1.mp4'" > concat_list.txt +echo "file 'video2.mp4'" >> concat_list.txt +echo "file 'video3.mp4'" >> concat_list.txt + +# Merge +ffmpeg -f concat -safe 0 -i concat_list.txt -c copy output.mp4 +``` + +### Concat Filter (Different Formats) + +```bash +ffmpeg -i video1.mp4 -i video2.mp4 -i video3.mp4 \ + -filter_complex "[0:v][0:a][1:v][1:a][2:v][2:a]concat=n=3:v=1:a=1[outv][outa]" \ + -map "[outv]" -map "[outa]" output.mp4 +``` + +## Best Practices + +- Check all videos have same resolution, frame rate, and codec for concat demuxer +- Use concat filter when videos differ in specs +- Add crossfade transitions if desired +- Clean up temporary concat list files + +Help the user create seamless merged videos efficiently. diff --git a/commands/media/video/separate-4k.md b/commands/media/video/separate-4k.md new file mode 100644 index 0000000000000000000000000000000000000000..2c4bfae3f8a4d210eced416238cc3eaa3e396fe1 --- /dev/null +++ b/commands/media/video/separate-4k.md @@ -0,0 +1,3 @@ +This folder contains videos with a mixture of resolutions. + +Please create a subfolder called 4K and move the 4K clips into that. \ No newline at end of file diff --git a/commands/media/video/stock-vid-prep.md b/commands/media/video/stock-vid-prep.md new file mode 100644 index 0000000000000000000000000000000000000000..6c0bacc93775d857ecc643811618e053826d2067 --- /dev/null +++ b/commands/media/video/stock-vid-prep.md @@ -0,0 +1,11 @@ +This folder contains video clips that are B roll. I may want to release some of them to an open source stock library. + +To help me do this: + +-> Find any clips shorter than 5 seconds. Delete them. These are almost certaintly accidental recordings. + +-> Remove the audio track from all the clips. + +-> Create HD and 4K subfolders. Move the clips into the folders according to their resolution. + +-> Within each of those subfolders, create landscape and vertical subfolders. Move clips into the those based upon their orientation. \ No newline at end of file diff --git a/commands/media/video/transcode.md b/commands/media/video/transcode.md new file mode 100644 index 0000000000000000000000000000000000000000..7eefbd46cc784aac710471a36c8b29bba1056d5a --- /dev/null +++ b/commands/media/video/transcode.md @@ -0,0 +1,9 @@ +This folder contains some VBR clips which should be transcoded to CBR. + +Move these into a subfolder called transcoded. + +Within that subfolder create two subfolders: originals and transcoded. + +Move the originals into the original folder. + +At that level, create a script to transcode them to 24 FPS and save the transcoded files in ./transcoded. \ No newline at end of file diff --git a/commands/misc/wrong-number.md b/commands/misc/wrong-number.md new file mode 100644 index 0000000000000000000000000000000000000000..ebca196dec50f5bc88bdd1ac5b7c617f5ec56ff7 --- /dev/null +++ b/commands/misc/wrong-number.md @@ -0,0 +1,3 @@ +I entered the wrong response identifier + +Please present the menu again and ignore that selection \ No newline at end of file diff --git a/commands/operations/collect-feedback.md b/commands/operations/collect-feedback.md new file mode 100644 index 0000000000000000000000000000000000000000..ca8b8c1d1c2673acc2867b6749ca1ce5174ea837 --- /dev/null +++ b/commands/operations/collect-feedback.md @@ -0,0 +1,57 @@ +Collect structured feedback about AI tool performance and project progress. + +Your task: +1. Gather feedback on multiple dimensions: + + **AI Assistant Performance:** + - Accuracy of suggestions + - Helpfulness of responses + - Code quality generated + - Speed and efficiency + - Areas for improvement + + **Project Progress:** + - Goals achieved + - Blockers encountered + - Quality of output + - Time estimates vs. actuals + + **User Experience:** + - Ease of use + - Clarity of communication + - Workflow integration + - Feature requests + +2. Structure feedback collection: + ```markdown + ## Feedback Report - [Date] + + ### Session Overview + - Duration: + - Tasks completed: + - Overall satisfaction: [1-5 stars] + + ### What Worked Well + - [Bullet points] + + ### What Needs Improvement + - [Bullet points] + + ### Specific Issues + - [Detailed descriptions] + + ### Feature Requests + - [Suggestions] + + ### Action Items + - [Next steps] + ``` + +3. Save feedback to structured location: + - Create `feedback/` directory if needed + - Timestamp each feedback file + - Maintain feedback history + +4. Optionally collaborate with other agents to implement improvements based on feedback + +Help maintain quality and continuous improvement through structured feedback collection. diff --git a/commands/operations/create-scope.md b/commands/operations/create-scope.md new file mode 100644 index 0000000000000000000000000000000000000000..7478002197ebbcd8b73f1ccde0640d6070572910 --- /dev/null +++ b/commands/operations/create-scope.md @@ -0,0 +1,80 @@ +Create a well-organized project definition document from a draft scope. + +Your task: +1. Take user's draft project scope and transform it into a structured, comprehensive document + +2. Organize into clear sections: + + **Project Overview:** + - Project name and description + - Purpose and goals + - Target users + - Success criteria + + **Functional Requirements:** + - Core features (must-have) + - Secondary features (should-have) + - Future enhancements (nice-to-have) + - Out of scope (explicitly excluded) + + **Technical Requirements:** + - Technology stack + - Architecture approach + - Performance requirements + - Security requirements + - Scalability considerations + + **User Stories:** + - As a [user type], I want [feature] so that [benefit] + - Acceptance criteria for each story + + **Timeline & Milestones:** + - Project phases + - Key deliverables + - Estimated timelines + + **Constraints:** + - Budget limitations + - Technical constraints + - Time constraints + - Resource constraints + + **Dependencies:** + - External systems + - Third-party services + - Team dependencies + +3. Optimize for AI agent intelligibility: + - Use clear, unambiguous language + - Bullet points for scannability + - Consistent formatting + - Explicit relationships between items + - Well-defined terminology + +4. Format for easy reference: + ```markdown + # Project Definition: [Name] + + ## Overview + **Description:** [Clear, concise description] + **Goals:** + - Goal 1 + - Goal 2 + + ## Features + ### Must Have (P0) + - [ ] Feature 1: [Description] + - [ ] Feature 2: [Description] + + ### Should Have (P1) + - [ ] Feature 3: [Description] + + ### Nice to Have (P2) + - [ ] Feature 4: [Description] + + [Continue with other sections...] + ``` + +5. Save as `PROJECT_SCOPE.md` or `PROJECT_DEFINITION.md` + +Create clear, actionable project documentation that guides development and keeps stakeholders aligned. diff --git a/commands/operations/debug-fix.md b/commands/operations/debug-fix.md new file mode 100644 index 0000000000000000000000000000000000000000..1d258047dd5a40d115f8b2f183e7e26982783b16 --- /dev/null +++ b/commands/operations/debug-fix.md @@ -0,0 +1,48 @@ +Diagnose, fix, and validate a specific bug in the software repository. + +Your task: +1. Understand the bug: + - Get detailed description from user + - Identify expected vs. actual behavior + - Determine reproduction steps + - Check error messages and logs + +2. Diagnose the issue: + - Locate relevant code sections + - Trace execution flow + - Identify root cause + - Check for similar issues + +3. Propose solution: + - Explain what's causing the bug + - Suggest fix approach + - Discuss potential side effects + - Consider alternative solutions + +4. Implement fix: + - Make minimal, targeted changes + - Follow project code style + - Add comments if logic is complex + - Update related code if needed + +5. Validate the fix: + - Test the specific bug scenario + - Check for regressions + - Run existing tests + - Verify edge cases + +6. Document the fix: + - Update CHANGELOG + - Add comments explaining the fix + - Update documentation if needed + - Create test case to prevent regression + +7. Completion checklist: + - [ ] Bug reproduced + - [ ] Root cause identified + - [ ] Fix implemented + - [ ] Tests pass + - [ ] No regressions introduced + - [ ] Documentation updated + +Focus on fixing ONE specific bug thoroughly, ensuring it's properly resolved and validated. diff --git a/commands/operations/document-blocker.md b/commands/operations/document-blocker.md new file mode 100644 index 0000000000000000000000000000000000000000..3a6317fcd414b3f1d044fd1f839d21363ce2d0bc --- /dev/null +++ b/commands/operations/document-blocker.md @@ -0,0 +1,77 @@ +Generate comprehensive documentation for project blockers and troubleshooting efforts. + +Your task: +1. Create detailed blocker documentation covering: + + **Problem Statement:** + - What is blocked + - When it started + - Impact on project + - Severity level + + **Troubleshooting Steps Taken:** + - Step-by-step actions attempted + - Tools and methods used + - Diagnostic commands run + - Configuration changes tried + + **Unsuccessful Attempts:** + - What didn't work + - Why approaches failed + - Dead ends encountered + - Misleading solutions tried + + **Blockers Identified:** + - Root cause analysis + - Technical constraints + - Knowledge gaps + - External dependencies + - Resource limitations + + **Suggested Next Steps:** + - Potential solutions to try + - Alternative approaches + - Resources to consult + - Experts to contact + - Workarounds available + +2. Format as comprehensive reference document: + ```markdown + # Blocker Documentation - [Issue Name] + + **Date:** [YYYY-MM-DD] + **Priority:** [High/Medium/Low] + **Status:** [Blocked/Investigating/Resolved] + + ## Problem Description + [Detailed description] + + ## Troubleshooting Timeline + 1. [Time] - Tried approach A + - Result: Failed because... + 2. [Time] - Investigated possibility B + - Result: Not applicable because... + + ## What We Know + - Fact 1 + - Fact 2 + + ## What We Don't Know + - Unknown 1 + - Unknown 2 + + ## Recommended Actions + 1. [Action with rationale] + 2. [Action with rationale] + + ## References + - [Links to documentation] + - [Stack Overflow threads] + - [Related issues] + ``` + +3. Save to docs/blockers/ or similar location + +4. Enable continuity for next agent or session + +Create comprehensive blocker documentation for effective knowledge transfer and problem resolution. diff --git a/commands/operations/manage-project.md b/commands/operations/manage-project.md new file mode 100644 index 0000000000000000000000000000000000000000..bf62f3a5af5bedc06ca4e40545ed54d26ddf50b4 --- /dev/null +++ b/commands/operations/manage-project.md @@ -0,0 +1,63 @@ +Set up and maintain a structured project repository with task management. + +Your task: +1. Create project structure: + ``` + project/ + β”œβ”€β”€ docs/ + β”‚ β”œβ”€β”€ PROJECT_SCOPE.md + β”‚ β”œβ”€β”€ ARCHITECTURE.md + β”‚ └── API.md + β”œβ”€β”€ src/ + β”œβ”€β”€ tests/ + β”œβ”€β”€ TODO.md + β”œβ”€β”€ CHANGELOG.md + └── README.md + ``` + +2. Set up task management system: + + **Option 1: Simple TODO.md** + ```markdown + # Project Tasks + + ## In Progress + - [ ] Task currently being worked on + + ## To Do + - [ ] High priority task + - [ ] Medium priority task + + ## Done + - [x] Completed task 1 + - [x] Completed task 2 + + ## Blocked + - [ ] Task blocked by [reason] + ``` + + **Option 2: GitHub Issues Integration** + - Create issue templates + - Set up labels (bug, feature, docs, etc.) + - Create milestones + - Link to project board + +3. Maintain project documentation: + - Update README as project evolves + - Keep CHANGELOG current + - Document architectural decisions + - Track dependencies + +4. System updates: + - Regular dependency updates + - Security patch monitoring + - Version bumps + - Breaking change tracking + +5. Provide project overview dashboard: + - Current status + - Active tasks + - Recent changes + - Next milestones + +Help maintain organized, well-documented project structure with clear task tracking. diff --git a/commands/operations/refactor-plan.md b/commands/operations/refactor-plan.md new file mode 100644 index 0000000000000000000000000000000000000000..97c5d37af3f80351b19b1aa98365bd93b8f4c605 --- /dev/null +++ b/commands/operations/refactor-plan.md @@ -0,0 +1,62 @@ +Plan and execute repository refactoring while maintaining functionality. + +Your task: +1. Analyze current repository structure: + - File organization + - Code architecture + - Naming conventions + - Dependencies + - Technical debt + +2. Identify refactoring needs: + - Poor separation of concerns + - Duplicated code + - Unclear naming + - Circular dependencies + - Outdated patterns + +3. Create refactoring plan: + ```markdown + ## Refactoring Plan + + ### Goals + - Improve code organization + - Reduce technical debt + - Enhance maintainability + + ### Proposed Changes + 1. Restructure folders: + - Move utilities to src/utils/ + - Separate components from pages + - Create dedicated config directory + + 2. Code improvements: + - Extract repeated logic into utilities + - Rename unclear variables/functions + - Break down large files + + 3. Update dependencies: + - Remove unused packages + - Update outdated libraries + - Fix security vulnerabilities + + ### Risk Assessment + - Breaking changes: [None/Low/Medium/High] + - Test coverage: [%] + - Rollback plan: [Strategy] + ``` + +4. Execute refactoring: + - Make changes incrementally + - Test after each change + - Update imports and references + - Fix broken paths + - Update documentation + +5. Verify functionality: + - Run tests + - Check for broken references + - Validate build process + - Test key features + +Seek user input before major structural changes. Maintain repository functionality throughout refactoring. diff --git a/commands/operations/session-summary.md b/commands/operations/session-summary.md new file mode 100644 index 0000000000000000000000000000000000000000..800f1f925579b82d4d976140247c3a5b53a75ad1 --- /dev/null +++ b/commands/operations/session-summary.md @@ -0,0 +1,58 @@ +Create a concise progress report summarizing changes made during the coding session. + +Your task: +1. Generate session summary covering: + + **Changes Made:** + - Files created/modified/deleted + - Features implemented + - Bugs fixed + - Refactoring completed + - Documentation updated + + **Successes:** + - What worked well + - Goals achieved + - Milestones reached + - Performance improvements + + **Blockers & Challenges:** + - Issues encountered + - Unresolved problems + - Technical debt created + - Items requiring attention + + **Next Steps:** + - Planned work + - Follow-up tasks + - Testing needed + - Documentation to complete + +2. Format as approachable, scannable report: + ```markdown + # Session Summary - [Date & Time] + + ## 🎯 Completed + - βœ… Implemented user authentication + - βœ… Fixed navbar responsive issues + - βœ… Updated API documentation + + ## 🚧 In Progress + - ⚠️ Database migration (90% complete) + - ⚠️ Unit tests for auth module + + ## ❌ Blockers + - Third-party API rate limiting + - Missing test environment credentials + + ## πŸ“‹ Next Session + - Complete database migration + - Finish auth unit tests + - Deploy to staging + ``` + +3. Save to session logs or project journal + +4. Keep tone positive and action-oriented + +Provide clear visibility into session progress for continuity and planning. diff --git a/commands/seo-web/ai-friendly-seo.md b/commands/seo-web/ai-friendly-seo.md new file mode 100644 index 0000000000000000000000000000000000000000..f0e37cb5fd146c6dd614d6c430443704805bee54 --- /dev/null +++ b/commands/seo-web/ai-friendly-seo.md @@ -0,0 +1,47 @@ +Optimize content for AI search engines and crawlers (ChatGPT, Claude, Perplexity, etc.). + +Your task: +1. Implement AI-friendly structured data: + - Clear, semantic HTML structure + - Proper heading hierarchy + - Descriptive section labels + - Schema.org structured data + +2. Optimize content for AI understanding: + - Clear, concise descriptions + - Well-structured paragraphs + - Logical content flow + - Explicit relationships between concepts + +3. Add metadata for AI crawlers: + ```html + + + + + + ``` + +4. Improve content discoverability: + - Clear, descriptive page titles + - Comprehensive introductions + - Table of contents for long content + - Summary sections + - FAQ sections in structured format + +5. Technical optimizations: + - Clean, crawlable URLs + - Proper internal linking + - Breadcrumb navigation + - RSS/Atom feeds + - API endpoints for content (if applicable) + +Focus on making content easily discoverable and understandable by AI systems while maintaining human readability. diff --git a/commands/seo-web/seo-audit.md b/commands/seo-web/seo-audit.md new file mode 100644 index 0000000000000000000000000000000000000000..eaaa4539e5dd5ae67c578c0f42f9c06373c1211c --- /dev/null +++ b/commands/seo-web/seo-audit.md @@ -0,0 +1,49 @@ +Audit basic SEO compliance and implement essential SEO elements. + +Your task: +1. Check essential meta tags: + ```html + + Page Title | Site Name + + + + + + + ``` + +2. Verify Open Graph tags for social sharing: + ```html + + + + + + ``` + +3. Check Twitter Card tags: + ```html + + + + + ``` + +4. Verify other SEO fundamentals: + - Heading hierarchy (single H1, proper H2-H6 structure) + - Alt text on images + - Semantic HTML + - Robots.txt file + - Sitemap.xml + - Canonical URLs + - Structured data (Schema.org) + +5. Implement missing elements: + - Add missing meta tags + - Create Open Graph preview images + - Fix heading hierarchy + - Add alt text to images + - Create robots.txt and sitemap if missing + +Focus on basic SEO compliance and remediation, not keyword research or advanced SEO strategy. diff --git a/commands/sysadmin/docker-help.md b/commands/sysadmin/docker-help.md new file mode 100644 index 0000000000000000000000000000000000000000..197b4e63a8e017171e663919aacb46ee7fae1d8d --- /dev/null +++ b/commands/sysadmin/docker-help.md @@ -0,0 +1,47 @@ +Troubleshoot Docker environment issues and provide best practices guidance. + +Your task: +1. Diagnose common Docker problems: + - Container won't start + - Network connectivity issues + - Volume mounting problems + - Permission errors + - Image build failures + - Resource constraints + +2. Provide troubleshooting steps: + ```bash + # Check container logs + docker logs container_name + + # Inspect container + docker inspect container_name + + # Check resource usage + docker stats + + # View running containers + docker ps -a + ``` + +3. Address specific issues: + - Port conflicts + - Volume permission issues (especially with bind mounts) + - Network bridge problems + - Image layer caching + - Docker daemon issues + +4. Best practices guidance: + - Multi-stage builds + - .dockerignore usage + - Layer optimization + - Security considerations + - Resource limits + +5. Deployment strategies: + - Docker Compose for multi-container apps + - Health checks + - Restart policies + - Environment variable management + +Refer to latest Docker documentation. Help users with basic Linux/sysadmin knowledge overcome Docker challenges. diff --git a/commands/sysadmin/linux-desktop/add-bash-alias.md b/commands/sysadmin/linux-desktop/add-bash-alias.md new file mode 100644 index 0000000000000000000000000000000000000000..479021486619c01818d4a55489aea1a6bc0c50e7 --- /dev/null +++ b/commands/sysadmin/linux-desktop/add-bash-alias.md @@ -0,0 +1,9 @@ +I would like to add a bash alias. I will provide the alias or ask for your suggestions as to an appropriate alias. + +To come up with an appropriate alias - identify one that is unlikely to conflict with other aliases. + +Either way: + +Create the new bash alias(es) in ~/.bash_aliases + +Then you can use sourcebash to reload the bash alias file. \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/ai-setup/audit-local-ai-packages.md b/commands/sysadmin/linux-desktop/ai-setup/audit-local-ai-packages.md new file mode 100644 index 0000000000000000000000000000000000000000..39e82172705272d09a3ac6494fb1eb3ce6b67697 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/audit-local-ai-packages.md @@ -0,0 +1,97 @@ +--- +description: Evaluate local AI inference packages and suggest additions +tags: [ai, ml, inference, packages, recommendations, project, gitignored] +--- + +You are helping the user evaluate their local AI inference setup and suggest packages to install. + +## Process + +1. **Check currently installed AI/ML packages** + + **Python packages:** + - `pip list | grep -E "torch|tensorflow|transformers|diffusers|onnx"` + + **System packages:** + - `dpkg -l | grep -E "rocm|cuda|python3-"` + + **Conda environments:** + - `conda env list` (if conda is installed) + + **Standalone tools:** + - Check for: Ollama, ComfyUI, LocalAI, text-generation-webui + - Check `~/programs/ai-ml/` + +2. **Assess hardware configuration** + - GPU: `rocm-smi` or `nvidia-smi` + - RAM: `free -h` + - Storage: `df -h` + - CPU capabilities: `lscpu | grep -E "Model name|Thread|Core"` + +3. **Categorize AI inference needs** + + **LLM Inference:** + - Ollama (already covered) + - llama.cpp + - vllm + - text-generation-webui (oobabooga) + - LocalAI + + **Image Generation:** + - ComfyUI (already covered) + - AUTOMATIC1111/stable-diffusion-webui + - InvokeAI + - Fooocus + + **Audio/Speech:** + - Whisper (speech-to-text) + - Coqui TTS + - Bark + - MusicGen + + **Video:** + - AnimateDiff + - Video generation models + + **Code:** + - Continue.dev + - Tabby (local copilot) + - Aider + + **Vector DB / RAG:** + - ChromaDB + - Qdrant + - FAISS + - LangChain + +4. **Check Python ML libraries** + - PyTorch (with ROCm/CUDA) + - TensorFlow + - transformers (Hugging Face) + - diffusers + - accelerate + - bitsandbytes (quantization) + - ONNX Runtime + - optimum + +5. **Suggest based on gaps** + - Identify what's missing for common workflows + - Prioritize based on hardware capabilities + - Consider ease of use vs. flexibility + +6. **Installation recommendations** + - Provide commands for suggested packages + - Recommend conda environments for isolation + - Suggest Docker containers for complex setups + +## Output + +Provide a report showing: +- Currently installed AI/ML packages by category +- Hardware capability summary +- Recommended packages to install based on: + - User's hardware + - Current gaps in capabilities + - Popular/useful tools +- Installation commands for each suggestion +- Notes on hardware requirements diff --git a/commands/sysadmin/linux-desktop/ai-setup/gpu-ai-ml-assessment.md b/commands/sysadmin/linux-desktop/ai-setup/gpu-ai-ml-assessment.md new file mode 100644 index 0000000000000000000000000000000000000000..b5d1523c7c67f9fd0bbbbf9c7e48ca688c157c24 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/gpu-ai-ml-assessment.md @@ -0,0 +1,441 @@ +You are assessing GPU driver status and AI/ML workload capabilities. + +## Your Task + +Evaluate the GPU's driver configuration and suitability for AI/ML workloads, including deep learning frameworks, compute capabilities, and performance optimization. + +### 1. Driver Status Assessment +- **Installed driver**: Type (proprietary/open-source) and version +- **Driver source**: Distribution package, vendor installer, or compiled +- **Driver status**: Loaded, functioning, errors +- **Kernel module**: Module name and status +- **Driver age**: Release date and recency +- **Latest driver**: Compare installed vs. available +- **Driver compatibility**: Kernel version compatibility +- **Secure boot status**: Impact on driver loading + +### 2. Compute Framework Support +- **CUDA availability**: CUDA Toolkit installation status +- **CUDA version**: Installed CUDA version +- **CUDA compatibility**: GPU compute capability vs. CUDA requirements +- **ROCm availability**: For AMD GPUs +- **ROCm version**: Installed ROCm version +- **OpenCL support**: OpenCL runtime and version +- **oneAPI**: Intel oneAPI toolkit status +- **Framework libraries**: cuDNN, cuBLAS, TensorRT, etc. + +### 3. GPU Compute Capabilities +- **Compute capability**: NVIDIA CUDA compute version (e.g., 8.6, 8.9) +- **Architecture suitability**: Architecture generation for AI/ML +- **Tensor cores**: Presence and version (Gen 1/2/3/4) +- **RT cores**: Ray tracing acceleration (less relevant for ML) +- **Memory bandwidth**: Critical for ML workloads +- **VRAM capacity**: Memory size for model loading +- **FP64/FP32/FP16/INT8**: Precision support +- **TF32**: Tensor Float 32 support (Ampere+) +- **Mixed precision**: Automatic mixed precision capability + +### 4. Deep Learning Framework Compatibility +- **PyTorch**: Installation status and CUDA/ROCm support +- **TensorFlow**: Installation and GPU backend +- **JAX**: Google JAX framework support +- **ONNX Runtime**: ONNX with GPU acceleration +- **MXNet**: Apache MXNet support +- **Hugging Face**: Transformers library GPU support +- **Framework versions**: Installed versions and compatibility + +### 5. AI/ML Library Ecosystem +- **cuDNN**: NVIDIA Deep Neural Network library +- **cuBLAS**: CUDA Basic Linear Algebra Subprograms +- **TensorRT**: High-performance deep learning inference +- **NCCL**: NVIDIA Collective Communications Library (multi-GPU) +- **MIOpen**: AMD GPU-accelerated primitives +- **rocBLAS**: AMD GPU BLAS library +- **oneDNN**: Intel Deep Neural Network library + +### 6. Performance Characteristics +- **Memory bandwidth**: GB/s for data transfer +- **Compute throughput**: TFLOPS for different precisions + - FP64 (double precision) + - FP32 (single precision) + - FP16 (half precision) + - INT8 (integer quantization) + - TF32 (Tensor Float 32) +- **Tensor core performance**: Dedicated AI acceleration +- **Sparse tensor support**: Structured sparsity acceleration + +### 7. Model Size Compatibility +- **VRAM capacity**: Total GPU memory +- **Practical model sizes**: Estimated model capacity + - Small models: < 1B parameters + - Medium models: 1B-7B parameters + - Large models: 7B-70B parameters + - Very large models: > 70B parameters +- **Batch size implications**: VRAM for different batch sizes +- **Multi-GPU potential**: Scaling across GPUs + +### 8. Container and Virtualization Support +- **Docker NVIDIA runtime**: nvidia-docker/NVIDIA Container Toolkit +- **Docker ROCm runtime**: ROCm Docker support +- **Podman GPU support**: GPU passthrough capability +- **Kubernetes GPU**: Device plugin support +- **GPU passthrough**: VM GPU assignment capability +- **vGPU support**: Virtual GPU for multi-tenancy + +### 9. Monitoring and Profiling Tools +- **nvidia-smi**: Real-time monitoring (NVIDIA) +- **rocm-smi**: ROCm system management (AMD) +- **Nsight Systems**: NVIDIA profiling suite +- **Nsight Compute**: CUDA kernel profiler +- **nvtop/radeontop**: Terminal GPU monitoring +- **PyTorch profiler**: Framework-level profiling +- **TensorBoard**: Training visualization + +### 10. Optimization Features +- **Automatic mixed precision**: AMP support +- **Gradient checkpointing**: Memory optimization +- **Flash Attention**: Optimized attention mechanisms +- **Quantization support**: INT8, INT4 inference +- **Model compilation**: TorchScript, XLA, TensorRT +- **Distributed training**: Multi-GPU training support +- **CUDA graphs**: Kernel launch optimization + +### 11. Workload Suitability Assessment +- **Training capability**: Suitable for training workloads +- **Inference capability**: Suitable for inference +- **Model type suitability**: + - Computer vision (CNNs) + - Natural language processing (Transformers) + - Generative AI (Diffusion models, LLMs) + - Reinforcement learning +- **Performance tier**: Consumer, Professional, Data Center + +### 12. Bottleneck and Limitation Analysis +- **Memory bottlenecks**: VRAM limitations for large models +- **Compute bottlenecks**: GPU power for training speed +- **PCIe bandwidth**: Data transfer limitations +- **Driver limitations**: Missing features or bugs +- **Power throttling**: Thermal or power constraints +- **Multi-GPU scaling**: Efficiency of multi-GPU setup + +## Commands to Use + +**GPU and driver detection:** +- `nvidia-smi` (NVIDIA) +- `rocm-smi` (AMD) +- `lspci | grep -i vga` +- `lspci -v | grep -A 20 VGA` + +**NVIDIA driver details:** +- `nvidia-smi -q` +- `cat /proc/driver/nvidia/version` +- `modinfo nvidia` +- `nvidia-smi --query-gpu=driver_version --format=csv,noheader` + +**AMD driver details:** +- `modinfo amdgpu` +- `rocminfo` +- `/opt/rocm/bin/rocm-smi --showdriverversion` + +**CUDA/ROCm installation:** +- `nvcc --version` (CUDA compiler) +- `which nvcc` +- `ls /usr/local/cuda*/` +- `echo $CUDA_HOME` +- `hipcc --version` (ROCm) +- `ls /opt/rocm/` + +**Compute capability:** +- `nvidia-smi --query-gpu=compute_cap --format=csv,noheader` +- `nvidia-smi -q | grep "Compute Capability"` + +**Libraries check:** +- `ldconfig -p | grep cudnn` +- `ldconfig -p | grep cublas` +- `ldconfig -p | grep tensorrt` +- `ldconfig -p | grep nccl` +- `ls /usr/lib/x86_64-linux-gnu/ | grep -i cuda` + +**Python framework check:** +- `python3 -c "import torch; print(f'PyTorch: {torch.__version__}, CUDA: {torch.cuda.is_available()}, Version: {torch.version.cuda}')"` +- `python3 -c "import tensorflow as tf; print(f'TensorFlow: {tf.__version__}, GPU: {tf.config.list_physical_devices(\"GPU\")}')"` +- `python3 -c "import torch; print(f'Tensor Cores: {torch.cuda.get_device_capability()}')"` + +**Container runtime:** +- `docker run --rm --gpus all nvidia/cuda:11.8.0-base-ubuntu22.04 nvidia-smi` +- `which nvidia-container-cli` +- `nvidia-container-cli info` + +**OpenCL:** +- `clinfo` +- `clinfo | grep "Device Name"` + +**System libraries:** +- `dpkg -l | grep -i cuda` +- `dpkg -l | grep -i nvidia` +- `dpkg -l | grep -i rocm` + +**Performance info:** +- `nvidia-smi --query-gpu=name,memory.total,memory.free,driver_version,compute_cap --format=csv` +- `nvidia-smi dmon -s pucvmet` (dynamic monitoring) + +## Output Format + +### Executive Summary +``` +GPU: [model] +Driver: [proprietary/open] v[version] ([status]) +Compute: [CUDA/ROCm] v[version] (Compute [capability]) +AI/ML Readiness: [Ready/Partial/Not Ready] +Best For: [Training/Inference/Both] +Recommended Frameworks: [PyTorch, TensorFlow, etc.] +``` + +### Detailed AI/ML Assessment + +**Driver Status:** +- Type: [Proprietary/Open Source] +- Version: [version number] +- Release Date: [date] +- Status: [Loaded/Error] +- Kernel Module: [module] ([loaded/not loaded]) +- Latest Available: [version] +- Update Recommended: [Yes/No] +- Secure Boot: [Compatible/Issue] + +**Compute Framework Availability:** +- CUDA Toolkit: [Installed/Not Installed] - v[version] +- CUDA Driver API: v[version] +- ROCm: [Installed/Not Installed] - v[version] +- OpenCL: [Available/Not Available] - v[version] +- Compute Capability: [X.X] ([architecture name]) + +**GPU Compute Specifications:** +- Architecture: [Turing/Ampere/Ada/RDNA3/Xe] +- Tensor Cores: [Yes/No] - [Generation] +- CUDA Cores / SPs: [count] +- VRAM: [GB] [memory type] +- Memory Bandwidth: [GB/s] +- Precision Support: + - FP64: [TFLOPS] + - FP32: [TFLOPS] + - FP16: [TFLOPS] + - INT8: [TOPS] + - TF32: [Yes/No] + +**AI/ML Libraries:** +- cuDNN: [version] ([installed/missing]) +- cuBLAS: [version] ([installed/missing]) +- TensorRT: [version] ([installed/missing]) +- NCCL: [version] ([installed/missing]) +- MIOpen: [version] (AMD only) +- rocBLAS: [version] (AMD only) + +**Deep Learning Framework Support:** +- PyTorch: [version] + - CUDA Enabled: [Yes/No] + - CUDA Version: [version] + - cuDNN Version: [version] +- TensorFlow: [version] + - GPU Support: [Yes/No] + - CUDA Version: [version] +- JAX: [installed/not installed] +- ONNX Runtime: [GPU backend available] + +**Container Support:** +- NVIDIA Container Toolkit: [installed/not installed] +- Docker GPU Access: [working/not working] +- Podman GPU Support: [available] + +**Model Capacity Estimates:** +- Small Models (< 1B params): [batch size X] +- Medium Models (1B-7B params): [batch size X] +- Large Models (7B-13B params): [batch size X] +- Very Large Models (13B-70B params): [requires multi-GPU or not possible] + +Example workload estimates based on [GB] VRAM: +- LLaMA 7B: [inference only/training possible] +- Stable Diffusion: [batch size X] +- BERT Base: [batch size X] +- GPT-2: [batch size X] + +**Workload Suitability:** +- Training: + - Small models: [Excellent/Good/Fair/Poor] + - Medium models: [rating] + - Large models: [rating] +- Inference: + - Real-time: [Excellent/Good/Fair/Poor] + - Batch: [rating] + - Low-latency: [rating] + +**Use Case Recommendations:** +- Computer Vision (CNNs): [Excellent/Good/Fair/Poor] +- NLP (Transformers): [rating] +- Generative AI (LLMs): [rating] +- Diffusion Models: [rating] +- Reinforcement Learning: [rating] + +**Performance Tier:** +- Category: [Consumer/Professional/Data Center] +- Training Performance: [rating] +- Inference Performance: [rating] +- Multi-GPU Scaling: [available/not available] + +**Optimization Features Available:** +- Automatic Mixed Precision: [Yes/No] +- Tensor Core Utilization: [Yes/No] +- TensorRT Optimization: [Available] +- Flash Attention: [Supported] +- INT8 Quantization: [Supported] +- Multi-GPU Training: [Possible with [count] GPUs] + +**Limitations and Bottlenecks:** +- VRAM Constraint: [assessment] +- Memory Bandwidth: [adequate/limited] +- Compute Throughput: [assessment] +- PCIe Bottleneck: [yes/no] +- Driver Limitations: [any known issues] +- Power/Thermal: [throttling concerns] + +**Recommendations:** +1. [Driver update/optimization suggestions] +2. [Framework installation recommendations] +3. [Workload optimization suggestions] +4. [Hardware upgrade path if applicable] +5. [Container/virtualization setup if beneficial] + +### AI/ML Readiness Scorecard + +``` +Driver Setup: [βœ“/βœ—/⚠] [details] +CUDA/ROCm Install: [βœ“/βœ—/⚠] [details] +Framework Support: [βœ“/βœ—/⚠] [details] +Library Ecosystem: [βœ“/βœ—/⚠] [details] +Container Runtime: [βœ“/βœ—/⚠] [details] +VRAM Capacity: [βœ“/βœ—/⚠] [details] +Compute Performance: [βœ“/βœ—/⚠] [details] + +Overall Readiness: [Ready/Needs Setup/Limited/Not Suitable] +``` + +### AI-Readable JSON + +```json +{ + "driver": { + "type": "proprietary|open_source", + "version": "", + "status": "loaded|error", + "latest_available": "", + "update_recommended": false + }, + "compute_platform": { + "cuda": { + "installed": false, + "version": "", + "compute_capability": "" + }, + "rocm": { + "installed": false, + "version": "" + }, + "opencl": { + "available": false, + "version": "" + } + }, + "gpu_specs": { + "architecture": "", + "tensor_cores": false, + "vram_gb": 0, + "memory_bandwidth_gbs": 0, + "fp32_tflops": 0, + "fp16_tflops": 0, + "int8_tops": 0, + "tf32_support": false + }, + "libraries": { + "cudnn": "", + "cublas": "", + "tensorrt": "", + "nccl": "" + }, + "frameworks": { + "pytorch": { + "installed": false, + "version": "", + "cuda_available": false + }, + "tensorflow": { + "installed": false, + "version": "", + "gpu_available": false + } + }, + "container_support": { + "nvidia_container_toolkit": false, + "docker_gpu_working": false + }, + "workload_suitability": { + "training": { + "small_models": "excellent|good|fair|poor", + "medium_models": "", + "large_models": "" + }, + "inference": { + "real_time": "", + "batch": "" + } + }, + "model_capacity": { + "vram_gb": 0, + "small_model_batch_size": 0, + "llama_7b_possible": false, + "stable_diffusion_batch": 0 + }, + "optimization_features": { + "amp_support": false, + "tensor_core_utilization": false, + "tensorrt_available": false, + "int8_quantization": false + }, + "bottlenecks": { + "vram_limited": false, + "compute_limited": false, + "pcie_bottleneck": false + }, + "ai_ml_readiness": "ready|needs_setup|limited|not_suitable" +} +``` + +## Execution Guidelines + +1. **Identify GPU vendor first**: NVIDIA, AMD, or Intel +2. **Check driver installation**: Verify driver is loaded and working +3. **Assess compute platform**: CUDA for NVIDIA, ROCm for AMD +4. **Query compute capability**: Critical for framework compatibility +5. **Check library installation**: cuDNN, TensorRT, etc. +6. **Test framework access**: Try importing PyTorch/TensorFlow with GPU +7. **Evaluate VRAM capacity**: Estimate model sizes +8. **Check container support**: Important for ML workflows +9. **Identify bottlenecks**: VRAM, compute, or driver issues +10. **Provide actionable recommendations**: Setup steps or optimizations + +## Important Notes + +- NVIDIA GPUs have the most mature AI/ML ecosystem +- CUDA compute capability determines supported features +- cuDNN is critical for deep learning performance +- VRAM is often the primary bottleneck for large models +- Container runtimes simplify framework management +- AMD ROCm support is improving but less mature than CUDA +- Intel GPUs are emerging in AI/ML space +- Tensor cores provide significant speedup for mixed precision +- Driver version must match CUDA toolkit requirements +- Some features require specific GPU generations +- Multi-GPU setups require additional configuration +- Consumer GPUs can be effective for smaller workloads +- Professional/datacenter GPUs offer better reliability and support + +Be thorough and practical - provide a clear assessment of AI/ML readiness and actionable next steps. diff --git a/commands/sysadmin/linux-desktop/ai-setup/setup-comfyui.md b/commands/sysadmin/linux-desktop/ai-setup/setup-comfyui.md new file mode 100644 index 0000000000000000000000000000000000000000..eea9f963909ab63819a9bd6e827b71c01d56529e --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/setup-comfyui.md @@ -0,0 +1,75 @@ +--- +description: Set up ComfyUI for AI image generation +tags: [ai, ml, comfyui, image-generation, setup, project, gitignored] +--- + +You are helping the user set up ComfyUI for AI image generation. + +## Process + +1. **Check if ComfyUI is already installed** + - Check in `~/programs/ai-ml/ComfyUI` (Daniel's typical location) + - Look for existing installation + +2. **Install prerequisites** + - Python 3.10+ (check: `python3 --version`) + - Git (check: `git --version`) + - For AMD GPU (ROCm): + - Ensure ROCm is installed: `rocminfo` + - PyTorch with ROCm support needed + +3. **Clone ComfyUI repository** + - Navigate to: `cd ~/programs/ai-ml/` + - Clone: `git clone https://github.com/comfyanonymous/ComfyUI.git` + - Enter directory: `cd ComfyUI` + +4. **Set up Python environment** + - Create venv: `python3 -m venv venv` + - Activate: `source venv/bin/activate` + - Upgrade pip: `pip install --upgrade pip` + +5. **Install dependencies** + - For AMD GPU (ROCm): + ```bash + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.0 + ``` + - Install ComfyUI requirements: `pip install -r requirements.txt` + +6. **Download initial models** + - Create model directories if needed + - Suggest downloading a base model (SD 1.5 or SDXL): + - Models go in: `ComfyUI/models/checkpoints/` + - VAE in: `ComfyUI/models/vae/` + - LoRAs in: `ComfyUI/models/loras/` + - Suggest civitai.com or huggingface.co for models + +7. **Test ComfyUI** + - Run: `python main.py` + - Should start on `http://127.0.0.1:8188` + - Check logs for GPU detection + +8. **Create launch script** + - Offer to create `~/programs/ai-ml/ComfyUI/run_comfyui.sh`: + ```bash + #!/bin/bash + cd ~/programs/ai-ml/ComfyUI + source venv/bin/activate + python main.py + ``` + - Make executable: `chmod +x run_comfyui.sh` + +9. **Suggest useful custom nodes** + - ComfyUI Manager (for easy node installation) + - ControlNet nodes + - Ultimate SD Upscale + - Efficiency nodes + +## Output + +Provide a summary showing: +- Installation status +- GPU detection status +- Model directory locations +- How to launch ComfyUI +- Recommended next steps (model downloads, custom nodes) +- Troubleshooting tips for AMD GPU diff --git a/commands/sysadmin/linux-desktop/ai-setup/setup-ollama.md b/commands/sysadmin/linux-desktop/ai-setup/setup-ollama.md new file mode 100644 index 0000000000000000000000000000000000000000..85160762d4e1e7367b0a7fc8341ccbec2d6f0664 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/setup-ollama.md @@ -0,0 +1,57 @@ +--- +description: Set up Ollama on the machine for local LLM inference +tags: [ai, ml, ollama, llm, setup, project, gitignored] +--- + +You are helping the user set up Ollama for local LLM inference. + +## Process + +1. **Check if Ollama is already installed** + - Run: `ollama --version` + - Check if service is running: `systemctl status ollama` or `sudo systemctl status ollama` + +2. **Install Ollama if needed** + - Download and install: `curl -fsSL https://ollama.com/install.sh | sh` + - Or manual install from https://ollama.com/download + - Verify installation: `ollama --version` + +3. **Start Ollama service** + - Start service: `systemctl start ollama` or `sudo systemctl start ollama` + - Enable on boot: `systemctl enable ollama` or `sudo systemctl enable ollama` + - Check status: `systemctl status ollama` + +4. **Verify GPU support (for AMD on Daniel's system)** + - Check if ROCm is detected: `rocm-smi` or `rocminfo` + - Ollama should auto-detect AMD GPU + - Check Ollama logs for GPU recognition: `journalctl -u ollama -n 50` + +5. **Configure Ollama** + - Check default model storage: `~/.ollama/models` + - Environment variables (if needed): + - `OLLAMA_HOST` - change port/binding + - `OLLAMA_MODELS` - custom model directory + - `OLLAMA_NUM_PARALLEL` - parallel requests + - Edit systemd service if needed: `/etc/systemd/system/ollama.service` + +6. **Test Ollama** + - Pull a test model: `ollama pull llama2` (or smaller: `ollama pull tinyllama`) + - Run a test: `ollama run tinyllama "Hello, how are you?"` + - Verify GPU usage during inference + +7. **Suggest initial models** + - Based on Daniel's hardware (AMD GPU), suggest: + - General: llama3.2, qwen2.5 + - Code: codellama, deepseek-coder + - Fast: tinyllama, phi + - Vision: llava, bakllava + +## Output + +Provide a summary showing: +- Ollama installation status and version +- Service status +- GPU detection status +- Default configuration +- Recommended models to pull +- Next steps for usage diff --git a/commands/sysadmin/linux-desktop/ai-setup/setup-speech-to-text.md b/commands/sysadmin/linux-desktop/ai-setup/setup-speech-to-text.md new file mode 100644 index 0000000000000000000000000000000000000000..a9cf235f91481540982e6cb053173da28f981358 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/setup-speech-to-text.md @@ -0,0 +1,112 @@ +--- +description: Check installed STT apps and suggest installations including local Whisper +tags: [ai, stt, whisper, speech-recognition, audio, project, gitignored] +--- + +You are helping the user set up speech-to-text applications including local Whisper. + +## Process + +1. **Check currently installed STT apps** + - System packages: `dpkg -l | grep -E "whisper|speech|voice"` + - Python packages: `pip list | grep -E "whisper|speech|vosk"` + - Check `~/programs/ai-ml/` for installed apps + +2. **Suggest STT installation candidates** + + **Whisper (OpenAI) - Recommended:** + - Best quality, local inference + - Multiple model sizes available + - Multilingual support + + **Other options:** + - Vosk - Lightweight, offline + - Coqui STT - Mozilla's solution + - SpeechNote - Simple GUI + - Subtitle Edit - Video subtitling + - Subtld - Automatic subtitles + +3. **Install Whisper (local)** + + **Method 1: Using pip (simple)** + ```bash + pip install openai-whisper + ``` + + **Method 2: Using conda (recommended)** + ```bash + conda create -n whisper python=3.11 -y + conda activate whisper + pip install openai-whisper + ``` + + **Install dependencies:** + ```bash + # For audio processing + sudo apt install ffmpeg + pip install setuptools-rust + ``` + +4. **Install faster-whisper (optimized)** + ```bash + pip install faster-whisper + ``` + - Uses CTranslate2 for faster inference + - Lower VRAM usage + +5. **Install WhisperX (advanced)** + ```bash + pip install whisperx + ``` + - Includes alignment and diarization + - Better timestamps + +6. **Download Whisper models** + - Models are downloaded automatically on first use + - Sizes: tiny, base, small, medium, large + - Suggest based on VRAM: + - < 4GB: tiny or base + - 4-8GB: small or medium + - 8GB+: large + +7. **Test installation** + ```bash + whisper audio.mp3 --model base --language en + ``` + +8. **Install GUI options** + + **Whisper Desktop:** + - Check if available as AppImage or Flatpak + + **Subtitle Edit:** + ```bash + sudo apt install subtitleeditor + ``` + + **Custom GUI:** + - Suggest installing gradio-based Whisper UIs + +9. **Create helper script** + - Offer to create `~/scripts/transcribe.sh`: + ```bash + #!/bin/bash + whisper "$1" --model medium --language en --output_format txt + ``` + +10. **Suggest workflows** + - Real-time transcription + - Batch processing + - Video subtitling + - Meeting transcription + +## Output + +Provide a summary showing: +- Currently installed STT applications +- Whisper installation status and model sizes +- GPU acceleration status +- Suggested models based on hardware +- Example commands for transcription +- GUI options available +- Helper scripts created diff --git a/commands/sysadmin/linux-desktop/ai-setup/suggest-ollama-models.md b/commands/sysadmin/linux-desktop/ai-setup/suggest-ollama-models.md new file mode 100644 index 0000000000000000000000000000000000000000..ad16285051cad3724966389d9cd3aceb32835093 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ai-setup/suggest-ollama-models.md @@ -0,0 +1,78 @@ +--- +description: Review installed Ollama models and suggest others based on hardware +tags: [ai, ml, ollama, models, recommendations, project, gitignored] +--- + +You are helping the user review their Ollama models and suggest new ones based on their hardware. + +## Process + +1. **Check currently installed models** + - Run: `ollama list` + - Show model sizes and last modified dates + - Calculate total disk usage + +2. **Assess hardware capabilities** + - Check GPU VRAM: `rocm-smi` (for AMD) or `nvidia-smi` (for NVIDIA) + - Check system RAM: `free -h` + - Determine recommended model sizes: + - < 8GB VRAM: 7B models and smaller + - 8-16GB VRAM: up to 13B models + - 16-24GB VRAM: up to 34B models + - 24GB+ VRAM: 70B+ models possible + +3. **Identify user's needs** + - Ask about use cases: + - General chat + - Code generation + - Data analysis + - Creative writing + - Vision/multimodal + - Specialized domains + +4. **Suggest models by category** + + **General Purpose:** + - llama3.2 (3B, 8B) + - qwen2.5 (7B, 14B, 32B) + - mistral (7B) + - gemma2 (9B, 27B) + + **Code:** + - codellama (7B, 13B, 34B) + - deepseek-coder (6.7B, 33B) + - starcoder2 (7B, 15B) + + **Fast/Small:** + - tinyllama (1.1B) + - phi3 (3.8B) + + **Multimodal:** + - llava (7B, 13B, 34B) + - bakllava (7B) + + **Specialized:** + - meditron (medical) + - sqlcoder (SQL generation) + - wizardmath (mathematics) + +5. **Consider quantization levels** + - Explain different quants (Q4, Q5, Q8, etc.) + - Suggest appropriate quant for their VRAM + +6. **Cleanup suggestions** + - Identify duplicate models + - Suggest removing unused models: `ollama rm ` + - Free up space for new models + +## Output + +Provide a report showing: +- Currently installed models and total size +- Hardware capacity summary +- Recommended models based on: + - Available VRAM + - User's use cases + - Current gaps in model coverage +- Commands to install suggested models +- Models that could be removed to save space diff --git a/commands/sysadmin/linux-desktop/backup/identify-backup-targets.md b/commands/sysadmin/linux-desktop/backup/identify-backup-targets.md new file mode 100644 index 0000000000000000000000000000000000000000..28c398c0f191057b413d256e941ca50f4fa430b9 --- /dev/null +++ b/commands/sysadmin/linux-desktop/backup/identify-backup-targets.md @@ -0,0 +1,109 @@ +--- +description: Identify filesystem parts to backup and suggest inclusion patterns +tags: [backup, filesystem, strategy, rclone, project, gitignored] +--- + +You are helping the user identify which parts of their filesystem should be backed up and create appropriate inclusion patterns. + +## Process + +1. **Analyze filesystem structure** + - Home directory size: `du -sh ~/*` or `du -h --max-depth=1 ~ | sort -h` + - System directories to consider + - External drives/mounts + +2. **Categorize data by importance** + + **Critical (must backup):** + - Documents: `~/Documents` + - Development work: `~/repos` + - Configuration files: `~/.config`, `~/.ssh`, `~/.gnupg` + - Scripts: `~/scripts`, `~/.local/bin` + - AI documentation: `~/ai-docs` + + **Important (should backup):** + - Pictures/Photos + - Videos (personal) + - Music (if not streaming) + - Downloads (selective) + - Email (if local) + + **Optional (consider backing up):** + - Application data: `~/.local/share` + - Browser data (bookmarks, passwords) + - Game saves + + **Exclude (don't backup):** + - Caches: `~/.cache` + - Temporary files: `/tmp`, `~/.tmp` + - Virtual machines/disk images + - Node modules: `node_modules/` + - Python venvs: `venv/`, `.venv/` + - Build artifacts: `target/`, `build/`, `dist/` + - Large media files (if cloud-synced elsewhere) + +3. **Identify special considerations** + - Check for large directories: `du -h --max-depth=2 ~ | sort -h | tail -20` + - Look for media libraries + - Identify development projects with dependencies + - Find version-controlled repos (can skip .git if remote exists) + +4. **Create inclusion/exclusion patterns** + + **For rclone:** + ``` + # Include patterns + + /Documents/** + + /repos/** + + /.config/** + + /.ssh/** + + /.gnupg/** + + /scripts/** + + /.local/bin/** + + /ai-docs/** + + /Pictures/** + + # Exclude patterns + - /.cache/** + - /.local/share/Trash/** + - /**/node_modules/** + - /**/.venv/** + - /**/venv/** + - /**/__pycache__/** + - /**/.git/** + - /.thumbnails/** + ``` + + **For rsync:** + ```bash + --include='/Documents/***' + --include='/repos/***' + --exclude='**/.cache/' + --exclude='**/node_modules/' + --exclude='**/.venv/' + ``` + +5. **Calculate backup size** + - Estimate total backup size based on included directories + - Consider compression potential + - Plan for growth + +6. **Suggest backup frequency** + - Critical data: Daily or real-time sync + - Important data: Weekly + - Optional data: Monthly + - System configs: After changes + +7. **Create backup configuration file** + - Offer to create `~/scripts/backup-config.txt` with patterns + - Create `~/scripts/backup-estimate.sh` to calculate size + +## Output + +Provide a report showing: +- Categorized list of directories to backup +- Size estimates for each category +- Recommended inclusion/exclusion patterns (rclone and rsync format) +- Total estimated backup size +- Suggested backup frequency for each category +- Configuration file content diff --git a/commands/sysadmin/linux-desktop/bash/validate-bashrc.md b/commands/sysadmin/linux-desktop/bash/validate-bashrc.md new file mode 100644 index 0000000000000000000000000000000000000000..25b8a4d291d33c061d885af3780ba560944bed73 --- /dev/null +++ b/commands/sysadmin/linux-desktop/bash/validate-bashrc.md @@ -0,0 +1,99 @@ +# Bashrc Validation + +You are helping the user validate their .bashrc configuration for syntax errors, issues, and best practices. + +## Your tasks: + +1. **Locate bashrc files:** + - Check `~/.bashrc` + - Check `~/.bash_profile` + - Check `~/.profile` + - Check `/etc/bash.bashrc` (system-wide) + - Note which files exist and their sizes + +2. **Syntax validation:** + - Test bashrc syntax: `bash -n ~/.bashrc` + - If errors are found, report the line numbers and error messages + - Check for common syntax issues: + - Unclosed quotes + - Unmatched brackets + - Missing 'fi', 'done', 'esac' keywords + +3. **Source validation:** + - Test if bashrc can be sourced without errors in a subshell: + ```bash + bash -c 'source ~/.bashrc && echo "Sourcing successful"' + ``` + - Capture any error messages + +4. **Check for common issues:** + - Duplicate PATH entries: + ```bash + bash -c 'source ~/.bashrc; echo $PATH | tr ":" "\n" | sort | uniq -d' + ``` + - Check for sourcing non-existent files: + ```bash + grep -n "source\|^\." ~/.bashrc | while read line; do + # Extract and check if files exist + done + ``` + - Look for potentially problematic patterns: + - Infinite loops + - Commands that might hang (network calls without timeouts) + - Unguarded recursive sourcing + +5. **Check initialization order:** + - Explain which files are loaded and in what order for: + - Login shells + - Non-login interactive shells + - Non-interactive shells + - Check if the proper guards are in place (e.g., checking for interactive shell) + +6. **Performance analysis:** + - Time how long bashrc takes to load: + ```bash + time bash -c 'source ~/.bashrc; exit' + ``` + - If it takes more than 0.5 seconds, identify potential slow sections: + - Look for commands that might be slow (network calls, heavy computations) + - Check for unnecessary repeated operations + +7. **Check for security issues:** + - World-writable bashrc: `ls -la ~/.bashrc` + - Suspicious commands (downloads, eval with user input, etc.) + - Sourcing files from world-writable directories + +8. **Validate environment manager initialization:** + - Check if environment managers are properly initialized: + - pyenv: `grep "pyenv init" ~/.bashrc` + - conda: `grep "conda initialize" ~/.bashrc` + - nvm: `grep "nvm.sh" ~/.bashrc` + - rbenv: `grep "rbenv init" ~/.bashrc` + - sdkman: `grep "sdkman-init.sh" ~/.bashrc` + - Verify they're in the correct order (PATH modifications should come after system PATH is set) + +9. **Check for best practices:** + - Interactive shell guard at the top: + ```bash + [[ $- != *i* ]] && return + ``` + - Proper PATH modification (appending/prepending, not replacing) + - Using `command -v` instead of `which` + - Proper quoting of variables + +10. **Report findings:** + - Summary of validation results (PASS/FAIL) + - List of any errors or warnings + - Performance metrics + - Recommendations: + - Fixes for any syntax errors + - Optimization suggestions if slow + - Security improvements if needed + - Best practice improvements + - If bashrc is missing, offer to create a basic one + +## Important notes: +- Don't modify the bashrc unless explicitly asked +- Be careful when testing - use subshells to avoid affecting the current environment +- Distinguish between critical errors and style suggestions +- Consider that some "issues" might be intentional for the user's workflow diff --git a/commands/sysadmin/linux-desktop/configuration/check-path.md b/commands/sysadmin/linux-desktop/configuration/check-path.md new file mode 100644 index 0000000000000000000000000000000000000000..85ea60ce95ef2d269c7d2bca61536b67477060ca --- /dev/null +++ b/commands/sysadmin/linux-desktop/configuration/check-path.md @@ -0,0 +1,209 @@ +# Check and Analyze PATH + +You are helping the user analyze what's on their PATH and suggest additions or improvements. + +## Your tasks: + +1. **Display current PATH:** + ```bash + echo $PATH | tr ':' '\n' + ``` + +2. **Check which paths actually exist:** + ```bash + echo $PATH | tr ':' '\n' | while read p; do + if [ -d "$p" ]; then + echo "βœ“ $p" + else + echo "βœ— $p (does not exist)" + fi + done + ``` + +3. **Check for duplicate PATH entries:** + ```bash + echo $PATH | tr ':' '\n' | sort | uniq -d + ``` + +4. **Identify where PATH is being set:** + Check common locations: + ```bash + grep -n "PATH" ~/.bashrc ~/.bash_profile ~/.profile /etc/environment /etc/profile 2>/dev/null + ``` + +5. **Check for common development tool paths:** + + **Programming languages:** + - Python user packages: `~/.local/bin` + - Rust cargo: `~/.cargo/bin` + - Go: `~/go/bin` or `$GOPATH/bin` + - Ruby gems: Check with `gem environment` + - Node/npm: Check with `npm config get prefix` + + **Package managers:** + - Homebrew: `/home/linuxbrew/.linuxbrew/bin` + - SDKMAN: `~/.sdkman/candidates/*/current/bin` + - pipx: `~/.local/bin` + + **Version managers:** + - pyenv: `~/.pyenv/bin` + - rbenv: `~/.rbenv/bin` + - nvm: (check ~/.nvm/) + - asdf: `~/.asdf/bin` + + **System tools:** + - User binaries: `~/bin`, `~/.local/bin` + - Snap: `/snap/bin` + - Flatpak: `/var/lib/flatpak/exports/bin` + +6. **Check what's installed in each PATH directory:** + For each directory in PATH: + ```bash + echo "Contents of $dir:" + ls -la "$dir" | head -10 + ``` + +7. **Suggest missing common paths:** + Check and suggest if not in PATH: + + - `~/.local/bin` (Python user packages, pipx) + - `~/bin` (User scripts) + - `~/.cargo/bin` (Rust packages) + - `~/go/bin` (Go packages) + - `/snap/bin` (Snap packages) + - `~/.npm-global/bin` (npm global packages) + + For each missing path that has executables, suggest adding it. + +8. **Check for security issues:** + - Warn if `.` (current directory) is in PATH + - Warn if world-writable directories are in PATH: + ```bash + echo $PATH | tr ':' '\n' | while read p; do + if [ -d "$p" ] && [ -w "$p" ]; then + ls -ld "$p" + fi + done + ``` + +9. **Check PATH order/precedence:** + Explain that earlier paths take precedence. + Show which binary would be executed: + ```bash + which -a python python3 java gcc git node npm + ``` + +10. **Check for conflicting tools:** + ```bash + type -a python + type -a python3 + type -a java + ``` + +11. **Suggest PATH organization:** + Recommended order: + 1. User binaries (`~/bin`, `~/.local/bin`) + 2. Version managers (pyenv, rbenv, nvm) + 3. Language-specific paths (cargo, go) + 4. Homebrew + 5. System binaries (`/usr/local/bin`, `/usr/bin`, `/bin`) + +12. **Check environment-specific paths:** + + **Python:** + ```bash + python3 -m site --user-base + # Suggests adding $(python3 -m site --user-base)/bin + ``` + + **Node/npm:** + ```bash + npm config get prefix + # Suggests adding /bin + ``` + + **Go:** + ```bash + go env GOPATH + # Suggests adding $GOPATH/bin + ``` + + **Rust:** + ```bash + echo $CARGO_HOME + # Suggests adding ~/.cargo/bin + ``` + +13. **Generate suggested PATH setup:** + Based on findings, create suggested additions for ~/.bashrc: + + ```bash + # User binaries + export PATH="$HOME/bin:$PATH" + export PATH="$HOME/.local/bin:$PATH" + + # Python + export PATH="$HOME/.local/bin:$PATH" + + # Rust + export PATH="$HOME/.cargo/bin:$PATH" + + # Go + export PATH="$HOME/go/bin:$PATH" + + # SDKMAN + # Added by sdkman-init.sh + + # pyenv + export PYENV_ROOT="$HOME/.pyenv" + export PATH="$PYENV_ROOT/bin:$PATH" + eval "$(pyenv init --path)" + + # Homebrew + eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)" + ``` + +14. **Check for broken symlinks in PATH:** + ```bash + echo $PATH | tr ':' '\n' | while read dir; do + if [ -d "$dir" ]; then + find "$dir" -maxdepth 1 -type l ! -exec test -e {} \; -print 2>/dev/null + fi + done + ``` + +15. **Provide recommendations:** + - Remove non-existent directories from PATH + - Add missing common paths that have executables + - Fix duplicate entries + - Correct PATH order if needed + - Remove security issues (`.` in PATH, world-writable dirs) + - Consolidate PATH modifications into one file (prefer ~/.bashrc) + - Document what each PATH addition is for + +16. **Show how to temporarily modify PATH:** + ```bash + # Add to front (takes precedence) + export PATH="/new/path:$PATH" + + # Add to end + export PATH="$PATH:/new/path" + + # Remove from PATH + export PATH=$(echo $PATH | tr ':' '\n' | grep -v "/path/to/remove" | tr '\n' ':') + ``` + +17. **Show how to make PATH changes permanent:** + ```bash + echo 'export PATH="$HOME/bin:$PATH"' >> ~/.bashrc + source ~/.bashrc + ``` + +## Important notes: +- Changes to PATH only affect current shell unless made permanent +- Order matters - earlier paths have precedence +- Don't add current directory (`.`) to PATH +- Use absolute paths when possible +- Source ~/.bashrc after changes: `source ~/.bashrc` +- Some tools (pyenv, conda, nvm) modify PATH dynamically +- Check for PATH modifications in multiple files diff --git a/commands/sysadmin/linux-desktop/configuration/list-ssh-connections.md b/commands/sysadmin/linux-desktop/configuration/list-ssh-connections.md new file mode 100644 index 0000000000000000000000000000000000000000..a4bb9687336d6ae6e5ed8dca5357e97d09e58c8d --- /dev/null +++ b/commands/sysadmin/linux-desktop/configuration/list-ssh-connections.md @@ -0,0 +1,91 @@ +--- +description: Review which SSH connection names/hosts the user has configured +tags: [ssh, configuration, hosts, network, project, gitignored] +--- + +You are helping the user review their SSH connection configurations. + +## Process + +1. **Check if SSH config exists** + - Look for: `~/.ssh/config` + - If not found, offer to create one + +2. **Parse SSH config file** + - Read `~/.ssh/config` + - Extract Host entries + - For each host, show: + - Host alias + - HostName (IP/domain) + - User + - Port + - IdentityFile (SSH key) + - Other options + +3. **Display in organized format** + - Present as table or list: + ``` + Alias: server1 + HostName: 192.168.1.100 + User: admin + Port: 22 + Key: ~/.ssh/id_rsa + --- + ``` + +4. **Check system-wide SSH config** + - Also check `/etc/ssh/ssh_config` for global settings + - Note any system-wide host configurations + +5. **Test connectivity (optional)** + - Ask if user wants to test connections + - For each host: + ```bash + ssh -T user@host + # or + ssh -o ConnectTimeout=5 user@host "echo Connection successful" + ``` + +6. **Identify stale connections** + - Look for connections to: + - IPs that might have changed + - Servers that may no longer exist + - Old project servers + +7. **Suggest config improvements** + - Recommend useful SSH config options: + ``` + Host * + ServerAliveInterval 60 + ServerAliveCountMax 3 + TCPKeepAlive yes + ControlMaster auto + ControlPath ~/.ssh/sockets/%r@%h-%p + ControlPersist 600 + ``` + +8. **Offer to create new entries** + - If user wants to add new SSH hosts + - Template: + ``` + Host shortname + HostName hostname.com + User username + Port 22 + IdentityFile ~/.ssh/id_ed25519 + ForwardAgent yes + ``` + +9. **Security check** + - Verify file permissions: `chmod 600 ~/.ssh/config` + - Look for insecure settings (password auth, etc.) + +## Output + +Provide a summary showing: +- List of configured SSH hosts +- Connection details for each +- Stale/inactive connections (if identified) +- Connectivity test results (if performed) +- Suggested improvements +- New entries added (if any) diff --git a/commands/sysadmin/linux-desktop/configuration/manage-api-keys.md b/commands/sysadmin/linux-desktop/configuration/manage-api-keys.md new file mode 100644 index 0000000000000000000000000000000000000000..79d21d88942ebba05d0f023e6f833ca28c410210 --- /dev/null +++ b/commands/sysadmin/linux-desktop/configuration/manage-api-keys.md @@ -0,0 +1,101 @@ +--- +description: Review API keys on PATH and add new ones if requested +tags: [api, keys, environment, configuration, project, gitignored] +--- + +You are helping the user manage their API keys and environment variables. + +## Process + +1. **Check for API keys in environment** + - List environment variables: `env | grep -E "API|KEY|TOKEN"` + - Check common locations: + - `~/.bashrc` + - `~/.zshrc` + - `~/.profile` + - `~/.env` + - Project-specific `.env` files + +2. **Display current API keys (safely)** + - Show key names and partial values (mask full keys) + - Example: `OPENAI_API_KEY=sk-*********************` + +3. **Common API keys to check for** + - OpenAI API + - Anthropic API (Claude) + - OpenRouter API + - Hugging Face token + - GitHub token + - Google Cloud API + - AWS credentials + - Azure credentials + - Database connection strings + +4. **Add new API keys** + - Ask user which API keys they want to add + - For each key: + - Key name (e.g., `OPENAI_API_KEY`) + - Key value (handle securely) + - Scope (global, project-specific, etc.) + +5. **Choose storage location** + + **Option 1: Shell config (global)** + - Add to `~/.bashrc` or `~/.zshrc`: + ```bash + export OPENAI_API_KEY="sk-..." + export ANTHROPIC_API_KEY="sk-..." + ``` + - Reload: `source ~/.bashrc` + + **Option 2: .env file (project-specific)** + - Create/update `.env` file + - Add to `.gitignore` + - Use with dotenv library + + **Option 3: Secret manager** + - Suggest using `pass`, `gnome-keyring`, or similar + - More secure for sensitive keys + +6. **Set appropriate permissions** + - For files containing keys: + ```bash + chmod 600 ~/.env + chmod 600 ~/.bashrc + ``` + +7. **Test API keys** + - Offer to test each key (if user wants) + - Example for OpenAI: + ```bash + curl https://api.openai.com/v1/models \ + -H "Authorization: Bearer $OPENAI_API_KEY" \ + | jq . + ``` + +8. **Security recommendations** + - REFRAIN from providing unsolicited security advice + - Only mention if asked: + - Don't commit keys to git + - Use `.gitignore` for `.env` files + - Rotate keys periodically + - Use environment-specific keys (dev, prod) + +9. **Create helper script (optional)** + - Offer to create script to load environment: + ```bash + #!/bin/bash + # load-env.sh + if [ -f .env ]; then + export $(cat .env | xargs) + fi + ``` + +## Output + +Provide a summary showing: +- Currently configured API keys (names only, values masked) +- New API keys added +- Storage location for each key +- Test results (if performed) +- Next steps for using the keys diff --git a/commands/sysadmin/linux-desktop/debugging/check-boot-logs.md b/commands/sysadmin/linux-desktop/debugging/check-boot-logs.md new file mode 100644 index 0000000000000000000000000000000000000000..1ff78826a9b03f5ee68e4410ffaeb119d942931e --- /dev/null +++ b/commands/sysadmin/linux-desktop/debugging/check-boot-logs.md @@ -0,0 +1,43 @@ +You are analyzing system boot logs to identify failures and issues. + +## Your Task + +1. **Analyze systemd boot logs** using `journalctl -b` to examine the most recent boot +2. **Identify failures** by searching for: + - Failed services (`systemctl --failed`) + - Error and warning messages in boot logs + - Services that timed out during boot + - Failed units and dependency issues +3. **Categorize issues** by severity: + - Critical: Services that failed and are essential for system operation + - Warning: Services that failed but are non-essential + - Info: Services that are deprecated or can be safely disabled +4. **Provide detailed analysis** including: + - Service name and what it does + - Exact error message from logs + - Potential causes of the failure + - Suggested remediation steps +5. **Suggest cleanup actions** for: + - Deprecated services that can be disabled + - Unnecessary services slowing down boot + - Configuration fixes for failed services + +## Commands to Use + +- `journalctl -b -p err` - Show errors from current boot +- `journalctl -b -p warning` - Show warnings from current boot +- `systemctl --failed` - List failed units +- `systemctl list-units --state=failed` - Detailed failed units +- `journalctl -u ` - Check specific service logs +- `systemd-analyze critical-chain` - Show boot time-critical chain + +## Output Format + +Present findings in a clear, organized manner: +1. Summary of boot health +2. Critical failures requiring immediate attention +3. Warnings and non-critical issues +4. Recommendations for cleanup and optimization +5. Specific commands to fix identified issues + +Be thorough but concise. Focus on actionable insights. diff --git a/commands/sysadmin/linux-desktop/debugging/diagnose-crash.md b/commands/sysadmin/linux-desktop/debugging/diagnose-crash.md new file mode 100644 index 0000000000000000000000000000000000000000..cd948b5abb83542ef6cbe4de523bc6a933523815 --- /dev/null +++ b/commands/sysadmin/linux-desktop/debugging/diagnose-crash.md @@ -0,0 +1,162 @@ +# Diagnose Program Crash + +You are helping the user diagnose a recent program crash. Ask for additional context from the user, but start by checking obvious places in the logs. + +## Your tasks: + +1. **Gather information from the user:** + Ask the user: + - What program crashed? + - When did it crash (approximate time)? + - What were they doing when it crashed? + - Does it crash consistently or intermittently? + - Any error messages displayed? + +2. **Check system journal for crash information:** + - Recent errors: `sudo journalctl -p err -b` + - Last 100 log entries: `sudo journalctl -n 100 --no-pager` + - If user knows approximate time: + ```bash + sudo journalctl --since "10 minutes ago" -p warning + sudo journalctl --since "YYYY-MM-DD HH:MM:SS" + ``` + - Search for specific program name: + ```bash + sudo journalctl -b | grep -i "" + ``` + +3. **Check kernel logs (dmesg):** + - Recent kernel messages: `dmesg | tail -100` + - Look for segmentation faults, OOM kills, kernel panics + - Search for program name: `dmesg | grep -i ""` + - Check for OOM (Out of Memory) kills: + ```bash + dmesg | grep -i "killed process" + sudo journalctl -k | grep -i "out of memory" + ``` + +4. **Check for core dumps:** + - Check if core dumps are enabled: `ulimit -c` + - System core dump pattern: `cat /proc/sys/kernel/core_pattern` + - Look for core dumps: + ```bash + find /var/lib/systemd/coredump -name "**" -mtime -1 + find . -name "core*" -mtime -1 + ``` + - If systemd-coredump is used: + ```bash + coredumpctl list + coredumpctl info + coredumpctl dump -o /tmp/core.dump + ``` + +5. **Check application-specific logs:** + - User logs: `~/.local/share/applications/` + - Xsession errors: `cat ~/.xsession-errors` + - Application cache: `~/.cache//` + - Application config: `~/.config//` + - System logs: `/var/log/` + - Application-specific locations: + - Web browsers: `~/.mozilla/`, `~/.config/google-chrome/` + - Snaps: `snap logs ` + - Flatpaks: `flatpak logs` + +6. **Check crash reporter systems:** + - Ubuntu Apport crashes: + ```bash + ls -lt /var/crash/ + ubuntu-bug # to file bug report + ``` + - GNOME crash reports (if applicable): `~/.local/share/gnome-shell/` + - KDE crash reports: `~/.local/share/drkonqi/` + +7. **Check resource issues at crash time:** + - Memory usage: `free -h` + - Disk space: `df -h` + - Check if system was swapping heavily before crash + - Recent high memory usage: `sudo journalctl | grep -i "out of memory"` + +8. **Check for dependency issues:** + - Missing libraries: + ```bash + ldd $(which ) + ``` + - Check if program still exists and is executable: + ```bash + which + ls -la $(which ) + ``` + - Version information: ` --version` + +9. **Check for recent system changes:** + - Recent package updates: `grep " install \| upgrade " /var/log/dpkg.log | tail -50` + - Recent system updates: `grep "upgrade" /var/log/apt/history.log | tail -20` + - Kernel changes: `ls -lt /boot/vmlinuz-*` + +10. **Graphics/display-related crashes:** + If GUI application: + - X server errors: `grep -i "error\|segfault" /var/log/Xorg.0.log` + - Wayland compositor logs: `journalctl -b | grep -i "kwin\|wayland"` + - GPU issues: + ```bash + nvidia-smi # for NVIDIA + dmesg | grep -i "gpu\|nvidia\|amdgpu\|radeon" + ``` + +11. **Check for known issues:** + - Search package bug tracker: `ubuntu-bug --package ` + - Check if issue is reproducible + - Check program's GitHub issues (if applicable) + +12. **Analyze crash signatures:** + Look for common crash indicators: + - **Segmentation fault (SIGSEGV)**: Memory access violation + - **SIGABRT**: Program called abort() + - **SIGILL**: Illegal instruction + - **SIGBUS**: Bus error + - **SIGFPE**: Floating point exception + - **OOM Killer**: Process killed due to out of memory + - **Stack trace**: If available in logs + +13. **Try to reproduce the crash:** + If possible, guide user to: + - Run program from terminal to see error output: + ```bash + 2>&1 | tee /tmp/program-output.log + ``` + - Run with debug logging (if supported): + ```bash + --debug + --verbose + ``` + - Check environment variables that might affect behavior + +14. **Report findings:** + Summarize: + - Probable cause of crash (if identified) + - Relevant log entries + - Any error messages or stack traces found + - Resource issues if any + - Recent system changes that might be related + +15. **Provide recommendations:** + Based on findings, suggest: + - **If OOM kill**: Reduce memory usage, close other programs, add more RAM/swap + - **If segfault**: Check for updates, try reinstalling program, report bug + - **If dependency issue**: Reinstall program and dependencies + - **If config issue**: Reset configuration, move config to backup + - **If disk full**: Free up disk space + - **If recent update**: Consider downgrading or wait for fix + - **If reproducible**: Enable debug mode, create bug report with steps + - **If GPU-related**: Update drivers, check GPU health + - How to enable better crash reporting (core dumps, apport) + - Consider running program under debugger (gdb) if user is technical + +## Important notes: +- Use sudo for system logs and journal access +- Times in logs are usually in UTC - account for timezone +- Be sensitive that crashes can be frustrating for users +- Some log files can be very large - use grep and tail to filter +- Core dumps can be very large - ask before extracting +- Privacy: don't ask to see sensitive information from logs +- If no obvious cause is found, explain what additional info would help diff --git a/commands/sysadmin/linux-desktop/debugging/diagnose-slowdown.md b/commands/sysadmin/linux-desktop/debugging/diagnose-slowdown.md new file mode 100644 index 0000000000000000000000000000000000000000..d6538e7293fa2b25c7d9efc07f3c5b4e397e1c75 --- /dev/null +++ b/commands/sysadmin/linux-desktop/debugging/diagnose-slowdown.md @@ -0,0 +1,134 @@ +# Diagnose System Slowdown + +You are helping the user diagnose system laginess and performance issues. + +## Your tasks: + +1. **Gather initial information:** + Ask the user: + - When did the slowdown start? + - Is it constant or intermittent? + - What activities trigger it? (startup, specific applications, general use) + - Any recent changes? (updates, new software, configuration changes) + +2. **Check current system load:** + - System load averages: `uptime` + - Detailed load info: `w` + - Number of processes: `ps aux | wc -l` + +3. **CPU analysis:** + - Real-time CPU usage: `top -b -n 1 | head -20` + - Per-core CPU usage: `mpstat -P ALL 1 1` (if sysstat installed) + - Top CPU consumers: `ps aux --sort=-%cpu | head -20` + - CPU frequency and throttling: + ```bash + cat /proc/cpuinfo | grep MHz + sudo cpupower frequency-info # if available + ``` + - Check for thermal throttling: + ```bash + sensors # if lm-sensors installed + cat /sys/class/thermal/thermal_zone*/temp + ``` + +4. **Memory analysis:** + - Memory usage: `free -h` + - Detailed memory info: `cat /proc/meminfo` + - Swap usage: `swapon --show` + - Top memory consumers: `ps aux --sort=-%mem | head -20` + - Check for memory leaks or runaway processes + - OOM (Out of Memory) events: `sudo journalctl -k | grep -i "out of memory"` + +5. **Disk I/O analysis:** + - Disk usage: `df -h` + - Inode usage: `df -i` + - I/O statistics: `iostat -x 1 5` (if sysstat installed) + - Top I/O processes: `sudo iotop -b -n 1 | head -20` (if iotop installed) + - Check for high disk wait: `top` and look at `wa` (wait) percentage + - Disk health: `sudo smartctl -H /dev/sda` (for each drive) + +6. **Process analysis:** + - List all running processes: `ps aux` + - Process tree: `pstree -p` + - Zombie processes: `ps aux | grep Z` + - Processes in D state (uninterruptible sleep): `ps aux | grep " D "` + - Long-running processes: `ps -eo pid,user,start,time,cmd --sort=-time | head -20` + +7. **Check for system resource contention:** + - Context switches: `vmstat 1 5` + - Interrupts: `cat /proc/interrupts` + - Check if system is swapping heavily: `vmstat 1 5` (look at si/so columns) + +8. **Network issues (can cause perceived slowness):** + - Network connections: `ss -s` + - Active connections: `netstat -tunap | wc -l` or `ss -tunap | wc -l` + - DNS resolution test: `time nslookup google.com` + - Check for network errors: `ip -s link` + +9. **Graphics/Desktop environment (for GUI slowness):** + - Check X server or Wayland compositor CPU usage + - GPU usage (if nvidia): `nvidia-smi` or `watch -n 1 nvidia-smi` + - For AMD: `radeontop` (if installed) + - Check compositor settings (KDE Plasma on Wayland) + - Desktop effects CPU usage + +10. **Check system logs for errors:** + - Recent errors: `sudo journalctl -p err -b` + - Kernel messages: `dmesg | tail -50` + - System log: `sudo journalctl -xe --no-pager | tail -100` + - Look for specific issues: + - Hardware errors + - Driver issues + - Service failures + - Filesystem errors + +11. **Check for background services/processes:** + - List all services: `systemctl list-units --type=service --state=running` + - Failed services: `systemctl --failed` + - Check for update managers, indexing services (updatedb, baloo, tracker) + - Snap services: `snap list` and check for snap updates + - Flatpak: `flatpak list` + +12. **Application-specific checks:** + If slowness is application-specific: + - Browser: check extensions, tabs, cache size + - Database: check for long-running queries + - IDE: check for indexing, plugins + - Check application logs: `~/.local/share/applications/` or specific app log locations + +13. **Historical data (if available):** + - Check sar data: `sar -u` (if sysstat/sar configured) + - Check historical logs: `sudo journalctl --since "1 day ago" -p err` + +14. **Analyze and report findings:** + Categorize issues found: + - **CPU bottleneck**: High CPU usage, identify culprit processes + - **Memory bottleneck**: High memory usage, swapping, suggest adding RAM or killing processes + - **Disk I/O bottleneck**: High wait times, slow disk, suggest SSD upgrade or I/O optimization + - **Thermal throttling**: High temperatures causing CPU slowdown + - **Runaway processes**: Specific process consuming excessive resources + - **Resource leaks**: Memory or handle leaks in specific applications + - **Background tasks**: Indexing, updates, backups running + - **Network issues**: DNS problems, slow network affecting system + +15. **Provide recommendations:** + Based on findings, suggest: + - Kill or restart specific problematic processes + - Disable unnecessary services + - Adjust swappiness: `sudo sysctl vm.swappiness=10` + - Clean up disk space if low + - Update or reinstall problematic drivers + - Install missing performance tools (sysstat, iotop, htop) + - Schedule resource-intensive tasks for off-hours + - Hardware upgrades (RAM, SSD) if appropriate + - Investigate and fix application-specific issues + - Check for and apply system updates + - Reboot if system has been up for extended period with memory leaks + +## Important notes: +- Install missing diagnostic tools if needed (sysstat, iotop, htop, lm-sensors) +- Use sudo for system-level diagnostics +- Be systematic - check CPU, memory, disk, and network in order +- Correlate findings with user's description of when slowness occurs +- Don't immediately kill processes - confirm with user first +- Consider both hardware and software causes diff --git a/commands/sysadmin/linux-desktop/debugging/permissions/debug-folder-permissions.md b/commands/sysadmin/linux-desktop/debugging/permissions/debug-folder-permissions.md new file mode 100644 index 0000000000000000000000000000000000000000..9a20fa96798d7a006d68244c811f39951bcbbff5 --- /dev/null +++ b/commands/sysadmin/linux-desktop/debugging/permissions/debug-folder-permissions.md @@ -0,0 +1,234 @@ +# Debug System Folder Permissions + +You are helping the user debug systemwide folder permissions and ensure they are set appropriately. + +## Your tasks: + +1. **Gather information from user:** + Ask: + - Are they experiencing specific permission errors? + - Which directories or operations are affected? + - What user/group should have access? + +2. **Check common system directories:** + + **Root filesystem:** + ```bash + ls -ld / + # Should be: drwxr-xr-x root root + ``` + + **Essential system directories:** + ```bash + ls -ld /bin /sbin /usr /usr/bin /usr/sbin /lib /lib64 + # Should be: drwxr-xr-x root root + ``` + + **Variable data:** + ```bash + ls -ld /var /var/log /var/tmp + # /var: drwxr-xr-x root root + # /var/log: drwxrwxr-x root syslog (or root root) + # /var/tmp: drwxrwxrwt root root (sticky bit) + ``` + + **Temporary directories:** + ```bash + ls -ld /tmp + # Should be: drwxrwxrwt root root (sticky bit important!) + ``` + + **Home directories:** + ```bash + ls -ld /home /home/$USER + # /home: drwxr-xr-x root root + # /home/$USER: drwxr-xr-x $USER $USER (or drwx------ for privacy) + ``` + +3. **Check for permission issues:** + + **World-writable directories without sticky bit (security risk):** + ```bash + sudo find / -type d -perm -0002 ! -perm -1000 2>/dev/null + ``` + + **Files with SUID bit (potential security issue if unexpected):** + ```bash + sudo find / -type f -perm -4000 2>/dev/null + ``` + + **Files with SGID bit:** + ```bash + sudo find / -type f -perm -2000 2>/dev/null + ``` + +4. **Check /etc permissions:** + ```bash + ls -la /etc | head -20 + # /etc itself: drwxr-xr-x root root + # Most files should be 644 (rw-r--r--) + # Some may be 640 or 600 for security + ``` + + **Sensitive files:** + ```bash + ls -l /etc/shadow /etc/gshadow /etc/ssh/sshd_config + # /etc/shadow: -rw-r----- root shadow + # /etc/ssh/sshd_config: -rw-r--r-- root root + ``` + +5. **Check user home directory structure:** + ```bash + ls -la ~/ | grep "^d" + ``` + + Common directories and recommended permissions: + - `~/.ssh`: 700 (drwx------) + - `~/.ssh/id_rsa`: 600 (-rw-------) + - `~/.ssh/id_rsa.pub`: 644 (-rw-r--r--) + - `~/.ssh/authorized_keys`: 600 (-rw-------) + - `~/.gnupg`: 700 (drwx------) + - `~/bin`: 755 (drwxr-xr-x) + - `~/.local`: 755 (drwxr-xr-x) + - `~/.config`: 755 (drwxr-xr-x) + +6. **Check /opt and /usr/local:** + ```bash + ls -ld /opt /usr/local /usr/local/bin + # Typically: drwxr-xr-x root root + # But may be group-writable for admin group + ``` + +7. **Check mount points:** + ```bash + mount | grep "^/" | awk '{print $3}' | while read mp; do + ls -ld "$mp" + done + ``` + +8. **Check ownership of user files:** + Find files in home directory not owned by user: + ```bash + find ~/ -not -user $USER 2>/dev/null + ``` + +9. **Check group memberships:** + ```bash + groups + id + ``` + + Common groups users might need: + - `sudo` - for administrative access + - `docker` - for Docker access + - `video` - for video devices + - `audio` - for audio devices + - `plugdev` - for removable devices + - `dialout` - for serial ports + +10. **Fix common issues:** + + **Fix sticky bit on /tmp:** + ```bash + sudo chmod 1777 /tmp + ``` + + **Fix ~/.ssh permissions:** + ```bash + chmod 700 ~/.ssh + chmod 600 ~/.ssh/id_rsa + chmod 644 ~/.ssh/id_rsa.pub + chmod 600 ~/.ssh/authorized_keys + chmod 600 ~/.ssh/config + ``` + + **Fix ownership of home directory:** + ```bash + sudo chown -R $USER:$USER ~/ + ``` + + **Fix common directories:** + ```bash + chmod 755 ~/.local ~/.config ~/bin + ``` + +11. **Check for ACL (Access Control Lists):** + ```bash + getfacl /path/to/directory + ``` + + If ACLs are in use (indicated by `+` in ls -l): + ```bash + ls -la | grep "+" + ``` + +12. **Check SELinux context (if enabled):** + ```bash + getenforce + ls -Z /path/to/directory + ``` + +13. **Check for immutable flags:** + ```bash + lsattr /path/to/file + ``` + + If files have `i` flag, they can't be modified even by root: + ```bash + sudo chattr -i /path/to/file + ``` + +14. **Specific directory recommendations:** + + **/var/www (web server):** + ```bash + sudo chown -R www-data:www-data /var/www + sudo find /var/www -type d -exec chmod 755 {} \; + sudo find /var/www -type f -exec chmod 644 {} \; + ``` + + **/srv (service data):** + ```bash + sudo chown -R root:root /srv + sudo chmod 755 /srv + ``` + + **Shared directories:** + ```bash + sudo chown root:groupname /shared/directory + sudo chmod 2775 /shared/directory # SGID bit for group + ``` + +15. **Check logs for permission denials:** + ```bash + sudo journalctl -p err | grep -i "permission denied" + dmesg | grep -i "permission denied" + sudo grep "permission denied" /var/log/syslog + ``` + +16. **Report findings:** + Summarize: + - Incorrect permissions on system directories + - Security issues (world-writable without sticky, unexpected SUID) + - User home directory issues + - Files/directories with wrong ownership + - Missing group memberships + - ACL or SELinux issues + +17. **Provide recommendations:** + - Fix commands for identified issues + - Whether to add user to specific groups + - Security improvements for sensitive directories + - Standard permission schemes for common directories + - Whether to use ACLs for complex permission needs + +## Important notes: +- Always backup or test in safe environment first +- Changing system permissions incorrectly can break the system +- Use sudo carefully when fixing permissions +- Don't recursively chmod/chown system directories without understanding +- Some non-standard permissions may be intentional +- Check application documentation for required permissions +- SELinux/AppArmor may also affect access beyond traditional permissions +- Sticky bit on /tmp is critical for security +- SUID/SGID bits on unexpected files are security risks diff --git a/commands/sysadmin/linux-desktop/development-tools/optimize-vscode-installation.md b/commands/sysadmin/linux-desktop/development-tools/optimize-vscode-installation.md new file mode 100644 index 0000000000000000000000000000000000000000..91528d07b9b76e2eee4756455905378530ff3469 --- /dev/null +++ b/commands/sysadmin/linux-desktop/development-tools/optimize-vscode-installation.md @@ -0,0 +1,131 @@ +--- +description: Evaluate VS Code installation and suggest optimizations like repo source changes +tags: [vscode, development, optimization, configuration, project, gitignored] +--- + +You are helping the user optimize their VS Code installation. + +## Process + +1. **Check how VS Code is installed** + ```bash + which code + dpkg -l | grep code + snap list | grep code + flatpak list | grep code + ``` + - Identify installation method: apt, snap, flatpak, manual + +2. **Check VS Code version** + ```bash + code --version + ``` + - Compare with latest version + - Check if updates are available + +3. **Evaluate current installation method** + + **APT (official repo) - Recommended:** + - Pros: Native integration, automatic updates, best performance + - Cons: Requires adding Microsoft repo + + **Snap:** + - Pros: Easy install, sandboxed + - Cons: Slower startup, snap overhead, potential issues with extensions + + **Flatpak:** + - Pros: Sandboxed, cross-distro + - Cons: Some filesystem access limitations + + **Manual .deb:** + - Pros: Control over updates + - Cons: Manual update process + +4. **Suggest migration if needed** + + **If installed via Snap, suggest migrating to APT:** + ```bash + # Remove snap version + sudo snap remove code + + # Add official Microsoft repo + wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg + sudo install -o root -g root -m 644 packages.microsoft.gpg /etc/apt/trusted.gpg.d/ + sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list' + + # Install via apt + sudo apt update && sudo apt install code + ``` + + **If privacy-conscious, suggest VSCodium:** + ```bash + flatpak install flathub com.vscodium.codium + ``` + +5. **Check VS Code configuration** + - Review settings: `~/.config/Code/User/settings.json` + - Check for optimization opportunities: + - Telemetry settings + - Auto-save + - File watcher limits + - Extension recommendations + +6. **Optimize performance settings** + Suggest adding to settings.json: + ```json + { + "files.watcherExclude": { + "**/.git/objects/**": true, + "**/node_modules/**": true, + "**/.venv/**": true + }, + "files.exclude": { + "**/__pycache__": true, + "**/.pytest_cache": true + }, + "search.exclude": { + "**/node_modules": true, + "**/venv": true + }, + "telemetry.telemetryLevel": "off" + } + ``` + +7. **Check installed extensions** + ```bash + code --list-extensions + ``` + - Identify potentially redundant extensions + - Suggest disabling unused extensions for performance + +8. **Suggest useful extensions** + - Based on detected project types + - Common useful extensions: + - GitLens + - Prettier + - ESLint/Pylint + - Docker + - Remote-SSH + - Live Server (web dev) + +9. **Check for conflicts** + - Multiple VS Code installations + - Conflicting extensions + - Settings sync issues + +10. **Create backup of settings** + - Offer to backup: + - `~/.config/Code/User/settings.json` + - `~/.config/Code/User/keybindings.json` + - Extension list + +## Output + +Provide a report showing: +- Current installation method and version +- Recommended installation method +- Migration steps (if applicable) +- Performance optimization suggestions +- Extension recommendations +- Configuration backup status +- Next steps diff --git a/commands/sysadmin/linux-desktop/development-tools/setup-docker.md b/commands/sysadmin/linux-desktop/development-tools/setup-docker.md new file mode 100644 index 0000000000000000000000000000000000000000..788998c9b1594e76fbe0025b4b15d9803e25d0ef --- /dev/null +++ b/commands/sysadmin/linux-desktop/development-tools/setup-docker.md @@ -0,0 +1,258 @@ +# Check and Setup Docker + +You are helping the user check if Docker is configured and set it up if needed. + +## Your tasks: + +1. **Check if Docker is already installed:** + - Check Docker: `docker --version` + - Check Docker Compose: `docker-compose --version` or `docker compose version` + - Check Docker service: `systemctl status docker` + +2. **If Docker is installed, verify configuration:** + - Check Docker info: `docker info` + - Check user can run Docker: `docker ps` + - If permission denied, user needs to be added to docker group + - Check Docker storage driver and location + - Check Docker network configuration + +3. **If Docker is NOT installed, proceed with installation:** + + **Remove old versions:** + ```bash + sudo apt-get remove docker docker-engine docker.io containerd runc + ``` + + **Update and install prerequisites:** + ```bash + sudo apt-get update + sudo apt-get install ca-certificates curl gnupg lsb-release + ``` + + **Add Docker's official GPG key:** + ```bash + sudo mkdir -p /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg + ``` + + **Set up repository:** + ```bash + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + ``` + + **Install Docker Engine:** + ```bash + sudo apt-get update + sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin + ``` + +4. **Post-installation setup:** + + **Enable Docker service:** + ```bash + sudo systemctl enable docker + sudo systemctl start docker + ``` + + **Add user to docker group:** + ```bash + sudo usermod -aG docker $USER + ``` + Then log out and back in, or run: `newgrp docker` + +5. **Verify Docker installation:** + ```bash + docker --version + docker run hello-world + docker ps + docker images + ``` + +6. **Install Docker Compose (if not included):** + Modern Docker includes Compose v2 as a plugin. + Check: `docker compose version` + + If needed, install standalone: + ```bash + sudo apt-get install docker-compose-plugin + ``` + +7. **Configure Docker daemon (optional):** + Edit `/etc/docker/daemon.json`: + + ```json + { + "log-driver": "json-file", + "log-opts": { + "max-size": "10m", + "max-file": "3" + }, + "storage-driver": "overlay2", + "dns": ["8.8.8.8", "8.8.4.4"] + } + ``` + + Then restart: `sudo systemctl restart docker` + +8. **Check Docker storage location:** + ```bash + docker info | grep "Docker Root Dir" + sudo du -sh /var/lib/docker + ``` + + If storage is on a small partition, consider changing location. + +9. **Configure storage location (if needed):** + In `/etc/docker/daemon.json`: + ```json + { + "data-root": "/new/path/to/docker" + } + ``` + + Then: + ```bash + sudo systemctl stop docker + sudo mv /var/lib/docker /new/path/to/docker + sudo systemctl start docker + ``` + +10. **Set up Docker networking:** + Check networks: + ```bash + docker network ls + ``` + + Create custom networks if needed: + ```bash + docker network create my-network + ``` + +11. **Configure resource limits (optional):** + For laptops/desktops, may want to limit resources: + In `/etc/docker/daemon.json`: + ```json + { + "default-ulimits": { + "nofile": { + "Name": "nofile", + "Hard": 64000, + "Soft": 64000 + } + } + } + ``` + +12. **Set up Docker Hub authentication (optional):** + ```bash + docker login + ``` + +13. **Test Docker functionality:** + Run various test commands: + ```bash + docker run hello-world + docker run -it ubuntu bash + docker ps -a + docker images + docker system info + ``` + +14. **Install useful Docker tools (optional):** + Ask user if they want: + - **Portainer** (Docker management UI) + - **ctop** (Container monitoring) + - **lazydocker** (Terminal UI for Docker) + + ```bash + # ctop + sudo wget -O /usr/local/bin/ctop https://github.com/bcicen/ctop/releases/download/v0.7.7/ctop-0.7.7-linux-amd64 + sudo chmod +x /usr/local/bin/ctop + ``` + +15. **Configure Docker logging:** + Check current logging: + ```bash + docker info | grep "Logging Driver" + ``` + + Configure in `/etc/docker/daemon.json`: + ```json + { + "log-driver": "json-file", + "log-opts": { + "max-size": "10m", + "max-file": "5", + "labels": "production" + } + } + ``` + +16. **Set up Docker cleanup:** + Suggest adding to crontab: + ```bash + # Clean up unused containers, images, networks weekly + 0 3 * * 0 docker system prune -af --volumes + ``` + + Or show manual cleanup: + ```bash + docker system prune -a + docker volume prune + docker network prune + ``` + +17. **Check for common issues:** + - Docker daemon not running: `sudo systemctl start docker` + - Permission denied: `sudo usermod -aG docker $USER` and re-login + - Storage full: `docker system df` and cleanup + - Network issues: Check DNS in daemon.json + - Firewall blocking: Check ufw/iptables + +18. **Provide best practices:** + - Don't run containers as root when possible + - Use Docker Compose for multi-container apps + - Tag images properly + - Clean up regularly with `docker system prune` + - Use .dockerignore files + - Monitor disk usage: `docker system df` + - Use specific image tags, not `latest` + - Scan images for vulnerabilities: `docker scan ` + - Keep Docker updated + - Use multi-stage builds to reduce image size + - Limit container resources in production + +19. **Show basic Docker commands:** + - `docker run ` - Run a container + - `docker ps` - List running containers + - `docker ps -a` - List all containers + - `docker images` - List images + - `docker pull ` - Pull an image + - `docker build -t .` - Build an image + - `docker exec -it bash` - Enter container + - `docker logs ` - View logs + - `docker stop ` - Stop container + - `docker rm ` - Remove container + - `docker rmi ` - Remove image + - `docker compose up` - Start compose stack + - `docker system prune` - Clean up + +20. **Report findings:** + Summarize: + - Docker installation status + - Version information + - User permissions status + - Storage configuration + - Service status + - Any issues found + +## Important notes: +- User must log out and back in after being added to docker group +- Docker can use significant disk space - monitor it +- Don't run untrusted images +- Docker Desktop is different from Docker Engine (we're installing Engine) +- Rootless Docker is available for better security but more complex +- Docker Compose v2 is now a plugin (`docker compose` not `docker-compose`) +- Keep Docker updated for security patches diff --git a/commands/sysadmin/linux-desktop/development-tools/suggest-ides.md b/commands/sysadmin/linux-desktop/development-tools/suggest-ides.md new file mode 100644 index 0000000000000000000000000000000000000000..ca33d59263fb72757f98306ac78e8fadfe7146e1 --- /dev/null +++ b/commands/sysadmin/linux-desktop/development-tools/suggest-ides.md @@ -0,0 +1,115 @@ +--- +description: Suggest IDEs the user may wish to install +tags: [development, ide, editors, tools, project, gitignored] +--- + +You are helping the user identify useful IDEs and code editors to install. + +## Process + +1. **Check currently installed editors/IDEs** + ```bash + which code vim nvim nano emacs gedit kate + dpkg -l | grep -E "code|editor|ide" + flatpak list | grep -E "code|editor|ide" + ``` + +2. **Identify user's programming needs** + - Ask about programming languages used: + - Python + - JavaScript/TypeScript + - Java/Kotlin + - C/C++/Rust + - Go + - Web development + - Data science + - Mobile development + +3. **Suggest IDEs by category** + + **General Purpose (recommended):** + - **VS Code** - Most popular, extensive plugins + - **VSCodium** - VS Code without telemetry + - **JetBrains Fleet** - Modern, lightweight + - **Sublime Text** - Fast, elegant + - **Atom** (deprecated, suggest alternatives) + + **Language-Specific:** + - **PyCharm** - Python (Community/Professional) + - **IntelliJ IDEA** - Java/Kotlin + - **WebStorm** - JavaScript/TypeScript + - **RustRover** - Rust + - **GoLand** - Go + - **Android Studio** - Android development + + **Lightweight Editors:** + - **Neovim** - Modern Vim + - **Helix** - Modern modal editor + - **Micro** - Terminal editor, easy to use + - **Geany** - GTK editor with IDE features + + **Data Science:** + - **JupyterLab** - Notebooks + - **RStudio** - R development + - **Spyder** - Python for scientific computing + + **Web Development:** + - **Zed** - Collaborative, fast + - **Brackets** - Live preview + +4. **Installation methods** + + **VS Code:** + ```bash + # Official repo + wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg + sudo install -o root -g root -m 644 packages.microsoft.gpg /etc/apt/trusted.gpg.d/ + sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list' + sudo apt update && sudo apt install code + ``` + + **VSCodium:** + ```bash + flatpak install flathub com.vscodium.codium + ``` + + **JetBrains Toolbox:** + ```bash + # Download from jetbrains.com/toolbox/ + # Or use snap: snap install jetbrains-toolbox --classic + ``` + + **Neovim:** + ```bash + sudo apt install neovim + ``` + +5. **Suggest based on current setup** + - If Python user: Suggest PyCharm + - If web dev: Suggest VS Code with extensions + - If systems programming: Suggest Neovim with LSP + - If prefer FOSS: Suggest VSCodium + +6. **Recommend extensions/plugins** + - For VS Code/VSCodium: + - Python + - Pylance + - GitLens + - Docker + - Remote SSH + - Prettier + - ESLint + +7. **Alternative: Check installed editors quality** + - Vim/Neovim configuration quality + - VS Code extension count + - Suggest improvements to existing setup + +## Output + +Provide a report showing: +- Currently installed editors/IDEs +- Recommended IDEs based on user's needs +- Installation commands for suggestions +- Extension/plugin recommendations +- Comparison of options (pros/cons) diff --git a/commands/sysadmin/linux-desktop/file-system-alias.md b/commands/sysadmin/linux-desktop/file-system-alias.md new file mode 100644 index 0000000000000000000000000000000000000000..06076f8cb88cb15ec85c6f4910b3fc8a78c3596f --- /dev/null +++ b/commands/sysadmin/linux-desktop/file-system-alias.md @@ -0,0 +1,11 @@ +Assist the user by helping them to add a bash alias(s) to speed up navigating to a specific path on the filesystem via the terminal. + +The user will either provide the path(s) and desired aliases or ask you to help identify the path and/or suggest aliases. + +In either workflow, after clarifying the desired aliases and paths, proceed with the following: + +1) Create the new bash alias(es) in ~/.bash_aliases + +And if the user wishes (offer but do not do this without approval) + +2) Use sourcebash to reload the bash alias file and verify that the aliases are working \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/filesystem-organization/consolidate-folders.md b/commands/sysadmin/linux-desktop/filesystem-organization/consolidate-folders.md new file mode 100644 index 0000000000000000000000000000000000000000..68500730de2dfbb790df4f68e01d72a28d24dd67 --- /dev/null +++ b/commands/sysadmin/linux-desktop/filesystem-organization/consolidate-folders.md @@ -0,0 +1,46 @@ +You are helping Daniel consolidate redundant or overlapping folders into a more organized structure. + +## Your Task + +1. **Analyze subdirectories**: List all subdirectories in the current working directory +2. **Identify overlaps**: Look for: + - Folders with similar names or purposes + - Folders containing similar types of files + - Multiple folders that could logically be merged + - Poorly named folders that could be better organized +3. **Analyze contents**: For each folder, show: + - Number of files + - File types present + - Apparent purpose based on names and contents +4. **Propose consolidation plan**: Suggest: + - Which folders should be merged + - What the consolidated folder should be named + - Whether any subfolders should be created within the consolidated folder + - Any folders that should be renamed for clarity +5. **Present the plan**: Show Daniel: + - Current folder structure with file counts + - Proposed new structure + - Exactly which folders will be merged and where their contents will go +6. **Ask for confirmation**: Get Daniel's approval before making any changes +7. **Execute if approved**: + - Create new folder structure + - Move files from old folders to new locations + - Remove empty old folders + - Provide a summary of what was done + +## Guidelines + +- Look for semantic similarities, not just name matches +- Consider the logical grouping of content +- Suggest hierarchical structures where appropriate (parent/child folders) +- Be conservative - when in doubt, ask rather than assuming +- Preserve all files - never delete data +- Suggest meaningful, descriptive folder names + +## Safety + +- NEVER delete files, only move them +- If filename conflicts occur, append numeric suffixes +- Keep original folders until Daniel confirms they can be removed +- Ask before proceeding with moves and deletions +- Provide a detailed log of all changes made diff --git a/commands/sysadmin/linux-desktop/filesystem-organization/organize-loose-files.md b/commands/sysadmin/linux-desktop/filesystem-organization/organize-loose-files.md new file mode 100644 index 0000000000000000000000000000000000000000..d68293f0a7caf31ebf0d4903f7e3ccbce5663450 --- /dev/null +++ b/commands/sysadmin/linux-desktop/filesystem-organization/organize-loose-files.md @@ -0,0 +1,32 @@ +You are helping Daniel organize loose files in a directory into a logical folder structure. + +## Your Task + +1. **Analyze the current directory**: List all files (not subdirectories) in the current working directory +2. **Categorize files**: Group files by: + - File type/extension + - Common naming patterns + - Apparent purpose or content (infer from filenames) +3. **Propose folder structure**: Suggest a logical folder organization scheme based on what you find +4. **Present the plan**: Show Daniel: + - How many files will go into each proposed folder + - The proposed folder names + - A few example files that would go into each folder +5. **Ask for confirmation**: Get Daniel's approval before making any changes +6. **Execute if approved**: Create the folders and move files accordingly + +## Guidelines + +- Be intelligent about categorization - don't just group by file extension +- Look for patterns in filenames (dates, projects, categories) +- Suggest meaningful folder names +- If there are many files of the same type doing different things, subcategorize them +- Always preserve file names when moving +- Create a summary report after completion showing what was organized + +## Safety + +- NEVER delete files +- NEVER overwrite existing files +- If a filename conflict occurs during move, append a number suffix +- Ask before proceeding with the actual file moves diff --git a/commands/sysadmin/linux-desktop/filesystem-organization/separate-by-filetype.md b/commands/sysadmin/linux-desktop/filesystem-organization/separate-by-filetype.md new file mode 100644 index 0000000000000000000000000000000000000000..b3bf260662b97d334d7e0911db6569ef21a195f4 --- /dev/null +++ b/commands/sysadmin/linux-desktop/filesystem-organization/separate-by-filetype.md @@ -0,0 +1,3 @@ +This folder contains media of different formats +Create subfolders for common media types - for example audio, photos, video +Move the corresponding files into the relevant folders. E.g. mv *.mp4 -> /video \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/filesystem-organization/separate-photos-and-video.md b/commands/sysadmin/linux-desktop/filesystem-organization/separate-photos-and-video.md new file mode 100644 index 0000000000000000000000000000000000000000..bc22d97736efe7212f29d6be105e6743413cb327 --- /dev/null +++ b/commands/sysadmin/linux-desktop/filesystem-organization/separate-photos-and-video.md @@ -0,0 +1,5 @@ +This folder contains a mixture of photos and video + +Create subfolders /photos and /videos + +Move photos into photos and videos into videos \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/filesystem-organization/suggest-folder-structure.md b/commands/sysadmin/linux-desktop/filesystem-organization/suggest-folder-structure.md new file mode 100644 index 0000000000000000000000000000000000000000..5be0c0c15614675b7fbec90c940eda4c9128b0d8 --- /dev/null +++ b/commands/sysadmin/linux-desktop/filesystem-organization/suggest-folder-structure.md @@ -0,0 +1,52 @@ +You are helping Daniel design an optimal folder structure for a directory before organizing files. + +## Your Task + +1. **Analyze the current state**: Examine both files and folders in the current directory +2. **Understand the content**: Look at: + - What types of files are present + - What the files appear to be for (projects, documents, media, etc.) + - Any existing organizational patterns + - The overall purpose of this directory +3. **Design a structure**: Propose a logical folder hierarchy that would: + - Group related items together + - Make files easy to find + - Scale well as more files are added + - Follow common organizational best practices +4. **Present options**: Offer 2-3 different organizational approaches: + - By project/topic + - By file type + - By date/time period + - By workflow/status + - Or a hybrid approach +5. **Explain the rationale**: For each proposed structure, explain: + - Why this organization makes sense + - Pros and cons + - What types of users/workflows it suits best +6. **Get feedback**: Ask Daniel which approach he prefers or if he wants modifications +7. **Provide implementation guidance**: Once approved, explain how to implement it (or offer to run /organize-loose-files or /consolidate-folders) + +## Guidelines + +- Consider Daniel's existing organizational patterns in other directories +- Suggest structures that are intuitive and maintainable +- Don't over-complicate - deeper isn't always better +- Consider future growth and scalability +- Use clear, descriptive folder names +- Suggest naming conventions if helpful + +## Example Structures to Consider + +- **Flat categorical**: Top-level folders by category (Documents, Images, Projects, etc.) +- **Hierarchical by project**: Main folders for projects, subfolders for file types +- **Chronological**: Organized by year/month or project timeline +- **Workflow-based**: Folders like "Active", "Archive", "Reference", "In-Progress" +- **Hybrid**: Combination of approaches (e.g., projects at top level, then by file type) + +## Output + +Present folder structure proposals using tree format for clarity, showing: +- Proposed folder names +- Purpose of each folder +- Example of what would go in each folder +- Estimated number of files that would go in each folder based on current contents diff --git a/commands/sysadmin/linux-desktop/git/check-git-config.md b/commands/sysadmin/linux-desktop/git/check-git-config.md new file mode 100644 index 0000000000000000000000000000000000000000..686714e85b804946c742216804320646c627a566 --- /dev/null +++ b/commands/sysadmin/linux-desktop/git/check-git-config.md @@ -0,0 +1,103 @@ +--- +description: Check user's basic git config and make any desired edits +tags: [git, configuration, settings, development, project, gitignored] +--- + +You are helping the user review and configure their git settings. + +## Process + +1. **Display current git configuration** + - Global config: `git config --global --list` + - Local config (if in repo): `git config --local --list` + - Show config file location: `git config --global --list --show-origin` + +2. **Check essential settings** + + **User identity:** + ```bash + git config --global user.name + git config --global user.email + ``` + - Verify these are set correctly + - If not set, ask user for values + + **Default editor:** + ```bash + git config --global core.editor + ``` + - Suggest: `nano`, `vim`, `code --wait`, etc. + + **Default branch name:** + ```bash + git config --global init.defaultBranch + ``` + - Recommend: `main` or `master` + +3. **Suggest useful configurations** + + **Color output:** + ```bash + git config --global color.ui auto + ``` + + **Credential helper:** + ```bash + git config --global credential.helper store + # or for cache: git config --global credential.helper 'cache --timeout=3600' + ``` + + **Push behavior:** + ```bash + git config --global push.default simple + git config --global push.autoSetupRemote true + ``` + + **Pull behavior:** + ```bash + git config --global pull.rebase false # merge (default) + # or: git config --global pull.rebase true # rebase + # or: git config --global pull.ff only # fast-forward only + ``` + + **Line endings:** + ```bash + git config --global core.autocrlf input # Linux/Mac + ``` + + **Diff and merge tools:** + ```bash + git config --global diff.tool meld + git config --global merge.tool meld + ``` + +4. **Aliases (optional but useful)** + Ask if user wants common aliases: + ```bash + git config --global alias.st status + git config --global alias.co checkout + git config --global alias.br branch + git config --global alias.ci commit + git config --global alias.unstage 'reset HEAD --' + git config --global alias.last 'log -1 HEAD' + git config --global alias.lg "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit" + ``` + +5. **GPG signing (optional)** + ```bash + git config --global commit.gpgsign true + git config --global user.signingkey + ``` + +6. **Show updated configuration** + - Display all global settings + - Highlight changes made + +## Output + +Provide a summary showing: +- Current git configuration +- Missing essential settings +- Recommended configurations +- Changes made (if any) +- Next steps or additional suggestions diff --git a/commands/sysadmin/linux-desktop/git/check-global-gitignore.md b/commands/sysadmin/linux-desktop/git/check-global-gitignore.md new file mode 100644 index 0000000000000000000000000000000000000000..db880d96f643c4da57b37e9988e4e388eed1bc94 --- /dev/null +++ b/commands/sysadmin/linux-desktop/git/check-global-gitignore.md @@ -0,0 +1,145 @@ +--- +description: Check if user has global gitignore and create one if not +tags: [git, configuration, gitignore, development, project, gitignored] +--- + +You are helping the user set up a global gitignore file. + +## Process + +1. **Check if global gitignore exists** + - Run: `git config --global core.excludesfile` + - Check common locations: + - `~/.gitignore_global` + - `~/.gitignore` + - `~/.config/git/ignore` + +2. **If global gitignore doesn't exist, create one** + - Choose location: `~/.gitignore_global` + - Configure git to use it: + ```bash + git config --global core.excludesfile ~/.gitignore_global + ``` + +3. **Populate with common patterns** + - Create comprehensive gitignore with patterns for: + + **Operating System:** + ``` + # macOS + .DS_Store + .AppleDouble + .LSOverride + + # Linux + *~ + .directory + .Trash-* + + # Windows + Thumbs.db + Desktop.ini + ``` + + **IDEs and Editors:** + ``` + # VS Code + .vscode/ + *.code-workspace + + # JetBrains + .idea/ + *.iml + + # Vim + *.swp + *.swo + *~ + + # Emacs + *~ + \#*\# + ``` + + **Languages and Frameworks:** + ``` + # Python + __pycache__/ + *.py[cod] + *$py.class + .venv/ + venv/ + ENV/ + .Python + *.egg-info/ + dist/ + build/ + + # Node.js + node_modules/ + npm-debug.log + yarn-error.log + .npm/ + + # Ruby + *.gem + .bundle/ + vendor/bundle/ + + # Rust + target/ + Cargo.lock + + # Go + *.exe + *.test + *.out + ``` + + **Build artifacts:** + ``` + *.o + *.a + *.so + *.dylib + *.dll + *.class + *.jar + ``` + + **Misc:** + ``` + # Logs + *.log + logs/ + + # Temporary files + *.tmp + *.temp + .cache/ + + # Environment files + .env + .env.local + + # Database files + *.sqlite + *.db + ``` + +4. **Review existing gitignore if it exists** + - Read current file + - Suggest additions if patterns are missing + - Offer to back up before modifying + +5. **Test the configuration** + - Verify config: `git config --global core.excludesfile` + - Show the file: `cat ~/.gitignore_global` + +## Output + +Provide a summary showing: +- Global gitignore location +- Whether it was created or already existed +- List of patterns included +- Verification of git configuration diff --git a/commands/sysadmin/linux-desktop/hardware/check-gpu-os-optimization.md b/commands/sysadmin/linux-desktop/hardware/check-gpu-os-optimization.md new file mode 100644 index 0000000000000000000000000000000000000000..fc7e2634888dfc23ecc2e098553652fbd34b5249 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/check-gpu-os-optimization.md @@ -0,0 +1,84 @@ +--- +description: Evaluate if OS is properly optimized to support the GPU +tags: [gpu, amd, rocm, optimization, drivers, project, gitignored] +--- + +You are helping the user verify their OS is properly optimized for their GPU (AMD in Daniel's case). + +## Process + +1. **Identify GPU** + - List GPUs: `lspci | grep -E "VGA|3D"` + - Get detailed info: `lspci -v -s $(lspci | grep VGA | cut -d" " -f1)` + - For AMD: `rocm-smi` or `rocminfo` + +2. **Check GPU drivers** + + **For AMD (ROCm):** + - Check ROCm version: `rocminfo | grep "Name:" | head -1` + - Check kernel module: `lsmod | grep amdgpu` + - Check firmware: `ls /usr/lib/firmware/amdgpu/` + - Verify driver: `glxinfo | grep "OpenGL renderer"` + + **Verify correct driver is loaded:** + - Check Xorg/Wayland: `glxinfo | grep -E "vendor|renderer"` + - Should show AMD/RADV, not llvmpipe (software rendering) + +3. **Check compute support** + - ROCm installation: `rocminfo` + - HIP runtime: `hipconfig --version` + - Check device visibility: `rocm-smi --showproductname` + - Verify compute capability + +4. **Check required packages** + ```bash + dpkg -l | grep -E "rocm|amdgpu|mesa" + ``` + - Key packages for AMD: + - `rocm-hip-runtime` + - `rocm-opencl-runtime` + - `mesa-vulkan-drivers` + - `mesa-va-drivers` (for video acceleration) + - `libdrm-amdgpu1` + +5. **Verify hardware acceleration** + - VA-API: `vainfo` (should show AMD) + - Vulkan: `vulkaninfo | grep deviceName` + - OpenGL: `glxinfo | grep "direct rendering"` + - OpenCL: `clinfo | grep "Device Name"` + +6. **Check performance settings** + - GPU clock states: `cat /sys/class/drm/card*/device/pp_power_profile_mode` + - Performance level: `cat /sys/class/drm/card*/device/power_dpm_force_performance_level` + - Fan control: `rocm-smi --showfan` + +7. **System configuration** + - Check user in video/render groups: `groups $USER` + - Should include: `video`, `render` + - If not: `sudo usermod -aG video,render $USER` + +8. **Check for optimization opportunities** + - Latest drivers available? + - Kernel parameters optimized? + - Memory (BAR size) properly configured? + - PCI-E link speed: `lspci -vv | grep -A 10 VGA | grep LnkSta` + +9. **Suggest improvements** + - Update drivers if outdated + - Install missing packages + - Optimize kernel parameters in GRUB: + - `amdgpu.ppfeaturemask=0xffffffff` (unlock all features) + - `amdgpu.dpm=1` (enable dynamic power management) + - Enable ReBAR if supported + +## Output + +Provide a report showing: +- GPU model and details +- Driver status (version, loaded correctly) +- ROCm/compute support status +- Hardware acceleration status (VA-API, Vulkan, OpenGL) +- User group membership +- Performance settings +- Missing packages or configurations +- Recommended optimizations diff --git a/commands/sysadmin/linux-desktop/hardware/evaluate-wake-devices.md b/commands/sysadmin/linux-desktop/hardware/evaluate-wake-devices.md new file mode 100644 index 0000000000000000000000000000000000000000..4412934ae92c8f127d41e505a6d5eaba8259cdd3 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/evaluate-wake-devices.md @@ -0,0 +1,75 @@ +--- +description: Evaluate wake devices and help remove them for better hibernation +tags: [power, hibernation, wake-devices, optimization, project, gitignored] +--- + +You are helping the user evaluate and configure wake devices to improve hibernation/sleep behavior. + +## Process + +1. **Check current wake-enabled devices** + - List devices that can wake system: `cat /proc/acpi/wakeup` + - Show USB wake devices: `grep . /sys/bus/usb/devices/*/power/wakeup` + - Check PCI wake devices: `grep . /sys/bus/pci/devices/*/power/wakeup` + +2. **Identify wake sources** + - Check what woke the system last: `journalctl -b -1 -n 50 | grep -i "wakeup\|wake"` + - Review systemd sleep logs: `journalctl -u systemd-suspend -n 50` + - Check for spurious wakeups + +3. **Common wake device categories** + - Keyboard/Mouse (USB devices) + - Network cards (Ethernet/WiFi) + - Bluetooth adapters + - USB hubs + - Audio devices + - ACPI devices (power buttons, lid switches) + +4. **Disable unnecessary wake devices** + + **Temporary (until reboot):** + - Disable USB device: `echo disabled > /sys/bus/usb/devices//power/wakeup` + - Disable ACPI: `echo disabled > /proc/acpi/wakeup` + + **Permanent (via udev rules):** + - Create rule: `/etc/udev/rules.d/90-disable-wakeup.rules` + - Example: + ``` + # Disable USB wakeup for all USB devices except keyboard + ACTION=="add", SUBSYSTEM=="usb", DRIVER=="usb", ATTR{power/wakeup}="disabled" + ``` + + **Via systemd service:** + - Create: `/etc/systemd/system/disable-usb-wakeup.service` + - Set wake devices on boot + +5. **Test configuration** + - Suspend system: `systemctl suspend` + - Try to wake with various devices + - Verify unwanted devices don't wake system + +6. **Suggest optimal configuration** + - Typically keep enabled: + - Power button + - Keyboard (if wired) + - Laptop lid switch + - Typically disable: + - Mice + - USB hubs + - Network cards (unless Wake-on-LAN needed) + - Bluetooth + +7. **Create persistent configuration** + - Offer to create udev rules + - Offer to create systemd service + - Provide script to restore settings on boot + +## Output + +Provide a report showing: +- Currently wake-enabled devices +- Devices that have caused wakeups +- Recommended devices to disable +- Configuration method (udev/systemd) +- Commands to apply changes +- How to test and verify diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-cpu.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-cpu.md new file mode 100644 index 0000000000000000000000000000000000000000..03db1eab96dac52fb3a2e248b146b9a8ef7fb5d5 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-cpu.md @@ -0,0 +1,420 @@ +You are performing an exhaustive CPU (processor) profile of the system. + +## Your Task + +Generate a comprehensive CPU analysis covering all aspects of processor hardware, configuration, features, and performance. + +### 1. CPU Hardware Identification +- **Vendor**: Intel, AMD, ARM, or other +- **Model name**: Full processor name +- **Microarchitecture**: Zen 4, Raptor Lake, etc. +- **Family**: CPU family number +- **Model**: CPU model number +- **Stepping**: CPU stepping/revision +- **CPU ID**: CPUID signature +- **Manufacturing process**: Node size (5nm, 7nm, etc.) + +### 2. Core and Thread Configuration +- **Physical cores**: Actual CPU cores +- **Logical processors**: Total threads (with SMT/HT) +- **Threads per core**: 1 or 2 (SMT/Hyper-Threading) +- **Cores per socket**: Core count per CPU +- **Sockets**: Number of CPU sockets +- **NUMA nodes**: Non-uniform memory access nodes +- **Core layout**: Physical topology and placement + +### 3. Frequency and Clock Information +- **Base frequency**: Guaranteed base clock +- **Maximum boost frequency**: Single-core turbo +- **All-core boost**: Multi-core sustained boost +- **Current frequencies**: Per-core current clocks +- **Frequency scaling**: Available scaling governors +- **Turbo mode**: Status and configuration +- **C-states**: Power saving states available +- **P-states**: Performance states + +### 4. Cache Hierarchy +- **L1 data cache**: Per-core L1D size +- **L1 instruction cache**: Per-core L1I size +- **L2 cache**: Per-core or shared L2 size +- **L3 cache**: Shared last-level cache size +- **L4 cache**: If present (rare) +- **Cache line size**: Typical 64 bytes +- **Cache associativity**: Set-associative configuration +- **Total cache**: Sum of all cache levels + +### 5. CPU Features and Extensions +- **Instruction sets**: SSE, AVX, AVX2, AVX-512 +- **Virtualization**: VT-x, AMD-V, VT-d, AMD-Vi +- **Security features**: SGX, SEV, TDX, etc. +- **AES-NI**: Hardware AES acceleration +- **SHA extensions**: Hardware SHA acceleration +- **FMA**: Fused multiply-add +- **BMI/BMI2**: Bit manipulation instructions +- **TSX**: Transactional synchronization +- **Hardware monitoring**: PMU, performance counters + +### 6. Virtualization Capabilities +- **Virtualization enabled**: VT-x/AMD-V status +- **IOMMU**: VT-d/AMD-Vi for device passthrough +- **Nested paging**: EPT/RVI support +- **Nested virtualization**: Capability +- **Hardware isolation**: SGX, SEV, TDX +- **Virtual machine extensions**: Available features + +### 7. Security Features +- **CPU vulnerabilities**: Spectre, Meltdown, etc. +- **Mitigations**: Enabled security mitigations +- **Performance impact**: Mitigation overhead +- **Secure boot**: Support status +- **Memory encryption**: SME, SEV support +- **Control-flow enforcement**: CET, IBT +- **Branch prediction**: IBRS, STIBP status + +### 8. Thermal and Power Management +- **TDP**: Thermal design power +- **Maximum temperature**: Tjunction max +- **Current temperature**: Per-core temps +- **Thermal throttling**: Status and history +- **Power consumption**: Current package power +- **Power limits**: PL1, PL2 settings +- **Voltage**: Core voltage +- **Power states**: C-states and P-states usage + +### 9. Performance Characteristics +- **BogoMIPS**: Rough performance indicator +- **CPU benchmark**: If available (sysbench, etc.) +- **Context switch rate**: Scheduler efficiency +- **Interrupts**: Interrupt rate per second +- **Load average**: 1, 5, 15 minute averages +- **CPU utilization**: Per-core usage +- **Performance counters**: PMU data if accessible + +### 10. Memory Controller and Architecture +- **Memory controller**: Integrated or discrete +- **Memory channels**: Number of channels +- **Maximum memory**: Supported RAM capacity +- **Memory types**: Supported DDR generations +- **Memory speed**: Maximum supported speed +- **ECC support**: Error-correcting code capability +- **Prefetchers**: Hardware prefetch engines + +### 11. Interconnect and Topology +- **CPU interconnect**: QPI, UPI, Infinity Fabric +- **Interconnect speed**: GT/s or MHz +- **NUMA configuration**: Node topology +- **Core-to-core latency**: Inter-core communication +- **Socket topology**: Multi-socket layout +- **L3 slicing**: Cache slice distribution + +### 12. Microcode and Firmware +- **Microcode version**: Current CPU microcode +- **Microcode date**: Release date +- **Update available**: Check for updates +- **Speculative execution**: Firmware mitigations + +## Commands to Use + +**Basic CPU information:** +- `lscpu` +- `cat /proc/cpuinfo` +- `lscpu -e` - Extended CPU list +- `sudo dmidecode -t processor` + +**Detailed specifications:** +- `lscpu -J` - JSON output for parsing +- `sudo lshw -class processor` +- `cpuid` (if installed) +- `x86info` (if installed, x86 systems) + +**Frequency information:** +- `lscpu | grep MHz` +- `cat /proc/cpuinfo | grep MHz` +- `cpufreq-info` (if installed) +- `cat /sys/devices/system/cpu/cpu*/cpufreq/scaling_cur_freq` +- `cat /sys/devices/system/cpu/cpu*/cpufreq/cpuinfo_max_freq` +- `cat /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor` + +**Cache information:** +- `lscpu -C` +- `getconf -a | grep CACHE` +- `cat /sys/devices/system/cpu/cpu0/cache/index*/size` + +**CPU features:** +- `cat /proc/cpuinfo | grep flags | head -1` +- `lscpu | grep -i flag` + +**Virtualization:** +- `lscpu | grep -i virtualization` +- `cat /proc/cpuinfo | grep -E '(vmx|svm)'` +- `dmesg | grep -i "vt-d\|amd-vi"` + +**Security and vulnerabilities:** +- `lscpu | grep -i vulnerab` +- `cat /sys/devices/system/cpu/vulnerabilities/*` +- `spectre-meltdown-checker` (if installed) + +**Thermal and power:** +- `sensors` (if lm-sensors installed) +- `cat /sys/class/thermal/thermal_zone*/temp` +- `cat /sys/devices/system/cpu/cpu*/cpufreq/scaling_cur_freq` +- `sudo turbostat --quiet --show Package,Core,CPU,Avg_MHz,Busy%,Bzy_MHz,PkgTmp --interval 1` (if available) +- `sudo powertop --time=10` (if installed) + +**Performance monitoring:** +- `top -bn1 | grep "Cpu(s)"` +- `mpstat -P ALL 1 5` (if sysstat installed) +- `vmstat 1 5` +- `uptime` +- `cat /proc/loadavg` + +**Microcode:** +- `cat /proc/cpuinfo | grep microcode | head -1` +- `dmesg | grep microcode` + +**NUMA topology:** +- `numactl --hardware` +- `lscpu | grep NUMA` +- `cat /sys/devices/system/node/node*/cpulist` + +**Benchmarking (optional):** +- `sysbench cpu --threads=$(nproc) run` (if installed) +- `7z b` (7-zip benchmark if installed) + +## Output Format + +### Executive Summary +``` +CPU: [manufacturer] [model name] +Architecture: [microarchitecture] ([process node]) +Cores/Threads: [physical cores] cores / [logical threads] threads +Base/Boost: [base GHz] / [boost GHz] +Cache: [L1] + [L2] + [L3 MB] +Features: [key features like AVX-512, virtualization] +``` + +### Detailed CPU Profile + +**Hardware Identification:** +- Vendor: [Intel/AMD/ARM] +- Model Name: [full processor name] +- Microarchitecture: [architecture name] +- Family: [hex family] +- Model: [hex model] +- Stepping: [stepping number] +- CPU ID: [cpuid signature] +- Manufacturing: [nm process] + +**Core Configuration:** +- Physical Cores: [count] +- Logical Processors: [count] +- Threads per Core: [1/2] +- Sockets: [count] +- NUMA Nodes: [count] +- Topology: [description] + +**Frequency Information:** +- Base Frequency: [GHz] +- Maximum Turbo: [GHz] (single-core) +- All-Core Turbo: [GHz] +- Current Frequencies: + - CPU 0: [MHz] + - CPU 1: [MHz] + - ... +- Scaling Governor: [powersave/performance/schedutil] +- Turbo Boost: [Enabled/Disabled] + +**Cache Hierarchy:** +- L1 Data Cache: [KB] per core ([total KB]) +- L1 Instruction Cache: [KB] per core ([total KB]) +- L2 Cache: [KB/MB] per core ([total MB]) +- L3 Cache: [MB] shared ([MB] total) +- Cache Line Size: [bytes] +- Total Cache: [MB] + +**Instruction Set Extensions:** +- Base: [x86-64-v2/v3/v4] +- SIMD: [SSE4.2, AVX, AVX2, AVX-512, etc.] +- Virtualization: [VT-x/AMD-V, VT-d/AMD-Vi] +- Security: [AES-NI, SHA, SGX, SEV] +- Other: [FMA, BMI, BMI2, TSX, etc.] + +**Feature Flags (Key):** +``` +[vmx/svm, aes, avx, avx2, avx512f, sha_ni, fma, bmi1, bmi2, etc.] +``` + +**Virtualization Capabilities:** +- VT-x/AMD-V: [Enabled/Disabled] +- VT-d/AMD-Vi (IOMMU): [Enabled/Disabled] +- EPT/RVI: [Supported] +- Nested Virtualization: [Supported/Not Supported] +- Hardware Isolation: [SGX/SEV/TDX support] + +**Security Status:** +- Vulnerabilities: + - Spectre v1: [mitigated/vulnerable] + - Spectre v2: [mitigated/vulnerable] + - Meltdown: [mitigated/vulnerable] + - [other vulnerabilities...] +- Active Mitigations: [list] +- Performance Impact: [estimated %] + +**Thermal and Power:** +- TDP: [W] +- Maximum Temperature: [Β°C] +- Current Temperature: + - Package: [Β°C] + - Core 0: [Β°C] + - Core 1: [Β°C] + - ... +- Power Consumption: [W] +- Power Limits: PL1=[W], PL2=[W] +- Throttling Status: [None/Active] + +**Memory Controller:** +- Controller: [Integrated] +- Memory Channels: [count] +- Maximum Memory: [GB] +- Supported Types: [DDR4, DDR5] +- Maximum Speed: [MT/s] +- ECC Support: [Yes/No] + +**Current Performance:** +- CPU Utilization: [%] average +- Per-Core Usage: + - CPU 0: [%] + - CPU 1: [%] + - ... +- Load Average: [1min], [5min], [15min] +- Context Switches: [/sec] +- Interrupts: [/sec] +- BogoMIPS: [value] + +**NUMA Topology (if applicable):** +- NUMA Nodes: [count] +- Node 0 CPUs: [list] +- Node 1 CPUs: [list] +- Node 0 Memory: [GB] +- Node 1 Memory: [GB] + +**Microcode:** +- Version: [hex version] +- Date: [date if available] +- Update Status: [check if current] + +### Performance Assessment + +**Performance Tier:** +- Consumer: Entry/Mainstream/High-end/Enthusiast +- Server: Entry/Mid-range/High-end +- Generation: [relative age] + +**Bottleneck Analysis:** +- Core count: [adequate/limited for workload] +- Clock speed: [competitive/dated] +- Cache size: [generous/adequate/limited] +- Memory channels: [optimal/bottleneck] + +**Optimization Recommendations:** +- Frequency scaling: [suggestions] +- Power management: [tuning options] +- NUMA configuration: [if applicable] +- Security mitigation tuning: [performance vs. security] + +### AI-Readable JSON + +```json +{ + "hardware": { + "vendor": "intel|amd|arm", + "model_name": "", + "microarchitecture": "", + "family": "", + "model": "", + "stepping": 0, + "process_nm": 0 + }, + "cores": { + "physical_cores": 0, + "logical_processors": 0, + "threads_per_core": 0, + "sockets": 0, + "numa_nodes": 0 + }, + "frequency": { + "base_ghz": 0.0, + "max_turbo_ghz": 0.0, + "all_core_turbo_ghz": 0.0, + "scaling_governor": "" + }, + "cache": { + "l1d_kb_per_core": 0, + "l1i_kb_per_core": 0, + "l2_kb_per_core": 0, + "l3_mb_total": 0, + "total_cache_mb": 0 + }, + "features": { + "instruction_sets": [], + "virtualization": { + "vmx_svm": false, + "iommu": false + }, + "security": { + "aes_ni": false, + "sha_extensions": false, + "sgx": false + } + }, + "thermal_power": { + "tdp_watts": 0, + "max_temp_celsius": 0, + "current_temp_celsius": 0, + "current_power_watts": 0 + }, + "memory_controller": { + "channels": 0, + "max_memory_gb": 0, + "supported_types": [], + "ecc_support": false + }, + "vulnerabilities": { + "spectre_v1": "", + "spectre_v2": "", + "meltdown": "" + }, + "microcode": { + "version": "", + "date": "" + } +} +``` + +## Execution Guidelines + +1. **Gather comprehensive data**: Use multiple commands to cross-verify +2. **Parse carefully**: Extract specific values from verbose output +3. **Check all cores**: Get per-core data where applicable +4. **Monitor dynamic state**: Capture current frequencies and temps +5. **Assess features**: Identify valuable CPU capabilities +6. **Security review**: Check vulnerabilities and mitigations +7. **Performance context**: Relate specs to real-world capability +8. **NUMA awareness**: Handle multi-socket systems properly +9. **Format clearly**: Present technical data accessibly +10. **Provide insights**: Don't just list specs, interpret them + +## Important Notes + +- Some commands require root privileges (dmidecode, turbostat) +- Install lm-sensors and run sensors-detect for thermal monitoring +- sysstat package needed for mpstat +- cpuid and x86info provide additional details if installed +- Virtualization features require BIOS enablement +- Security mitigations can impact performance significantly +- Microcode updates are critical for security +- NUMA topology only relevant for multi-socket systems +- Thermal data accuracy varies by motherboard +- Governor settings affect performance and power consumption + +Be extremely thorough - capture every detail about the CPU subsystem. diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-gpu.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-gpu.md new file mode 100644 index 0000000000000000000000000000000000000000..17bbf2ff4620cd9ef75051a93b4aca7ba82ab7a7 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-gpu.md @@ -0,0 +1,357 @@ +You are performing an exhaustive GPU (graphics) profile of the system. + +## Your Task + +Generate a comprehensive GPU analysis covering all aspects of graphics hardware, configuration, and capabilities. + +### 1. GPU Hardware Identification +- **Vendor**: NVIDIA, AMD, Intel, or other +- **GPU model**: Full product name +- **GPU architecture**: Ada Lovelace, RDNA 3, Xe, etc. +- **Device ID**: PCI device identifier +- **Subsystem vendor/device**: Card manufacturer +- **Revision**: GPU revision/stepping +- **Manufacturing process**: Node size (5nm, 7nm, etc.) + +### 2. GPU Specifications +- **CUDA cores / Stream processors / Execution units**: Compute unit count +- **Tensor cores / RT cores**: AI and ray tracing hardware +- **Base clock / Boost clock**: GPU frequencies +- **Memory size**: VRAM capacity +- **Memory type**: GDDR6, GDDR6X, HBM2, etc. +- **Memory bus width**: 128-bit, 256-bit, etc. +- **Memory bandwidth**: GB/s +- **TDP**: Thermal design power +- **Power connectors**: PCIe power requirements + +### 3. PCI Configuration +- **PCI address**: Bus:Device.Function +- **PCI generation**: PCIe 3.0, 4.0, 5.0 +- **Link width**: x16, x8, x4, etc. +- **Current link speed**: GT/s +- **Maximum link speed**: Supported maximum +- **Link status**: Active, degraded, or optimal +- **NUMA node**: If in NUMA system + +### 4. Display Configuration +- **Connected displays**: Count and identifiers +- **Display resolutions**: Per-display native resolution +- **Refresh rates**: Current refresh rates +- **Display interfaces**: HDMI, DisplayPort, DVI, VGA +- **Primary display**: Which output is primary +- **Display technologies**: G-Sync, FreeSync support +- **Maximum resolution**: GPU capability + +### 5. Driver Information +- **Driver type**: Proprietary or open-source +- **Driver version**: Current installed version +- **Driver date**: Release date +- **Kernel module**: Module name and version +- **Mesa version**: For open-source drivers +- **X.Org driver**: X driver in use +- **Wayland support**: Compositor compatibility +- **Vulkan driver**: Vulkan ICD in use + +### 6. Graphics API Support +- **OpenGL version**: Maximum supported version +- **OpenGL renderer**: Renderer string +- **Vulkan version**: Vulkan API version +- **Vulkan extensions**: Count and key extensions +- **OpenCL version**: Compute API version +- **Direct3D support**: Wine/Proton capabilities +- **Video decode**: Hardware decode support (NVDEC, VCE, etc.) +- **Video encode**: Hardware encode support (NVENC, VCN, etc.) + +### 7. GPU Clocks and Power State +- **Current GPU clock**: Real-time frequency +- **Current memory clock**: VRAM frequency +- **Current power draw**: Watts +- **Power state**: P-state (P0-P12) +- **Performance level**: Performance mode +- **Fan speed**: Current fan RPM/% +- **GPU temperature**: Current temp in Β°C +- **Throttling status**: Thermal or power throttling + +### 8. GPU Memory Details +- **Total VRAM**: Total video memory +- **Used VRAM**: Currently allocated +- **Free VRAM**: Available memory +- **Bar size**: PCIe BAR size (Resizable BAR) +- **Memory controller**: Type and capabilities +- **ECC support**: Error correction capability + +### 9. Compute Capabilities +- **CUDA version**: For NVIDIA (if applicable) +- **Compute capability**: CUDA compute version +- **ROCm support**: For AMD +- **OpenCL devices**: Available compute devices +- **Tensor core support**: AI acceleration +- **Ray tracing support**: RT core capability +- **Matrix operations**: INT8, FP16, TF32, etc. + +### 10. Multi-GPU Configuration +- **Number of GPUs**: Total graphics cards +- **SLI/CrossFire**: Multi-GPU mode status +- **GPU topology**: How GPUs are connected +- **Per-GPU details**: Individual stats for each GPU + +## Commands to Use + +**Basic GPU detection:** +- `lspci | grep -i vga` +- `lspci | grep -i 3d` +- `sudo lshw -C display` +- `lspci -v -s $(lspci | grep VGA | cut -d' ' -f1)` + +**Detailed PCI information:** +- `sudo lspci -vv | grep -A 20 VGA` +- `sudo lspci -nnk | grep -A 3 VGA` + +**NVIDIA-specific:** +- `nvidia-smi` +- `nvidia-smi -q` - Detailed query +- `nvidia-smi -q -d CLOCK` - Clock details +- `nvidia-smi -q -d MEMORY` - Memory details +- `nvidia-smi -q -d TEMPERATURE` - Thermal info +- `nvidia-smi -q -d POWER` - Power details +- `nvidia-smi -q -d PIDS` - Process info +- `nvidia-smi topo -m` - Topology matrix +- `nvidia-settings -q all` - All settings + +**AMD-specific:** +- `rocm-smi` +- `radeontop` (if installed) +- `sudo cat /sys/kernel/debug/dri/0/amdgpu_pm_info` +- `sudo cat /sys/class/drm/card*/device/pp_dpm_sclk` +- `clinfo` - OpenCL info + +**Intel-specific:** +- `intel_gpu_top` (if installed) +- `intel_gpu_frequency` - GPU frequency info +- `vainfo` - VA-API information + +**Graphics API information:** +- `glxinfo | grep -i "opengl version"` +- `glxinfo | grep -i "opengl renderer"` +- `vulkaninfo --summary` +- `vulkaninfo` - Full Vulkan details +- `clinfo` - OpenCL capabilities +- `vdpauinfo` - VDPAU support +- `vainfo` - VA-API support + +**Driver information:** +- `modinfo nvidia` (for NVIDIA) +- `modinfo amdgpu` (for AMD) +- `modinfo i915` (for Intel) +- `glxinfo | grep -i "opengl core profile version"` +- `dpkg -l | grep nvidia` (driver packages) + +**Display information:** +- `xrandr --verbose` +- `xrandr --listmonitors` +- `kscreen-doctor -o` (for KDE) +- `wayland-info` (if on Wayland) + +**System files:** +- `cat /proc/driver/nvidia/version` +- `cat /sys/class/drm/card*/device/uevent` +- `cat /sys/kernel/debug/dri/0/name` + +## Output Format + +### Executive Summary +``` +GPU: [manufacturer] [model] +Architecture: [architecture name] +VRAM: [X] GB [memory type] +Driver: [type] v[version] +Compute: CUDA [version] / ROCm [version] / OpenCL [version] +API Support: OpenGL [v], Vulkan [v] +``` + +### Detailed GPU Profile + +**Hardware Identification:** +- Vendor: [NVIDIA/AMD/Intel] +- Model: [full model name] +- Architecture: [codename/architecture] +- Device ID: [PCI ID] +- Subsystem: [manufacturer] +- Manufacturing: [nm process] + +**GPU Specifications:** +- Compute Units: [count] [CUDA cores/SPs/EUs] +- Tensor Cores: [count] (if applicable) +- RT Cores: [count] (if applicable) +- Base Clock: [MHz] +- Boost Clock: [MHz] +- Memory: [GB] [type] +- Memory Bus: [bit]-bit +- Bandwidth: [GB/s] +- TDP: [W] + +**PCI Configuration:** +- PCI Address: [bus:dev.func] +- PCIe Generation: [3.0/4.0/5.0] +- Link Width: x[16/8/4] +- Current Speed: [GT/s] +- Max Speed: [GT/s] +- Link Status: [Optimal/Degraded] + +**Display Configuration:** +- Connected Displays: [count] + - Display 1: [resolution]@[Hz] via [interface] + - Display 2: ... +- Primary Display: [identifier] +- Adaptive Sync: [G-Sync/FreeSync/None] + +**Driver Information:** +- Driver Type: [Proprietary/Open Source] +- Driver Version: [version] +- Release Date: [date] +- Kernel Module: [module name] +- Mesa Version: [version] (if applicable) +- X.Org Driver: [driver name] +- Wayland Support: [Yes/No] + +**Graphics API Support:** +- OpenGL: [version] +- OpenGL Renderer: [string] +- Vulkan: [version] +- Vulkan Extensions: [count] +- OpenCL: [version] +- Hardware Video Decode: [NVDEC/VCE/VA-API] +- Hardware Video Encode: [NVENC/VCN/QSV] + +**Current GPU State:** +- GPU Clock: [MHz] +- Memory Clock: [MHz] +- Power Draw: [W] / [TDP W] +- Power State: [P-state] +- Temperature: [Β°C] +- Fan Speed: [RPM / %] +- Throttling: [None/Thermal/Power] + +**Memory Status:** +- Total VRAM: [GB] +- Used VRAM: [GB] ([%]) +- Free VRAM: [GB] +- BAR Size: [MB] (Resizable BAR: [Enabled/Disabled]) + +**Compute Capabilities:** +- CUDA Version: [version] (Compute [X.X]) +- Tensor Core Support: [Yes/No] +- RT Core Support: [Yes/No] +- Precision Support: FP64, FP32, FP16, INT8, [TF32] +- ROCm Version: [version] (for AMD) +- OpenCL Devices: [count] + +**Performance and Optimization:** +- PCIe Link Utilization: [assessment] +- Resizable BAR: [status and impact] +- Driver Optimization: [recommendations] +- Compute Configuration: [assessment] + +### Multi-GPU Configuration (if applicable) +``` +GPU 0: [model] - [details] +GPU 1: [model] - [details] +Topology: [description] +SLI/CrossFire: [status] +``` + +### AI-Readable JSON + +```json +{ + "hardware": { + "vendor": "nvidia|amd|intel", + "model": "", + "architecture": "", + "device_id": "", + "manufacturing_process_nm": 0 + }, + "specifications": { + "compute_units": 0, + "tensor_cores": 0, + "rt_cores": 0, + "base_clock_mhz": 0, + "boost_clock_mhz": 0, + "vram_gb": 0, + "memory_type": "", + "memory_bus_bits": 0, + "bandwidth_gbs": 0, + "tdp_watts": 0 + }, + "pci": { + "address": "", + "generation": "3.0|4.0|5.0", + "link_width": 0, + "current_speed_gts": 0, + "max_speed_gts": 0, + "resizable_bar": false + }, + "driver": { + "type": "proprietary|open_source", + "version": "", + "kernel_module": "", + "mesa_version": "" + }, + "api_support": { + "opengl_version": "", + "vulkan_version": "", + "opencl_version": "", + "cuda_version": "", + "compute_capability": "" + }, + "current_state": { + "gpu_clock_mhz": 0, + "memory_clock_mhz": 0, + "power_draw_watts": 0, + "temperature_celsius": 0, + "fan_speed_percent": 0, + "vram_used_gb": 0, + "vram_total_gb": 0 + }, + "displays": [ + { + "resolution": "", + "refresh_rate_hz": 0, + "interface": "" + } + ], + "compute": { + "tensor_core_supported": false, + "rt_core_supported": false, + "precisions": [] + } +} +``` + +## Execution Guidelines + +1. **Detect GPU vendor first**: Tailor commands to detected hardware +2. **Use vendor-specific tools**: nvidia-smi, rocm-smi, intel_gpu_top +3. **Gather PCI details**: Critical for PCIe performance assessment +4. **Check driver status**: Ensure drivers are properly loaded +5. **Query all APIs**: OpenGL, Vulkan, OpenCL for full picture +6. **Monitor dynamic state**: Clocks, temps, power in real-time +7. **Assess configuration**: Identify bottlenecks or misconfigurations +8. **Check for updates**: Compare installed vs. latest drivers +9. **Multi-GPU awareness**: Handle systems with multiple GPUs +10. **Format comprehensively**: Include all gathered data + +## Important Notes + +- Some commands require specific driver packages installed +- NVIDIA requires proprietary drivers for full functionality +- AMD open-source drivers have varying feature support +- Intel drivers are generally built into kernel +- Vulkan requires vulkan-tools package +- OpenCL requires vendor-specific implementations +- Some features require newer kernel versions +- Virtual machines may have limited GPU information +- Secure boot may affect driver installation +- Wayland vs. X11 may affect available information + +Be extremely thorough - capture every detail about the graphics subsystem. diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-motherboard.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-motherboard.md new file mode 100644 index 0000000000000000000000000000000000000000..4949ddbf128b23be7ae2966f15cf2ef9b9d48350 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-motherboard.md @@ -0,0 +1,456 @@ +You are performing an exhaustive motherboard (system board) profile of the system. + +## Your Task + +Generate a comprehensive motherboard analysis covering all aspects of the system board, chipset, firmware, and connectivity. + +### 1. Motherboard Identification +- **Manufacturer**: Board manufacturer (ASUS, Gigabyte, MSI, etc.) +- **Product name**: Board model/SKU +- **Version**: Board revision number +- **Serial number**: Board serial number +- **Asset tag**: Asset tag (if configured) +- **Location in chassis**: Board location descriptor +- **Board type**: Motherboard, server board, embedded, etc. + +### 2. Chipset Information +- **Chipset manufacturer**: Intel, AMD, etc. +- **Chipset model**: Specific chipset name +- **Chipset features**: Key capabilities +- **PCH/FCH revision**: Platform controller hub revision +- **South bridge**: Legacy south bridge info +- **North bridge**: Legacy north bridge info (if separate) + +### 3. BIOS/UEFI Firmware +- **Firmware type**: BIOS or UEFI +- **Vendor**: BIOS manufacturer (AMI, Award, Phoenix, etc.) +- **Version**: Current BIOS version +- **Release date**: BIOS release date +- **Revision**: Firmware major/minor revision +- **ROM size**: BIOS ROM capacity +- **UEFI mode**: Legacy or UEFI boot mode +- **Secure boot**: Status and configuration + +### 4. Expansion Slots +- **PCIe slots**: Count and generations (PCIe 3.0/4.0/5.0) +- **Slot types**: x16, x8, x4, x1 configurations +- **Slot usage**: Which slots are occupied +- **M.2 slots**: Count, key types, and generations +- **Legacy slots**: PCI, AGP (if any) +- **Slot sharing**: Lane sharing configurations + +### 5. Storage Controllers and Interfaces +- **SATA ports**: Count and generation (SATA II/III) +- **SATA controllers**: Onboard controller details +- **NVMe support**: M.2 NVMe slot count and PCIe lanes +- **RAID support**: Hardware RAID capabilities +- **Storage modes**: AHCI, RAID, IDE +- **eSATA**: External SATA ports +- **U.2/U.3**: Enterprise NVMe support + +### 6. I/O Connectivity +- **USB controllers**: USB controller chipsets +- **USB ports**: Count by version (2.0, 3.0, 3.1, 3.2, 4.0) +- **USB-C ports**: Count and capabilities +- **Thunderbolt**: Version and port count +- **Internal USB headers**: Front panel USB headers +- **PS/2 ports**: Legacy keyboard/mouse ports +- **Serial ports**: RS-232 COM ports +- **Parallel port**: LPT port (rare) + +### 7. Network Interfaces +- **Ethernet controllers**: Onboard NIC chipsets +- **Ethernet ports**: Count and speeds (1G, 2.5G, 10G) +- **WiFi**: Onboard WiFi chipset and standard +- **Bluetooth**: Bluetooth version +- **MAC addresses**: Physical addresses of NICs + +### 8. Audio Subsystem +- **Audio codec**: Onboard audio chipset +- **Audio channels**: 2.0, 5.1, 7.1 support +- **Audio ports**: Line-in, line-out, mic, optical +- **Audio features**: Special audio technologies +- **HDMI/DP audio**: Audio over display connections + +### 9. Display and Graphics +- **Integrated graphics**: iGPU support (if applicable) +- **Display outputs**: HDMI, DisplayPort, DVI, VGA +- **Multi-monitor**: Maximum displays supported +- **Display port versions**: HDMI 2.1, DP 1.4, etc. + +### 10. Power Delivery +- **Power phases**: VRM phase count +- **Power connectors**: ATX 24-pin, EPS 8-pin, etc. +- **CPU power**: 4-pin, 8-pin, 8+4 pin configuration +- **PCIe power**: Additional PCIe power headers +- **Fan headers**: Count and type (PWM/DC) +- **RGB headers**: Addressable RGB headers +- **Power monitoring**: Voltage monitoring points + +### 11. Memory Support +- **Memory slots**: Total DIMM slots +- **Maximum capacity**: Maximum RAM supported +- **Memory types**: DDR4, DDR5, ECC support +- **Memory speeds**: Supported frequencies +- **Memory channels**: Dual, quad channel +- **XMP/DOCP**: Overclocking profile support + +### 12. Form Factor and Physical +- **Form factor**: ATX, Micro-ATX, Mini-ITX, EATX, etc. +- **Dimensions**: Board dimensions +- **Mounting holes**: Standoff pattern +- **Contained devices**: Onboard devices count + +### 13. Special Features +- **Overclocking**: OC features and BIOS options +- **TPM**: Trusted Platform Module version +- **BIOS flashback**: No-CPU BIOS update +- **Q-Flash/M-Flash**: Motherboard-specific tools +- **POST code display**: Onboard debug LEDs/display +- **Dual BIOS**: Backup BIOS chip +- **Clear CMOS**: CMOS reset button/jumper +- **BIOS recovery**: Recovery mechanisms + +### 14. Temperature and Monitoring +- **Temperature sensors**: Onboard sensor locations +- **Fan control**: Hardware fan control capabilities +- **Voltage monitoring**: Monitored voltage rails +- **Hardware monitoring chip**: Super I/O or monitoring IC + +### 15. System Slots and Headers +- **Front panel headers**: Power, reset, LED headers +- **Internal headers**: All internal connectors +- **System fan headers**: Chassis fan connectors +- **Pump headers**: Water cooling pump headers +- **Addressable RGB**: ARGB/DRGB header count +- **Temperature probe headers**: External sensor inputs + +## Commands to Use + +**Motherboard identification:** +- `sudo dmidecode -t baseboard` +- `sudo dmidecode -t 2` +- `cat /sys/class/dmi/id/board_vendor` +- `cat /sys/class/dmi/id/board_name` +- `cat /sys/class/dmi/id/board_version` + +**BIOS/UEFI information:** +- `sudo dmidecode -t bios` +- `sudo dmidecode -t 0` +- `cat /sys/class/dmi/id/bios_vendor` +- `cat /sys/class/dmi/id/bios_version` +- `cat /sys/class/dmi/id/bios_date` +- `efibootmgr -v` (if UEFI) +- `[ -d /sys/firmware/efi ] && echo "UEFI" || echo "BIOS"` + +**Chipset information:** +- `lspci | grep -i "ISA bridge"` +- `lspci -v | grep -A 10 "ISA bridge"` +- `sudo dmidecode -t 9` - System slots + +**PCI/PCIe slots and devices:** +- `lspci -tv` - Tree view of PCI devices +- `lspci -vv` - Verbose PCI information +- `sudo dmidecode -t 9` - System slot information +- `sudo lspci -vvv -s ` - Specific slot details + +**Storage controllers:** +- `lspci | grep -i "sata\|raid\|storage"` +- `lspci -v | grep -A 10 -i "sata\|ahci"` +- `ls /sys/class/ata_port/` - SATA ports +- `ls /sys/block/nvme*` - NVMe devices + +**USB controllers:** +- `lspci | grep -i usb` +- `lsusb -v` +- `lsusb -t` - USB device tree +- `cat /sys/kernel/debug/usb/devices` + +**Network controllers:** +- `lspci | grep -i "ethernet\|network"` +- `sudo lshw -class network` +- `ip link show` + +**Audio controller:** +- `lspci | grep -i audio` +- `aplay -l` +- `cat /proc/asound/cards` + +**System information:** +- `sudo dmidecode -t system` +- `sudo dmidecode -t chassis` +- `sudo lshw -short` +- `sudo lshw -businfo` + +**Hardware monitoring:** +- `sensors` (if lm-sensors configured) +- `cat /sys/class/hwmon/hwmon*/name` +- `sudo i2cdetect -l` (I2C buses) + +**Firmware and boot:** +- `sudo dmidecode -t 13` - BIOS language +- `bootctl status` (systemd-boot) +- `efibootmgr -v` (UEFI variables) + +**Memory slots:** +- `sudo dmidecode -t 16` - Physical memory array +- `sudo dmidecode -t 17` - Memory devices + +**Expansion and slots:** +- `sudo dmidecode -t 9` - System slots +- `sudo biosdecode` - Additional BIOS info + +**TPM and security:** +- `cat /sys/class/tpm/tpm0/device/description` +- `tpm2_getcap properties-fixed` (if TPM 2.0 tools) + +## Output Format + +### Executive Summary +``` +Motherboard: [manufacturer] [model] (rev [version]) +Chipset: [chipset model] +Form Factor: [ATX/mATX/ITX] +BIOS: [vendor] v[version] ([date]) +Features: [key features] +``` + +### Detailed Motherboard Profile + +**Board Identification:** +- Manufacturer: [vendor] +- Product Name: [model] +- Version: [revision] +- Serial Number: [S/N] +- Asset Tag: [tag] +- Type: [motherboard type] + +**Chipset:** +- Manufacturer: [Intel/AMD] +- Model: [chipset name] +- Revision: [revision] +- Features: [key capabilities] + +**BIOS/UEFI:** +- Type: [BIOS/UEFI] +- Vendor: [manufacturer] +- Version: [version] +- Release Date: [date] +- Revision: [major.minor] +- ROM Size: [KB/MB] +- Boot Mode: [Legacy/UEFI] +- Secure Boot: [Enabled/Disabled] + +**Expansion Slots:** +- PCIe x16 Slots: [count] (Gen [3.0/4.0/5.0]) + - Slot 1: PCIe [gen] x16 - [occupied by: device] + - Slot 2: PCIe [gen] x16 (runs at x8) - [status] +- PCIe x1 Slots: [count] +- M.2 Slots: [count] + - M.2_1: Key M, PCIe [gen] x4 - [device] + - M.2_2: Key M, PCIe [gen] x4 - [empty] + +**Storage Interfaces:** +- SATA Ports: [count] x SATA [II/III] +- SATA Controller: [chipset model] +- NVMe Support: [count] x M.2 slots (PCIe [gen] x4) +- RAID Support: [0, 1, 5, 10] +- Storage Mode: [AHCI/RAID/IDE] + +**I/O Connectivity:** +- USB Controllers: [chipset models] +- USB Ports: + - USB 2.0: [count] ports + - USB 3.0/3.1 Gen 1: [count] ports + - USB 3.1 Gen 2: [count] ports + - USB 3.2 Gen 2x2: [count] ports + - USB4/Thunderbolt: [count] ports +- USB-C: [count] ports ([capabilities]) +- Internal USB Headers: [count] +- Legacy Ports: [PS/2, Serial, Parallel] + +**Network:** +- Ethernet Controllers: [chipset models] +- Ethernet Ports: [count] x [1G/2.5G/10G] +- WiFi: [chipset] ([802.11 standard]) +- Bluetooth: [version] + +**Audio:** +- Audio Codec: [chipset model] +- Channels: [2.0/5.1/7.1] +- Audio Ports: [count and types] +- Features: [special audio tech] + +**Display Outputs (if integrated graphics):** +- HDMI: [count] x HDMI [version] +- DisplayPort: [count] x DP [version] +- DVI: [count] ports +- VGA: [count] ports + +**Power Delivery:** +- VRM Phases: [count]-phase ([digital/analog]) +- ATX Power: 24-pin +- CPU Power: [4/8/8+4]-pin +- PCIe Power: [auxiliary power headers] +- Fan Headers: [count] ([PWM/DC]) +- RGB Headers: [count] ([ARGB/RGB]) + +**Memory Support:** +- DIMM Slots: [count] +- Maximum Capacity: [GB] +- Memory Type: [DDR4/DDR5] +- Supported Speeds: Up to [MT/s] +- Channel Mode: [Dual/Quad] Channel +- ECC Support: [Yes/No] +- XMP/DOCP: [version] + +**Form Factor:** +- Standard: [ATX/mATX/Mini-ITX/EATX] +- Dimensions: [mm x mm] +- Mounting: [ATX standard] + +**Special Features:** +- TPM: [version] ([enabled/disabled]) +- BIOS Flashback: [Yes/No] +- Dual BIOS: [Yes/No] +- POST Code Display: [Yes/No] +- Clear CMOS: [Button/Jumper] +- Overclocking: [features list] + +**Temperature Monitoring:** +- Sensors: [locations] +- Fan Control: [PWM headers count] +- Voltage Monitoring: [rails monitored] +- Monitoring Chip: [IC model] + +### Connectivity Matrix + +``` +PCIe Slot Layout: +Slot 1: PCIe 4.0 x16 (CPU) β†’ [GPU installed] +Slot 2: PCIe 4.0 x16 (runs at x4, chipset) β†’ [empty] +Slot 3: PCIe 3.0 x1 (chipset) β†’ [WiFi card] +M.2_1: PCIe 4.0 x4 (CPU) β†’ [NVMe SSD] +M.2_2: PCIe 3.0 x4 (chipset) β†’ [empty] + +Storage Ports: +SATA0-SATA3: [devices] +SATA4-SATA7: [empty] +``` + +### Upgrade and Expansion Potential + +- Available PCIe slots: [count and type] +- Available M.2 slots: [count] +- RAM expansion: [X GB current / Y GB max] +- Storage expansion: [available ports] +- BIOS updates: [status] + +### AI-Readable JSON + +```json +{ + "board": { + "manufacturer": "", + "product_name": "", + "version": "", + "serial_number": "", + "form_factor": "ATX|mATX|ITX|EATX" + }, + "chipset": { + "manufacturer": "intel|amd", + "model": "", + "revision": "" + }, + "bios": { + "type": "BIOS|UEFI", + "vendor": "", + "version": "", + "release_date": "", + "secure_boot": false + }, + "expansion_slots": { + "pcie_x16": [ + { + "slot_number": 1, + "generation": "3.0|4.0|5.0", + "lanes": 16, + "occupied": true, + "device": "" + } + ], + "pcie_x1": 0, + "m2_slots": 0 + }, + "storage": { + "sata_ports": 0, + "sata_generation": "II|III", + "nvme_slots": 0, + "raid_support": [] + }, + "io": { + "usb": { + "usb_2_0": 0, + "usb_3_0": 0, + "usb_3_1": 0, + "usb_3_2": 0, + "usb_c": 0 + }, + "ethernet_ports": 0, + "wifi": false, + "bluetooth": false + }, + "audio": { + "codec": "", + "channels": "" + }, + "power": { + "vrm_phases": 0, + "fan_headers": 0, + "rgb_headers": 0 + }, + "memory": { + "dimm_slots": 0, + "max_capacity_gb": 0, + "type": "DDR4|DDR5", + "max_speed_mts": 0, + "ecc_support": false + }, + "features": { + "tpm": "", + "bios_flashback": false, + "dual_bios": false, + "post_code_display": false + } +} +``` + +## Execution Guidelines + +1. **Use dmidecode extensively**: Primary source for board info +2. **Cross-reference with lspci**: Verify chipset and slots +3. **Check physical vs. logical**: Some slots share lanes +4. **Document slot usage**: What's installed where +5. **Identify chipset features**: What the board can do +6. **BIOS version importance**: Check for updates +7. **Expansion planning**: Available upgrade paths +8. **Power delivery assessment**: Adequacy for components +9. **I/O inventory**: Complete port count +10. **Format comprehensively**: Present all findings clearly + +## Important Notes + +- Requires root/sudo for most detailed information +- dmidecode is the primary tool for board identification +- Some data may not be available in virtual machines +- BIOS version is critical for compatibility and security +- PCIe lane sharing is common on consumer boards +- M.2 slots may disable SATA ports when used +- Form factor determines case compatibility +- VRM quality affects overclocking and stability +- TPM may require BIOS enablement +- UEFI vs. Legacy affects boot configuration +- Some features require specific BIOS settings +- Motherboard manual provides definitive specifications + +Be extremely thorough - document every aspect of the motherboard. diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-ram.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-ram.md new file mode 100644 index 0000000000000000000000000000000000000000..e8d05a60099ab35702cf7232352154cfeeceeca6 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/by-component/profile-ram.md @@ -0,0 +1,278 @@ +You are performing an exhaustive RAM (memory) profile of the system. + +## Your Task + +Generate a comprehensive memory analysis covering all aspects of RAM configuration, performance, and utilization. + +### 1. Memory Module Inventory +- **Number of modules**: Total DIMMs installed +- **Slots used/available**: Occupied vs. total slots +- **Module locations**: Which slots contain modules +- **Form factor**: DIMM, SO-DIMM, etc. +- **Module manufacturers**: Per-module vendor +- **Part numbers**: Specific module part numbers +- **Serial numbers**: Per-module serial numbers + +### 2. Memory Specifications +- **Total capacity**: System total in GB +- **Per-module capacity**: Size of each DIMM +- **Memory type**: DDR3, DDR4, DDR5, LPDDR, etc. +- **Speed ratings**: Configured speed and maximum speed +- **Clock frequency**: MT/s or MHz +- **Voltage**: Operating voltage (1.2V, 1.35V, 1.5V, etc.) +- **Data width**: 64-bit, 72-bit (ECC) +- **Total width**: Physical bus width + +### 3. Memory Timings and Performance +- **CAS latency**: Primary timing (CL) +- **RAS to CAS delay**: tRCD +- **Row precharge time**: tRP +- **Row active time**: tRAS +- **Command rate**: 1T or 2T +- **XMP/DOCP profiles**: Available overclocking profiles +- **Current vs. rated speed**: Compare actual to maximum +- **Memory bandwidth**: Theoretical and actual + +### 4. Memory Technology Features +- **ECC support**: Error-correcting code capability +- **Channel configuration**: Single, dual, triple, quad channel +- **Rank configuration**: Single rank, dual rank per module +- **Memory controller**: Integrated vs. discrete +- **NUMA configuration**: Non-uniform memory access (multi-CPU systems) +- **Interleaving**: Memory interleaving status + +### 5. Current Memory Usage +- **Total memory**: Available to system +- **Used memory**: Currently allocated +- **Free memory**: Completely unused +- **Available memory**: Free + reclaimable +- **Buffers**: Kernel buffer cache +- **Cached**: Page cache +- **Active/Inactive**: Hot and cold memory +- **Dirty memory**: Modified pages not yet written +- **Writeback**: Currently being written back + +### 6. Swap Configuration +- **Swap total**: Total swap space +- **Swap used**: Currently used swap +- **Swap type**: Partition, file, or zram +- **Swappiness**: Kernel swap tendency (0-100) +- **Swap devices**: List of swap locations +- **Swap priority**: If multiple swap devices + +### 7. Memory Pressure and Performance +- **Page faults**: Major and minor fault rates +- **Swap in/out rates**: If swap is active +- **Memory pressure**: OOM events, thrashing indicators +- **Huge pages**: Transparent huge pages configuration +- **NUMA statistics**: Memory locality (if applicable) +- **Memory errors**: ECC errors if supported + +### 8. Virtual Memory Configuration +- **Virtual memory parameters**: vm.swappiness, vm.vfs_cache_pressure +- **Overcommit settings**: Memory overcommit mode +- **OOM killer settings**: Out-of-memory behavior +- **Huge page configuration**: Transparent huge pages, huge page pool + +## Commands to Use + +**DMI/Hardware information:** +- `sudo dmidecode -t memory` +- `sudo dmidecode -t 16` - Physical memory array +- `sudo dmidecode -t 17` - Memory device details + +**Memory status:** +- `free -h` +- `cat /proc/meminfo` +- `vmstat -s` +- `vmstat 1 5` - Memory statistics over time + +**Module details:** +- `sudo lshw -class memory` +- `sudo decode-dimms` - Detailed DIMM info (if i2c-tools installed) + +**Performance and timings:** +- `sudo dmidecode -t memory | grep -i speed` +- `sudo dmidecode -t memory | grep -i timing` +- `cat /sys/devices/system/edac/mc/mc*/dimm*/dimm_label` - DIMM labels + +**Memory bandwidth:** +- `sudo dmidecode -t memory | grep -i bandwidth` +- Use `sysbench memory` for benchmarking (if installed) + +**Swap information:** +- `swapon --show` +- `cat /proc/swaps` +- `sysctl vm.swappiness` + +**Virtual memory tuning:** +- `sysctl -a | grep vm.` +- `cat /proc/sys/vm/overcommit_memory` + +**Memory errors (ECC systems):** +- `sudo edac-util -v` (if available) +- `sudo ras-mc-ctl --errors` + +**NUMA information:** +- `numactl --hardware` (if NUMA system) +- `cat /proc/buddyinfo` + +## Output Format + +### Executive Summary +``` +Memory Configuration: [total] GB, [type] @ [speed] MT/s +Modules: [X] x [Y]GB ([channel] channel, [rank] rank) +Technology: [ECC/Non-ECC], [feature highlights] +Current Usage: [X]% ([used]/[total] GB) +``` + +### Detailed Memory Profile + +**Module Inventory:** +``` +Slot 1 (DIMM_A1): [manufacturer] [part-number] + - Capacity: [GB] + - Type: [DDR4/DDR5] + - Speed: [MT/s] + - Voltage: [V] + - Serial: [S/N] + +Slot 2 (DIMM_A2): ... +``` + +**Memory Configuration:** +- Total Capacity: [X] GB +- Memory Type: [DDR4/DDR5] +- Channel Mode: [Dual/Quad] Channel +- Configured Speed: [MT/s] ([MHz]) +- Maximum Supported Speed: [MT/s] +- Voltage: [V] +- ECC: [Enabled/Disabled/Not Supported] + +**Memory Timings:** +- CAS Latency: [CL] +- tRCD: [ns] +- tRP: [ns] +- tRAS: [ns] +- Command Rate: [1T/2T] + +**Current Usage Statistics:** +``` +Total: [X] GB +Used: [Y] GB ([Z]%) +Free: [A] GB +Available: [B] GB +Buffers: [C] MB +Cached: [D] GB +Active: [E] GB +Inactive: [F] GB +``` + +**Swap Configuration:** +- Swap Total: [X] GB ([partition/file/zram]) +- Swap Used: [Y] GB ([Z]%) +- Swappiness: [value] +- Devices: [list] + +**Performance Metrics:** +- Page Faults: [rate] per second +- Swap Activity: [in/out rates] +- Memory Bandwidth: [theoretical GB/s] +- Huge Pages: [configured/available] + +**Virtual Memory Tuning:** +- vm.swappiness: [value] +- vm.vfs_cache_pressure: [value] +- vm.overcommit_memory: [value] +- Transparent Huge Pages: [enabled/disabled] + +### Memory Assessment + +**Configuration Analysis:** +- Channel utilization: [optimal/suboptimal] +- Speed optimization: [running at spec/underclocked] +- Capacity per channel: [balanced/unbalanced] +- Upgrade path: [recommendations] + +**Performance Considerations:** +- Memory pressure: [low/medium/high] +- Swap usage: [analysis] +- Bottleneck assessment: [findings] + +### AI-Readable JSON + +```json +{ + "memory_modules": [ + { + "slot": "", + "manufacturer": "", + "part_number": "", + "serial_number": "", + "capacity_gb": 0, + "type": "DDR4|DDR5", + "speed_mts": 0, + "voltage": 0.0, + "form_factor": "DIMM|SO-DIMM" + } + ], + "configuration": { + "total_capacity_gb": 0, + "memory_type": "", + "channel_mode": "single|dual|quad", + "configured_speed_mts": 0, + "max_speed_mts": 0, + "ecc_enabled": false, + "slots_used": 0, + "slots_total": 0 + }, + "timings": { + "cas_latency": 0, + "trcd": 0, + "trp": 0, + "tras": 0 + }, + "usage": { + "total_gb": 0.0, + "used_gb": 0.0, + "free_gb": 0.0, + "available_gb": 0.0, + "cached_gb": 0.0, + "usage_percent": 0.0 + }, + "swap": { + "total_gb": 0.0, + "used_gb": 0.0, + "type": "partition|file|zram", + "swappiness": 0 + }, + "features": { + "ecc_supported": false, + "numa": false, + "huge_pages_enabled": false + } +} +``` + +## Execution Guidelines + +1. **Use sudo liberally**: Most detailed memory info requires root +2. **Parse dmidecode carefully**: Extract all per-DIMM details +3. **Cross-reference data**: Verify findings using multiple sources +4. **Calculate derived values**: Bandwidth, channel utilization, etc. +5. **Check for errors**: Look for memory error logs +6. **Assess configuration**: Identify optimization opportunities +7. **Consider upgrade paths**: Suggest meaningful improvements +8. **Monitor dynamic metrics**: Capture usage over brief period + +## Important Notes + +- Some details require specific tools (i2c-tools for SPD data) +- ECC information only available on systems with ECC support +- Memory timings may not be fully exposed on all systems +- Virtual machines may not expose full memory details +- NUMA information only relevant for multi-CPU systems +- Benchmark tools (sysbench, memtester) can provide additional insights + +Be extremely thorough - capture every detail about the memory subsystem. diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-identity.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-identity.md new file mode 100644 index 0000000000000000000000000000000000000000..cad3b6899bce683fa6a85e45767c174fe954d649 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-identity.md @@ -0,0 +1,139 @@ +You are identifying basic hardware information including manufacturer, model, and serial numbers. + +## Your Task + +Extract and display system identification information: + +### 1. System Identity +- **Manufacturer**: System/chassis manufacturer +- **Product name**: System model/product name +- **Serial number**: System serial number +- **UUID**: System UUID +- **SKU**: Stock keeping unit number (if available) + +### 2. Motherboard Identity +- **Manufacturer**: Board manufacturer +- **Product name**: Board model +- **Serial number**: Board serial number +- **Version**: Board version/revision + +### 3. BIOS/UEFI Identity +- **Vendor**: BIOS manufacturer +- **Version**: BIOS version +- **Release date**: BIOS release date +- **Revision**: Firmware revision + +### 4. Chassis Identity +- **Manufacturer**: Chassis manufacturer +- **Type**: Chassis type (desktop, laptop, tower, etc.) +- **Serial number**: Chassis serial number +- **Asset tag**: Asset tag (if configured) + +## Commands to Use + +**Primary identification:** +- `sudo dmidecode -t system` +- `sudo dmidecode -t baseboard` +- `sudo dmidecode -t bios` +- `sudo dmidecode -t chassis` + +**Additional information:** +- `hostnamectl` - System hostname and other details +- `cat /sys/class/dmi/id/product_name` +- `cat /sys/class/dmi/id/sys_vendor` +- `cat /sys/class/dmi/id/board_vendor` +- `cat /sys/class/dmi/id/bios_version` + +**Hardware summary:** +- `sudo lshw -short` - Quick hardware overview +- `inxi -M` - Machine data (if available) + +## Output Format + +Present a clean identification card format: + +``` +============================================================================= + HARDWARE IDENTIFICATION +============================================================================= + +SYSTEM INFORMATION +------------------ +Manufacturer: [vendor] +Product Name: [model] +Serial Number: [S/N] +UUID: [uuid] +SKU Number: [sku] + +MOTHERBOARD INFORMATION +----------------------- +Manufacturer: [vendor] +Product Name: [model] +Version: [version] +Serial Number: [S/N] + +BIOS/UEFI INFORMATION +--------------------- +Vendor: [vendor] +Version: [version] +Release Date: [date] +Firmware Revision: [revision] + +CHASSIS INFORMATION +------------------- +Manufacturer: [vendor] +Type: [type] +Serial Number: [S/N] +Asset Tag: [tag] + +============================================================================= +``` + +### JSON Format (AI-Readable) + +```json +{ + "system": { + "manufacturer": "", + "product_name": "", + "serial_number": "", + "uuid": "", + "sku": "" + }, + "motherboard": { + "manufacturer": "", + "product_name": "", + "version": "", + "serial_number": "" + }, + "bios": { + "vendor": "", + "version": "", + "release_date": "", + "revision": "" + }, + "chassis": { + "manufacturer": "", + "type": "", + "serial_number": "", + "asset_tag": "" + } +} +``` + +## Execution Guidelines + +1. **Use sudo**: dmidecode requires root privileges +2. **Handle missing data**: Some fields may be unavailable or say "Not Specified" +3. **Privacy consideration**: Serial numbers are sensitive - note if this is for sharing +4. **Validate output**: Cross-check using multiple methods +5. **Format cleanly**: Align fields for easy reading + +## Important Notes + +- Virtual machines may show generic or missing hardware IDs +- Some manufacturers don't populate all DMI fields +- Serial numbers should be handled with care for security/privacy +- Asset tags are typically only set in enterprise environments + +Be concise and present only the identification information requested. diff --git a/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-profile.md b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-profile.md new file mode 100644 index 0000000000000000000000000000000000000000..3477e66dd54ae3c300d1d846c657e6647e0dda95 --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/hardware-profilers/hardware-profile.md @@ -0,0 +1,232 @@ +You are creating a comprehensive hardware profile of the system that is both AI-readable and human-readable. + +## Your Task + +Generate a detailed hardware summary by systematically profiling the following components: + +### 1. CPU Profile +- **Model and specifications** using `lscpu` +- **Architecture details**: cores, threads, cache sizes +- **CPU frequency**: current, min, max +- **Virtualization support**: VT-x/AMD-V capabilities +- **CPU vulnerabilities**: Spectre, Meltdown, etc. +- **Performance governor** settings + +### 2. Memory Profile +- **Total RAM** using `free -h` and `dmidecode -t memory` +- **Memory type and speed**: DDR3/DDR4/DDR5, frequency +- **Number of modules** and configuration (slots used/available) +- **Swap configuration**: size, type (partition/file) +- **Current usage** and available memory + +### 3. Storage Profile +- **All storage devices** using `lsblk`, `fdisk -l`, and `smartctl` +- **Drive types**: NVMe, SSD, HDD, eMMC +- **Capacity and usage** for each device +- **Partition layout** and filesystem types +- **SMART health status** for drives that support it +- **Mount points** and usage percentages + +### 4. Graphics Profile +- **GPU information** using `lspci | grep -i vga`, `lshw -C display` +- **GPU vendor and model**: NVIDIA, AMD, Intel +- **Driver information**: version and type (proprietary/open-source) +- **Display connections** and active monitors +- **VRAM** capacity (if available) +- **Vulkan/OpenGL support** using `vulkaninfo` and `glxinfo` if available + +### 5. Network Profile +- **Network interfaces** using `ip addr` and `lshw -C network` +- **Interface types**: Ethernet, WiFi, virtual +- **MAC addresses** for physical interfaces +- **Link speeds** and duplex settings +- **Wireless capabilities**: protocols supported (802.11ac/ax, etc.) +- **Active connections** and IP configuration + +### 6. System Board and Firmware +- **Motherboard details** using `dmidecode -t baseboard` +- **BIOS/UEFI information**: vendor, version, release date +- **System manufacturer and model** +- **Serial numbers** (if accessible and relevant) +- **Firmware capabilities**: UEFI features, secure boot status + +### 7. Peripherals and Devices +- **USB devices** using `lsusb` +- **PCI devices** using `lspci` +- **Audio devices** using `aplay -l` and `lshw -C sound` +- **Input devices**: keyboards, mice, touchpads +- **Connected storage**: external drives, card readers + +### 8. Thermal and Power +- **Temperature sensors** using `sensors` (if lm-sensors installed) +- **Fan speeds** and thermal zones +- **Battery information** (for laptops) using `upower -i /org/freedesktop/UPower/devices/battery_BAT0` +- **Power management** settings and capabilities + +## Commands to Use + +**System Overview:** +- `inxi -Fxz` - Comprehensive system information +- `hwinfo --short` - Hardware summary + +**CPU:** +- `lscpu` +- `cat /proc/cpuinfo` +- `cpufreq-info` (if available) + +**Memory:** +- `free -h` +- `sudo dmidecode -t memory` +- `cat /proc/meminfo` + +**Storage:** +- `lsblk -o NAME,SIZE,TYPE,FSTYPE,MOUNTPOINT,MODEL` +- `sudo fdisk -l` +- `sudo smartctl -a /dev/sdX` (for each drive) +- `df -h` + +**Graphics:** +- `lspci | grep -i vga` +- `sudo lshw -C display` +- `nvidia-smi` (for NVIDIA GPUs) +- `glxinfo | grep -i "opengl version"` + +**Network:** +- `ip addr` +- `sudo lshw -C network` +- `iwconfig` (for wireless) +- `ethtool eth0` (for Ethernet) + +**Motherboard/BIOS:** +- `sudo dmidecode -t baseboard` +- `sudo dmidecode -t bios` +- `sudo dmidecode -t system` + +**Peripherals:** +- `lsusb -v` +- `lspci -v` +- `aplay -l` + +**Thermal:** +- `sensors` +- `cat /sys/class/thermal/thermal_zone*/temp` + +## Output Format + +Create a structured report with the following sections: + +### Executive Summary +- System type (desktop/laptop/server) +- Overall hardware generation/age +- Primary use case capabilities (gaming, development, general use) + +### Detailed Hardware Profile + +**CPU:** +- Model: [full CPU name] +- Cores/Threads: [physical cores]/[logical threads] +- Base/Max Frequency: [GHz] +- Cache: L1/L2/L3 sizes +- Features: [virtualization, security features] + +**Memory:** +- Total: [GB] ([type] @ [speed]) +- Configuration: [X modules in Y slots] +- Swap: [size] ([type]) + +**Storage:** +- Drive 1: [model] ([type]) - [capacity] - Health: [status] +- Drive 2: ... +- Total capacity: [TB] +- Partition layout: [summary] + +**Graphics:** +- GPU: [model] +- Driver: [version and type] +- VRAM: [size] +- Displays: [count and configuration] + +**Network:** +- Ethernet: [model] - [speed] +- WiFi: [model] - [protocols] +- Active connections: [summary] + +**Motherboard:** +- Manufacturer: [brand] +- Model: [model number] +- BIOS: [version] ([date]) + +**Peripherals:** +- [List of notable USB/PCI devices] + +**Thermal/Power:** +- Current temperatures: [CPU/GPU/etc.] +- Battery: [status if laptop] + +### Hardware Capabilities Assessment + +Rate and describe: +- **Performance tier**: Entry/Mid/High-end for [CPU/GPU/Storage/RAM] +- **Bottlenecks**: Identify any limiting components +- **Upgrade recommendations**: Suggest meaningful upgrades if applicable +- **Compatibility notes**: Linux driver status, known issues + +### AI-Readable Summary (JSON) + +Provide a structured JSON object: +```json +{ + "system_type": "desktop|laptop|server", + "cpu": { + "model": "", + "cores": 0, + "threads": 0, + "base_ghz": 0.0, + "max_ghz": 0.0 + }, + "memory": { + "total_gb": 0, + "type": "", + "speed_mhz": 0 + }, + "storage": [ + { + "device": "", + "type": "nvme|ssd|hdd", + "capacity_gb": 0, + "health": "good|warning|critical" + } + ], + "gpu": { + "model": "", + "vendor": "nvidia|amd|intel", + "driver": "", + "vram_gb": 0 + }, + "network": { + "ethernet": {"present": true, "speed_mbps": 0}, + "wifi": {"present": true, "standard": ""} + } +} +``` + +## Execution Guidelines + +1. **Run commands systematically** in the order listed above +2. **Handle missing tools gracefully**: Note if `inxi`, `hwinfo`, `smartctl`, or `sensors` are not installed +3. **Use sudo appropriately**: Many hardware queries require root privileges +4. **Parse output carefully**: Extract relevant information, filter noise +5. **Cross-reference data**: Verify findings using multiple tools when possible +6. **Format for readability**: Use tables, bullet points, and clear hierarchies +7. **Include context**: Add brief explanations for technical specs +8. **Flag concerns**: Highlight any hardware issues, deprecated drivers, or thermal problems + +## Important Notes + +- Some commands may require installation of additional packages (`lm-sensors`, `smartmontools`, `pciutils`, etc.) +- SMART data requires drives that support it (most modern SSDs/HDDs) +- GPU information varies significantly by vendor +- Thermal data availability depends on sensor support +- Always respect privacy: avoid exposing serial numbers in shared contexts + +Be thorough, accurate, and provide actionable insights about the hardware configuration. diff --git a/commands/sysadmin/linux-desktop/hardware/install-google-fonts.md b/commands/sysadmin/linux-desktop/hardware/install-google-fonts.md new file mode 100644 index 0000000000000000000000000000000000000000..4f5cd5fc826cb8fc39fdc018cbbda89f8eb69b5b --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/install-google-fonts.md @@ -0,0 +1,80 @@ +--- +description: Install Google Fonts provided by the user +tags: [fonts, google-fonts, typography, installation, project, gitignored] +--- + +You are helping the user install Google Fonts by name. + +## Process + +1. **Get font names from user** + - Ask user which Google Fonts they want to install + - Accept multiple font names + +2. **Choose installation method** + + **Method 1: Using google-font-installer (if available)** + - Install tool: `pip install gftools` + - Download font: `gftools download-family "Font Name"` + + **Method 2: Using font-downloader** + - Install: `sudo apt install font-manager` + - Or use: `pip install font-downloader` + + **Method 3: Manual download** + - Download from: `https://fonts.google.com/` + - Or use GitHub: `https://github.com/google/fonts/tree/main/` + +3. **Download fonts** + - For each font name: + - Convert name to lowercase with dashes (e.g., "Roboto Mono" β†’ "roboto-mono") + - Download from: `https://fonts.google.com/download?family=Font+Name` + - Or clone specific font: `git clone https://github.com/google/fonts.git --depth 1 --filter=blob:none --sparse && cd fonts && git sparse-checkout set ofl/` + +4. **Install fonts** + - Create user font directory: `mkdir -p ~/.local/share/fonts/google-fonts` + - Extract and copy font files: + ```bash + unzip .zip -d ~/.local/share/fonts/google-fonts// + ``` + - Only copy .ttf and .otf files + +5. **Update font cache** + - Run: `fc-cache -fv` + - Verify installation: `fc-list | grep -i ""` + +6. **Provide usage examples** + - Show how to use in applications + - Show how to set as system font + - Show how to use in CSS/web design + +## Example Workflow + +```bash +# Example: Installing "Roboto" and "Open Sans" +mkdir -p ~/.local/share/fonts/google-fonts +cd /tmp + +# Download Roboto +wget "https://fonts.google.com/download?family=Roboto" -O roboto.zip +unzip roboto.zip -d ~/.local/share/fonts/google-fonts/roboto/ + +# Download Open Sans +wget "https://fonts.google.com/download?family=Open+Sans" -O open-sans.zip +unzip open-sans.zip -d ~/.local/share/fonts/google-fonts/open-sans/ + +# Update cache +fc-cache -fv + +# Verify +fc-list | grep -i "roboto\|open sans" +``` + +## Output + +Provide a summary showing: +- Fonts requested by user +- Download and installation status for each +- Installation location +- Verification that fonts are available +- Usage examples diff --git a/commands/sysadmin/linux-desktop/hardware/list-fonts.md b/commands/sysadmin/linux-desktop/hardware/list-fonts.md new file mode 100644 index 0000000000000000000000000000000000000000..b8f4659708b74d2812a22dc9eb6ae01ddade830e --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/list-fonts.md @@ -0,0 +1,79 @@ +--- +description: List installed fonts and offer to install additional fonts +tags: [fonts, typography, system, customization, project, gitignored] +--- + +You are helping the user review their installed fonts and install additional ones if requested. + +## Process + +1. **List currently installed fonts** + - System fonts: `fc-list | cut -d: -f2 | sort -u | wc -l` (count) + - Show font families: `fc-list : family | sort -u` + - List font directories: + - System: `/usr/share/fonts/` + - User: `~/.local/share/fonts/` + +2. **Categorize installed fonts** + - Serif fonts + - Sans-serif fonts + - Monospace/coding fonts + - Display/decorative fonts + - Icon fonts + +3. **Check for common font packages** + - `dpkg -l | grep -E "fonts-|ttf-"` + - Common packages: + - `fonts-liberation` + - `fonts-noto` + - `fonts-roboto` + - `ttf-mscorefonts-installer` + - `fonts-powerline` + +4. **Suggest useful font additions** + + **For coding:** + - Fira Code (ligatures) + - JetBrains Mono + - Cascadia Code + - Victor Mono + - Source Code Pro + + **For design:** + - Inter + - Poppins + - Montserrat + - Raleway + + **System fonts:** + - Noto fonts (comprehensive Unicode) + - Liberation fonts (MS Office compatible) + + **Icons:** + - Font Awesome + - Material Design Icons + - Nerd Fonts + +5. **Installation methods** + - APT: `sudo apt install fonts-` + - Manual installation: + ```bash + mkdir -p ~/.local/share/fonts + # Copy font files to directory + fc-cache -fv + ``` + - Google Fonts downloader (see separate command) + +6. **Test font installation** + - Refresh font cache: `fc-cache -fv` + - Verify font: `fc-list | grep -i ` + - Show sample: `fc-match ` + +## Output + +Provide a report showing: +- Total number of installed font families +- List of installed fonts by category +- Missing commonly-used fonts +- Suggested fonts to install based on use case +- Installation commands diff --git a/commands/sysadmin/linux-desktop/hardware/review-gpu-settings.md b/commands/sysadmin/linux-desktop/hardware/review-gpu-settings.md new file mode 100644 index 0000000000000000000000000000000000000000..2b5cec684cc913f6a7302b0f7202483aa04471ac --- /dev/null +++ b/commands/sysadmin/linux-desktop/hardware/review-gpu-settings.md @@ -0,0 +1,104 @@ +--- +description: Review GPU settings and suggest compatible monitoring tools +tags: [gpu, monitoring, settings, optimization, tools, project, gitignored] +--- + +You are helping the user review GPU settings and suggest appropriate monitoring tools. + +## Process + +1. **Current GPU configuration review** + - Power management mode: `cat /sys/class/drm/card*/device/power_dpm_state` + - Performance level: `cat /sys/class/drm/card*/device/power_dpm_force_performance_level` + - Clock speeds: + ```bash + cat /sys/class/drm/card*/device/pp_dpm_sclk # GPU clock + cat /sys/class/drm/card*/device/pp_dpm_mclk # Memory clock + ``` + - Temperature limits: `cat /sys/class/drm/card*/device/hwmon/hwmon*/temp*_crit` + +2. **Power profile settings** + - Available profiles: `cat /sys/class/drm/card*/device/pp_power_profile_mode` + - Typical profiles: + - BOOTUP_DEFAULT + - 3D_FULL_SCREEN + - POWER_SAVING + - VIDEO + - VR + - COMPUTE + +3. **Fan control settings** + - Fan mode: `cat /sys/class/drm/card*/device/hwmon/hwmon*/pwm*_enable` + - Fan speed: `cat /sys/class/drm/card*/device/hwmon/hwmon*/pwm*` + - Auto vs manual control + +4. **Overclocking/undervolting status** + - Check if overclocking is enabled + - Voltage settings: `cat /sys/class/drm/card*/device/pp_od_clk_voltage` + - Power limit: `rocm-smi --showmaxpower` + +5. **Suggest monitoring tools** + + **CLI Tools:** + - `rocm-smi` - AMD's official tool (already mentioned) + - `radeontop` - Real-time AMD GPU usage + - `nvtop` - Works with AMD GPUs too (better visualization) + - `htop` with GPU support + + **GUI Tools:** + - `radeon-profile` - Comprehensive AMD GPU control + - `CoreCtrl` - Modern GPU/CPU control for Linux + - `GreenWithEnvy` (GWE) - Mainly NVIDIA, but has AMD support + - `Mission Center` - System monitor with GPU support + - `Mangohud` - In-game overlay for monitoring + + **System monitoring:** + - `conky` with GPU scripts + - `btop` - Resource monitor with GPU + - `glances` - With GPU plugin + +6. **Install and configure recommended tool** + + **For AMD, recommend CoreCtrl:** + ```bash + sudo apt install corectrl + ``` + - Set up autostart + - Configure polkit rules for non-root access + + **For CLI, recommend nvtop:** + ```bash + sudo apt install nvtop + ``` + + **For gaming overlay, recommend Mangohud:** + ```bash + sudo apt install mangohud + ``` + +7. **Configure optimal settings** + - Suggest performance profile for user's use case: + - Gaming: 3D_FULL_SCREEN + - AI/ML: COMPUTE + - Video encoding: VIDEO + - Power saving: POWER_SAVING + + - Offer to create script to set preferred profile on boot + +8. **Create monitoring script** + - Offer to create a simple GPU monitoring script: + ```bash + #!/bin/bash + watch -n 1 'rocm-smi && echo && sensors | grep -A 3 amdgpu' + ``` + +## Output + +Provide a report showing: +- Current GPU settings summary +- Active power profile +- Temperature and fan status +- Recommended monitoring tools (CLI and GUI) +- Installation commands for suggested tools +- Optimal settings for user's use case +- Script to apply recommended settings diff --git a/commands/sysadmin/linux-desktop/installation/clis/install-gh-cli.md b/commands/sysadmin/linux-desktop/installation/clis/install-gh-cli.md new file mode 100644 index 0000000000000000000000000000000000000000..ec1d9520387ee6879814ad19945032a9e21df354 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/install-gh-cli.md @@ -0,0 +1,309 @@ +# Install and Authenticate GitHub CLI (gh) + +You are helping the user install and authenticate the GitHub CLI tool. + +## Your tasks: + +1. **Check if gh is already installed:** + ```bash + which gh + gh --version + ``` + + If already installed and authenticated: + ```bash + gh auth status + ``` + +2. **Install GitHub CLI (if not installed):** + + **Method 1: Using official repository (recommended):** + ```bash + # Add the GPG key + sudo mkdir -p -m 755 /etc/apt/keyrings + wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null + sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg + + # Add the repository + echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null + + # Install + sudo apt update + sudo apt install gh + ``` + + **Method 2: Using snap:** + ```bash + sudo snap install gh + ``` + + **Method 3: Using Homebrew (if installed):** + ```bash + brew install gh + ``` + +3. **Verify installation:** + ```bash + gh --version + which gh + ``` + +4. **Authenticate with GitHub:** + + **Interactive authentication (recommended):** + ```bash + gh auth login + ``` + + This will prompt for: + - GitHub.com or GitHub Enterprise Server + - Preferred protocol (HTTPS or SSH) + - Authentication method (web browser or token) + + **Via web browser (easiest):** + - Select "Login with a web browser" + - Follow the one-time code and URL + - Authorize in browser + + **Via token:** + - Generate a token at https://github.com/settings/tokens + - Select "Login with authentication token" + - Paste the token + +5. **Verify authentication:** + ```bash + gh auth status + ``` + + Should show: + - Logged in to github.com + - Account name + - Token scopes + +6. **Configure gh settings:** + + **Set default editor:** + ```bash + gh config set editor vim + # or + gh config set editor nano + # or + gh config set editor code # VS Code + ``` + + **Set default protocol:** + ```bash + gh config set git_protocol ssh + # or + gh config set git_protocol https + ``` + + **View all config:** + ```bash + gh config list + ``` + +7. **Set up SSH key (if using SSH protocol):** + ```bash + # Generate SSH key if needed + ssh-keygen -t ed25519 -C "your_email@example.com" + + # Add to ssh-agent + eval "$(ssh-agent -s)" + ssh-add ~/.ssh/id_ed25519 + + # Add to GitHub + gh ssh-key add ~/.ssh/id_ed25519.pub --title "My Ubuntu Desktop" + + # Or copy public key to GitHub manually + cat ~/.ssh/id_ed25519.pub + ``` + +8. **Test GitHub connectivity:** + ```bash + # Test SSH connection + ssh -T git@github.com + + # Test gh CLI + gh repo list + gh auth status + ``` + +9. **Configure git to use gh for credentials:** + ```bash + gh auth setup-git + ``` + + This configures git to use gh as a credential helper. + +10. **Show basic gh commands:** + + **Repository operations:** + - `gh repo create` - Create a repository + - `gh repo clone ` - Clone a repository + - `gh repo view` - View repository details + - `gh repo list` - List your repositories + - `gh repo fork` - Fork a repository + + **Pull requests:** + - `gh pr create` - Create a pull request + - `gh pr list` - List pull requests + - `gh pr view ` - View a PR + - `gh pr checkout ` - Checkout a PR + - `gh pr merge ` - Merge a PR + - `gh pr review ` - Review a PR + + **Issues:** + - `gh issue create` - Create an issue + - `gh issue list` - List issues + - `gh issue view ` - View an issue + - `gh issue close ` - Close an issue + + **Workflows:** + - `gh workflow list` - List workflows + - `gh workflow view ` - View workflow + - `gh workflow run ` - Trigger a workflow + - `gh run list` - List workflow runs + - `gh run view ` - View a run + + **Gists:** + - `gh gist create ` - Create a gist + - `gh gist list` - List gists + - `gh gist view ` - View a gist + +11. **Set up shell completion:** + + **For bash:** + ```bash + gh completion -s bash > ~/.gh-completion.bash + echo 'source ~/.gh-completion.bash' >> ~/.bashrc + source ~/.bashrc + ``` + + **For zsh:** + ```bash + gh completion -s zsh > ~/.gh-completion.zsh + echo 'source ~/.gh-completion.zsh' >> ~/.zshrc + source ~/.zshrc + ``` + +12. **Configure multiple accounts (if needed):** + ```bash + # Add another account + GH_HOST=github.com gh auth login + + # Switch between accounts + gh auth switch + ``` + +13. **Set up aliases (optional):** + ```bash + gh alias set pv 'pr view' + gh alias set co 'pr checkout' + gh alias set bugs 'issue list --label=bug' + ``` + + List aliases: + ```bash + gh alias list + ``` + +14. **Authenticate with GitHub Enterprise (if applicable):** + ```bash + gh auth login --hostname github.example.com + ``` + +15. **Troubleshooting common issues:** + + **Permission denied:** + - Check auth status: `gh auth status` + - Re-authenticate: `gh auth login` + - Check token scopes + + **SSH issues:** + - Verify SSH key: `ssh -T git@github.com` + - Add SSH key to GitHub: `gh ssh-key add` + - Check ssh-agent: `ssh-add -l` + + **Rate limiting:** + - Check rate limit: `gh api rate_limit` + - Use authentication to increase limits + +16. **Update gh:** + ```bash + sudo apt update + sudo apt upgrade gh + # or + brew upgrade gh + # or + sudo snap refresh gh + ``` + +17. **Advanced configuration:** + + **Custom API endpoint:** + ```bash + gh config set api_endpoint https://api.github.com + ``` + + **Disable prompts:** + ```bash + gh config set prompt disabled + ``` + + **Configure pager:** + ```bash + gh config set pager less + ``` + +18. **Security best practices:** + - Use SSH keys instead of HTTPS when possible + - Use tokens with minimal required scopes + - Rotate tokens regularly + - Don't share tokens + - Use different tokens for different machines + - Enable 2FA on GitHub account + - Review authorized applications regularly + +19. **Provide workflow examples:** + + **Create a repo and push:** + ```bash + mkdir my-project + cd my-project + git init + echo "# My Project" > README.md + git add README.md + git commit -m "Initial commit" + gh repo create my-project --public --source=. --push + ``` + + **Fork and clone:** + ```bash + gh repo fork owner/repo --clone + ``` + + **Create PR from current branch:** + ```bash + gh pr create --title "My changes" --body "Description of changes" + ``` + +20. **Report findings:** + Summarize: + - Installation status + - Authentication status + - Configured settings + - Available accounts + - Next steps + +## Important notes: +- gh is the official GitHub CLI +- Requires GitHub account +- Can use HTTPS or SSH protocol +- SSH is generally more secure and convenient +- gh can replace many git operations with simpler syntax +- Shell completion is very helpful +- Keep gh updated for latest features +- Multiple accounts are supported +- Works with both GitHub.com and GitHub Enterprise +- Tokens should have minimal required scopes diff --git a/commands/sysadmin/linux-desktop/installation/clis/install-pipx.md b/commands/sysadmin/linux-desktop/installation/clis/install-pipx.md new file mode 100644 index 0000000000000000000000000000000000000000..c0e3a2a7d714d04c6729e2eeac7806b5e8270d8c --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/install-pipx.md @@ -0,0 +1,321 @@ +# Install pipx and Suggest Packages + +You are helping the user install pipx and suggesting useful packages to install with it. + +## Your tasks: + +1. **Explain what pipx is:** + pipx is a tool to install and run Python applications in isolated environments. Unlike pip which installs packages globally or in the current environment, pipx creates a separate virtual environment for each application. + +2. **Check if pipx is already installed:** + ```bash + which pipx + pipx --version + ``` + + If already installed: + ```bash + pipx list + ``` + +3. **Install pipx:** + + **Method 1: Using apt (Ubuntu 23.04+):** + ```bash + sudo apt update + sudo apt install pipx + ``` + + **Method 2: Using pip:** + ```bash + python3 -m pip install --user pipx + python3 -m pipx ensurepath + ``` + + **Method 3: Using Homebrew (if installed):** + ```bash + brew install pipx + ``` + +4. **Ensure pipx is on PATH:** + ```bash + pipx ensurepath + ``` + + Then restart shell or: + ```bash + source ~/.bashrc + ``` + +5. **Verify installation:** + ```bash + pipx --version + which pipx + pipx list + ``` + +6. **Explain pipx benefits:** + - Each app in isolated environment (no dependency conflicts) + - Easy to install/uninstall applications + - Applications available system-wide + - No need to activate virtual environments + - Perfect for CLI tools + - Automatic PATH configuration + +7. **Show basic pipx usage:** + - `pipx install ` - Install a package + - `pipx uninstall ` - Uninstall a package + - `pipx list` - List installed packages + - `pipx upgrade ` - Upgrade a package + - `pipx upgrade-all` - Upgrade all packages + - `pipx run ` - Run without installing + - `pipx inject ` - Add dependency to app + +8. **Suggest essential Python CLI tools:** + + **Development tools:** + ```bash + pipx install black # Code formatter + pipx install flake8 # Linter + pipx install pylint # Code analyzer + pipx install mypy # Static type checker + pipx install isort # Import sorter + pipx install autopep8 # Auto formatter + pipx install bandit # Security linter + ``` + + **Project management:** + ```bash + pipx install poetry # Dependency management + pipx install pipenv # Virtual environment manager + pipx install cookiecutter # Project templates + pipx install tox # Testing automation + ``` + + **Productivity tools:** + ```bash + pipx install httpie # HTTP client (better than curl) + pipx install youtube-dl # Download videos + pipx install yt-dlp # youtube-dl fork (maintained) + pipx install tldr # Simplified man pages + pipx install howdoi # Code search from command line + ``` + + **Data science & analysis:** + ```bash + pipx install jupyter # Jupyter notebooks + pipx install jupyterlab # JupyterLab + pipx install datasette # Data exploration + pipx install csvkit # CSV tools + ``` + + **File & text processing:** + ```bash + pipx install pdfplumber # PDF text extraction + pipx install pdf2image # PDF to image converter + pipx install rich-cli # Rich text in terminal + pipx install glances # System monitoring + ``` + + **Cloud & infrastructure:** + ```bash + pipx install ansible # Automation + pipx install aws-cli # AWS command line + pipx install httpie # API testing + pipx install docker-compose # Docker orchestration + ``` + + **Documentation:** + ```bash + pipx install mkdocs # Documentation generator + pipx install sphinx # Documentation tool + pipx install doc8 # Documentation linter + ``` + + **Testing & quality:** + ```bash + pipx install pytest # Testing framework + pipx install coverage # Code coverage + pipx install pre-commit # Git hooks manager + ``` + +9. **Suggest packages based on user's interests:** + Ask the user what they work with: + - Web development? + - Data science? + - DevOps? + - Security? + - Content creation? + + Then suggest relevant packages. + +10. **Install a few essential packages:** + Recommend installing at minimum: + ```bash + pipx install httpie # Better HTTP client + pipx install tldr # Quick command help + pipx install black # Python formatter (if they code) + pipx install glances # System monitor + ``` + +11. **Show how to use pipx run (temporary usage):** + ```bash + # Run without installing + pipx run pycowsay "Hello!" + pipx run black --version + + # Useful for one-off tasks + pipx run cookiecutter gh:audreyr/cookiecutter-pypackage + ``` + +12. **Show how to manage installations:** + + **List all installed apps:** + ```bash + pipx list + pipx list --verbose + ``` + + **Upgrade specific package:** + ```bash + pipx upgrade black + ``` + + **Upgrade all packages:** + ```bash + pipx upgrade-all + ``` + + **Uninstall package:** + ```bash + pipx uninstall black + ``` + + **Reinstall package:** + ```bash + pipx reinstall black + ``` + +13. **Show how to inject additional dependencies:** + Some apps need extra packages: + ```bash + pipx install ansible + pipx inject ansible ansible-lint + pipx inject ansible molecule + ``` + +14. **Configure pipx:** + + **Check current configuration:** + ```bash + pipx environment + ``` + + **Change installation location (if needed):** + ```bash + export PIPX_HOME=~/.local/pipx + export PIPX_BIN_DIR=~/.local/bin + ``` + +15. **Show differences between pip and pipx:** + - `pip install ` - Installs in current environment + - `pipx install ` - Installs in isolated environment + - Use pip for: libraries, dependencies + - Use pipx for: CLI applications, standalone tools + +16. **Troubleshooting:** + + **Package not in PATH:** + ```bash + pipx ensurepath + source ~/.bashrc + echo $PATH | grep .local/bin + ``` + + **Broken installation:** + ```bash + pipx reinstall + ``` + + **Clean up:** + ```bash + pipx uninstall-all + ``` + +17. **Advanced usage:** + + **Specify Python version:** + ```bash + pipx install --python python3.11 black + ``` + + **Install from git:** + ```bash + pipx install git+https://github.com/user/repo.git + ``` + + **Install with extras:** + ```bash + pipx install 'package[extra1,extra2]' + ``` + +18. **Integration with other tools:** + + **pre-commit integration:** + ```bash + pipx install pre-commit + pre-commit install + ``` + + **VSCode integration:** + - Installed tools (black, flake8, mypy) are auto-detected + - No need to install in each project + +19. **Maintenance commands:** + ```bash + # Update pipx itself + python3 -m pip install --user --upgrade pipx + + # Upgrade all installed packages + pipx upgrade-all + + # List outdated packages + pipx list --verbose | grep -A 2 "upgrade available" + ``` + +20. **Provide recommendations:** + - Use pipx for all Python CLI tools + - Keep applications updated with `pipx upgrade-all` + - Don't use pip for global installations anymore + - Use `pipx run` to try packages before installing + - Install project-specific tools (black, flake8) with pipx + - Consider adding `pipx upgrade-all` to crontab + - Keep separate from project dependencies (use venv/poetry for those) + +21. **Show common workflow:** + ```bash + # Install essential tools + pipx install black + pipx install flake8 + pipx install mypy + + # In your project + cd my-project + black . + flake8 . + mypy . + + # No need to activate virtual environment! + ``` + +## Important notes: +- pipx requires Python 3.6+ +- Each app gets its own virtual environment +- Apps are available system-wide after installation +- Perfect for CLI tools, not for libraries +- Keeps system Python clean +- No dependency conflicts between apps +- Must be on PATH - use `pipx ensurepath` +- Can coexist with pip and venv +- Use pip for project dependencies, pipx for tools +- Regular updates recommended: `pipx upgrade-all` diff --git a/commands/sysadmin/linux-desktop/installation/clis/install-sdkman.md b/commands/sysadmin/linux-desktop/installation/clis/install-sdkman.md new file mode 100644 index 0000000000000000000000000000000000000000..929b81cf876ba8976da3ef69965d9571a1933830 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/install-sdkman.md @@ -0,0 +1,180 @@ +# Install SDKMAN on Linux + +You are helping the user install SDKMAN for managing parallel versions of multiple Software Development Kits. + +## Your tasks: + +1. **Check if SDKMAN is already installed:** + - Check: `sdk version` or `which sdk` + - Check for SDKMAN directory: `ls -la ~/.sdkman` + - If already installed, ask if they want to update it + +2. **Check prerequisites:** + SDKMAN requires: + - curl: `curl --version` + - zip/unzip: `which zip unzip` + - bash or zsh shell + + Install missing prerequisites: + ```bash + sudo apt update + sudo apt install curl zip unzip + ``` + +3. **Download and install SDKMAN:** + ```bash + curl -s "https://get.sdkman.io" | bash + ``` + + The installer will: + - Install to `~/.sdkman` + - Add initialization to ~/.bashrc or ~/.zshrc + - Set up sdk command + +4. **Initialize SDKMAN in current shell:** + ```bash + source "$HOME/.sdkman/bin/sdkman-init.sh" + ``` + +5. **Verify installation:** + ```bash + sdk version + sdk help + ``` + +6. **Show available SDKs:** + ```bash + sdk list + ``` + + Common SDKs available: + - Java (various distributions: OpenJDK, Graal, Corretto, etc.) + - Gradle + - Maven + - Kotlin + - Scala + - Groovy + - Spring Boot + - Micronaut + - And many more + +7. **Install a few common SDKs (ask user first):** + + **Java:** + ```bash + sdk list java + sdk install java # installs latest stable + # Or specific version: + # sdk install java 17.0.9-tem + ``` + + **Gradle:** + ```bash + sdk install gradle + ``` + + **Maven:** + ```bash + sdk install maven + ``` + +8. **Show basic SDKMAN usage:** + Explain to the user: + - `sdk list ` - List available versions of an SDK + - `sdk install ` - Install specific version + - `sdk install ` - Install latest stable + - `sdk uninstall ` - Remove a version + - `sdk use ` - Use version for current shell + - `sdk default ` - Set default version + - `sdk current ` - Show current version in use + - `sdk current` - Show all current versions + - `sdk upgrade ` - Upgrade to latest version + - `sdk update` - Update SDK list + - `sdk selfupdate` - Update SDKMAN itself + +9. **Configure SDKMAN (optional):** + Edit `~/.sdkman/etc/config`: + + ```bash + # Auto answer 'yes' to all prompts + sdkman_auto_answer=true + + # Automatically use Java version from .sdkmanrc + sdkman_auto_env=true + + # Check for SDK updates on login + sdkman_checkup_enable=true + + # Automatically selfupdate + sdkman_selfupdate_enable=true + ``` + +10. **Set up project-specific SDK versions:** + Create `.sdkmanrc` file in project root: + ```bash + java=17.0.9-tem + gradle=8.4 + maven=3.9.5 + ``` + + Enable auto-env: `sdk env` or `sdk env install` + +11. **Verify PATH and environment:** + ```bash + which java + java -version + echo $JAVA_HOME + ``` + +12. **Show how to switch Java versions:** + Demonstrate: + ```bash + sdk list java + sdk install java 11.0.21-tem + sdk install java 17.0.9-tem + sdk use java 11.0.21-tem + java -version + sdk default java 17.0.9-tem + ``` + +13. **Offline mode (optional):** + Explain offline mode for when internet is unavailable: + ```bash + sdk offline enable + sdk offline disable + ``` + +14. **Flush and clean:** + ```bash + sdk flush # Clear caches + sdk flush temp # Clear temporary files + ``` + +15. **Provide best practices:** + - Run `sdk update` regularly to refresh SDK lists + - Run `sdk selfupdate` to keep SDKMAN current + - Use `sdk current` to verify active versions + - Use `.sdkmanrc` files for project-specific versions + - Enable `sdkman_auto_env` for automatic version switching + - Use `sdk env` when entering project directories + - Keep multiple Java versions for different projects + - Set sensible defaults with `sdk default` + - SDKMAN doesn't require sudo + - Check `~/.sdkman/candidates/` to see installed SDKs + +16. **Troubleshooting:** + - If `sdk` command not found, source the init script + - Check `~/.bashrc` has SDKMAN initialization + - Restart shell or source bashrc: `source ~/.bashrc` + - Check PATH: `echo $PATH | grep sdkman` + - Verify SDKMAN directory exists: `ls ~/.sdkman` + +## Important notes: +- SDKMAN is user-level, doesn't require sudo +- Each SDK version is kept separate in `~/.sdkman/candidates/` +- Can coexist with system Java or other installation methods +- Using `sdk use` only affects current shell +- Using `sdk default` affects all new shells +- .sdkmanrc files are project-specific +- SDKMAN is particularly popular in JVM ecosystem +- Bash completion is included diff --git a/commands/sysadmin/linux-desktop/installation/clis/install-yadm.md b/commands/sysadmin/linux-desktop/installation/clis/install-yadm.md new file mode 100644 index 0000000000000000000000000000000000000000..475c57d5ec339231e49da751dffa72456323bb37 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/install-yadm.md @@ -0,0 +1,111 @@ +# Install YADM (Yet Another Dotfiles Manager) + +You are helping the user install and set up YADM for managing their dotfiles. + +## Your tasks: + +1. **Check if YADM is already installed:** + - Check: `which yadm` + - If installed: `yadm version` + - If already installed, ask the user if they want to: + - Configure it for first use + - Upgrade to the latest version + - Or exit + +2. **Install YADM:** + + **Option 1: Using apt (recommended for Ubuntu):** + ```bash + sudo apt update + sudo apt install yadm + ``` + + **Option 2: Using the install script (for latest version):** + ```bash + curl -fsSL https://github.com/TheLocehiliosan/yadm/raw/master/bootstrap/install_yadm.sh | sudo bash + ``` + + Ask the user which installation method they prefer. + +3. **Verify installation:** + - Check version: `yadm version` + - Check location: `which yadm` + +4. **Initialize YADM (if user wants to set it up):** + + **For new setup:** + ```bash + yadm init + ``` + + **For cloning existing dotfiles:** + Ask the user if they have an existing dotfiles repository to clone. + If yes, get the repository URL and run: + ```bash + yadm clone + ``` + +5. **Guide the user through initial configuration:** + + **Add existing dotfiles:** + Suggest common dotfiles to track: + - `~/.bashrc` + - `~/.bash_profile` + - `~/.profile` + - `~/.gitconfig` + - `~/.ssh/config` (if exists) + - `~/.config/` directories (ask which ones) + + Show how to add files: + ```bash + yadm add ~/.bashrc + yadm add ~/.gitconfig + yadm commit -m "Initial dotfiles commit" + ``` + +6. **Set up remote repository (if user wants):** + Ask if they want to set up a remote repository: + ```bash + yadm remote add origin + yadm push -u origin main + ``` + +7. **Explain basic YADM usage:** + - `yadm status` - Check status + - `yadm add ` - Track a file + - `yadm commit -m "message"` - Commit changes + - `yadm push` - Push to remote + - `yadm pull` - Pull from remote + - `yadm list` - List tracked files + - `yadm diff` - Show differences + +8. **Set up encryption (optional):** + Ask if the user wants to encrypt sensitive files: + ```bash + echo ".ssh/id_rsa" >> ~/.config/yadm/encrypt + yadm encrypt + ``` + +9. **Set up bootstrap (optional):** + Explain that YADM can run a bootstrap script on new systems. + Offer to create a basic `~/.config/yadm/bootstrap` script: + ```bash + #!/bin/bash + # Install common packages + sudo apt update + sudo apt install -y git vim tmux + ``` + +10. **Provide next steps and best practices:** + - Regularly commit dotfile changes: `yadm add -u && yadm commit -m "Update dotfiles"` + - Use branches for experimental configurations + - Use `.config/yadm/encrypt` for sensitive files + - Consider alternate files for different systems (using YADM's alternate file feature) + - Backup remote repository (GitHub/GitLab) + +## Important notes: +- Ask before making any commits or pushes +- Explain the difference between YADM and regular git (YADM operates on $HOME) +- Warn about not committing sensitive information unencrypted +- If user already has dotfiles in a git repo, explain migration process +- Be clear that YADM commands work like git commands diff --git a/commands/sysadmin/linux-desktop/installation/clis/setup-aws-cli.md b/commands/sysadmin/linux-desktop/installation/clis/setup-aws-cli.md new file mode 100644 index 0000000000000000000000000000000000000000..a89b72f7c75052ce2b8403c130b15d7c65a308e7 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/setup-aws-cli.md @@ -0,0 +1,40 @@ +--- +description: Set up or validate AWS CLI configuration +tags: [cloud, aws, setup, validation, project, gitignored] +--- + +You are helping the user set up or validate their AWS CLI configuration. + +## Process + +1. **Check if AWS CLI is installed** + - Run `aws --version` to check installation + - If not installed, install using: `sudo apt install awscli` or `pip3 install awscli --upgrade --user` + +2. **Check existing configuration** + - Run `aws configure list` to see current config + - Check `~/.aws/credentials` and `~/.aws/config` files if they exist + +3. **Validate configuration** + - If credentials exist, test with: `aws sts get-caller-identity` + - This will confirm the credentials are valid and show account info + +4. **Configure if needed** + - If not configured or user wants to update: + - Run `aws configure` interactively OR + - Ask user for: AWS Access Key ID, Secret Access Key, default region, output format + - Offer to set up profiles if user has multiple AWS accounts + +5. **Additional setup suggestions** + - Suggest installing `aws-shell` for better CLI experience + - Recommend setting up AWS SSO if applicable + - Suggest configuring MFA if not already set up + +## Output + +Provide a summary showing: +- AWS CLI version +- Configured profiles +- Current default profile and region +- Validation status +- Any recommendations for improvement diff --git a/commands/sysadmin/linux-desktop/installation/clis/setup-b2-cli.md b/commands/sysadmin/linux-desktop/installation/clis/setup-b2-cli.md new file mode 100644 index 0000000000000000000000000000000000000000..b8698280fb155574bca43c14e2ea01fe98cda6c0 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/setup-b2-cli.md @@ -0,0 +1,40 @@ +--- +description: Set up or validate Backblaze B2 CLI configuration +tags: [cloud, b2, backblaze, setup, validation, project, gitignored] +--- + +You are helping the user set up or validate their Backblaze B2 CLI configuration. + +## Process + +1. **Check if B2 CLI is installed** + - Run `b2 version` to check installation + - If not installed, install using: `pip3 install b2 --upgrade --user` or `sudo apt install backblaze-b2` + +2. **Check existing authorization** + - Run `b2 get-account-info` to see if already authorized + - Check `~/.b2_account_info` if it exists + +3. **Validate configuration** + - If authorized, test by listing buckets: `b2 list-buckets` + - Verify account ID and key are working + +4. **Configure if needed** + - If not configured or user wants to update: + - Ask user for Application Key ID and Application Key + - Run `b2 authorize-account ` + - Alternatively, use `b2 clear-account` first if re-authorizing + +5. **Additional setup** + - Show available buckets: `b2 list-buckets` + - Suggest setting up lifecycle rules if doing backups + - Recommend testing upload/download with a small file + +## Output + +Provide a summary showing: +- B2 CLI version +- Authorization status +- Account ID (if authorized) +- List of buckets (if any) +- Any recommendations for optimization diff --git a/commands/sysadmin/linux-desktop/installation/clis/setup-rclone.md b/commands/sysadmin/linux-desktop/installation/clis/setup-rclone.md new file mode 100644 index 0000000000000000000000000000000000000000..9fbdc4e1972e4aa0176b6f1880206806566abefa --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/clis/setup-rclone.md @@ -0,0 +1,41 @@ +--- +description: Set up rclone for cloud storage management +tags: [cloud, rclone, setup, backup, project, gitignored] +--- + +You are helping the user set up rclone for cloud storage management. + +## Process + +1. **Check if rclone is installed** + - Run `rclone version` to check installation + - If not installed, install using: `sudo apt install rclone` or download from rclone.org + +2. **Check existing remotes** + - Run `rclone listremotes` to see configured remotes + - Run `rclone config file` to show config file location + +3. **Configure new remotes if needed** + - Run `rclone config` for interactive setup + - Guide user through: + - Choosing storage type (S3, B2, Google Drive, etc.) + - Entering credentials + - Testing connection + +4. **Validate existing remotes** + - For each remote, test with: `rclone lsd :` + - Verify access and permissions + +5. **Optimization suggestions** + - Suggest setting up encrypted remotes for sensitive data + - Recommend bandwidth limits if needed: `--bwlimit` + - Suggest useful flags for backups: `--transfers`, `--checkers` + - Offer to create wrapper scripts for common operations + +## Output + +Provide a summary showing: +- rclone version +- List of configured remotes with types +- Validation status for each remote +- Suggested next steps or optimizations diff --git a/commands/sysadmin/linux-desktop/installation/guis/input-remapper.md b/commands/sysadmin/linux-desktop/installation/guis/input-remapper.md new file mode 100644 index 0000000000000000000000000000000000000000..cee28b47a2ef2240ef44c77ca8e93f335a684c35 --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/guis/input-remapper.md @@ -0,0 +1 @@ +Install Intput Remapper.# \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/installation/guis/install-gimp-plugin.md b/commands/sysadmin/linux-desktop/installation/guis/install-gimp-plugin.md new file mode 100644 index 0000000000000000000000000000000000000000..c3e99709032918dfdcac53e03789e9c00bffa28d --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/guis/install-gimp-plugin.md @@ -0,0 +1,374 @@ +# Install GIMP Plugin + +You are a system administration assistant specialized in installing and managing GIMP plugins and extensions on Linux. + +## Your Task + +Help the user install GIMP plugins (scripts, plug-ins, and extensions): + +1. First, verify GIMP is installed: + ```bash + gimp --version + ``` + +2. Ask the user: + - Which plugin/script they want to install (provide popular suggestions) + - Plugin type (Python-Fu, Script-Fu, binary plugin) + - Installation preference (Flatpak vs system) + +3. Determine correct plugin directories: + - **System GIMP**: `~/.config/GIMP/2.10/plug-ins/` and `~/.config/GIMP/2.10/scripts/` + - **Flatpak GIMP**: `~/.var/app/org.gimp.GIMP/config/GIMP/2.10/plug-ins/` and scripts + - **GIMP 3.x**: Replace `2.10` with appropriate version + +4. Install plugin with correct permissions and verify it loads + +## GIMP Plugin Directories + +### System Installation +```bash +# Python/Binary plugins +~/.config/GIMP/2.10/plug-ins/ + +# Script-Fu scripts (.scm files) +~/.config/GIMP/2.10/scripts/ + +# Brushes, patterns, gradients +~/.config/GIMP/2.10/brushes/ +~/.config/GIMP/2.10/patterns/ +~/.config/GIMP/2.10/gradients/ +``` + +### Flatpak Installation +```bash +~/.var/app/org.gimp.GIMP/config/GIMP/2.10/plug-ins/ +~/.var/app/org.gimp.GIMP/config/GIMP/2.10/scripts/ +``` + +## Popular GIMP Plugins + +### G'MIC (Powerful filters and effects) + +**System GIMP:** +```bash +sudo apt install gmic gimp-gmic +``` + +**Flatpak GIMP:** +```bash +flatpak install flathub org.gimp.GIMP.Plugin.GMIC +``` + +### Resynthesizer (Content-aware fill, heal selection) + +```bash +sudo apt install gimp-plugin-registry +# Includes resynthesizer and many other useful plugins +``` + +**Manual installation:** +```bash +# Download from GitHub +git clone https://github.com/bootchk/resynthesizer.git +cd resynthesizer + +# Build +sudo apt install build-essential libgimp2.0-dev +./autogen.sh +make +sudo make install +``` + +### Liquid Rescale (Content-aware scaling) + +```bash +sudo apt install gimp-plugin-registry +``` + +### BIMP (Batch Image Manipulation) + +```bash +# Download from releases +wget https://github.com/alessandrofrancesconi/gimp-plugin-bimp/releases/download/v2.4/gimp-plugin-bimp-2.4.tar.gz +tar -xzf gimp-plugin-bimp-2.4.tar.gz + +# Install dependencies +sudo apt install libgimp2.0-dev libpcre3-dev + +# Build and install +cd gimp-plugin-bimp-* +make +make install +``` + +### Beautify (Photo enhancement) + +```bash +# Download beautify.scm +wget https://raw.githubusercontent.com/hejiann/beautify/master/beautify.scm + +# Install +mkdir -p ~/.config/GIMP/2.10/scripts/ +cp beautify.scm ~/.config/GIMP/2.10/scripts/ +``` + +### Fourier Plugin (Frequency domain editing) + +```bash +sudo apt install gimp-plugin-registry +# Includes fourier plugin +``` + +### Layer via Copy/Cut + +```bash +# Download the script +wget http://registry.gimp.org/files/layer-via-copy-cut.scm + +# Install +mkdir -p ~/.config/GIMP/2.10/scripts/ +cp layer-via-copy-cut.scm ~/.config/GIMP/2.10/scripts/ +``` + +## Installing Different Plugin Types + +### Script-Fu (.scm files) + +```bash +# Download the .scm file +# Copy to scripts directory +mkdir -p ~/.config/GIMP/2.10/scripts/ +cp plugin-name.scm ~/.config/GIMP/2.10/scripts/ + +# For Flatpak: +cp plugin-name.scm ~/.var/app/org.gimp.GIMP/config/GIMP/2.10/scripts/ + +# Refresh scripts in GIMP: Filters β†’ Script-Fu β†’ Refresh Scripts +``` + +### Python-Fu (.py files) + +```bash +# Create plugin directory with plugin name +mkdir -p ~/.config/GIMP/2.10/plug-ins/plugin-name/ + +# Copy Python file +cp plugin-name.py ~/.config/GIMP/2.10/plug-ins/plugin-name/ + +# Make executable +chmod +x ~/.config/GIMP/2.10/plug-ins/plugin-name/plugin-name.py + +# For Flatpak: +mkdir -p ~/.var/app/org.gimp.GIMP/config/GIMP/2.10/plug-ins/plugin-name/ +cp plugin-name.py ~/.var/app/org.gimp.GIMP/config/GIMP/2.10/plug-ins/plugin-name/ +chmod +x ~/.var/app/org.gimp.GIMP/config/GIMP/2.10/plug-ins/plugin-name/plugin-name.py +``` + +**Example Python plugin structure:** +``` +~/.config/GIMP/2.10/plug-ins/ +└── my-plugin/ + β”œβ”€β”€ my-plugin.py (executable) + └── README.md +``` + +### Binary Plugins (.so files) + +```bash +# Copy to plug-ins directory +mkdir -p ~/.config/GIMP/2.10/plug-ins/plugin-name/ +cp plugin.so ~/.config/GIMP/2.10/plug-ins/plugin-name/ + +# Make executable +chmod +x ~/.config/GIMP/2.10/plug-ins/plugin-name/plugin.so +``` + +## Installing from GIMP Plugin Registry + +Many plugins can be installed via the package manager: + +```bash +# Install the full plugin registry +sudo apt install gimp-plugin-registry + +# This includes: +# - Resynthesizer +# - Liquid Rescale +# - Fourier +# - Wavelet Denoise +# - Separate+ +# - And many more +``` + +## Installing Brushes, Patterns, and Gradients + +### Brushes (.gbr, .gih, .vbr files) + +```bash +mkdir -p ~/.config/GIMP/2.10/brushes/ +cp *.gbr ~/.config/GIMP/2.10/brushes/ + +# Refresh in GIMP: Windows β†’ Dockable Dialogs β†’ Brushes β†’ Refresh +``` + +### Patterns (.pat files) + +```bash +mkdir -p ~/.config/GIMP/2.10/patterns/ +cp *.pat ~/.config/GIMP/2.10/patterns/ +``` + +### Gradients (.ggr files) + +```bash +mkdir -p ~/.config/GIMP/2.10/gradients/ +cp *.ggr ~/.config/GIMP/2.10/gradients/ +``` + +## Verify Plugin Installation + +1. **Check plugin appears in GIMP:** + - Restart GIMP + - Check Filters menu for new entries + - Check Tools menu if it's a tool plugin + +2. **Refresh plugins without restarting:** + - Filters β†’ Script-Fu β†’ Refresh Scripts (for Script-Fu) + - Filters β†’ Python-Fu β†’ Console β†’ Browse (for Python-Fu) + +3. **Check GIMP error console:** + - Filters β†’ Python-Fu β†’ Console + - Look for any error messages + +4. **Review GIMP startup messages:** + ```bash + gimp --verbose + ``` + +## Building Plugins from Source + +General process for compiled plugins: + +```bash +# Install build dependencies +sudo apt install build-essential libgimp2.0-dev + +# Clone plugin repository +git clone https://github.com/author/plugin-name.git +cd plugin-name + +# Build (method varies by plugin) +# Method 1: Autotools +./autogen.sh +./configure +make +sudo make install + +# Method 2: Meson +meson build +ninja -C build +sudo ninja -C build install + +# Method 3: Simple Makefile +make +sudo make install +``` + +## Troubleshooting + +### Plugin not appearing + +**Check permissions:** +```bash +chmod +x ~/.config/GIMP/2.10/plug-ins/plugin-name/*.py +chmod +x ~/.config/GIMP/2.10/plug-ins/plugin-name/*.so +``` + +**Check Python shebang (for Python plugins):** +```python +#!/usr/bin/env python3 +``` + +**Ensure plugin is in its own directory:** +```bash +# Wrong: +~/.config/GIMP/2.10/plug-ins/plugin.py + +# Correct: +~/.config/GIMP/2.10/plug-ins/plugin-name/plugin.py +``` + +### Missing dependencies + +**For Python plugins:** +```bash +# System GIMP uses system Python +pip3 install required-module + +# Flatpak GIMP - more complex, may need to use flatpak Python +``` + +**Check what libraries binary needs:** +```bash +ldd ~/.config/GIMP/2.10/plug-ins/plugin-name/plugin.so +``` + +### Flatpak permission issues + +```bash +# Grant additional permissions if needed +flatpak override --user --filesystem=~/.config/GIMP org.gimp.GIMP +``` + +## GIMP 3.0 Changes + +For GIMP 3.0+ (when released): +- Plugin directory: `~/.config/GIMP/3.0/plug-ins/` +- Python 3 required for all Python plugins +- Some API changes may require plugin updates + +## Recommended Plugin Collection + +Essential plugins for most users: +1. **G'MIC** - Hundreds of filters and effects +2. **Resynthesizer** - Content-aware fill (like Photoshop) +3. **BIMP** - Batch image manipulation +4. **gimp-plugin-registry** - Collection of useful plugins +5. **Liquid Rescale** - Content-aware scaling +6. **Layer via Copy/Cut** - Photoshop-like layer workflow + +## Uninstalling Plugins + +**Remove script:** +```bash +rm ~/.config/GIMP/2.10/scripts/plugin-name.scm +``` + +**Remove Python/binary plugin:** +```bash +rm -rf ~/.config/GIMP/2.10/plug-ins/plugin-name/ +``` + +**Remove system-installed plugin:** +```bash +sudo apt remove gimp-plugin-name +``` + +## Resources + +- **GIMP Plugin Registry**: https://www.gimphelp.org/ +- **GitHub**: Search for "GIMP plugin" +- **GIMP Forums**: https://www.gimp-forum.net/ +- **Package search**: `apt search gimp-plugin` + +## Best Practices + +- Install plugins one at a time and test +- Keep backups of working GIMP configurations +- Read plugin documentation for requirements +- Check GIMP version compatibility +- Prefer packaged versions when available +- Use GIMP's built-in plugin manager (if available) +- Test plugins on copy of image first + +Help users extend GIMP's capabilities with powerful plugins for advanced image editing. diff --git a/commands/sysadmin/linux-desktop/installation/guis/install-obs-plugin.md b/commands/sysadmin/linux-desktop/installation/guis/install-obs-plugin.md new file mode 100644 index 0000000000000000000000000000000000000000..45508dd6708932af7e7336f5271954bdd3f42aec --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/guis/install-obs-plugin.md @@ -0,0 +1,216 @@ +# Install OBS Studio Plugin + +You are a system administration assistant specialized in installing and managing OBS Studio plugins on Linux. + +## Your Task + +Help the user install OBS Studio plugins: + +1. First, verify OBS Studio is installed: + ```bash + obs --version + ``` + +2. Ask the user: + - Which plugin they want to install (provide popular suggestions if unsure) + - Installation method preference (package manager, manual, Flatpak) + - Whether OBS is installed via package manager or Flatpak + +3. Determine the correct plugin directory: + - **System install**: `~/.config/obs-studio/plugins/` or `/usr/lib/obs-plugins/` + - **Flatpak install**: `~/.var/app/com.obsproject.Studio/config/obs-studio/plugins/` + +4. Install the plugin and verify it loads correctly + +## Popular OBS Plugins + +### obs-gstreamer (GStreamer integration) +```bash +sudo apt install obs-gstreamer +``` + +### obs-websocket (Remote control) +**Included in OBS 28+ by default, but for older versions:** +```bash +# Download from releases +wget https://github.com/obsproject/obs-websocket/releases/download/4.9.1/obs-websocket-4.9.1-1_amd64.deb +sudo dpkg -i obs-websocket-4.9.1-1_amd64.deb +``` + +### obs-v4l2sink (Virtual camera) +```bash +sudo apt install v4l2loopback-dkms obs-v4l2sink +``` + +### StreamFX (Advanced effects and filters) +```bash +# Download latest release from GitHub +wget https://github.com/Xaymar/obs-StreamFX/releases/download/0.12.0/StreamFX-ubuntu-22.04.deb +sudo dpkg -i StreamFX-ubuntu-22.04.deb +``` + +### obs-backgroundremoval (AI background removal) +```bash +# Download from releases +wget https://github.com/royshil/obs-backgroundremoval/releases/download/v1.1.13/obs-backgroundremoval-v1.1.13-ubuntu-22.04-x86_64.deb +sudo dpkg -i obs-backgroundremoval-v1.1.13-ubuntu-22.04-x86_64.deb +``` + +### wlrobs (Wayland screen capture) +```bash +sudo apt install obs-wlrobs +``` + +### Advanced Scene Switcher +```bash +# Download from releases +wget https://github.com/WarmUpTill/SceneSwitcher/releases/download/1.26.2/SceneSwitcher.so +mkdir -p ~/.config/obs-studio/plugins/SceneSwitcher/bin/64bit/ +mv SceneSwitcher.so ~/.config/obs-studio/plugins/SceneSwitcher/bin/64bit/ +``` + +### obs-teleport (NDI alternative, low-latency streaming) +```bash +# Download from releases +wget https://github.com/fzwoch/obs-teleport/releases/download/0.7.2/obs-teleport_0.7.2_amd64.deb +sudo dpkg -i obs-teleport_0.7.2_amd64.deb +``` + +## Manual Plugin Installation + +### For system OBS installation: + +```bash +# Download plugin (usually a .so file or .deb package) +# If it's a .so file: +mkdir -p ~/.config/obs-studio/plugins/PLUGIN_NAME/bin/64bit/ +cp plugin-file.so ~/.config/obs-studio/plugins/PLUGIN_NAME/bin/64bit/ + +# If plugin has data files: +mkdir -p ~/.config/obs-studio/plugins/PLUGIN_NAME/data/ +cp -r data/* ~/.config/obs-studio/plugins/PLUGIN_NAME/data/ +``` + +### For Flatpak OBS installation: + +```bash +# Create plugin directory +mkdir -p ~/.var/app/com.obsproject.Studio/config/obs-studio/plugins/PLUGIN_NAME/bin/64bit/ + +# Copy plugin +cp plugin-file.so ~/.var/app/com.obsproject.Studio/config/obs-studio/plugins/PLUGIN_NAME/bin/64bit/ + +# If plugin has data files: +mkdir -p ~/.var/app/com.obsproject.Studio/config/obs-studio/plugins/PLUGIN_NAME/data/ +cp -r data/* ~/.var/app/com.obsproject.Studio/config/obs-studio/plugins/PLUGIN_NAME/data/ +``` + +## Building Plugin from Source + +For plugins that need to be built: + +```bash +# Install build dependencies +sudo apt install build-essential cmake git libobs-dev + +# Clone plugin repository +git clone https://github.com/AUTHOR/PLUGIN_NAME.git +cd PLUGIN_NAME + +# Build +mkdir build && cd build +cmake .. -DCMAKE_INSTALL_PREFIX=/usr +make -j$(nproc) + +# Install +sudo make install +``` + +## Verify Plugin Installation + +1. **Check plugin loads:** + ```bash + # Start OBS and check Tools menu or filters + obs + ``` + +2. **Check OBS logs:** + ```bash + tail -f ~/.config/obs-studio/logs/$(ls -t ~/.config/obs-studio/logs/ | head -1) + ``` + +3. **List loaded plugins:** + - Open OBS Studio + - Go to Tools β†’ Scripts or check Filters for new options + - Check Help β†’ Log Files β†’ View Current Log + +## Troubleshooting + +**Plugin not appearing:** +- Verify plugin is in correct directory +- Check OBS version compatibility +- Review OBS logs for loading errors +- Ensure .so file has execute permissions: `chmod +x plugin.so` + +**Permission issues:** +```bash +chmod -R 755 ~/.config/obs-studio/plugins/ +``` + +**Missing dependencies:** +```bash +# Check what libraries plugin needs +ldd ~/.config/obs-studio/plugins/PLUGIN_NAME/bin/64bit/plugin.so +``` + +**Flatpak-specific issues:** +```bash +# Give Flatpak OBS more permissions if needed +flatpak override --user --filesystem=~/.config/obs-studio/plugins com.obsproject.Studio +``` + +## Plugin Directory Structure + +``` +~/.config/obs-studio/plugins/ +β”œβ”€β”€ plugin-name/ +β”‚ β”œβ”€β”€ bin/ +β”‚ β”‚ └── 64bit/ +β”‚ β”‚ └── plugin.so +β”‚ └── data/ +β”‚ └── locale/ +β”‚ └── en-US.ini +``` + +## Recommended Plugin Collection + +For a well-rounded OBS setup, consider: +1. **StreamFX** - Advanced effects and encoding +2. **obs-backgroundremoval** - AI background removal +3. **Advanced Scene Switcher** - Automation +4. **obs-websocket** - Remote control (built-in OBS 28+) +5. **wlrobs** - Better Wayland capture +6. **obs-v4l2sink** - Virtual camera output + +## Best Practices + +- Install plugins one at a time and test each +- Keep OBS and plugins updated +- Back up OBS configuration before major plugin installations +- Read plugin documentation for specific requirements +- Check plugin compatibility with your OBS version +- Use package manager versions when available (easier updates) + +## Uninstalling Plugins + +**Remove from user directory:** +```bash +rm -rf ~/.config/obs-studio/plugins/PLUGIN_NAME/ +``` + +**Remove system-installed plugin:** +```bash +sudo apt remove obs-plugin-name +``` + +Help users enhance their OBS Studio setup with powerful plugins for better streaming and recording. diff --git a/commands/sysadmin/linux-desktop/installation/install-this.md b/commands/sysadmin/linux-desktop/installation/install-this.md new file mode 100644 index 0000000000000000000000000000000000000000..ded64674046f7c1e93b922169c3325ca9009c45e --- /dev/null +++ b/commands/sysadmin/linux-desktop/installation/install-this.md @@ -0,0 +1,3 @@ +Install the program at this directory. + +If it needs to be unzipped and unpacked, unzip, in the first instance to ~/programs and choose the most appropriate folder at that level (the folders divide programs into topics). \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/mcp/manage-mcp-servers.md b/commands/sysadmin/linux-desktop/mcp/manage-mcp-servers.md new file mode 100644 index 0000000000000000000000000000000000000000..4e393e14a9dc2aa70e0a0060ec4226e0c805ea71 --- /dev/null +++ b/commands/sysadmin/linux-desktop/mcp/manage-mcp-servers.md @@ -0,0 +1,104 @@ +--- +description: Review installed MCP servers and work with user to add new ones +tags: [mcp, ai, configuration, servers, project, gitignored] +--- + +You are helping the user manage their MCP (Model Context Protocol) servers. + +## Process + +1. **Check MCP configuration** + - Look for MCP config: `~/mcp/` or `~/.config/mcp/` + - Check Claude Code config: `~/.config/claude/` + - Identify MCP server config files + +2. **List currently installed MCP servers** + - Parse configuration files + - For each server, show: + - Server name + - Server type/purpose + - Status (running/stopped) + - Configuration details + +3. **Check running MCP servers** + - Look for running processes: + ```bash + ps aux | grep mcp + ``` + - Check if servers are accessible + +4. **Suggest useful MCP servers** + + **Common MCP servers:** + - Filesystem MCP (file operations) + - GitHub MCP (GitHub integration) + - Database MCP (PostgreSQL, SQLite, etc.) + - Browser MCP (web automation) + - Context7 MCP (documentation) + - Memory MCP (persistent memory) + - Search MCP (web search) + +5. **Install new MCP servers** + - For each server user wants: + - Check installation method (npm, pip, docker, etc.) + - Install dependencies + - Configure server + - Add to MCP config + + **Example: Installing filesystem MCP** + ```bash + npm install -g @anthropic/mcp-server-filesystem + ``` + + **Example: Installing custom MCP server** + ```bash + git clone + cd + npm install + ``` + +6. **Configure MCP servers** + - Add server to config file + - Example config: + ```json + { + "mcpServers": { + "filesystem": { + "command": "mcp-server-filesystem", + "args": ["/path/to/allowed/directory"] + }, + "github": { + "command": "mcp-server-github", + "env": { + "GITHUB_TOKEN": "your-token-here" + } + } + } + } + ``` + +7. **Test MCP server connectivity** + - Start servers + - Verify they're accessible by Claude Code + - Test basic operations + +8. **Document MCP setup** + - Offer to create `~/mcp/README.md` documenting: + - Installed servers + - Configuration + - Usage examples + - Troubleshooting + +9. **Suggest workflows** + - Recommend MCP server combinations for common tasks + - Show example use cases + +## Output + +Provide a summary showing: +- Currently installed MCP servers +- Server status and configuration +- New servers installed (if any) +- Configuration changes made +- Usage examples +- Next steps or recommendations diff --git a/commands/sysadmin/linux-desktop/media/check-codecs.md b/commands/sysadmin/linux-desktop/media/check-codecs.md new file mode 100644 index 0000000000000000000000000000000000000000..40e9fa41dbf028ffe8310fb1d6dd5f477150cd48 --- /dev/null +++ b/commands/sysadmin/linux-desktop/media/check-codecs.md @@ -0,0 +1,89 @@ +--- +description: Evaluate installed media codecs on the computer +tags: [media, codecs, audio, video, system, project, gitignored] +--- + +You are helping the user evaluate what media codecs are installed on their system. + +## Process + +1. **Check GStreamer plugins** + - List GStreamer plugins: `gst-inspect-1.0 | grep -i plugin` + - Check installed GStreamer packages: + ```bash + dpkg -l | grep -E "gstreamer.*plugin" + ``` + - Key packages: + - `gstreamer1.0-plugins-base` (essential) + - `gstreamer1.0-plugins-good` (common formats) + - `gstreamer1.0-plugins-bad` (additional) + - `gstreamer1.0-plugins-ugly` (patent-encumbered) + - `gstreamer1.0-libav` (FFmpeg integration) + +2. **Check FFmpeg codecs** + - List FFmpeg codecs: `ffmpeg -codecs 2>/dev/null | head -50` + - List encoders: `ffmpeg -encoders 2>/dev/null | head -20` + - List decoders: `ffmpeg -decoders 2>/dev/null | head -20` + - Check FFmpeg version: `ffmpeg -version` + +3. **Check VA-API support (hardware acceleration)** + - Check VA-API: `vainfo` + - For AMD: Should show ROCm/RADV support + - Verify hardware encoding/decoding support + +4. **Check for common codec packages** + ```bash + dpkg -l | grep -E "libavcodec|libavformat|libavutil|x264|x265|vp9|opus|aac|mp3" + ``` + +5. **Test codec support** + - Video codecs to verify: + - H.264/AVC (most common) + - H.265/HEVC (4K content) + - VP8/VP9 (WebM) + - AV1 (modern codec) + - Audio codecs to verify: + - MP3 + - AAC + - Opus + - FLAC + - Vorbis + +6. **Identify missing codecs** + - Common needs: + - DVD playback: `libdvd-pkg` + - Proprietary formats: `ubuntu-restricted-extras` + - H.265 encoding: `x265` + - AV1: `libaom3`, `libdav1d-dev` + +7. **Suggest installations** + + **For comprehensive codec support:** + ```bash + sudo apt install ubuntu-restricted-extras + sudo apt install ffmpeg + sudo apt install gstreamer1.0-plugins-{base,good,bad,ugly} + sudo apt install gstreamer1.0-libav + sudo apt install gstreamer1.0-vaapi # Hardware acceleration + ``` + + **For DVD:** + ```bash + sudo apt install libdvd-pkg + sudo dpkg-reconfigure libdvd-pkg + ``` + +8. **Check browser codec support** + - Visit: `https://www.youtube.com/html5` + - Shows which codecs browser supports + - Check hardware acceleration in browsers + +## Output + +Provide a report showing: +- Installed GStreamer plugins +- FFmpeg codec support +- Hardware acceleration status (VA-API) +- Missing common codecs +- Installation recommendations +- Browser codec support status diff --git a/commands/sysadmin/linux-desktop/mgmt/to-add-1.md b/commands/sysadmin/linux-desktop/mgmt/to-add-1.md new file mode 100644 index 0000000000000000000000000000000000000000..464f969f7ac28fbd17c60cd1f1dae793822d205d --- /dev/null +++ b/commands/sysadmin/linux-desktop/mgmt/to-add-1.md @@ -0,0 +1,79 @@ +Please create teh following list of slash commands by writing them and then organising them into the correct subfolder(s) + +When done, add to git and push + +Do not ask for any confirmation - just complete the task by yourself + +a slash command to set up aws cli and configure it (or validate config) + +a slash command to set up b2 cli and configure it (or validate config) + +a slash command to run arp and scan the local network and produce a lan map + +a slash command to diagnose any lan connectivity issues by pinging the detected gateway + +a slash command to install pyenv and work with the user to set up various python versions + +a slash command to evaluate what software the user has installed and set up complementary clis or guis they may wish to use + +a slash command to work with the user to evlauate and optimise their pipewire setup + +a slash command to intelligently probe their system for security vunerabilities that they may wish to remediate + +a slash command to try to identify packages (like flarpatk debian) that the user has not used in a while and may wish to remove + +a slash command which evaluates which packages the user has installed through third party repos but may be able to move to official repos + +a slash command which evalutes whether the user has any software that is known to be spyware installed and suggest removal if so + +a slash command to diagnose what printers the user has installed and see if any can be removed + +a slash command to set up ollama on the machine + +a slash command to see which ollama models the user has installed and suggest others which would fit their hardwware + +a slash command to set up comfyui + +a slash command to evaluate what packages the user has in place for local ai inference and suggest any they may wish to isntall + +a slash command to see which fonts the user has installed and install additional fonts if requested + +a slash command to help the user to install google fonts. the user will provide the names of google fonts and claude will install them + +slash command to evaluae what wake devices the user has configured and help remove them for better hibernation + +slash command to evaluate what codecs the user has installed on their computer + +slash command to evaluaet whether the os is properly optimised to support the user's gpu + +slash command to evaluate any settings which may affect the operatio oft he gpu and suggest a compatibel cli/gui for monitoring + +slash commmand which identifies which parts of the filesystem the user may wish to back up and suggest an inclusion pattern based upon that + +slash command which sets up rclone + +slash command which checks if the user has a global gitignore in place and if not creates one + +slash comand which checks the user's basic git config and makes any desired edits + +slash command which sees which ssh pairs the user has installed and loaded and deletes old ones if the user desiers + +slash command which evaluates which ssh connection names the user has + +slash command which evlautes which mcp servers (for ai) the user has installed and running and works with the uiser to add new ones + +slash commands which sees which speech to text apps the user has installed, suggests potnetial installation candidates, installs local whisper + +slash commands which sets up a conda environment for rocm and pytocrch + +slash command which sets up a conda environment for llm fine tuning + +slash comand which sets up a conda env for creating an stt fine tune + +slash command which sets up a conda env for data analysis + +slash command which suggests ides the user may wish to install + +slash command which evaluates how the user has installed vs code and suggests optimisations like moving to a new repo source + +slash command which evluaets which api keys the user has on path and adds the if requested - refraining from providing unsolicited security advice \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/network/diagnose-lan-connectivity.md b/commands/sysadmin/linux-desktop/network/diagnose-lan-connectivity.md new file mode 100644 index 0000000000000000000000000000000000000000..444abd88b43404cfb5939cb06a4a366816f7d073 --- /dev/null +++ b/commands/sysadmin/linux-desktop/network/diagnose-lan-connectivity.md @@ -0,0 +1,50 @@ +--- +description: Diagnose LAN connectivity issues by pinging gateway and testing network +tags: [network, diagnostics, connectivity, gateway, troubleshooting, project, gitignored] +--- + +You are helping the user diagnose LAN connectivity issues. + +## Process + +1. **Identify network configuration** + - Run `ip addr show` to check network interfaces + - Run `ip route show` to identify default gateway + - Check DNS servers: `cat /etc/resolv.conf` + +2. **Test gateway connectivity** + - Ping default gateway: `ping -c 4 ` + - If gateway is unreachable, check: + - Network interface status: `ip link show` + - NetworkManager status: `nmcli device status` + - Physical connection (if applicable) + +3. **Test DNS resolution** + - Test DNS lookup: `nslookup google.com` + - Try alternative DNS: `nslookup google.com 8.8.8.8` + - Check if DNS is the issue + +4. **Test external connectivity** + - Ping external IP: `ping -c 4 8.8.8.8` + - Ping domain name: `ping -c 4 google.com` + - Traceroute to identify where packets stop: `traceroute google.com` + +5. **Check for common issues** + - Firewall blocking: `sudo ufw status` + - IP conflicts: `arp -a` (look for duplicate IPs) + - DHCP issues: Check if IP is self-assigned (169.254.x.x) + +6. **Advanced diagnostics** + - Check routing table: `ip route show` + - Monitor network traffic: `sudo tcpdump -i -c 20` + - Check for packet loss: `mtr ` + +## Output + +Provide a diagnostic report showing: +- Network configuration summary +- Gateway reachability status +- DNS resolution status +- External connectivity status +- Identified issues (if any) +- Recommended fixes diff --git a/commands/sysadmin/linux-desktop/network/scan-lan.md b/commands/sysadmin/linux-desktop/network/scan-lan.md new file mode 100644 index 0000000000000000000000000000000000000000..edfca1cd6ef792581a5a5762616342e08ca9d3b9 --- /dev/null +++ b/commands/sysadmin/linux-desktop/network/scan-lan.md @@ -0,0 +1,47 @@ +--- +description: Scan local network using ARP and produce a LAN map +tags: [network, diagnostics, lan, arp, scanning, project, gitignored] +--- + +You are helping the user scan their local network and create a comprehensive LAN map. + +## Process + +1. **Identify network interface and subnet** + - Run `ip route | grep default` to find default gateway + - Run `ip addr show` to identify active network interface and IP + - Determine subnet (likely 10.0.0.0/24 based on Daniel's setup) + +2. **Perform ARP scan** + - Run `arp -a` to see current ARP cache + - For more comprehensive scan, use `sudo arp-scan --localnet` (install if needed: `sudo apt install arp-scan`) + - Alternative: `sudo nmap -sn 10.0.0.0/24` for network sweep + +3. **Gather detailed information** + - For each discovered host, attempt to: + - Get hostname: `nslookup ` + - Identify device type if possible (router, printer, etc.) + - Check if SSH is accessible: `timeout 2 nc -z 22` + +4. **Create LAN map** + - Organize discovered devices by: + - IP address + - MAC address + - Hostname (if available) + - Device type (if identifiable) + - Open ports/services (if detected) + +5. **Save results** + - Offer to save the LAN map to `~/ai-docs/network/lan-map-$(date +%Y%m%d).md` + - Include timestamp and subnet information + +## Output + +Present the LAN map in a clear table format showing: +- IP Address +- MAC Address +- Hostname +- Device Type/Notes +- Status (active/inactive) + +Include summary statistics (total devices, device type breakdown). diff --git a/commands/sysadmin/linux-desktop/node/node-version-check.md b/commands/sysadmin/linux-desktop/node/node-version-check.md new file mode 100644 index 0000000000000000000000000000000000000000..44b1815e98800e450930d35584437e072ed51af1 --- /dev/null +++ b/commands/sysadmin/linux-desktop/node/node-version-check.md @@ -0,0 +1 @@ +Check which version of node I have installed on this computer. \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/node/npm-install.md b/commands/sysadmin/linux-desktop/node/npm-install.md new file mode 100644 index 0000000000000000000000000000000000000000..e1ddca2cd0114cdfcd56fe4d4f24a3d4ae2523ca --- /dev/null +++ b/commands/sysadmin/linux-desktop/node/npm-install.md @@ -0,0 +1 @@ +Install Node Package Manager (npm). \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/optimisation/optimize-boot-speed.md b/commands/sysadmin/linux-desktop/optimisation/optimize-boot-speed.md new file mode 100644 index 0000000000000000000000000000000000000000..bdb6ac659978bba82ba40e9ecb4e4fe12799c2c6 --- /dev/null +++ b/commands/sysadmin/linux-desktop/optimisation/optimize-boot-speed.md @@ -0,0 +1,73 @@ +You are optimizing system boot speed by identifying and remediating slow or hanging processes. + +## Your Task + +1. **Analyze boot performance** using systemd-analyze: + - `systemd-analyze` - Show total boot time + - `systemd-analyze blame` - List services by boot time impact + - `systemd-analyze critical-chain` - Show critical path bottlenecks + - `systemd-analyze plot > boot-analysis.svg` - Generate visual timeline (optional) + +2. **Identify slow services**: + - Services taking > 5 seconds to start + - Services in the critical boot path causing delays + - Services with timeout issues + - Parallel vs sequential loading issues + +3. **Detect hanging processes**: + - Check for services waiting on timeouts + - Identify dependency chain bottlenecks + - Look for failed network mounts or remote resources + - Find services that could be started later (after boot completes) + +4. **Categorize optimization opportunities**: + - **Disable**: Unnecessary services that can be completely disabled + - **Delay**: Services that can use `After=network-online.target` or similar + - **Parallel**: Services that could start in parallel instead of sequentially + - **Configure**: Services needing timeout or dependency adjustments + +5. **Propose specific optimizations**: + - Provide exact `systemctl` commands to implement changes + - Explain the impact and safety of each change + - Suggest configuration tweaks for slow services + - Recommend masking vs disabling where appropriate + +## Key Commands + +- `systemd-analyze time` - Overall boot time breakdown +- `systemd-analyze blame` - Time taken by each unit +- `systemd-analyze critical-chain` - Critical path analysis +- `systemctl list-dependencies --before` - What loads before a service +- `systemctl list-dependencies --after` - What loads after a service +- `journalctl -b | grep -i timeout` - Find timeout issues +- `systemctl show --property=TimeoutStartUSec` - Check timeout settings + +## Output Format + +1. **Boot Performance Summary**: + - Total boot time + - Kernel, userspace, and firmware times + - Comparison to typical boot times + +2. **Top Boot Time Offenders** (services > 3 seconds): + - Service name and time taken + - What the service does + - Whether it's essential + +3. **Hanging/Timeout Issues**: + - Services with timeout problems + - Root cause analysis + - Recommended fixes + +4. **Optimization Recommendations**: + - Prioritized list of changes (high to low impact) + - Specific commands to execute + - Expected time savings + - Risk assessment for each change + +5. **Implementation Plan**: + - Step-by-step instructions + - Backup/rollback procedures + - Testing recommendations + +Be specific and actionable. Always explain the safety and reversibility of proposed changes. diff --git a/commands/sysadmin/linux-desktop/package-management/check-apt-health.md b/commands/sysadmin/linux-desktop/package-management/check-apt-health.md new file mode 100644 index 0000000000000000000000000000000000000000..caf9d73a2b7091e2a82fb40804d8fc59489d9899 --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/check-apt-health.md @@ -0,0 +1,183 @@ +# APT Package Manager Health Check + +You are helping the user ensure that the APT package manager on Ubuntu is in good working health and remove any broken third-party repositories or packages. + +## Your tasks: + +1. **Check basic APT functionality:** + - Update package lists: `sudo apt update` + - Check for errors in output + - Verify cache state: `apt-cache policy` + +2. **Check for broken packages:** + - List broken packages: `dpkg -l | grep "^..r"` + - Check for unconfigured packages: `dpkg -l | grep "^..c"` + - Check dpkg status: `sudo dpkg --configure -a` + - Check for broken dependencies: `sudo apt-get check` + +3. **Identify problematic repositories:** + - List all repositories: + ```bash + grep -r --include '*.list' '^deb ' /etc/apt/sources.list /etc/apt/sources.list.d/ + ``` + - Check for failing repositories during update: + ```bash + sudo apt update 2>&1 | grep -i "fail\|error\|warning" + ``` + - List third-party PPAs: + ```bash + ls /etc/apt/sources.list.d/ + ``` + +4. **Check APT cache integrity:** + - Check cache size: `du -sh /var/cache/apt/archives/` + - List problematic cache entries: + ```bash + sudo apt-get clean + sudo apt-get autoclean + ``` + +5. **Fix broken dependencies:** + - Attempt to fix broken packages: + ```bash + sudo apt --fix-broken install + ``` + - Force reconfiguration of all packages: + ```bash + sudo dpkg --configure -a + ``` + - Try to complete interrupted installations: + ```bash + sudo apt-get -f install + ``` + +6. **Identify and handle broken third-party repositories:** + For each failing repository found: + - Ask user if they still need it + - If not needed, disable or remove: + ```bash + sudo add-apt-repository --remove ppa: + ``` + - Or manually remove: `sudo rm /etc/apt/sources.list.d/.list` + - Or disable by commenting out: `sudo sed -i 's/^deb/#deb/' /etc/apt/sources.list.d/.list` + +7. **Check for GPG key issues:** + - Check for missing GPG keys: + ```bash + sudo apt update 2>&1 | grep "NO_PUBKEY" + ``` + - If missing keys found, attempt to import: + ```bash + sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys + ``` + - List all trusted keys: `apt-key list` + +8. **Check for duplicate repositories:** + - Find duplicates: + ```bash + grep -h "^deb " /etc/apt/sources.list /etc/apt/sources.list.d/* | sort | uniq -d + ``` + - Remove duplicates manually or ask user which to keep + +9. **Check disk space:** + - Disk space in /var: `df -h /var` + - If low on space: + ```bash + sudo apt-get clean + sudo apt-get autoclean + sudo apt-get autoremove + ``` + +10. **Check for held packages:** + - List held packages: `apt-mark showhold` + - These packages won't be upgraded - ask user if intentional + - To unhold: `sudo apt-mark unhold ` + +11. **Verify repository configurations:** + - Check main sources.list: `cat /etc/apt/sources.list` + - Ensure official Ubuntu repositories are present: + - main + - restricted + - universe + - multiverse + - security updates + - updates + - backports (optional) + +12. **Check for obsolete packages:** + - List locally installed packages not in any repository: + ```bash + aptitude search '~o' + ``` + - Or using apt: `apt list '~o'` + +13. **Verify package authentication:** + - Check if packages are being verified: + ```bash + grep -r "APT::Get::AllowUnauthenticated" /etc/apt/ + ``` + - Should be "false" or not present for security + +14. **Run full system check:** + - Check for consistency: `sudo apt-get check` + - Simulate upgrade to check for issues: `sudo apt-get -s upgrade` + - Simulate dist-upgrade: `sudo apt-get -s dist-upgrade` + +15. **Clean up:** + - Remove old packages: `sudo apt-get autoremove` + - Clean package cache: `sudo apt-get clean` + - Clean old cached packages: `sudo apt-get autoclean` + +16. **Reset APT if severely broken:** + If APT is severely corrupted, may need to: + ```bash + # Backup current sources + sudo cp -r /etc/apt /etc/apt.backup + + # Reset dpkg + sudo dpkg --clear-avail + sudo apt-get update + + # Reinstall base packages if needed + sudo apt-get install --reinstall apt dpkg + ``` + +17. **Check APT configuration files:** + - List all APT config: `apt-config dump` + - Check for problematic configurations in: + - `/etc/apt/apt.conf` + - `/etc/apt/apt.conf.d/` + - Look for unusual proxy settings, deprecated options + +18. **Report findings:** + Summarize: + - Number of broken packages (if any) + - Problematic repositories (outdated PPAs, failing repos) + - Missing GPG keys + - Dependency issues + - Disk space issues + - Held packages + - Overall APT health status (HEALTHY / NEEDS ATTENTION / BROKEN) + +19. **Provide recommendations:** + - List of repositories to remove + - Packages to fix or remove + - Whether full system upgrade is recommended + - Cleanup commands to run + - Any configuration changes needed + - If APT is healthy, suggest regular maintenance: + ```bash + sudo apt update && sudo apt upgrade + sudo apt autoremove + sudo apt clean + ``` + +## Important notes: +- Always backup before removing repositories or packages +- Don't remove dependencies of packages user needs +- Some third-party repos may be intentionally added - confirm before removing +- Be cautious with --fix-broken - it may remove packages +- Check if user is running unsupported Ubuntu version (EOL) +- PPAs may lag behind Ubuntu releases +- sudo is required for most operations +- After major fixes, suggest reboot to ensure clean state diff --git a/commands/sysadmin/linux-desktop/package-management/check-third-party-repos.md b/commands/sysadmin/linux-desktop/package-management/check-third-party-repos.md new file mode 100644 index 0000000000000000000000000000000000000000..9b312dcb378649d960855da4e79049111970387a --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/check-third-party-repos.md @@ -0,0 +1,54 @@ +--- +description: Identify packages from third-party repos that may be available in official repos +tags: [system, packages, repositories, optimization, project, gitignored] +--- + +You are helping the user identify packages installed from third-party repositories that might now be available in official Ubuntu repos. + +## Process + +1. **List all configured repositories** + - Check `/etc/apt/sources.list` + - Check `/etc/apt/sources.list.d/*` + - Identify which are third-party (PPAs, custom repos) + +2. **Identify packages from third-party sources** + - Run: `apt list --installed | grep -v "ubuntu\|debian"` + - For each PPA, find packages: `apt-cache policy ` shows source + +3. **Check official repo availability** + - For each third-party package: + - Check if available in Ubuntu repos: `apt-cache policy ` + - Compare versions (official might be newer or older) + - Note if it's in `universe`, `multiverse`, or `main` + +4. **Common candidates for migration** + - Development tools (git, docker, etc.) + - Media codecs + - Drivers (graphics, etc.) + - Programming languages (Python, Node.js, etc.) + +5. **Evaluate risks and benefits** + - Official repos: More stable, better security updates + - PPAs: Often newer versions, specific features + - Suggest migration if: + - Official version is adequate + - PPA is unmaintained + - Security concerns + +6. **Create migration plan** + - For packages to migrate: + - Remove third-party package + - Remove PPA if no longer needed + - Install from official repo + - Test functionality + +## Output + +Provide a report showing: +- List of third-party repositories in use +- Packages installed from each third-party source +- Which packages are available in official repos +- Version comparison +- Migration recommendations with commands +- Warnings about potential breaking changes diff --git a/commands/sysadmin/linux-desktop/package-management/configure-auto-updates.md b/commands/sysadmin/linux-desktop/package-management/configure-auto-updates.md new file mode 100644 index 0000000000000000000000000000000000000000..71e8762b34143b0a78c0a7c8a0e167cdd6c35fdb --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/configure-auto-updates.md @@ -0,0 +1,186 @@ +# Configure Ubuntu Auto-Updates + +You are helping the user configure automatic updates for Ubuntu. + +## Your tasks: + +1. **Check current update configuration:** + - Check if unattended-upgrades is installed: `dpkg -l | grep unattended-upgrades` + - Current configuration: `cat /etc/apt/apt.conf.d/50unattended-upgrades` + - Check if auto-updates are enabled: `cat /etc/apt/apt.conf.d/20auto-upgrades` + - Update check frequency: `cat /etc/apt/apt.conf.d/10periodic` + +2. **Install unattended-upgrades if not present:** + ```bash + sudo apt update + sudo apt install unattended-upgrades apt-listchanges + ``` + +3. **Ask user about their update preferences:** + Discuss with the user: + - **Security updates only** (recommended, safest) + - **Security + recommended updates** + - **All updates** (risky for production systems) + - **Update frequency**: daily, weekly + - **Auto-reboot preference**: never, only for security, scheduled time + - **Email notifications** (if configured) + +4. **Configure update types:** + Edit `/etc/apt/apt.conf.d/50unattended-upgrades`: + + For security updates only (recommended): + ``` + Unattended-Upgrade::Allowed-Origins { + "${distro_id}:${distro_codename}-security"; + }; + ``` + + For security + updates: + ``` + Unattended-Upgrade::Allowed-Origins { + "${distro_id}:${distro_codename}-security"; + "${distro_id}:${distro_codename}-updates"; + }; + ``` + +5. **Configure automatic reboot settings:** + In `/etc/apt/apt.conf.d/50unattended-upgrades`, configure: + + **Never auto-reboot (safest):** + ``` + Unattended-Upgrade::Automatic-Reboot "false"; + ``` + + **Auto-reboot when required:** + ``` + Unattended-Upgrade::Automatic-Reboot "true"; + Unattended-Upgrade::Automatic-Reboot-Time "02:00"; + ``` + + **Only reboot if no users logged in:** + ``` + Unattended-Upgrade::Automatic-Reboot-WithUsers "false"; + ``` + +6. **Configure email notifications (optional):** + If user wants email notifications: + ``` + Unattended-Upgrade::Mail "user@example.com"; + Unattended-Upgrade::MailReport "on-change"; // or "always" or "only-on-error" + ``` + + Note: Requires mail system configured (postfix, sendmail, etc.) + +7. **Enable automatic updates:** + Create/edit `/etc/apt/apt.conf.d/20auto-upgrades`: + ``` + APT::Periodic::Update-Package-Lists "1"; + APT::Periodic::Download-Upgradeable-Packages "1"; + APT::Periodic::AutocleanInterval "7"; + APT::Periodic::Unattended-Upgrade "1"; + ``` + + Explanation: + - `Update-Package-Lists`: Update package list (1=daily) + - `Download-Upgradeable-Packages`: Pre-download updates (1=daily) + - `AutocleanInterval`: Clean up old packages (7=weekly) + - `Unattended-Upgrade`: Actually install updates (1=daily) + +8. **Configure blacklist (packages to exclude):** + In `/etc/apt/apt.conf.d/50unattended-upgrades`: + ``` + Unattended-Upgrade::Package-Blacklist { + "linux-image-*"; // Example: don't auto-update kernel + "nvidia-*"; // Example: don't auto-update GPU drivers + }; + ``` + + Ask user if there are specific packages they want to exclude. + +9. **Test configuration:** + - Check configuration syntax: + ```bash + sudo unattended-upgrades --dry-run --debug + ``` + - View what would be updated: + ```bash + sudo unattended-upgrade --dry-run + ``` + +10. **Set up monitoring:** + - Check logs: `cat /var/log/unattended-upgrades/unattended-upgrades.log` + - Check dpkg log: `cat /var/log/dpkg.log` + - Monitor update service status: `systemctl status unattended-upgrades.service` + +11. **Configure additional safety options:** + In `/etc/apt/apt.conf.d/50unattended-upgrades`: + ``` + // Remove unused dependencies + Unattended-Upgrade::Remove-Unused-Dependencies "true"; + + // Remove unused kernel packages + Unattended-Upgrade::Remove-Unused-Kernel-Packages "true"; + + // Automatically remove new unused dependencies + Unattended-Upgrade::Remove-New-Unused-Dependencies "true"; + + // Split the upgrade into smallest possible chunks + Unattended-Upgrade::MinimalSteps "true"; + + // Install updates when on AC power only + Unattended-Upgrade::OnlyOnACPower "true"; // laptops only + ``` + +12. **Set up pre/post-update hooks (optional):** + If user wants custom actions before/after updates: + ``` + Unattended-Upgrade::PreUpdate "echo 'Starting updates' | logger"; + Unattended-Upgrade::PostUpdate "echo 'Updates complete' | logger"; + ``` + +13. **Enable and start the service:** + ```bash + sudo systemctl enable unattended-upgrades + sudo systemctl start unattended-upgrades + sudo systemctl status unattended-upgrades + ``` + +14. **Manual trigger for testing:** + ```bash + sudo unattended-upgrade -d + ``` + +15. **Provide best practices and recommendations:** + - **Desktops/Workstations**: Security updates only, no auto-reboot + - **Servers**: Security updates only, scheduled reboot window if needed + - **Laptops**: Same as desktop, plus OnlyOnACPower option + - **Production systems**: Manual updates preferred, or extensive testing + - Always check logs periodically: `/var/log/unattended-upgrades/` + - Test in non-production environment first + - Keep kernel packages in blacklist if you want manual control + - Consider using livepatch for kernel updates without rebooting + - Set up email notifications for important systems + - Monitor disk space - updates require free space + +16. **Show how to check what's configured:** + ```bash + # View current configuration + apt-config dump APT::Periodic + + # Check when updates last ran + ls -la /var/lib/apt/periodic/ + + # View update history + cat /var/log/unattended-upgrades/unattended-upgrades.log + ``` + +## Important notes: +- Backup configuration files before editing +- Test with --dry-run before enabling +- Auto-reboot can be disruptive - configure carefully +- Email requires MTA (mail system) configured +- Updates consume bandwidth and disk space +- Some updates may break custom configurations +- Keep an eye on logs after enabling +- Security updates are generally safe to auto-install +- Feature updates may require testing diff --git a/commands/sysadmin/linux-desktop/package-management/evaluate-installed-software.md b/commands/sysadmin/linux-desktop/package-management/evaluate-installed-software.md new file mode 100644 index 0000000000000000000000000000000000000000..039b652d2c1c9cfcf15b075003eafcdc21e09fb2 --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/evaluate-installed-software.md @@ -0,0 +1,50 @@ +--- +description: Evaluate installed software and suggest complementary CLIs or GUIs +tags: [system, audit, software, recommendations, optimization, project, gitignored] +--- + +You are helping the user evaluate their installed software and suggest complementary tools. + +## Process + +1. **Inventory installed software** + - APT packages: `apt list --installed | wc -l` + - Snap packages: `snap list` + - Flatpak packages: `flatpak list` + - pip packages: `pip list` + - Manually installed in `~/programs` + +2. **Categorize software** + - Development tools + - Media/graphics applications + - System utilities + - Communication tools + - AI/ML tools + - Backup/storage tools + +3. **Identify gaps and complementary tools** + - For each category, suggest: + - Missing CLIs that complement existing GUIs + - Missing GUIs that complement existing CLIs + - Alternative tools that might be better suited + - Modern replacements for outdated tools + +4. **Examples of complementary suggestions** + - If `docker` installed, suggest `lazydocker` GUI + - If `git` installed, suggest `gitui` or `lazygit` + - If `code` (VS Code) installed, suggest useful extensions + - If media editing tools installed, suggest codec packages + - If Python installed, suggest `pipx` for isolated CLI tools + +5. **Present recommendations** + - Group by category + - Explain the benefit of each suggestion + - Prioritize based on user's existing software patterns + +## Output + +Provide a report showing: +- Summary of installed software by category +- List of recommended complementary tools +- Brief explanation of why each tool would be useful +- Installation commands for suggested tools diff --git a/commands/sysadmin/linux-desktop/package-management/identify-unused-packages.md b/commands/sysadmin/linux-desktop/package-management/identify-unused-packages.md new file mode 100644 index 0000000000000000000000000000000000000000..199d90111372434878457de834a80b617e1645c8 --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/identify-unused-packages.md @@ -0,0 +1,55 @@ +--- +description: Identify packages user hasn't used recently and may wish to remove +tags: [system, cleanup, packages, optimization, project, gitignored] +--- + +You are helping the user identify unused packages that could be removed to free up space. + +## Process + +1. **Check package installation dates** + - For APT packages: `ls -lt /var/lib/dpkg/info/*.list | tail -50` + - Check package access times if available + +2. **Identify large packages** + - List by size: `dpkg-query -W -f='${Installed-Size}\t${Package}\n' | sort -rn | head -30` + - Focus on large packages that might be unused + +3. **Check Flatpak packages** + - List Flatpaks: `flatpak list --app` + - Check Flatpak size: `flatpak list --app --columns=name,application,size` + - Suggest running: `flatpak uninstall --unused` to remove unused runtimes + +4. **Check Snap packages** + - List snaps: `snap list` + - Check snap disk usage: `du -sh /var/lib/snapd/snaps` + - Identify old snap revisions: `snap list --all | grep disabled` + - Suggest: `snap remove --purge ` + +5. **Identify orphaned packages (APT)** + - Find orphaned packages: `deborphan` + - Check apt autoremove suggestions: `apt autoremove --dry-run` + +6. **Check for development packages** + - List `-dev` packages: `dpkg -l | grep -E "^ii.*-dev"` + - Ask user if they're actively developing and need these + +7. **Review by category** + - Games (if user doesn't game) + - Old kernels: `dpkg -l | grep linux-image` + - Language packs not needed + - Documentation packages (`-doc` suffix) + +8. **Present findings to user** + - Group by category and size + - Estimate space that could be freed + - Ask user to confirm before suggesting removal + +## Output + +Provide a report showing: +- Total number of installed packages +- Potentially unused packages by category +- Space that could be freed +- Safe removal suggestions +- Warning about packages to NOT remove (dependencies) diff --git a/commands/sysadmin/linux-desktop/package-management/install-brew.md b/commands/sysadmin/linux-desktop/package-management/install-brew.md new file mode 100644 index 0000000000000000000000000000000000000000..90148a34ebe109b48224870b0042d0b1577d91e4 --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/install-brew.md @@ -0,0 +1,123 @@ +# Install Homebrew on Linux + +You are helping the user install Homebrew (brew) package manager on Linux. + +## Your tasks: + +1. **Check if Homebrew is already installed:** + - Check: `which brew` + - If installed: `brew --version` + - If already installed, ask if they want to update or reconfigure it + +2. **Check prerequisites:** + Homebrew requires: + - Git: `git --version` + - Curl: `curl --version` + - GCC: `gcc --version` + - Build essentials + + Install missing prerequisites: + ```bash + sudo apt update + sudo apt install build-essential procps curl file git + ``` + +3. **Download and run Homebrew installer:** + ```bash + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + ``` + + The script will: + - Install to `/home/linuxbrew/.linuxbrew` (multi-user) or `~/.linuxbrew` (single user) + - Set up necessary directories + - Install Homebrew + +4. **Add Homebrew to PATH:** + The installer will suggest adding Homebrew to your PATH. Add to ~/.bashrc or ~/.profile: + + ```bash + echo 'eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)"' >> ~/.bashrc + source ~/.bashrc + ``` + + Or for single-user installation: + ```bash + echo 'eval "$($HOME/.linuxbrew/bin/brew shellenv)"' >> ~/.bashrc + source ~/.bashrc + ``` + +5. **Verify installation:** + ```bash + brew --version + which brew + brew doctor + ``` + +6. **Run brew doctor and fix issues:** + `brew doctor` will check for common issues. Follow its recommendations: + - Install recommended dependencies + - Fix PATH issues + - Update outdated software + +7. **Install recommended packages:** + Homebrew recommends installing gcc: + ```bash + brew install gcc + ``` + +8. **Configure Homebrew (optional):** + - Disable analytics: `brew analytics off` + - Set up auto-update preferences + - Configure tap repositories + +9. **Show basic Homebrew usage:** + Explain to the user: + - `brew install ` - Install a package + - `brew uninstall ` - Remove a package + - `brew upgrade` - Upgrade all packages + - `brew update` - Update Homebrew itself + - `brew list` - List installed packages + - `brew search ` - Search for packages + - `brew info ` - Get package info + - `brew doctor` - Check for issues + - `brew cleanup` - Remove old versions + +10. **Set up common taps (optional):** + Ask if user wants popular taps: + ```bash + brew tap homebrew/cask-fonts # for fonts + brew tap homebrew/cask-versions # for alternative versions + ``` + +11. **Handle path conflicts:** + Check if Homebrew binaries conflict with system packages: + ```bash + which -a python3 + which -a git + ``` + Explain that Homebrew packages take precedence if in PATH correctly. + +12. **Performance optimization:** + - Set up Homebrew bottle (binary package) cache + - Configure number of parallel downloads: + ```bash + echo 'export HOMEBREW_MAKE_JOBS=4' >> ~/.bashrc + ``` + +13. **Provide best practices:** + - Run `brew update` regularly + - Run `brew upgrade` to keep packages current + - Run `brew cleanup` to free up space + - Use `brew doctor` to diagnose issues + - Pin packages you don't want upgraded: `brew pin ` + - Prefer Homebrew for development tools, apt for system packages + - Don't run brew with sudo + +## Important notes: +- Homebrew on Linux is called "Linuxbrew" +- Don't use sudo with brew commands +- Homebrew compiles from source if no bottle (binary) is available +- Can coexist with apt/apt-get +- Takes up significant disk space +- Compilation can take time +- Keep PATH properly configured diff --git a/commands/sysadmin/linux-desktop/package-management/sdk-check.md b/commands/sysadmin/linux-desktop/package-management/sdk-check.md new file mode 100644 index 0000000000000000000000000000000000000000..96c77d9d749a91b6777b71891e342aa4397beac4 --- /dev/null +++ b/commands/sysadmin/linux-desktop/package-management/sdk-check.md @@ -0,0 +1,2 @@ +Check which development SDKs I have installed on my computer. +Start with identifying what's on path. Then see what I might have elsewhere on the filesystem. \ No newline at end of file diff --git a/commands/sysadmin/linux-desktop/program-management/install-github-program.md b/commands/sysadmin/linux-desktop/program-management/install-github-program.md new file mode 100644 index 0000000000000000000000000000000000000000..2e7b3199e1f97638e57416f3e1203f5eb38cf6df --- /dev/null +++ b/commands/sysadmin/linux-desktop/program-management/install-github-program.md @@ -0,0 +1,50 @@ +You are helping Daniel install and set up a program from GitHub. + +## Your task + +1. **Understand the program**: Ask Daniel for the GitHub repository URL if not already provided +2. **Determine the category**: Analyze the program's purpose and select the most appropriate category from Daniel's `~/programs` directory structure: + - `ai-ml`: AI and machine learning applications + - `communication`: Communication tools + - `data-testing`: Data testing utilities + - `design`: Design software + - `development`: Development tools + - `media-graphics`: Media and graphics applications + - `monitoring-iot`: Monitoring and IoT tools + - `storage-backup`: Storage and backup utilities + - `system-utilities`: System utilities + +3. **Clone the repository**: Clone the GitHub repository to the appropriate subdirectory in `~/programs/[category]/` + +4. **Analyze setup requirements**: + - Check for README, INSTALL, or setup documentation + - Look for dependency requirements (package.json, requirements.txt, Cargo.toml, etc.) + - Identify build steps (Makefile, build scripts, etc.) + +5. **Install dependencies**: Install any required dependencies using the appropriate package manager: + - Python: `pip install -r requirements.txt` or `pip install -e .` + - Node.js: `npm install` or `yarn install` + - Rust: `cargo build --release` + - System packages: `sudo apt install [packages]` + +6. **Build if necessary**: Run any build commands specified in the documentation + +7. **Create symlinks or add to PATH**: If the program has executables: + - Either create symlinks in `~/.local/bin/` (or `/usr/local/bin/` with sudo) + - Or document how to add the program to PATH + +8. **Test the installation**: Verify the program runs correctly + +9. **Document the installation**: Create a brief summary including: + - Where the program was installed + - Any configuration steps taken + - How to run/access the program + - Any additional setup needed + +## Important notes + +- Use `gh repo clone` when possible for authenticated GitHub access +- Preserve the program's directory structure +- Don't modify the original repository files unless necessary for configuration +- If unsure about the category, ask Daniel for guidance +- Always test before declaring success diff --git a/commands/sysadmin/linux-desktop/python-environments/identify-python-environments.md b/commands/sysadmin/linux-desktop/python-environments/identify-python-environments.md new file mode 100644 index 0000000000000000000000000000000000000000..8eb5a9121ce49733183d675890532106b8aab45a --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/identify-python-environments.md @@ -0,0 +1,89 @@ +# Python Environment Manager Identification + +You are helping the user identify their system Python installation and all Python environment managers in use. + +## Your tasks: + +1. **Identify system Python:** + - System Python version: `python3 --version` + - System Python location: `which python3` + - Check if python (unversioned) exists: `which python` + - Python paths: `python3 -c "import sys; print(sys.executable)"` + - List all Python installations: `which -a python python3 python2` + +2. **Check for pyenv:** + - Check if installed: `which pyenv` + - If installed: + - Version: `pyenv --version` + - Root directory: `echo $PYENV_ROOT` or default `~/.pyenv` + - Installed Python versions: `pyenv versions` + - Global Python: `pyenv global` + - Local Python (if set): `pyenv local` + - Check if properly initialized in shell: `grep -r "pyenv init" ~/.bashrc ~/.zshrc ~/.profile 2>/dev/null` + +3. **Check for Conda/Miniconda/Anaconda:** + - Check if conda is installed: `which conda` + - If installed: + - Version: `conda --version` + - Conda info: `conda info` + - Base environment location: `echo $CONDA_PREFIX` + - List environments: `conda env list` + - Current environment: `echo $CONDA_DEFAULT_ENV` + - Check initialization: `grep -r "conda initialize" ~/.bashrc ~/.zshrc ~/.profile 2>/dev/null` + +4. **Check for Mamba:** + - Check if installed: `which mamba` + - If installed: + - Version: `mamba --version` + - Environments: `mamba env list` + +5. **Check for Poetry:** + - Check if installed: `which poetry` + - If installed: + - Version: `poetry --version` + - Config location: `poetry config --list` + - Virtual environment settings: `poetry config virtualenvs.path` + +6. **Check for pipenv:** + - Check if installed: `which pipenv` + - If installed: + - Version: `pipenv --version` + - Environment variable settings: `echo $PIPENV_VENV_IN_PROJECT` + +7. **Check for virtualenv/venv:** + - Check if virtualenv is installed: `which virtualenv` + - Check for virtualenvwrapper: `which virtualenvwrapper.sh` + - If virtualenvwrapper found: + - Check workon home: `echo $WORKON_HOME` + - List environments: `lsvirtualenv` (if available) + +8. **Check for other Python version managers:** + - asdf with Python plugin: `which asdf` and `asdf plugin list | grep python` + - pythonz: `which pythonz` + - Check for manual Python installations in common locations: + - `/usr/local/bin/python*` + - `/opt/python*` + - `~/.local/bin/python*` + +9. **Analyze pip installations:** + - System pip: `pip3 --version` + - Pip location: `which pip3 pip` + - User site packages: `python3 -m site --user-site` + - List globally installed packages: `pip3 list --user` + +10. **Report summary:** + - System Python version and location + - All detected environment managers with versions + - Which manager is currently active (if any) + - Any conflicts or issues detected (e.g., multiple managers competing) + - Recommendations: + - If no environment manager is detected, suggest installing one (pyenv or conda) + - If multiple managers are detected, explain their different use cases + - Suggest best practices for the detected setup + - Warn about potential PATH conflicts + +## Important notes: +- Don't use sudo for these checks (environment managers are typically user-level) +- Be clear about which Python is currently active vs. available +- Explain the difference between system Python and managed versions +- If shell initialization is missing for detected managers, point that out diff --git a/commands/sysadmin/linux-desktop/python-environments/manage-conda-environments.md b/commands/sysadmin/linux-desktop/python-environments/manage-conda-environments.md new file mode 100644 index 0000000000000000000000000000000000000000..40e749300672a76de0e89b16ce6aa0f85e1cab8d --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/manage-conda-environments.md @@ -0,0 +1,368 @@ +# Manage Conda Environments + +You are helping the user list conda environments and work with them to add packages. + +## Your tasks: + +1. **Check if conda is installed:** + ```bash + which conda + conda --version + conda info + ``` + + If not installed, offer to help install Miniconda or Anaconda. + +2. **List all conda environments:** + ```bash + conda env list + # or + conda info --envs + ``` + + This shows: + - All environment names + - Their locations + - Current active environment (marked with *) + +3. **Show current environment:** + ```bash + echo $CONDA_DEFAULT_ENV + conda info --envs | grep "*" + ``` + +4. **Display detailed environment information:** + For each environment, show: + ```bash + # List packages in specific environment + conda list -n + + # Show environment details + conda env export -n + + # Show size + du -sh ~/miniconda3/envs/ + # or + du -sh ~/anaconda3/envs/ + ``` + +5. **Ask user which environment to work with:** + Present the list and ask which environment they want to modify or examine. + +6. **Activate environment:** + ```bash + conda activate + ``` + + Verify activation: + ```bash + conda info --envs + python --version + which python + ``` + +7. **Show packages in environment:** + ```bash + conda list + # or for specific environment + conda list -n + + # Show only explicitly installed packages + conda env export --from-history -n + ``` + +8. **Search for packages:** + Ask what packages user wants to install: + ```bash + conda search + conda search --info + ``` + +9. **Install packages:** + + **Single package:** + ```bash + conda install + # or specify environment + conda install -n + ``` + + **Multiple packages:** + ```bash + conda install + ``` + + **Specific version:** + ```bash + conda install = + # Example: + conda install python=3.11 + conda install numpy=1.24.0 + ``` + + **From specific channel:** + ```bash + conda install -c conda-forge + ``` + +10. **Suggest common packages by category:** + + **Data Science:** + ```bash + conda install numpy pandas matplotlib seaborn scikit-learn + conda install jupyter jupyterlab notebook + conda install scipy statsmodels + ``` + + **Machine Learning:** + ```bash + conda install tensorflow pytorch torchvision + conda install keras scikit-learn xgboost + conda install -c conda-forge lightgbm + ``` + + **Development:** + ```bash + conda install ipython black flake8 pytest + conda install requests beautifulsoup4 selenium + conda install flask django fastapi + ``` + + **Visualization:** + ```bash + conda install matplotlib seaborn plotly + conda install bokeh altair + ``` + + **Database:** + ```bash + conda install sqlalchemy psycopg2 pymongo + conda install sqlite + ``` + +11. **Update packages:** + + **Update specific package:** + ```bash + conda update + ``` + + **Update all packages in environment:** + ```bash + conda update --all + ``` + + **Update conda itself:** + ```bash + conda update conda + ``` + +12. **Remove packages:** + ```bash + conda remove + # or from specific environment + conda remove -n + ``` + +13. **Create new environment:** + Offer to create a new environment: + ```bash + # Basic environment + conda create -n python=3.11 + + # With packages + conda create -n myenv python=3.11 numpy pandas jupyter + + # From file + conda env create -f environment.yml + ``` + +14. **Export environment:** + Help user export environment for sharing: + + **Full export (with all dependencies):** + ```bash + conda env export -n > environment.yml + ``` + + **Only explicitly installed packages:** + ```bash + conda env export --from-history -n > environment.yml + ``` + + **As requirements.txt:** + ```bash + conda list -n --export > requirements.txt + ``` + +15. **Clone environment:** + ```bash + conda create --name --clone + ``` + +16. **Clean up conda:** + ```bash + # Remove unused packages and caches + conda clean --all + + # Remove packages cache + conda clean --packages + + # Remove tarballs + conda clean --tarballs + + # Check what would be removed + conda clean --all --dry-run + ``` + +17. **Check environment conflicts:** + ```bash + # Check for broken dependencies + conda info + + # Verify environment + conda env export -n | conda env create -n test-env -f - + ``` + +18. **Show environment size:** + ```bash + # Size of all environments + du -sh ~/miniconda3/envs/* + # or + du -sh ~/anaconda3/envs/* + + # Total conda size + du -sh ~/miniconda3 + ``` + +19. **Configure conda:** + + **Add channels:** + ```bash + conda config --add channels conda-forge + conda config --add channels bioconda + ``` + + **Set channel priority:** + ```bash + conda config --set channel_priority strict + ``` + + **Show configuration:** + ```bash + conda config --show + conda config --show channels + ``` + + **Remove channel:** + ```bash + conda config --remove channels + ``` + +20. **Use mamba (faster alternative):** + If user has performance issues: + ```bash + # Install mamba + conda install mamba -n base -c conda-forge + + # Use mamba instead of conda + mamba install + mamba search + mamba env create -f environment.yml + ``` + +21. **Troubleshooting common issues:** + + **Environment not activating:** + ```bash + conda init bash + source ~/.bashrc + ``` + + **Package conflicts:** + ```bash + # Create new environment instead + conda create -n new-env python=3.11 + ``` + + **Slow package resolution:** + ```bash + # Use mamba + conda install mamba -c conda-forge + # or + conda config --set solver libmamba + ``` + + **Conda command not found:** + ```bash + export PATH="$HOME/miniconda3/bin:$PATH" + conda init bash + ``` + +22. **Best practices to share:** + - Create separate environment for each project + - Use environment.yml for reproducibility + - Pin important package versions + - Use conda-forge channel for latest packages + - Regularly clean up with `conda clean --all` + - Don't install packages in base environment + - Use mamba for faster package resolution + - Export environments before major changes + - Keep Python version explicit in environment + - Use `--from-history` for cross-platform compatibility + +23. **Show workflow example:** + ```bash + # Create environment + conda create -n data-project python=3.11 + + # Activate it + conda activate data-project + + # Install packages + conda install numpy pandas jupyter matplotlib scikit-learn + + # Verify + conda list + + # Export for sharing + conda env export --from-history > environment.yml + + # Deactivate when done + conda deactivate + ``` + +24. **Integration with Jupyter:** + ```bash + # Install ipykernel in environment + conda activate + conda install ipykernel + + # Register environment as Jupyter kernel + python -m ipykernel install --user --name= + + # Now available in Jupyter + jupyter lab + ``` + +25. **Report current status:** + Summarize: + - Number of environments + - Active environment + - Total disk usage + - conda version + - Suggested actions based on user needs + +## Important notes: +- Always activate environment before installing packages +- Base environment should be kept minimal +- Use `-n ` to work with environments without activating +- conda-forge channel has more packages than default +- mamba is drop-in replacement, much faster +- Environment.yml files ensure reproducibility +- Pin versions in production environments +- Clean up regularly to save disk space +- Don't mix pip and conda unless necessary (prefer conda) +- Use `--from-history` when exporting for other OS +- Jupyter needs ipykernel in each environment +- conda init modifies .bashrc - check if needed diff --git a/commands/sysadmin/linux-desktop/python-environments/setup-conda-data-analysis.md b/commands/sysadmin/linux-desktop/python-environments/setup-conda-data-analysis.md new file mode 100644 index 0000000000000000000000000000000000000000..86baf2441b32b2b636496f679003c67f815c4497 --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/setup-conda-data-analysis.md @@ -0,0 +1,88 @@ +--- +description: Set up conda environment for data analysis +tags: [python, conda, data-analysis, jupyter, pandas, project, gitignored] +--- + +You are helping the user set up a conda environment for data analysis. + +## Process + +1. **Create base environment** + ```bash + conda create -n data-analysis python=3.11 -y + conda activate data-analysis + ``` + +2. **Install core data analysis libraries** + ```bash + conda install -c conda-forge pandas numpy scipy -y + ``` + +3. **Install visualization libraries** + ```bash + conda install -c conda-forge matplotlib seaborn plotly -y + pip install altair + pip install bokeh + ``` + +4. **Install Jupyter ecosystem** + ```bash + conda install -c conda-forge jupyter jupyterlab ipywidgets -y + pip install jupyterlab-git + pip install jupyterlab-lsp + ``` + +5. **Install statistical and ML libraries** + ```bash + conda install -c conda-forge scikit-learn statsmodels -y + pip install scipy + pip install pingouin # Statistics + ``` + +6. **Install data processing tools** + ```bash + conda install -c conda-forge openpyxl xlrd -y # Excel support + pip install pyarrow fastparquet # Parquet support + pip install sqlalchemy # Database connectivity + pip install beautifulsoup4 # Web scraping + pip install requests # HTTP requests + ``` + +7. **Install data manipulation tools** + ```bash + pip install polars # Fast DataFrame library + pip install dask # Parallel computing + pip install vaex # Big data processing + ``` + +8. **Install database drivers** + ```bash + pip install psycopg2-binary # PostgreSQL + pip install pymongo # MongoDB + pip install redis # Redis + ``` + +9. **Install development tools** + ```bash + pip install black # Code formatting + pip install pylint # Linting + pip install ipdb # Debugging + ``` + +10. **Configure Jupyter extensions** + - Enable useful extensions + - Set up theme preferences + - Configure autosave + +11. **Create example notebook** + - Offer to create `~/notebooks/data-analysis-template.ipynb` with common imports + +## Output + +Provide a summary showing: +- Environment name and setup status +- Installed libraries grouped by category +- Jupyter Lab configuration +- Example import statements +- Suggested workflows +- Links to documentation diff --git a/commands/sysadmin/linux-desktop/python-environments/setup-conda-llm-finetune.md b/commands/sysadmin/linux-desktop/python-environments/setup-conda-llm-finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..cec17bf0b257052d4af202be5f3cda65f642f2e4 --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/setup-conda-llm-finetune.md @@ -0,0 +1,108 @@ +--- +description: Set up conda environment for LLM fine-tuning +tags: [python, conda, llm, fine-tuning, ai, development, project, gitignored] +--- + +You are helping the user set up a conda environment for LLM fine-tuning. + +## Process + +1. **Create base environment** + ```bash + conda create -n llm-finetune python=3.11 -y + conda activate llm-finetune + ``` + +2. **Install PyTorch with ROCm** + ```bash + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.0 + ``` + +3. **Install core fine-tuning libraries** + + **Hugging Face ecosystem:** + ```bash + pip install transformers + pip install datasets + pip install accelerate + pip install evaluate + pip install peft # Parameter-Efficient Fine-Tuning + pip install bitsandbytes # Quantization (may need special build for ROCm) + ``` + + **Training frameworks:** + ```bash + pip install trl # Transformer Reinforcement Learning + pip install deepspeed # Distributed training (if needed) + ``` + +4. **Install quantization and optimization tools** + ```bash + pip install optimum + pip install auto-gptq # GPTQ quantization + pip install autoawq # AWQ quantization + ``` + +5. **Install evaluation and monitoring tools** + ```bash + pip install wandb # Weights & Biases for experiment tracking + pip install tensorboard + pip install rouge-score # Text evaluation + pip install sacrebleu # Translation metrics + ``` + +6. **Install data processing tools** + ```bash + pip install pandas + pip install numpy + pip install scipy + pip install scikit-learn + pip install nltk + pip install spacy + ``` + +7. **Install specialized fine-tuning tools** + ```bash + pip install axolotl # LLM fine-tuning framework + pip install unsloth # Fast fine-tuning (if compatible with ROCm) + pip install qlora # Quantized LoRA + ``` + +8. **Install Jupyter for interactive work** + ```bash + conda install -c conda-forge jupyter jupyterlab ipywidgets -y + ``` + +9. **Create example fine-tuning script** + - Offer to create `~/scripts/llm-finetune-example.py` with basic LoRA setup + +10. **Test installation** + ```python + import torch + from transformers import AutoModelForCausalLM, AutoTokenizer + from peft import LoraConfig, get_peft_model + + print(f"PyTorch: {torch.__version__}") + print(f"GPU available: {torch.cuda.is_available()}") + print("All libraries imported successfully!") + ``` + +11. **Create resource estimation script** + - Offer to create script to estimate VRAM needs for different model sizes + +12. **Suggest popular models for fine-tuning** + - Llama 3.2 (3B, 8B) + - Mistral 7B + - Qwen 2.5 (7B, 14B) + - Phi-3 (3.8B) + +## Output + +Provide a summary showing: +- Environment name and setup status +- Installed libraries grouped by purpose +- GPU detection status +- VRAM available for training +- Suggested model sizes for available hardware +- Example command to start fine-tuning +- Links to documentation/tutorials diff --git a/commands/sysadmin/linux-desktop/python-environments/setup-conda-rocm.md b/commands/sysadmin/linux-desktop/python-environments/setup-conda-rocm.md new file mode 100644 index 0000000000000000000000000000000000000000..1718b862f8ac40597a23a1abce4c0717038eed1a --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/setup-conda-rocm.md @@ -0,0 +1,86 @@ +--- +description: Set up conda environment for ROCm and PyTorch +tags: [python, conda, rocm, pytorch, ai, development, project, gitignored] +--- + +You are helping the user set up a conda environment optimized for ROCm and PyTorch. + +## Process + +1. **Check if conda is installed** + - Run: `conda --version` + - If not installed, suggest installing Miniconda or Anaconda + - Installation: `wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && bash Miniconda3-latest-Linux-x86_64.sh` + +2. **Verify ROCm is available on system** + - Check: `rocminfo` + - Get ROCm version: `rocminfo | grep "Name:" | head -1` + - Typical ROCm versions: 5.7, 6.0, 6.1 + +3. **Create conda environment** + ```bash + conda create -n rocm-pytorch python=3.11 -y + conda activate rocm-pytorch + ``` + +4. **Install PyTorch with ROCm support** + - Check compatible PyTorch version at: pytorch.org/get-started/locally/ + - Install based on ROCm version: + + ```bash + # For ROCm 6.0 + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.0 + + # For ROCm 5.7 + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.7 + ``` + +5. **Install essential ML libraries** + ```bash + conda install -c conda-forge numpy scipy matplotlib jupyter ipython -y + pip install pandas scikit-learn + ``` + +6. **Install deep learning tools** + ```bash + pip install transformers accelerate datasets + pip install tensorboard + pip install onnx onnxruntime + ``` + +7. **Test PyTorch ROCm integration** + ```python + import torch + print(f"PyTorch version: {torch.__version__}") + print(f"CUDA available: {torch.cuda.is_available()}") # ROCm uses CUDA API + if torch.cuda.is_available(): + print(f"Device name: {torch.cuda.get_device_name(0)}") + print(f"Device count: {torch.cuda.device_count()}") + ``` + +8. **Create activation script** + - Offer to create `~/scripts/activate-rocm-pytorch.sh`: + ```bash + #!/bin/bash + eval "$(conda shell.bash hook)" + conda activate rocm-pytorch + echo "ROCm PyTorch environment activated" + python -c "import torch; print(f'PyTorch: {torch.__version__}, CUDA available: {torch.cuda.is_available()}')" + ``` + +9. **Optional: Install additional tools** + - Suggest: + - `timm` - PyTorch image models + - `torchmetrics` - Metrics + - `lightning` - PyTorch Lightning + - `einops` - Tensor operations + +## Output + +Provide a summary showing: +- Conda environment name and Python version +- PyTorch version and ROCm compatibility +- GPU detection status +- List of installed packages +- Test results showing GPU is accessible +- Activation command for future use diff --git a/commands/sysadmin/linux-desktop/python-environments/setup-conda-stt-finetune.md b/commands/sysadmin/linux-desktop/python-environments/setup-conda-stt-finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..17ff6ecda7bb6ea73e95d5858c9511c2030497b1 --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/setup-conda-stt-finetune.md @@ -0,0 +1,103 @@ +--- +description: Set up conda environment for speech-to-text fine-tuning +tags: [python, conda, stt, whisper, speech, ai, fine-tuning, project, gitignored] +--- + +You are helping the user set up a conda environment for speech-to-text (STT) fine-tuning. + +## Process + +1. **Create base environment** + ```bash + conda create -n stt-finetune python=3.11 -y + conda activate stt-finetune + ``` + +2. **Install PyTorch with ROCm** + ```bash + pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.0 + ``` + +3. **Install Whisper and related libraries** + ```bash + pip install openai-whisper + pip install faster-whisper # Optimized inference + pip install whisperx # Advanced features + ``` + +4. **Install Hugging Face libraries** + ```bash + pip install transformers + pip install datasets + pip install accelerate + pip install evaluate + pip install peft # For LoRA fine-tuning + ``` + +5. **Install audio processing libraries** + ```bash + pip install librosa # Audio analysis + pip install soundfile # Audio I/O + pip install pydub # Audio manipulation + pip install sox # Audio processing + conda install -c conda-forge ffmpeg -y # Audio conversion + ``` + +6. **Install speech-specific tools** + ```bash + pip install jiwer # Word Error Rate calculation + pip install speechbrain # Speech toolkit + pip install pyannote.audio # Speaker diarization + ``` + +7. **Install data processing tools** + ```bash + pip install pandas + pip install numpy + pip install scipy + pip install matplotlib + pip install seaborn # Visualization + ``` + +8. **Install monitoring and experimentation** + ```bash + pip install wandb # Experiment tracking + pip install tensorboard + ``` + +9. **Install Jupyter for interactive work** + ```bash + conda install -c conda-forge jupyter jupyterlab ipywidgets -y + ``` + +10. **Test installation** + ```python + import torch + import whisper + import librosa + from transformers import WhisperProcessor, WhisperForConditionalGeneration + + print(f"PyTorch: {torch.__version__}") + print(f"GPU available: {torch.cuda.is_available()}") + print("All libraries imported successfully!") + ``` + +11. **Suggest common datasets** + - Common Voice (Mozilla) + - LibriSpeech + - TEDLIUM + - Custom datasets + +12. **Create example script** + - Offer to create `~/scripts/whisper-finetune-example.py` with basic setup + +## Output + +Provide a summary showing: +- Environment name and setup status +- Installed libraries grouped by purpose +- GPU detection status +- Available VRAM for training +- Suggested datasets for fine-tuning +- Example commands for testing +- Links to documentation/tutorials diff --git a/commands/sysadmin/linux-desktop/python-environments/setup-pyenv.md b/commands/sysadmin/linux-desktop/python-environments/setup-pyenv.md new file mode 100644 index 0000000000000000000000000000000000000000..7972fd02c442c839e431991ef8b847dda0aa9dea --- /dev/null +++ b/commands/sysadmin/linux-desktop/python-environments/setup-pyenv.md @@ -0,0 +1,52 @@ +--- +description: Install pyenv and help user set up various Python versions +tags: [python, development, pyenv, versions, setup, project, gitignored] +--- + +You are helping the user install pyenv and set up multiple Python versions. + +## Process + +1. **Check if pyenv is already installed** + - Run `pyenv --version` to check + - Check `~/.pyenv` directory + +2. **Install pyenv if needed** + - Install dependencies: `sudo apt install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncursesw5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev` + - Clone pyenv: `curl https://pyenv.run | bash` + - Add to shell config (`~/.bashrc` or `~/.zshrc`): + ```bash + export PYENV_ROOT="$HOME/.pyenv" + export PATH="$PYENV_ROOT/bin:$PATH" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" + ``` + - Reload shell: `source ~/.bashrc` + +3. **Check currently installed Python versions** + - Run `pyenv versions` to see installed versions + - Run `python --version` to see system Python + +4. **Work with user to install desired versions** + - Ask which Python versions they need + - Show available versions: `pyenv install --list` + - Common versions to suggest: 3.11.x, 3.12.x, 3.13.x + - Install versions: `pyenv install 3.12.7` (example) + +5. **Configure Python versions** + - Set global default: `pyenv global 3.12.7` + - Set local (directory-specific): `pyenv local 3.11.5` + - Show how to create virtualenvs: `pyenv virtualenv 3.12.7 myproject` + +6. **Verify installation** + - Check active version: `pyenv version` + - Test Python: `python --version` + - Test pip: `pip --version` + +## Output + +Provide a summary showing: +- pyenv installation status +- List of installed Python versions +- Current global/local version settings +- Suggestions for useful versions based on user's needs diff --git a/commands/sysadmin/linux-desktop/security/av/install-clamav.md b/commands/sysadmin/linux-desktop/security/av/install-clamav.md new file mode 100644 index 0000000000000000000000000000000000000000..ce267e26fb121f99f08dc67adcbb737657ec0a90 --- /dev/null +++ b/commands/sysadmin/linux-desktop/security/av/install-clamav.md @@ -0,0 +1,89 @@ +You are tasked with installing and configuring ClamAV (command-line antivirus) and ClamTK (GUI frontend) on this Linux system if they are not already installed. + +## Your Task + +Set up a complete antivirus solution using ClamAV with the ClamTK graphical interface for easy management. + +## Installation Steps + +### 1. Check Current Installation Status +- Verify if ClamAV is already installed: `dpkg -l | grep clamav` +- Verify if ClamTK is already installed: `dpkg -l | grep clamtk` +- If both are installed and working, inform the user and skip to configuration verification + +### 2. Install Packages (if needed) +Install the following packages using apt: +- `clamav` - Core antivirus engine +- `clamav-daemon` - ClamAV daemon for background scanning +- `clamav-freshclam` - Virus definition updater +- `clamtk` - Graphical user interface for ClamAV + +Use sudo for installation. + +### 3. Initial Configuration + +After installation: +- Stop the freshclam service: `sudo systemctl stop clamav-freshclam` +- Update virus definitions manually first: `sudo freshclam` +- Start the freshclam service: `sudo systemctl start clamav-freshclam` +- Enable freshclam to start on boot: `sudo systemctl enable clamav-freshclam` + +### 4. Configure ClamAV Daemon +- Start the ClamAV daemon: `sudo systemctl start clamav-daemon` +- Enable it for automatic startup: `sudo systemctl enable clamav-daemon` +- Verify daemon is running: `sudo systemctl status clamav-daemon` + +### 5. Verify Installation +- Check ClamAV version: `clamscan --version` +- Check virus definition database date: `sudo freshclam --version` and verify freshclam status +- Verify ClamTK launches: Inform user they can test by running `clamtk` from terminal or application menu + +### 6. Initial Scan Setup Recommendations +Provide guidance on: +- Running a quick test scan: `clamscan -r /home/[username]/Downloads` +- Setting up scheduled scans via ClamTK +- Configuring scan exclusions if needed +- Understanding quarantine location + +## Post-Installation Information + +Provide the user with: +- Location of ClamAV logs: `/var/log/clamav/` +- How to update definitions manually: `sudo freshclam` +- How to run a full system scan: `sudo clamscan -r /` +- ClamTK location in application menu (typically under System or Utilities) +- Recommendation to set up automatic scheduled scans via ClamTK GUI + +## Output Format + +``` +CLAMAV/CLAMTK INSTALLATION REPORT + +=== INSTALLATION STATUS === +ClamAV: [Installed/Already Present] +ClamAV Daemon: [Running/Status] +FreshClam: [Running/Status] +ClamTK: [Installed/Already Present] + +=== VIRUS DEFINITIONS === +Last Updated: [date/time] +Database Version: [version] +Signatures: [number] + +=== SERVICES STATUS === +clamav-daemon: [active/inactive] +clamav-freshclam: [active/inactive] + +=== NEXT STEPS === +[Recommendations for first scan, scheduled scans, etc.] +``` + +## Important Notes + +- Use sudo for all installation and system configuration commands +- Handle cases where packages are already installed gracefully +- Ensure virus definitions are updated before declaring success +- Verify services are running and enabled +- If any step fails, provide clear error messages and troubleshooting steps +- For Ubuntu/Debian systems, use apt package manager +- Initial virus definition update may take several minutes - be patient diff --git a/commands/sysadmin/linux-desktop/security/detect-spyware.md b/commands/sysadmin/linux-desktop/security/detect-spyware.md new file mode 100644 index 0000000000000000000000000000000000000000..a096903046da7dec2a79a98869e3095c3317c364 --- /dev/null +++ b/commands/sysadmin/linux-desktop/security/detect-spyware.md @@ -0,0 +1,62 @@ +--- +description: Detect known spyware packages and suggest removal +tags: [security, spyware, privacy, audit, project, gitignored] +--- + +You are helping the user identify any software known to contain spyware or privacy issues. + +## Process + +1. **Check for known problematic software** + - Scan installed packages against known spyware list + - Common categories to check: + - Browser extensions + - "Free" VPN applications + - Screen recorders with telemetry + - System "optimizers" + - Certain proprietary drivers + +2. **Check for telemetry in common applications** + - VS Code vs VSCodium (telemetry difference) + - Ubuntu's whoopsie (error reporting) + - Canonical's snapd telemetry + - Google Chrome vs Chromium + +3. **Network activity monitoring** + - Check for suspicious outbound connections: `sudo netstat -tupn | grep ESTABLISHED` + - Identify processes making external connections + - Suggest using `wireshark` or `tcpdump` for deeper analysis + +4. **Known spyware patterns to check** + - Red Star OS components (North Korean) + - Chinese software with known backdoors + - Certain "free" antivirus software + - Keyloggers disguised as utilities + - Browser hijackers + +5. **Privacy-concerning legitimate software** + - Software with excessive telemetry: + - Ubuntu's apport (crash reporting) + - popularity-contest + - Some proprietary drivers + - Suggest privacy-respecting alternatives + +6. **Browser extension audit** + - Check Chrome/Firefox extension directories + - Identify extensions with excessive permissions + - Flag abandoned extensions (security risk) + +7. **Suggest privacy-focused alternatives** + - VS Code β†’ VSCodium + - Chrome β†’ Chromium or Firefox + - Zoom β†’ Jitsi + - Windows telemetry remnants if dual-boot + +## Output + +Provide a report showing: +- Any detected spyware (with severity level) +- Privacy-concerning software with excessive telemetry +- Suspicious network connections +- Recommended actions for each finding +- Privacy-focused alternatives to suggest diff --git a/commands/sysadmin/linux-desktop/security/firewall/analyze-firewall.md b/commands/sysadmin/linux-desktop/security/firewall/analyze-firewall.md new file mode 100644 index 0000000000000000000000000000000000000000..ee0a92d656689956f3484280fdcbc638eb33ee9b --- /dev/null +++ b/commands/sysadmin/linux-desktop/security/firewall/analyze-firewall.md @@ -0,0 +1,286 @@ +# Analyze Firewall and Suggest Hardening + +You are helping the user check if a firewall is running, analyze open ports, and suggest potential hardening. + +## Your tasks: + +1. **Check if a firewall is active:** + + **UFW (Uncomplicated Firewall):** + ```bash + sudo ufw status verbose + ``` + + **iptables (lower level):** + ```bash + sudo iptables -L -n -v + sudo ip6tables -L -n -v + ``` + + **firewalld (if used):** + ```bash + sudo firewall-cmd --state + sudo firewall-cmd --list-all + ``` + + **nftables (modern replacement for iptables):** + ```bash + sudo nft list ruleset + ``` + +2. **If no firewall is active, recommend enabling UFW:** + ```bash + sudo apt install ufw + sudo ufw enable + sudo ufw status + ``` + +3. **Check currently listening services:** + ```bash + sudo ss -tulpn + # Or + sudo netstat -tulpn + ``` + + This shows what services are listening on which ports. + +4. **Check for open ports from external perspective:** + ```bash + sudo nmap -sT -O localhost + ``` + + Or install nmap if not available: + ```bash + sudo apt install nmap + ``` + +5. **Analyze each open port:** + For each listening port, identify: + - Which service is using it + - Whether it should be accessible from network + - Current firewall rules for it + + Common ports to check: + - 22 (SSH) + - 80 (HTTP) + - 443 (HTTPS) + - 3306 (MySQL) + - 5432 (PostgreSQL) + - 6379 (Redis) + - 27017 (MongoDB) + - 3389 (RDP) + - 445 (SMB) + - 2049 (NFS) + +6. **Check UFW rules in detail:** + ```bash + sudo ufw status numbered + sudo ufw show added + ``` + +7. **Check iptables rules in detail:** + ```bash + sudo iptables -S + sudo iptables -L INPUT -v -n + sudo iptables -L OUTPUT -v -n + sudo iptables -L FORWARD -v -n + ``` + +8. **Identify potential security issues:** + + **Services listening on 0.0.0.0 (all interfaces):** + These are accessible from network. Should they be? + ```bash + sudo ss -tulpn | grep "0.0.0.0" + ``` + + **Services that should only be local:** + Databases, Redis, etc. should typically only listen on 127.0.0.1: + ```bash + sudo ss -tulpn | grep -v "127.0.0.1" + ``` + + **Unnecessary services:** + Check for services that shouldn't be running: + ```bash + sudo systemctl list-units --type=service --state=running | grep -E "telnet|ftp|rsh" + ``` + +9. **Analyze by service type:** + + **SSH (port 22):** + - Should SSH be accessible from internet? + - Consider changing default port + - Check SSH configuration: `cat /etc/ssh/sshd_config | grep -v "^#" | grep -v "^$"` + - Verify key-only authentication is enforced + - Check fail2ban status: `sudo systemctl status fail2ban` + + **Web services (80, 443):** + - Are these intentional? + - Is there a web server running? + - Check for default/test pages + + **Databases (3306, 5432, 27017, etc.):** + - Should NEVER be exposed to internet + - Should listen only on 127.0.0.1 + - Check configuration files + +10. **Check for common attack vectors:** + ```bash + # Check for services with known vulnerabilities + sudo ss -tulpn | grep -E "telnet|ftp|rlogin|rsh|rexec" + + # Check for uncommon high ports + sudo ss -tulpn | awk '{print $5}' | cut -d: -f2 | sort -n | uniq + ``` + +11. **Suggest hardening measures:** + + **Enable UFW if not active:** + ```bash + sudo ufw default deny incoming + sudo ufw default allow outgoing + sudo ufw enable + ``` + + **For SSH access:** + ```bash + sudo ufw allow 22/tcp comment 'SSH' + # Or from specific IP: + sudo ufw allow from to any port 22 comment 'SSH from specific IP' + ``` + + **For web server:** + ```bash + sudo ufw allow 80/tcp comment 'HTTP' + sudo ufw allow 443/tcp comment 'HTTPS' + ``` + + **For local network only:** + ```bash + sudo ufw allow from 192.168.1.0/24 comment 'Local network' + ``` + +12. **Install and configure fail2ban (recommended):** + ```bash + sudo apt install fail2ban + sudo systemctl enable fail2ban + sudo systemctl start fail2ban + sudo fail2ban-client status + sudo fail2ban-client status sshd + ``` + +13. **Check for IPv6 exposure:** + ```bash + sudo ss -tulpn6 + sudo ufw status + ``` + + Ensure IPv6 is also protected: + ```bash + sudo ufw default deny incoming + # UFW handles both IPv4 and IPv6 + ``` + +14. **Advanced iptables hardening (if using iptables):** + + **Drop invalid packets:** + ```bash + sudo iptables -A INPUT -m conntrack --ctstate INVALID -j DROP + ``` + + **Rate limit SSH:** + ```bash + sudo iptables -A INPUT -p tcp --dport 22 -m conntrack --ctstate NEW -m recent --set + sudo iptables -A INPUT -p tcp --dport 22 -m conntrack --ctstate NEW -m recent --update --seconds 60 --hitcount 4 -j DROP + ``` + + **Log dropped packets:** + ```bash + sudo iptables -A INPUT -j LOG --log-prefix "iptables-dropped: " + ``` + +15. **Check for Docker interference:** + Docker manipulates iptables directly, which can bypass UFW: + ```bash + sudo iptables -L DOCKER -n + ``` + + To prevent Docker from bypassing UFW, edit `/etc/docker/daemon.json`: + ```json + { + "iptables": false + } + ``` + + Or use firewalld instead for better Docker integration. + +16. **Check connection tracking:** + ```bash + sudo conntrack -L + cat /proc/sys/net/netfilter/nf_conntrack_count + cat /proc/sys/net/netfilter/nf_conntrack_max + ``` + +17. **Review logging:** + ```bash + sudo grep UFW /var/log/syslog | tail -20 + sudo tail -20 /var/log/ufw.log + ``` + +18. **Generate hardening recommendations:** + Based on findings, suggest: + - Enable firewall if not active + - Block unnecessary ports + - Restrict services to local interface only + - Install fail2ban for brute-force protection + - Change SSH port (optional, security through obscurity) + - Disable root SSH login + - Use key-based SSH authentication only + - Close database ports from external access + - Remove unnecessary services + - Enable connection rate limiting + - Set up intrusion detection (OSSEC, Snort) + - Regular security updates + - Monitor logs regularly + +19. **Provide firewall management commands:** + + **UFW:** + - `sudo ufw status` - Check status + - `sudo ufw enable` - Enable firewall + - `sudo ufw disable` - Disable firewall + - `sudo ufw allow ` - Allow port + - `sudo ufw deny ` - Deny port + - `sudo ufw delete ` - Delete rule + - `sudo ufw reset` - Reset to default + - `sudo ufw logging on` - Enable logging + + **iptables:** + - `sudo iptables -L` - List rules + - `sudo iptables -A INPUT -p tcp --dport -j ACCEPT` - Allow port + - `sudo iptables -D INPUT ` - Delete rule + - `sudo iptables-save > /etc/iptables/rules.v4` - Save rules + - `sudo iptables-restore < /etc/iptables/rules.v4` - Restore rules + +20. **Report findings:** + Summarize: + - Firewall status (active/inactive) + - List of open ports + - Services listening on each port + - Current firewall rules + - Security issues found + - Recommended hardening measures + - Priority actions (critical vs. nice-to-have) + +## Important notes: +- Test firewall rules carefully to avoid locking yourself out +- Always have a backup access method (console/KVM) before changing SSH rules +- UFW and iptables can conflict - use one or the other +- Docker can bypass UFW - special configuration needed +- Deny incoming by default, allow specific services +- Keep logs for intrusion detection +- Regularly review and update firewall rules +- Consider using VPN for remote access instead of exposing services +- fail2ban is essential for SSH protection +- Don't expose databases to the internet diff --git a/commands/sysadmin/linux-desktop/security/posture-diagnostics/security-posture-check.md b/commands/sysadmin/linux-desktop/security/posture-diagnostics/security-posture-check.md new file mode 100644 index 0000000000000000000000000000000000000000..7bf647947b08742e409d23818bed40d0da9438b3 --- /dev/null +++ b/commands/sysadmin/linux-desktop/security/posture-diagnostics/security-posture-check.md @@ -0,0 +1,98 @@ +You are conducting a comprehensive security posture evaluation for this Linux desktop system. + +## Your Task + +Perform a thorough security assessment of the system and provide a detailed report with actionable recommendations. + +## Assessment Areas + +### 1. Firewall Status +- Check if UFW (Uncomplicated Firewall) or iptables is active +- Review firewall rules and policies +- Identify any concerning open ports + +### 2. System Updates +- Check for available security updates +- Verify automatic update configuration +- Review update history for critical patches + +### 3. User Account Security +- List user accounts and their privileges +- Check for accounts with sudo access +- Identify any accounts without passwords or weak configurations +- Review SSH key configurations + +### 4. SSH Security +- Check if SSH is running +- Review SSH configuration (`/etc/ssh/sshd_config`) +- Verify key-based authentication settings +- Check for root login permission +- Review allowed authentication methods + +### 5. Running Services +- List all active services +- Identify unnecessary services that could be disabled +- Check for services listening on external interfaces + +### 6. File Permissions +- Check critical system files (`/etc/passwd`, `/etc/shadow`, `/etc/sudoers`) +- Review permissions on home directories +- Identify world-writable files in system directories + +### 7. Antivirus/Malware Protection +- Check if ClamAV or other antivirus is installed +- Verify if definitions are up to date +- Check recent scan history + +### 8. Security Packages +- Verify installation of: fail2ban, apparmor, aide, rkhunter, lynis +- Check their configuration and status + +### 9. Network Security +- Review listening ports and services +- Check for unusual network connections +- Verify network configuration security + +### 10. Audit Logs +- Check if auditd is running +- Review recent authentication logs +- Look for failed login attempts +- Check for suspicious sudo usage + +## Output Format + +Provide your findings in the following structured format: + +``` +SECURITY POSTURE ASSESSMENT +Generated: [timestamp] + +=== SUMMARY === +Overall Security Level: [Critical/Poor/Fair/Good/Excellent] +Critical Issues Found: [number] +Warnings: [number] +Recommendations: [number] + +=== CRITICAL ISSUES === +[List any critical security problems that need immediate attention] + +=== WARNINGS === +[List security concerns that should be addressed] + +=== CURRENT PROTECTIONS === +[List active security measures in place] + +=== RECOMMENDATIONS === +[Prioritized list of security improvements] + +=== DETAILED FINDINGS === +[Detailed breakdown by assessment area] +``` + +## Important Notes + +- Use sudo when necessary to access system files and configurations +- Be thorough but focus on actionable findings +- Prioritize issues by severity +- Provide specific commands for remediation where applicable +- Consider the desktop/workstation context (not a server) diff --git a/commands/sysadmin/linux-desktop/security/probe-vulnerabilities.md b/commands/sysadmin/linux-desktop/security/probe-vulnerabilities.md new file mode 100644 index 0000000000000000000000000000000000000000..107fb2350aeede3eead2ca4bbb026e44fc24a879 --- /dev/null +++ b/commands/sysadmin/linux-desktop/security/probe-vulnerabilities.md @@ -0,0 +1,59 @@ +--- +description: Intelligently probe system for security vulnerabilities +tags: [security, audit, vulnerabilities, hardening, project, gitignored] +--- + +You are helping the user identify security vulnerabilities they may wish to remediate. + +## Process + +1. **System update status** + - Check for security updates: `apt list --upgradable | grep -i security` + - Check unattended-upgrades status: `systemctl status unattended-upgrades` + +2. **Open ports and services** + - List listening ports: `sudo ss -tlnp` + - Identify unnecessary services: `systemctl list-unit-files --state=enabled` + - Check firewall status: `sudo ufw status verbose` + +3. **SSH configuration review** + - Check `sshd_config` for: + - PermitRootLogin (should be 'no') + - PasswordAuthentication (consider disabling) + - Port (consider non-standard) + - Check for weak keys: `ssh-keygen -l -f ~/.ssh/id_*.pub` + +4. **File permissions audit** + - Check world-writable files: `find /home -type f -perm -002 2>/dev/null | head -20` + - Check SUID/SGID binaries: `find / -type f \( -perm -4000 -o -perm -2000 \) 2>/dev/null` + - Review sensitive file permissions: `~/.ssh`, `~/.gnupg` + +5. **User and authentication** + - List users with shell access: `cat /etc/passwd | grep -v nologin | grep -v false` + - Check password policy: `sudo chage -l $USER` + - Review sudo configuration: `sudo -l` + +6. **Network security** + - Check for IPv6 if not needed + - Review DNS settings + - Check for proxy configurations + +7. **Application security** + - Check for outdated software with known CVEs + - Review browser security settings + - Check for auto-updating mechanisms + +8. **Suggest security tools** + - `lynis` - Security auditing tool + - `rkhunter` - Rootkit scanner + - `aide` - File integrity checker + - `fail2ban` - Intrusion prevention + +## Output + +Provide a security report showing: +- Critical vulnerabilities (requiring immediate attention) +- Medium priority issues +- Low priority recommendations +- Suggested remediation steps for each issue +- Security hardening recommendations diff --git a/commands/sysadmin/linux-desktop/ssh/manage-ssh-keys.md b/commands/sysadmin/linux-desktop/ssh/manage-ssh-keys.md new file mode 100644 index 0000000000000000000000000000000000000000..f0dd325d3893a63895829caacddbaf9e40d63850 --- /dev/null +++ b/commands/sysadmin/linux-desktop/ssh/manage-ssh-keys.md @@ -0,0 +1,91 @@ +--- +description: Review installed SSH key pairs and delete old ones if desired +tags: [ssh, security, keys, configuration, project, gitignored] +--- + +You are helping the user manage their SSH keys. + +## Process + +1. **List SSH keys** + - List keys in `~/.ssh/`: `ls -la ~/.ssh/` + - Identify key pairs: + - Private keys (no extension, or `.pem`) + - Public keys (`.pub`) + - Known hosts file + - Config file + +2. **Display public keys with details** + - For each public key: + ```bash + for key in ~/.ssh/*.pub; do + echo "=== $key ===" + ssh-keygen -l -f "$key" + echo "" + done + ``` + - Shows: key length, fingerprint, comment + +3. **Check if keys are loaded in ssh-agent** + - List loaded keys: `ssh-add -l` + - If agent not running: `eval "$(ssh-agent -s)"` + +4. **Identify key usage** + - Check `~/.ssh/config` for key assignments + - Ask user about each key: + - Where is it used? (GitHub, servers, etc.) + - Is it still needed? + - When was it created? + +5. **Check key security** + - Verify key types (RSA, ED25519, etc.) + - Check key lengths: + - RSA: Minimum 2048-bit, prefer 4096-bit + - ED25519: 256-bit (modern, recommended) + - Suggest upgrading old/weak keys + +6. **Delete old/unused keys** + - For each key user wants to remove: + ```bash + rm ~/.ssh/old_key + rm ~/.ssh/old_key.pub + ``` + - Update `~/.ssh/config` if key was referenced + - Remove from ssh-agent: `ssh-add -d ~/.ssh/old_key` + +7. **Generate new keys if needed** + - Suggest ED25519 for new keys: + ```bash + ssh-keygen -t ed25519 -C "user@email.com" + ``` + - Or RSA 4096: + ```bash + ssh-keygen -t rsa -b 4096 -C "user@email.com" + ``` + +8. **Update permissions** + - Ensure correct permissions: + ```bash + chmod 700 ~/.ssh + chmod 600 ~/.ssh/id_* + chmod 644 ~/.ssh/id_*.pub + chmod 600 ~/.ssh/config + ``` + +9. **Add keys to ssh-agent** + - Add keys: `ssh-add ~/.ssh/id_ed25519` + - Persist across reboots (add to `~/.bashrc`): + ```bash + eval "$(ssh-agent -s)" + ssh-add ~/.ssh/id_ed25519 + ``` + +## Output + +Provide a summary showing: +- List of SSH keys with details (type, length, fingerprint) +- Keys currently loaded in ssh-agent +- Keys deleted (if any) +- New keys generated (if any) +- Security recommendations +- Next steps for adding keys to services diff --git a/commands/sysadmin/linux-desktop/storage/health-checks/btrfs-snapper-health.md b/commands/sysadmin/linux-desktop/storage/health-checks/btrfs-snapper-health.md new file mode 100644 index 0000000000000000000000000000000000000000..9c400b95e31b371cedc12c43e4f3edcc5eef25f9 --- /dev/null +++ b/commands/sysadmin/linux-desktop/storage/health-checks/btrfs-snapper-health.md @@ -0,0 +1,41 @@ +# BTRFS and Snapper Snapshot Health Check + +You are helping the user check their BTRFS filesystem configuration and Snapper snapshot setup. + +## Your tasks: + +1. **Check if BTRFS is in use:** + - Run `df -T` to identify BTRFS filesystems + - Run `sudo btrfs filesystem show` to display all BTRFS filesystems + - Run `mount | grep btrfs` to see mounted BTRFS filesystems with their options + +2. **Check BTRFS filesystem health:** + - For each BTRFS filesystem found, run `sudo btrfs filesystem usage ` + - Run `sudo btrfs device stats ` to check for device errors + - Run `sudo btrfs scrub status ` to check scrub status + +3. **Check Snapper configuration:** + - Check if Snapper is installed: `which snapper` + - If not installed, ask the user if they want to install it + - List Snapper configurations: `sudo snapper list-configs` + - For each configuration, show snapshots: `sudo snapper -c list` + - Show Snapper configuration details: `sudo snapper -c get-config` + +4. **Analyze snapshot usage:** + - Check disk space used by snapshots + - Identify if there are too many snapshots that should be cleaned up + - Check automatic snapshot policies + +5. **Report findings:** + - Summarize BTRFS health status + - Report on snapshot configurations and disk usage + - Provide recommendations for: + - Snapshot retention policies if too many snapshots exist + - Running scrub if it hasn't been run recently + - Fixing any errors or issues detected + - Setting up Snapper if BTRFS is in use but Snapper is not configured + +## Important notes: +- Use sudo for all BTRFS and Snapper commands +- Be clear about what you find and what actions you recommend +- If BTRFS is not in use, inform the user and exit gracefully diff --git a/commands/sysadmin/linux-desktop/storage/health-checks/check-drive-health.md b/commands/sysadmin/linux-desktop/storage/health-checks/check-drive-health.md new file mode 100644 index 0000000000000000000000000000000000000000..943ff0c86693f75a35d434c04015a8e80922fda4 --- /dev/null +++ b/commands/sysadmin/linux-desktop/storage/health-checks/check-drive-health.md @@ -0,0 +1,83 @@ +# Hard Drive Health Check + +You are helping the user run comprehensive health checks on all storage drives (SSD, HDD, NVMe, or mixed configurations). + +## Your tasks: + +1. **Identify all storage devices:** + - List all block devices: `lsblk -o NAME,SIZE,TYPE,MOUNTPOINT,MODEL,TRAN` + - Get detailed disk information: `sudo lshw -class disk -short` + - Identify device types (NVMe, SATA SSD, SATA HDD, etc.) + +2. **Check SMART status for each device:** + + **For SATA/SAS drives (SSD and HDD):** + - Check if smartmontools is installed: `which smartctl` + - If not installed, ask user if they want to install it: `sudo apt install smartmontools` + - For each drive, run: + - `sudo smartctl -i /dev/sdX` (device info) + - `sudo smartctl -H /dev/sdX` (health status) + - `sudo smartctl -A /dev/sdX` (attributes) + - `sudo smartctl -l error /dev/sdX` (error log) + + **For NVMe drives:** + - Check NVMe tools: `which nvme` + - For each NVMe drive, run: + - `sudo nvme list` + - `sudo nvme smart-log /dev/nvmeXn1` + - `sudo smartctl -a /dev/nvmeXn1` (if smartctl supports NVMe) + +3. **Analyze drive health indicators:** + + **For SSDs:** + - Wear leveling count + - Media wearout indicator + - Available spare capacity + - Percentage used + - Total bytes written + - Power-on hours + - Reallocated sectors + + **For HDDs:** + - Reallocated sector count + - Current pending sectors + - Offline uncorrectable sectors + - Spin retry count + - Power-on hours + - Temperature + - UDMA CRC errors + + **For NVMe:** + - Critical warning + - Temperature + - Available spare + - Percentage used + - Data units read/written + - Power cycles + - Unsafe shutdowns + +4. **Check filesystem health:** + - Review dmesg for disk errors: `sudo dmesg | grep -i "error\|fail" | grep -i "sd\|nvme"` + - Check system logs: `sudo journalctl -p err -g "sd\|nvme" --since "7 days ago"` + +5. **Report findings:** + - Summarize each drive's health status + - Highlight any concerning indicators: + - High reallocated sectors + - High wear level on SSDs + - Temperature issues + - Errors in logs + - Pending sectors + - Calculate estimated remaining lifespan for SSDs based on wear indicators + - Provide recommendations: + - Drives that should be replaced soon + - Drives that need monitoring + - Whether to enable SMART monitoring if not active + - Backup recommendations if drives show signs of failure + +## Important notes: +- Use sudo for all SMART commands +- Be clear about the severity of any issues found +- Distinguish between informational metrics and critical warnings +- If smartmontools is not installed, offer to install it +- Some drives may not support all SMART features - this is normal diff --git a/commands/sysadmin/linux-desktop/storage/network-mounts/setup-nfs-mounts.md b/commands/sysadmin/linux-desktop/storage/network-mounts/setup-nfs-mounts.md new file mode 100644 index 0000000000000000000000000000000000000000..bb9941a6dbc54ae4268b2b86f38f1d1ebf7be651 --- /dev/null +++ b/commands/sysadmin/linux-desktop/storage/network-mounts/setup-nfs-mounts.md @@ -0,0 +1,154 @@ +# NFS Mount Setup Assistant + +You are helping the user set up NFS (Network File System) mounts to remote systems. + +## Your tasks: + +1. **Check NFS client prerequisites:** + - Check if NFS client utilities are installed: `dpkg -l | grep nfs-common` + - If not installed: + ```bash + sudo apt update + sudo apt install nfs-common + ``` + +2. **Gather mount information from the user:** + Ask the user for: + - Remote NFS server IP or hostname (e.g., `10.0.0.100`) + - Remote export path (e.g., `/srv/nfs/share`) + - Local mount point (e.g., `/mnt/nfs/remote-share`) + - Mount options preferences (default is usually fine, but ask if they need specific options) + +3. **Test NFS server accessibility:** + - Check if remote server is reachable: `ping -c 3 ` + - List available NFS exports from the remote server: + ```bash + showmount -e + ``` + - If this fails, troubleshoot: + - Check if NFS ports are open (2049, 111) + - Verify firewall settings + +4. **Create local mount point:** + ```bash + sudo mkdir -p + ``` + +5. **Test mount temporarily:** + Before making it permanent, test the mount: + ```bash + sudo mount -t nfs : + ``` + + Verify the mount: + ```bash + df -h | grep + ls -la + ``` + +6. **Configure mount options:** + Discuss common NFS mount options with the user: + - `rw` / `ro` - Read-write or read-only + - `hard` / `soft` - Hard mount (recommended) or soft mount + - `intr` - Allow interruption of NFS requests + - `noatime` - Don't update access times (performance) + - `vers=4` - Force NFSv4 (recommended) + - `timeo=14` - Timeout value + - `retrans=3` - Number of retransmits + - `_netdev` - Required for network filesystems + - `nofail` - Don't fail boot if mount unavailable + + Recommended default options: + ``` + rw,hard,intr,vers=4,_netdev,nofail + ``` + +7. **Make mount permanent via /etc/fstab:** + - Backup current fstab: + ```bash + sudo cp /etc/fstab /etc/fstab.backup.$(date +%Y%m%d_%H%M%S) + ``` + + - Add entry to /etc/fstab: + ``` + : nfs 0 0 + ``` + + - Test fstab entry without rebooting: + ```bash + sudo umount + sudo mount -a + df -h | grep + ``` + +8. **Set up automount with systemd (alternative to fstab):** + If the user prefers automount, create systemd mount units: + + Create `/etc/systemd/system/mnt-nfs-remote\x2dshare.mount`: + ``` + [Unit] + Description=NFS Mount for remote-share + After=network-online.target + Wants=network-online.target + + [Mount] + What=: + Where= + Type=nfs + Options= + + [Install] + WantedBy=multi-user.target + ``` + + Enable and start: + ```bash + sudo systemctl daemon-reload + sudo systemctl enable mnt-nfs-remote\\x2dshare.mount + sudo systemctl start mnt-nfs-remote\\x2dshare.mount + sudo systemctl status mnt-nfs-remote\\x2dshare.mount + ``` + +9. **Configure permissions:** + Check and configure local mount point permissions: + ```bash + ls -la + ``` + + If needed, adjust ownership: + ```bash + sudo chown : + ``` + +10. **Test and verify:** + - Create a test file: + ```bash + touch /test-file + ls -la /test-file + ``` + - Check from remote server if possible + - Verify mount survives reboot (ask user to test) + +11. **Troubleshooting guidance:** + If issues occur, check: + - Network connectivity: `ping ` + - NFS service on remote: `showmount -e ` + - Firewall rules on both client and server + - SELinux/AppArmor policies (if applicable) + - NFS server exports configuration (`/etc/exports` on server) + - Mount logs: `sudo journalctl -u ` or `dmesg | grep nfs` + +12. **Provide best practices:** + - Use NFSv4 when possible (better performance and security) + - Use `_netdev` option for network mounts + - Use `nofail` to prevent boot issues if NFS server is down + - Consider using autofs for on-demand mounting + - Document all NFS mounts (keep a list of what's mounted where) + - Regular monitoring of NFS mount health + +## Important notes: +- Always backup /etc/fstab before editing +- Test mounts before making them permanent +- Use `_netdev` and `nofail` options to prevent boot issues +- Systemd mount units need escaped names (replace / with \x2d) +- Ensure NFS server has proper export permissions configured diff --git a/commands/sysadmin/linux-desktop/storage/network-mounts/setup-smb-mounts.md b/commands/sysadmin/linux-desktop/storage/network-mounts/setup-smb-mounts.md new file mode 100644 index 0000000000000000000000000000000000000000..4a0d62b0a639ec9cb06e4c13397efd0c80aee5a7 --- /dev/null +++ b/commands/sysadmin/linux-desktop/storage/network-mounts/setup-smb-mounts.md @@ -0,0 +1,201 @@ +# SMB/CIFS Mount Setup Assistant + +You are helping the user set up SMB/CIFS (Windows/Samba) mounts to remote systems. + +## Your tasks: + +1. **Check SMB client prerequisites:** + - Check if CIFS utilities are installed: `dpkg -l | grep cifs-utils` + - If not installed: + ```bash + sudo apt update + sudo apt install cifs-utils + ``` + +2. **Gather mount information from the user:** + Ask the user for: + - Remote SMB server IP or hostname (e.g., `10.0.0.100` or `nas.local`) + - Share name (e.g., `shared` or `documents`) + - Username for authentication + - Domain (if applicable, otherwise use `WORKGROUP`) + - Local mount point (e.g., `/mnt/smb/remote-share`) + - Whether they want to store credentials securely + +3. **Test SMB server accessibility:** + - Check if remote server is reachable: `ping -c 3 ` + - List available shares (if credentials are available): + ```bash + smbclient -L // -U + ``` + - If this fails, troubleshoot: + - Check if SMB ports are open (445, 139) + - Verify firewall settings + +4. **Set up credentials file (recommended for security):** + Create a credentials file to avoid storing passwords in /etc/fstab: + + ```bash + sudo mkdir -p /etc/samba/credentials + sudo touch /etc/samba/credentials/ + sudo chmod 700 /etc/samba/credentials + sudo chmod 600 /etc/samba/credentials/ + ``` + + Edit the credentials file: + ``` + username= + password= + domain= + ``` + + Secure it: + ```bash + sudo chown root:root /etc/samba/credentials/ + sudo chmod 600 /etc/samba/credentials/ + ``` + +5. **Create local mount point:** + ```bash + sudo mkdir -p + ``` + +6. **Test mount temporarily:** + Before making it permanent, test the mount: + ```bash + sudo mount -t cifs /// \ + -o credentials=/etc/samba/credentials/,uid=$(id -u),gid=$(id -g) + ``` + + Verify the mount: + ```bash + df -h | grep + ls -la + ``` + +7. **Configure mount options:** + Discuss common CIFS mount options with the user: + - `credentials=` - Use credentials file + - `uid=` - Set file owner (use `id -u`) + - `gid=` - Set file group (use `id -g`) + - `file_mode=0644` - File permissions + - `dir_mode=0755` - Directory permissions + - `vers=3.0` - SMB protocol version (2.0, 2.1, 3.0, 3.1.1) + - `iocharset=utf8` - Character set + - `_netdev` - Required for network filesystems + - `nofail` - Don't fail boot if mount unavailable + - `noauto` - Don't mount automatically (use with autofs) + - `rw` / `ro` - Read-write or read-only + + Recommended default options: + ``` + credentials=/etc/samba/credentials/,uid=,gid=,file_mode=0644,dir_mode=0755,vers=3.0,iocharset=utf8,_netdev,nofail + ``` + +8. **Detect SMB version:** + Help determine the best SMB version to use: + ```bash + smbclient -L // -U --option='client max protocol=SMB3' + ``` + + Common versions: + - SMB 1.0 - Legacy, insecure (avoid) + - SMB 2.0 - Windows Vista/Server 2008 + - SMB 2.1 - Windows 7/Server 2008 R2 + - SMB 3.0 - Windows 8/Server 2012 + - SMB 3.1.1 - Windows 10/Server 2016+ (recommended) + +9. **Make mount permanent via /etc/fstab:** + - Backup current fstab: + ```bash + sudo cp /etc/fstab /etc/fstab.backup.$(date +%Y%m%d_%H%M%S) + ``` + + - Add entry to /etc/fstab: + ``` + /// cifs 0 0 + ``` + + - Test fstab entry without rebooting: + ```bash + sudo umount + sudo mount -a + df -h | grep + ``` + +10. **Set up automount with systemd (alternative to fstab):** + If the user prefers automount, create systemd mount units: + + Create `/etc/systemd/system/mnt-smb-remote\x2dshare.mount`: + ``` + [Unit] + Description=SMB Mount for remote-share + After=network-online.target + Wants=network-online.target + + [Mount] + What=/// + Where= + Type=cifs + Options= + + [Install] + WantedBy=multi-user.target + ``` + + Enable and start: + ```bash + sudo systemctl daemon-reload + sudo systemctl enable mnt-smb-remote\\x2dshare.mount + sudo systemctl start mnt-smb-remote\\x2dshare.mount + sudo systemctl status mnt-smb-remote\\x2dshare.mount + ``` + +11. **Configure for Windows Active Directory (if applicable):** + If connecting to AD domain: + - May need to install additional packages: + ```bash + sudo apt install krb5-user + ``` + - Use domain credentials in credentials file + - May need to configure Kerberos (`/etc/krb5.conf`) + - Use `sec=krb5` option if Kerberos is configured + +12. **Test and verify:** + - Create a test file: + ```bash + touch /test-file + ls -la /test-file + ``` + - Check permissions and ownership + - Verify mount survives reboot (ask user to test) + +13. **Troubleshooting guidance:** + If issues occur, check: + - Network connectivity: `ping ` + - SMB service on remote: `smbclient -L // -N` (null session) + - Firewall rules on both client and server + - SMB version compatibility: try different `vers=` options + - Credentials: test with `smbclient /// -U ` + - Mount logs: `sudo journalctl -u ` or `dmesg | grep cifs` + - Permissions issues: check `uid`, `gid`, `file_mode`, `dir_mode` + - Check kernel logs: `dmesg | tail -20` + +14. **Provide best practices:** + - Store credentials in `/etc/samba/credentials/` with 600 permissions + - Use SMB 3.0+ when possible (better security and performance) + - Use `_netdev` and `nofail` options to prevent boot issues + - Set appropriate `uid` and `gid` for file access + - Avoid SMB 1.0 (deprecated and insecure) + - Consider using autofs for on-demand mounting + - Document all SMB mounts + - Regular monitoring of SMB mount health + - Keep credentials files secure (root ownership, 600 permissions) + +## Important notes: +- Always backup /etc/fstab before editing +- Never store passwords directly in /etc/fstab +- Use credentials files with proper permissions (600, root:root) +- Test mounts before making them permanent +- Use `_netdev` and `nofail` options to prevent boot issues +- Systemd mount units need escaped names (replace / with \x2d) +- SMB 1.0 is deprecated and should be avoided diff --git a/commands/sysadmin/linux-desktop/storage/raid/check-raid-config.md b/commands/sysadmin/linux-desktop/storage/raid/check-raid-config.md new file mode 100644 index 0000000000000000000000000000000000000000..97558d6bb06c32c1d0995b19b94cbb74414b22ec --- /dev/null +++ b/commands/sysadmin/linux-desktop/storage/raid/check-raid-config.md @@ -0,0 +1,54 @@ +# RAID Configuration Check + +You are helping the user identify and analyze their RAID configuration (software or hardware). + +## Your tasks: + +1. **Detect RAID type:** + + **Software RAID (mdadm):** + - Check if mdadm is installed: `which mdadm` + - List all MD devices: `cat /proc/mdstat` + - Get detailed info for each array: `sudo mdadm --detail /dev/md*` + - Check mdadm configuration: `cat /etc/mdadm/mdadm.conf` (if exists) + + **LVM RAID:** + - Check for LVM: `sudo pvs`, `sudo vgs`, `sudo lvs` + - Check for RAID logical volumes: `sudo lvs -a -o +devices,segtype` + + **Hardware RAID:** + - Check for common hardware RAID controllers: + - MegaRAID: `sudo lspci | grep -i raid` and `which megacli` or `which storcli` + - HP Smart Array: `which hpacucli` or `which ssacli` + - Adaptec: `which arcconf` + - List block devices: `lsblk` and `sudo lshw -class disk -class storage` + + **ZFS (if applicable):** + - Check if ZFS is installed: `which zfs` + - List ZFS pools: `sudo zpool status` + - List ZFS datasets: `sudo zfs list` + +2. **Analyze RAID health:** + - For software RAID: check array status, degraded arrays, sync status + - For hardware RAID: if tools are available, check controller and disk status + - Check for any failed or missing drives + - Review disk errors: `sudo smartctl -a /dev/sd*` for member disks + +3. **Report configuration details:** + - RAID level (RAID 0, 1, 5, 6, 10, etc.) + - Number of devices in each array + - Total capacity and usable capacity + - Current status (clean, active, degraded, rebuilding, etc.) + - Performance configuration (chunk size, stripe size) + +4. **Provide recommendations:** + - If arrays are degraded, suggest immediate action + - If no monitoring is configured, suggest setting up monitoring + - If hardware RAID tools are missing, suggest installation + - Best practices for the detected configuration + +## Important notes: +- Use sudo for all RAID-related commands +- If no RAID is detected, clearly state "No RAID configuration found" +- Be specific about what type of RAID is in use +- Highlight any critical issues requiring immediate attention diff --git a/commands/sysadmin/linux-desktop/system-health/optimize-pipewire.md b/commands/sysadmin/linux-desktop/system-health/optimize-pipewire.md new file mode 100644 index 0000000000000000000000000000000000000000..9540f972fd5fff378caf5d277d259bdbcd14018d --- /dev/null +++ b/commands/sysadmin/linux-desktop/system-health/optimize-pipewire.md @@ -0,0 +1,57 @@ +--- +description: Evaluate and optimize PipeWire audio setup +tags: [audio, pipewire, optimization, system, project, gitignored] +--- + +You are helping the user evaluate and optimize their PipeWire audio setup. + +## Process + +1. **Check PipeWire status** + - Verify PipeWire is running: `systemctl --user status pipewire pipewire-pulse wireplumber` + - Check version: `pipewire --version` + - List audio devices: `pactl list sinks short` and `pactl list sources short` + +2. **Evaluate current configuration** + - Check config files in `~/.config/pipewire/` and `/usr/share/pipewire/` + - Review sample rate: `pactl info | grep "Default Sample"` + - Check buffer settings and latency + +3. **Test audio quality** + - Check for audio issues: `journalctl --user -u pipewire -n 50` + - Look for xruns or underruns in logs + - Test different sample rates if needed + +4. **Optimization suggestions** + - For low latency (music production): + - Adjust `default.clock.rate` and `default.clock.allowed-rates` + - Set `default.clock.quantum` (64, 128, 256) + - Configure `api.alsa.period-size` + + - For quality (media playback): + - Higher sample rates (48000, 96000) + - Larger buffer sizes + + - For Bluetooth: + - Check codec usage: `pactl list | grep -i codec` + - Suggest enabling higher quality codecs (LDAC, aptX) + +5. **Recommended tools** + - `pavucontrol` - GUI volume control + - `helvum` - PipeWire patchbay + - `qpwgraph` - Qt-based graph manager + - `easyeffects` - Audio effects for PipeWire + +6. **Create optimized config if needed** + - Offer to create `~/.config/pipewire/pipewire.conf.d/` overrides + - Suggest settings based on use case + +## Output + +Provide a report showing: +- PipeWire status and version +- Current audio configuration +- Detected issues (if any) +- Optimization recommendations +- Suggested tools to install +- Configuration changes (if applicable) diff --git a/commands/sysadmin/linux-desktop/utilities/diagnose-printers.md b/commands/sysadmin/linux-desktop/utilities/diagnose-printers.md new file mode 100644 index 0000000000000000000000000000000000000000..c7f3a5c102635e51fbd6d32a0ee1a24c8106b3e5 --- /dev/null +++ b/commands/sysadmin/linux-desktop/utilities/diagnose-printers.md @@ -0,0 +1,54 @@ +--- +description: Diagnose installed printers and suggest removal of unused ones +tags: [system, printers, cups, cleanup, project, gitignored] +--- + +You are helping the user review installed printers and identify ones that can be removed. + +## Process + +1. **Check CUPS status** + - Verify CUPS is running: `systemctl status cups` + - Access CUPS web interface info: check `http://localhost:631` + +2. **List configured printers** + - Run: `lpstat -p -d` + - Show detailed info: `lpstat -l -p` + - List printer queues: `lpq -a` + +3. **Check printer usage** + - View printer job history if available + - Check `/var/log/cups/page_log` for usage patterns + - Identify printers with no recent jobs + +4. **Identify printer drivers** + - List installed printer drivers: `lpinfo -m | grep -i ` + - Check for unnecessary driver packages: `dpkg -l | grep -E "printer|cups|hplip"` + +5. **Test printer connectivity** + - For network printers, ping their IPs + - Check if printers are still on the network + - Test print to each printer: `lp -d /etc/hosts` + +6. **Suggest removals** + - Old/disconnected printers + - Duplicate printer entries + - Printers user no longer has access to + - Unnecessary drivers + +7. **Cleanup commands** + - Remove printer: `lpadmin -x ` + - Remove unused drivers: `apt remove ` + - Clean print queue: `cancel -a ` + - Disable CUPS if no printers needed: `sudo systemctl disable cups` + +## Output + +Provide a report showing: +- List of configured printers with status +- Last usage date (if available) +- Network connectivity status +- Installed printer drivers +- Recommendations for removal +- Cleanup commands +- Potential space savings diff --git a/commands/sysadmin/linux-desktop/virtualization/check-virtualization.md b/commands/sysadmin/linux-desktop/virtualization/check-virtualization.md new file mode 100644 index 0000000000000000000000000000000000000000..265037ee8fa144d874a715c91daa2106f6490d03 --- /dev/null +++ b/commands/sysadmin/linux-desktop/virtualization/check-virtualization.md @@ -0,0 +1,318 @@ +# Check Virtualization Setup + +You are helping the user check if the system is properly set up to run virtualized workloads and remediate any issues. + +## Your tasks: + +1. **Check if CPU supports virtualization:** + + **Intel (VT-x):** + ```bash + grep -E "vmx" /proc/cpuinfo + ``` + + **AMD (AMD-V):** + ```bash + grep -E "svm" /proc/cpuinfo + ``` + + If no output, virtualization is not supported or not enabled in BIOS. + +2. **Check if virtualization is enabled in BIOS:** + ```bash + sudo apt install cpu-checker + sudo kvm-ok + ``` + + If it says KVM can be used, virtualization is enabled. + If not, user needs to enable it in BIOS/UEFI. + +3. **Check current virtualization software:** + + **KVM/QEMU:** + ```bash + which qemu-system-x86_64 + lsmod | grep kvm + ``` + + **VirtualBox:** + ```bash + which virtualbox + VBoxManage --version + ``` + + **VMware:** + ```bash + which vmware + systemctl status vmware + ``` + + **Docker (containerization):** + ```bash + docker --version + systemctl status docker + ``` + +4. **Check KVM kernel modules:** + ```bash + lsmod | grep kvm + ``` + + Should show: + - `kvm_intel` (for Intel) + - `kvm_amd` (for AMD) + - `kvm` (base module) + + If not loaded, try: + ```bash + sudo modprobe kvm + sudo modprobe kvm_intel # or kvm_amd + ``` + +5. **Install KVM and related tools (if not installed):** + ```bash + sudo apt update + sudo apt install qemu-kvm libvirt-daemon-system libvirt-clients bridge-utils virt-manager + ``` + +6. **Check libvirt status:** + ```bash + sudo systemctl status libvirtd + ``` + + If not running: + ```bash + sudo systemctl enable libvirtd + sudo systemctl start libvirtd + ``` + +7. **Add user to required groups:** + ```bash + sudo usermod -aG libvirt $USER + sudo usermod -aG kvm $USER + ``` + + User needs to log out and back in for group changes to take effect. + +8. **Verify user permissions:** + ```bash + groups + ``` + + Should include: `libvirt` and `kvm` + +9. **Check libvirt connectivity:** + ```bash + virsh list --all + ``` + + If permission denied, user is not in libvirt group or not logged back in. + +10. **Check virtualization networking:** + + **Default network:** + ```bash + virsh net-list --all + ``` + + If default network is not active: + ```bash + virsh net-start default + virsh net-autostart default + ``` + + **Bridge networking:** + ```bash + ip link show + brctl show # if bridge-utils installed + ``` + +11. **Check nested virtualization (if needed):** + + **For Intel:** + ```bash + cat /sys/module/kvm_intel/parameters/nested + ``` + + **For AMD:** + ```bash + cat /sys/module/kvm_amd/parameters/nested + ``` + + If shows `N` or `0`, nested virtualization is disabled. + + To enable: + ```bash + echo "options kvm_intel nested=1" | sudo tee /etc/modprobe.d/kvm-intel.conf + # or for AMD: + echo "options kvm_amd nested=1" | sudo tee /etc/modprobe.d/kvm-amd.conf + ``` + + Then reload: + ```bash + sudo modprobe -r kvm_intel + sudo modprobe kvm_intel + ``` + +12. **Check IOMMU for PCIe passthrough (if needed):** + ```bash + dmesg | grep -i iommu + ``` + + If IOMMU is needed, add to kernel parameters in `/etc/default/grub`: + ``` + GRUB_CMDLINE_LINUX_DEFAULT="quiet splash intel_iommu=on" + # or for AMD: + GRUB_CMDLINE_LINUX_DEFAULT="quiet splash amd_iommu=on" + ``` + + Then update grub: + ```bash + sudo update-grub + sudo reboot + ``` + +13. **Check available storage pools:** + ```bash + virsh pool-list --all + ``` + + Create default pool if needed: + ```bash + virsh pool-define-as default dir --target /var/lib/libvirt/images + virsh pool-start default + virsh pool-autostart default + ``` + +14. **Check system resources for virtualization:** + ```bash + free -h + df -h /var/lib/libvirt/images + cat /proc/cpuinfo | grep "processor" | wc -l + ``` + + Recommendations: + - At least 4GB RAM for light VMs + - At least 20GB free disk space + - Multiple CPU cores recommended + +15. **Test VM creation (small test):** + ```bash + virt-install --name test-vm \ + --ram 512 \ + --disk size=1 \ + --cdrom /path/to/iso \ + --graphics vnc \ + --check all=off \ + --dry-run + ``` + +16. **Check for conflicting virtualization:** + VirtualBox and KVM can sometimes conflict. Check if both are installed: + ```bash + dpkg -l | grep -E "virtualbox|qemu-kvm" + ``` + + VirtualBox kernel modules can conflict with KVM: + ```bash + lsmod | grep vbox + ``` + +17. **Check virtualization acceleration:** + ```bash + ls -l /dev/kvm + ``` + + Should be: + ``` + crw-rw---- 1 root kvm /dev/kvm + ``` + +18. **Install virt-manager (GUI) if desired:** + ```bash + sudo apt install virt-manager + ``` + + Test launch: + ```bash + virt-manager + ``` + +19. **Check for Secure Boot issues:** + Secure Boot can prevent some virtualization modules from loading: + ```bash + mokutil --sb-state + ``` + + If Secure Boot is enabled and causing issues, user may need to: + - Sign modules + - Disable Secure Boot in BIOS + - Use signed versions + +20. **Performance tuning:** + + **Enable hugepages for better performance:** + ```bash + sudo sysctl vm.nr_hugepages=1024 + echo "vm.nr_hugepages=1024" | sudo tee -a /etc/sysctl.conf + ``` + + **Check CPU governor:** + ```bash + cat /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor + ``` + + For virtualization, `performance` governor is recommended: + ```bash + sudo apt install cpufrequtils + sudo cpufreq-set -g performance + ``` + +21. **Report findings:** + Summarize: + - CPU virtualization support status + - BIOS/UEFI virtualization enabled status + - KVM modules loaded status + - libvirt status + - User group membership + - Network configuration + - Nested virtualization status + - Storage pools status + - Available resources + - Any conflicts or issues + - Recommendations + +22. **Provide recommendations:** + - Enable VT-x/AMD-V in BIOS if not enabled + - Install KVM/QEMU if not present + - Add user to libvirt and kvm groups + - Set up default network + - Enable nested virtualization if needed + - Configure IOMMU for PCIe passthrough if needed + - Install virt-manager for GUI management + - Allocate sufficient resources + - Resolve any conflicts (VirtualBox vs KVM) + - Performance tuning suggestions + +23. **Basic virtualization commands to share:** + - `virsh list --all` - List all VMs + - `virsh start ` - Start a VM + - `virsh shutdown ` - Shutdown a VM + - `virsh destroy ` - Force stop a VM + - `virsh console ` - Connect to VM console + - `virsh net-list` - List networks + - `virsh pool-list` - List storage pools + - `virt-manager` - Launch GUI + - `virt-install` - Create new VM from command line + +## Important notes: +- Virtualization must be enabled in BIOS/UEFI +- User must be in kvm and libvirt groups +- Log out and back in after adding to groups +- VirtualBox and KVM can conflict +- Nested virtualization is disabled by default +- IOMMU required for PCIe passthrough +- Secure Boot may prevent module loading +- Sufficient RAM and disk space needed +- Performance governor recommended for VMs +- Check if system is itself a VM before enabling nested virtualization diff --git a/commands/sysadmin/organize-files.md b/commands/sysadmin/organize-files.md new file mode 100644 index 0000000000000000000000000000000000000000..c8e8f878f4f2862b04476ed09f5529dd3bdcc56f --- /dev/null +++ b/commands/sysadmin/organize-files.md @@ -0,0 +1,51 @@ +Organize and restructure files in the current directory or repository. + +Your task: +1. Analyze current structure: + - Identify file types and purposes + - Note organization issues + - Find misplaced files + - Detect duplicates or unused files + +2. Create logical folder structure: + - Group related files together + - Separate by function (src, docs, tests, config, etc.) + - Create clear hierarchy + - Follow project conventions + +3. Example structures: + + **For code projects:** + ``` + project/ + β”œβ”€β”€ src/ # Source code + β”œβ”€β”€ tests/ # Test files + β”œβ”€β”€ docs/ # Documentation + β”œβ”€β”€ config/ # Configuration files + β”œβ”€β”€ scripts/ # Utility scripts + └── assets/ # Static assets + ``` + + **For documents:** + ``` + project/ + β”œβ”€β”€ drafts/ + β”œβ”€β”€ final/ + β”œβ”€β”€ archive/ + β”œβ”€β”€ templates/ + └── resources/ + ``` + +4. Move files to appropriate locations: + - Preserve git history if in repository + - Update import/require paths in code + - Fix broken references + - Update documentation + +5. Clean up: + - Remove redundant files + - Archive old versions + - Delete temporary files + - Update .gitignore if needed + +Implement clear separation of concerns and logical file organization. diff --git a/commands/sysadmin/python/python-env-setup.md b/commands/sysadmin/python/python-env-setup.md new file mode 100644 index 0000000000000000000000000000000000000000..18841edf41f8e4190ba6433516078e24852ed8d4 --- /dev/null +++ b/commands/sysadmin/python/python-env-setup.md @@ -0,0 +1,48 @@ +Set up and manage Python virtual environments. + +Your task: +1. Determine best approach for the project: + - **venv** (built-in, lightweight) + - **virtualenv** (more features) + - **conda** (data science, complex dependencies) + - **poetry** (modern dependency management) + - **pipenv** (Pipfile-based) + +2. Create virtual environment: + ```bash + # Using venv + python3 -m venv venv + source venv/bin/activate # Linux/Mac + venv\Scripts\activate # Windows + + # Using conda + conda create -n myenv python=3.11 + conda activate myenv + ``` + +3. Manage dependencies: + ```bash + # Install packages + pip install -r requirements.txt + + # Generate requirements + pip freeze > requirements.txt + + # For conda + conda env export > environment.yml + ``` + +4. Best practices: + - Never install packages globally + - Use requirements.txt or environment.yml + - Pin versions for reproducibility + - Separate dev and production dependencies + - Add venv/ to .gitignore + +5. Project setup guidance: + - Create virtual environment + - Install dependencies + - Configure IDE to use environment + - Document setup process in README + +Help users establish proper Python environment isolation and dependency management. diff --git a/commands/sysadmin/python/setup-conda.md b/commands/sysadmin/python/setup-conda.md new file mode 100644 index 0000000000000000000000000000000000000000..8a5c8115e22d06a8c59855c56058f9ddfc77917a --- /dev/null +++ b/commands/sysadmin/python/setup-conda.md @@ -0,0 +1,46 @@ +Set up and optimize conda environments tailored to system hardware. + +Your task: +1. Evaluate current conda setup: + ```bash + conda env list # List environments + conda list -n env_name # Packages in environment + ``` + +2. Validate hardware specifications: + - Check for NVIDIA GPU (nvidia-smi) + - CPU information (lscpu) + - Available RAM + - Storage capacity + +3. Create optimized environment based on hardware: + - For systems with NVIDIA GPU: + - Include CUDA toolkit + - GPU-accelerated libraries (cuDNN, cuBLAS) + - PyTorch/TensorFlow with GPU support + + - For CPU-only systems: + - CPU-optimized libraries + - Intel MKL if on Intel CPU + - Standard ML libraries + +4. Best practices: + - Use mamba for faster package resolution + - Create environment from environment.yml + - Pin versions for reproducibility + - Separate environments for different projects + +5. Example environment setup: + ```bash + # Create environment + conda create -n myenv python=3.11 + + # Activate and install packages + conda activate myenv + conda install numpy pandas scikit-learn + + # For GPU systems + conda install pytorch torchvision torchaudio pytorch-cuda=12.1 -c pytorch -c nvidia + ``` + +Ensure conda environments are optimized for the user's specific hardware configuration. diff --git a/commands/sysadmin/review-boot.md b/commands/sysadmin/review-boot.md new file mode 100644 index 0000000000000000000000000000000000000000..f92a0dc358af5820060beca030b0db0539d61c99 --- /dev/null +++ b/commands/sysadmin/review-boot.md @@ -0,0 +1,36 @@ +Review system boot process and identify issues. + +Your task: +1. Scan boot messages and journal logs: + ```bash + journalctl -b # Current boot + journalctl -b -1 # Previous boot + ``` + +2. Identify issues: + - Failed services + - Error messages + - Warnings + - Slow-starting services + - Dependency problems + +3. Analyze boot performance: + ```bash + systemd-analyze # Overall boot time + systemd-analyze blame # Time per service + systemd-analyze critical-chain # Critical path + ``` + +4. Suggest remediation: + - Fix failed services + - Disable unnecessary services + - Resolve dependency issues + - Optimize slow services + +5. Provide actionable recommendations: + - Commands to investigate specific issues + - Configuration changes needed + - Services to disable or reconfigure + - Further diagnostic steps + +Proactively identify and suggest fixes for boot-time issues. diff --git a/commands/sysadmin/shared/review-startup-services.md b/commands/sysadmin/shared/review-startup-services.md new file mode 100644 index 0000000000000000000000000000000000000000..f201f25bf8759dee2962bf4da57c6ba123e245a7 --- /dev/null +++ b/commands/sysadmin/shared/review-startup-services.md @@ -0,0 +1,102 @@ +--- +description: Review system startup services, identify failed or deprecated services, and clean up boot jobs +tags: [sysadmin, systemd, services, boot, cleanup, troubleshooting] +--- + +Review and clean up system startup services: + +1. **Failed Services**: Identify all services that failed to start +2. **Enabled Services**: List all enabled services that start at boot +3. **Deprecated Services**: Identify services that may be outdated or unnecessary +4. **Service Dependencies**: Check for broken dependencies +5. **Masked Services**: Review masked services +6. **Timing Analysis**: Identify services that slow down boot + +Run the following diagnostic commands: + +**Failed and Problematic Services:** +- `systemctl --failed` to list all failed services +- `systemctl list-units --state=failed --all` for detailed failed units +- `systemctl list-units --state=error` for services in error state +- `systemctl list-units --state=not-found` for services with missing unit files + +**Enabled Services:** +- `systemctl list-unit-files --state=enabled` for all enabled services +- `systemctl list-units --type=service --state=running` for currently running services +- `systemctl list-units --type=service --state=active` for active services + +**Boot-time Services:** +- `systemd-analyze blame | head -n 30` for slowest boot services +- `systemctl list-dependencies --before multi-user.target` for services started before multi-user +- `systemctl list-dependencies --after multi-user.target` for services started after multi-user + +**Service Details for Failed Services:** +For each failed service, run: +- `systemctl status [service-name]` for current status +- `journalctl -u [service-name] -n 50` for recent logs +- `systemctl cat [service-name]` to view unit file + +**Masked Services:** +- `systemctl list-unit-files --state=masked` for masked services + +**Deprecated/Unnecessary Service Detection:** +- Check for common deprecated services (networking.service on systemd systems, etc.) +- Identify services for removed/uninstalled software +- Find duplicate or redundant services + +Analyze the output and provide: + +**Failed Services Report:** +- List each failed service with its error message +- Classify the failure: + - Missing dependencies + - Configuration errors + - Service no longer needed + - Hardware/driver related + - Permission issues + +**Recommendations for each failed service:** +- **Remove**: Service is deprecated or related to uninstalled software + - Command: `sudo systemctl disable [service-name]` + - Command: `sudo systemctl mask [service-name]` if it keeps trying to start + +- **Fix**: Service is needed but has configuration issues + - Provide specific fix based on error logs + - Command to restart after fix: `sudo systemctl restart [service-name]` + +- **Investigate**: Service failure needs deeper investigation + - Provide relevant log excerpts + - Suggest diagnostic steps + +**Boot Optimization Opportunities:** +- Services that can be set to start on-demand instead of at boot +- Services that can be disabled if not needed +- Commands to disable: `sudo systemctl disable [service-name]` +- Commands to mask: `sudo systemctl mask [service-name]` + +**Enabled Services Review:** +- List all enabled services +- Highlight services that may be unnecessary: + - Services for unused hardware + - Duplicate services + - Development/testing services on production systems + - Legacy services replaced by newer alternatives + +**Safety Warnings:** +- Warn before suggesting removal of critical services +- List services that should NOT be disabled +- Suggest creating a snapshot/backup before making changes (especially for BTRFS/Snapper systems) + +**Action Plan:** +Provide a prioritized list of actions: +1. Safe to disable/mask (services clearly not needed) +2. Should be fixed (services needed but failing) +3. Investigate further (unclear if needed or cause of failure) + +For each action, provide the exact commands to execute. + +**Post-cleanup:** +After making changes, recommend: +- `sudo systemctl daemon-reload` to reload systemd configuration +- `systemd-analyze` to check boot time improvement +- Review logs after next boot to ensure no new issues diff --git a/commands/sysadmin/shared/system-health-checkup.md b/commands/sysadmin/shared/system-health-checkup.md new file mode 100644 index 0000000000000000000000000000000000000000..3580c890ef9a9dc912eef6b859862cd74a704fc9 --- /dev/null +++ b/commands/sysadmin/shared/system-health-checkup.md @@ -0,0 +1,87 @@ +--- +description: Comprehensive system health checkup including disk health, SMART status, filesystem checks, and overall system status +tags: [sysadmin, diagnostics, health, disk, smart, filesystem, comprehensive] +--- + +Perform a comprehensive system health checkup: + +1. **Disk Health (SMART)**: Check all disk SMART status and health indicators +2. **Filesystem Health**: Check all mounted filesystems for errors +3. **System Resources**: CPU, memory, swap, and load status +4. **Critical Services**: Verify critical system services are running +5. **Security Updates**: Check for pending security updates +6. **Disk Space**: Check all mounted filesystems for space issues +7. **System Logs**: Check for recent critical errors +8. **Hardware Errors**: Check for hardware-related issues in logs + +Run the following comprehensive diagnostic commands: + +**Disk Health (SMART):** +- `sudo smartctl --scan` to identify all drives +- `sudo smartctl -H /dev/sda` for health status (repeat for all drives found) +- `sudo smartctl -A /dev/sda` for SMART attributes (repeat for all drives) +- Check for: Reallocated sectors, Current pending sectors, Offline uncorrectable sectors + +**Filesystem Health:** +- `df -h` for disk space on all filesystems +- `sudo btrfs device stats /` if using BTRFS +- Check mounted filesystems with `mount | grep -E '^/dev'` +- For ext4: `sudo tune2fs -l /dev/sdXY | grep -i 'state\|error'` for filesystem state + +**System Resources:** +- `free -h` for memory usage +- `uptime` for load averages +- `top -b -n 1 | head -n 20` for process overview +- `swapon --show` for swap status + +**Critical Services:** +- `systemctl status systemd-journald` for logging service +- `systemctl status cron` or `systemctl status crond` for task scheduler +- `systemctl --failed` for any failed services + +**Updates and Security:** +- `sudo apt-get update` to refresh package lists +- `apt list --upgradable` to check for available updates +- `grep -i security /var/log/apt/history.log | tail -n 20` for recent security updates + +**System Logs:** +- `journalctl -p 3 -b` for errors in current boot +- `journalctl -p 2 -b` for critical issues in current boot +- `dmesg | grep -i 'error\|fail\|critical' | tail -n 20` for kernel errors + +**Hardware Status:** +- `sensors` for temperature monitoring (if lm-sensors installed) +- `dmesg | grep -i 'hardware error'` for hardware errors +- `lspci -v | grep -i 'error'` for PCIe errors + +**Additional Checks:** +- Check for excessive failed login attempts: `sudo grep -i 'failed password' /var/log/auth.log | tail -n 10` +- Check for disk I/O errors: `dmesg | grep -i 'I/O error'` + +Analyze all results and provide: + +**Summary Report:** +- Overall system health status (Healthy, Warning, Critical) +- Disk health status for each drive +- Filesystem health and space status +- Memory and swap status +- Any failed services or critical errors +- Pending updates (especially security) +- Temperature warnings if applicable +- Specific issues found with severity levels + +**Recommendations:** +- Immediate actions needed (if any) +- Preventive maintenance suggestions +- Monitoring recommendations +- Whether a reboot is recommended +- Backup reminders if issues detected + +**Priority Issues:** +List any issues in order of urgency: +1. Critical (requires immediate attention) +2. Warning (should be addressed soon) +3. Informational (for awareness) + +If smartmontools is not installed, offer to install with `sudo apt-get install smartmontools`. +If lm-sensors is not installed and temperature monitoring is desired, offer to install with `sudo apt-get install lm-sensors`. diff --git a/commands/sysadmin/shared/system-upgrade.md b/commands/sysadmin/shared/system-upgrade.md new file mode 100644 index 0000000000000000000000000000000000000000..ec3853a1b29d986afeb2030baa4788e381cabfe0 --- /dev/null +++ b/commands/sysadmin/shared/system-upgrade.md @@ -0,0 +1,21 @@ +--- +description: Perform a full system upgrade with apt-get (updates package lists and upgrades all packages) +tags: [sysadmin, maintenance, apt, upgrade, system] +--- + +Perform a comprehensive system upgrade: + +1. Update the package lists from repositories +2. Upgrade all installed packages to their latest versions +3. Show which packages were upgraded +4. Clean up any unnecessary packages + +Use sudo to execute these commands with appropriate privileges. + +Run the following commands sequentially: +- `sudo apt-get update` to refresh package lists +- `sudo apt-get upgrade -y` to upgrade packages +- `sudo apt-get autoremove -y` to remove unnecessary packages +- `sudo apt-get autoclean` to clean up package cache + +Provide a summary of what was updated and if a reboot is recommended. diff --git a/commands/tech-docs/add-vibe-coding-disclosure.md b/commands/tech-docs/add-vibe-coding-disclosure.md new file mode 100644 index 0000000000000000000000000000000000000000..2146a709295a5058ef356b51067c6e194a5bbf9c --- /dev/null +++ b/commands/tech-docs/add-vibe-coding-disclosure.md @@ -0,0 +1,13 @@ +Please add a note to the README that the code in this repo was generated entirely by AI or with AI assistance. + +To do so, let's add this badge somewhere prominent in the README: + + ![AI Assisted Development](https://img.shields.io/badge/AI%20Assisted-Development-brightgreen?style=for-the-badge&logo=robot) + +And add it again in the footer like this. + +## AI Assisted Development + +![AI Assisted Development](https://img.shields.io/badge/AI%20Assisted-Development-brightgreen?style=for-the-badge&logo=robot) + +The code in this repository was generated by or with an AI tool. Use and integrate with your projects at your own risk. \ No newline at end of file diff --git a/commands/tech-docs/create-changelog.md b/commands/tech-docs/create-changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..9324a6b1ada53eff5b459555c9f27d4f83180d50 --- /dev/null +++ b/commands/tech-docs/create-changelog.md @@ -0,0 +1,43 @@ +Create or update the changelog for this repository. + +Your task: +1. If no CHANGELOG.md exists, create one +2. If it exists, add a new entry for recent changes +3. Use standard changelog formatting + +Format: +```markdown +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added +- New features added + +### Changed +- Changes in existing functionality + +### Deprecated +- Soon-to-be removed features + +### Removed +- Removed features + +### Fixed +- Bug fixes + +### Security +- Security fixes + +## [1.0.0] - YYYY-MM-DD + +### Added +- Initial release +``` + +Document the tasks and changes made since the last session. Use semantic versioning and standard categories. diff --git a/commands/tech-docs/create-reference.md b/commands/tech-docs/create-reference.md new file mode 100644 index 0000000000000000000000000000000000000000..8ff447e8088961f6ee514cd9b35787adc27f335d --- /dev/null +++ b/commands/tech-docs/create-reference.md @@ -0,0 +1,15 @@ +Create reference documentation for the codebase. + +Your task: +1. Analyze the code to identify key components, functions, classes, and APIs +2. Generate reference documentation covering: + - Main modules/components and their purposes + - Key functions/methods with parameters and return values + - Important classes with their properties and methods + - API endpoints (if applicable) + - Configuration options + - Workflows and processes + +Format as clear, navigable documentation. Include code examples where helpful. + +Save in a `docs/` or `reference/` folder as appropriate. Use markdown format for easy maintenance and readability. diff --git a/commands/tech-docs/document-stack.md b/commands/tech-docs/document-stack.md new file mode 100644 index 0000000000000000000000000000000000000000..8af4b2cef48ccc1398262d712e9265e3601f94d1 --- /dev/null +++ b/commands/tech-docs/document-stack.md @@ -0,0 +1,37 @@ +Analyze and document the technology stack for this project. + +Your task: +1. Examine the repository to identify all technologies, frameworks, and libraries in use +2. Create a comprehensive stack documentation file +3. Organize by categories (frontend, backend, database, DevOps, testing, etc.) +4. Include version information where available + +Output format: +```markdown +# Technology Stack + +## Frontend +- Framework: [Name] (version) +- UI Library: [Name] (version) +- State Management: [Name] (version) + +## Backend +- Runtime: [Name] (version) +- Framework: [Name] (version) +- API: [Type/Framework] + +## Database +- Database: [Name] (version) +- ORM/ODM: [Name] (version) + +## DevOps & Tools +- Package Manager: [Name] +- Build Tool: [Name] +- Testing: [Framework] +- CI/CD: [Platform] + +## Dependencies +Key dependencies and their purposes. +``` + +Save as `STACK.md` or similar in the repository root or docs folder. diff --git a/commands/tech-docs/format-links.md b/commands/tech-docs/format-links.md new file mode 100644 index 0000000000000000000000000000000000000000..540c7ce7fd93c7bf70b6a911dad3bf1a414a9c87 --- /dev/null +++ b/commands/tech-docs/format-links.md @@ -0,0 +1,9 @@ +This collection of links is intended to be added to an organised list of links - such as an awesome list. + +Please find the awesome list (likely at the base of the repo). Or if it doesn't exist, begin it. + +Understand the structure. And identify whether it is being maintained manually or via a script. + +Once you have understood the structure and the method of creation, take these links and add them to the public-facing list. + +Then, delete this markdown doc. \ No newline at end of file diff --git a/commands/tech-docs/readme/add-readme.md b/commands/tech-docs/readme/add-readme.md new file mode 100644 index 0000000000000000000000000000000000000000..b272dc11eed9720accc30b756d8938fecfce0689 --- /dev/null +++ b/commands/tech-docs/readme/add-readme.md @@ -0,0 +1,18 @@ +Add the README.md + +Do: + +- Summarise clearly and simply the purpose of this project/codebase as you can infer it. You may use the understanding of the repository you have built up during this session, or in prior memory, to supplement your current indexing + +- Use shields.io badges sparingly to visually identify languages and components used + +- Prefer markdown tables over bullet point lists. When adding a table of repositories, use shields.io badges for linking to the repos + +- Add internal links on README.md by adding relative links that will be navigable when visited on github.com + +DO NOT: + +- Use emojis! +- Add a contributor guidelines section (unless otherwise stated) +- Add a license or make any note of licensing. +- Include navigable links to any parts of the repository which are deliberately not commited: for example if there's a folder called /private, don't mention it or add links to it \ No newline at end of file diff --git a/commands/tech-docs/readme/create-hf-readme.md b/commands/tech-docs/readme/create-hf-readme.md new file mode 100644 index 0000000000000000000000000000000000000000..9a8314823e01b8830b9115ad40b9f898043701ee --- /dev/null +++ b/commands/tech-docs/readme/create-hf-readme.md @@ -0,0 +1,46 @@ +Create a Hugging Face compliant README with valid YAML front matter. + +Your task: +1. Analyze the repository to understand the model, dataset, or space +2. Create a README.md with valid YAML front matter for Hugging Face +3. Include appropriate metadata tags + +Structure: +```markdown +--- +# YAML front matter for Hugging Face +tags: +- tag1 +- tag2 +license: mit +datasets: +- dataset-name +language: +- en +--- + +# Model/Dataset/Space Name + +Description of the model, dataset, or space. + +## Model Details + +- **Developed by:** [Organization] +- **Model type:** [Type] +- **Language(s):** [Languages] +- **License:** [License] + +## Uses + +Intended uses and limitations. + +## Training Data + +Information about training data (if applicable). + +## Evaluation + +Evaluation metrics and results (if applicable). +``` + +Ensure the YAML front matter is valid and appropriate for the Hugging Face platform. diff --git a/commands/tech-docs/readme/create-readme.md b/commands/tech-docs/readme/create-readme.md new file mode 100644 index 0000000000000000000000000000000000000000..cb1d0e966de53b15de79bc0dc6ebcc63d1500f94 --- /dev/null +++ b/commands/tech-docs/readme/create-readme.md @@ -0,0 +1,32 @@ +Create a standard GitHub README for this repository. + +Your task: +1. Analyze the repository structure and code to understand its purpose +2. Generate a README.md file with: + - Clear, concise project description + - Overview of functions and features + - Key technologies used + - Basic usage examples (if applicable) + +Format: +```markdown +# Project Name + +Brief description of what this project does. + +## Features + +- Feature 1 +- Feature 2 +- Feature 3 + +## Technologies + +List of main technologies, frameworks, or languages used. + +## Usage + +Basic usage instructions or examples. +``` + +Do not include collaborative guidelines or licensing information unless explicitly requested. Focus on clarity and completeness. diff --git a/commands/tech-docs/readme/specific-additions/add-wip.md b/commands/tech-docs/readme/specific-additions/add-wip.md new file mode 100644 index 0000000000000000000000000000000000000000..39d77271f88b73fbceb12f36de14ff0f31d0e31b --- /dev/null +++ b/commands/tech-docs/readme/specific-additions/add-wip.md @@ -0,0 +1,11 @@ +This repository is a work in progress (WIP). I would like to highlight that. + +To do this, please: + +- Add a work in progress badge (using Shields.io). Position this at the top of the readme. + +Add somewhere else in the readme: + +## Work In Progress + +This repository is a work in progress \ No newline at end of file diff --git a/commands/tech-docs/readme/update-readme.md b/commands/tech-docs/readme/update-readme.md new file mode 100644 index 0000000000000000000000000000000000000000..3949d0598ad448fd543ff8b27300959a9bd2b3cc --- /dev/null +++ b/commands/tech-docs/readme/update-readme.md @@ -0,0 +1,9 @@ +Update the readme to this repository to reflect changes to the codebase that can be inferred since it was created or last updated. + +Updates may include: + +- Revising the linked sections of the repo after a refactoring +- Removing mention of deprecated functionalities +- Updating indexes and tables after the codebase expanded + +After making your edits, push the repo \ No newline at end of file diff --git a/commands/tech-docs/rewrite-in-shakespearean.md b/commands/tech-docs/rewrite-in-shakespearean.md new file mode 100644 index 0000000000000000000000000000000000000000..2ce48b597ab83224322019d3f59c09163b90f778 --- /dev/null +++ b/commands/tech-docs/rewrite-in-shakespearean.md @@ -0,0 +1,7 @@ +Please read the README.md for this project. + +Then: + +Rewrite the read me in Shakespearean English. + +You will enevitably note that many or most terms do not have obvious parallels in Shakespearean English. Attempt to find amusing but somewhat clear parallels, even if they aer not entirely perfect. You may choose to describe a laptop, for example, as a "mobile computing object". \ No newline at end of file diff --git a/commands/tech-docs/user-docs.md b/commands/tech-docs/user-docs.md new file mode 100644 index 0000000000000000000000000000000000000000..b93fb04999a5be23f002b5852cab66d1d9001a00 --- /dev/null +++ b/commands/tech-docs/user-docs.md @@ -0,0 +1,8 @@ +This repository is a private repository for the user's personal use. + +If a README already exists, then rewrite it from this perspective. The purpose of the README in this case is not to present the project to other stakeholders, but rather to provide the user with a clear overview of the purpose of this project. + +If the user requests her, you can infer that this would be helpful to create a docs folder for the users use in which you provide summaries of how the key components of this project function. + +For example, if the repository contains a data backup script, then you may wish to create docs in this folder reminding or instructing the user upon how to operate parts of the script or the functionality of certain parts of the code repository. + diff --git a/commands/writing-and-editing/break-up-long-sentences.md b/commands/writing-and-editing/break-up-long-sentences.md new file mode 100644 index 0000000000000000000000000000000000000000..369882e9cdacd6ce710c3a35f1617a488ad91c92 --- /dev/null +++ b/commands/writing-and-editing/break-up-long-sentences.md @@ -0,0 +1,31 @@ +You are a text editor specializing in breaking up long, run-on sentences into shorter, more digestible sentences. + +Your task is to identify overly long sentences and split them into multiple shorter sentences that improve readability and comprehension. + +## Guidelines + +1. **Identify long sentences**: Look for sentences exceeding 25-30 words or containing multiple independent clauses +2. **Find natural break points**: Split at conjunctions (and, but, or), semicolons, or logical thought boundaries +3. **Maintain logical flow**: Ensure that split sentences maintain clear connections and transitions +4. **Use appropriate transitions**: Add transition words (However, Additionally, Therefore, etc.) when needed for coherence +5. **Preserve meaning**: Keep the original message and all important details intact +6. **Improve readability**: Aim for an average sentence length of 15-20 words for optimal web readability +7. **Vary sentence length**: Create some variety - not all sentences should be exactly the same length +8. **Keep paragraph integrity**: Don't split sentences that would create awkward single-sentence paragraphs + +## Target Sentence Lengths + +- **Ideal**: 15-20 words per sentence +- **Maximum**: 25-30 words per sentence +- **Variety**: Mix shorter (10-15 words) and medium (20-25 words) sentences for rhythm + +## Process + +1. Identify sentences that are too long (>30 words) +2. Locate natural splitting points (conjunctions, semicolons, logical breaks) +3. Split into two or more sentences +4. Add transitions if needed for clarity +5. Review for flow and coherence +6. Present the revised text + +Return only the revised text with split sentences, without explanations or commentary unless specifically requested. \ No newline at end of file diff --git a/commands/writing-and-editing/englsh-variations/uk-english.md b/commands/writing-and-editing/englsh-variations/uk-english.md new file mode 100644 index 0000000000000000000000000000000000000000..a880e891f7b86ec36bc637f4981ad2f3b4ac02f3 --- /dev/null +++ b/commands/writing-and-editing/englsh-variations/uk-english.md @@ -0,0 +1,17 @@ +Convert the provided text to UK English standards. + +Your task: +- Standardise spelling to British English (colour, organisation, analyse, etc.) +- Use British vocabulary and expressions +- Apply British punctuation and formatting conventions +- Ensure consistency throughout the text + +Preserve meaning, tone, and authorial intent. Only change regionalisation and language standards, not content or style. + +Common conversions: +- -ize β†’ -ise (standardise, organise) +- -or β†’ -our (colour, favour) +- -er β†’ -re (centre, theatre) +- -og β†’ -ogue (catalogue, dialogue) +- -l β†’ -ll (travelled, labelled) +- -se β†’ -ce (defence, licence as noun) diff --git a/commands/writing-and-editing/englsh-variations/us-english.md b/commands/writing-and-editing/englsh-variations/us-english.md new file mode 100644 index 0000000000000000000000000000000000000000..3b9c6a8b47bfb8817cf3f941dd7f832ee6efdbbc --- /dev/null +++ b/commands/writing-and-editing/englsh-variations/us-english.md @@ -0,0 +1,17 @@ +Convert the provided text to US English standards. + +Your task: +- Standardize spelling to American English (color, organization, analyze, etc.) +- Use American vocabulary and expressions +- Apply American punctuation and formatting conventions +- Ensure consistency throughout the text + +Preserve meaning, tone, and authorial intent. Only change regionalization and language standards, not content or style. + +Common conversions: +- -ise β†’ -ize (standardize, organize) +- -our β†’ -or (color, favor) +- -re β†’ -er (center, theater) +- -ogue β†’ -og (catalog, dialog) +- -ll β†’ -l (traveled, labeled) +- -ce β†’ -se (defense, license as noun) diff --git a/commands/writing-and-editing/enhancement/add-examples.md b/commands/writing-and-editing/enhancement/add-examples.md new file mode 100644 index 0000000000000000000000000000000000000000..544301ff0e223be93cebec495e5c17fc7cc21272 --- /dev/null +++ b/commands/writing-and-editing/enhancement/add-examples.md @@ -0,0 +1,94 @@ +# Add Examples Where Missing + +You are an example enrichment specialist. Your task is to identify places in the provided text where concrete examples would clarify concepts and make the content more relatable and understandable. + +## Your Task + +1. **Identify abstract concepts** that need illustration +2. **Spot missing use cases** where practical examples would help +3. **Add relevant examples** that: + - Clarify the concept + - Make it relatable to the audience + - Provide concrete, specific details + - Vary in type and context + +## Types of Examples to Add + +### Real-World Scenarios +- Practical use cases +- Common situations readers face +- Industry-specific applications +- Day-to-day implementations + +### Illustrative Cases +- Hypothetical but realistic scenarios +- Before/after comparisons +- Success stories or case studies +- Problem-solution demonstrations + +### Concrete Instances +- Specific product/tool names +- Named companies or organizations +- Actual data points +- Tangible outcomes + +### Analogies and Metaphors +- Familiar comparisons +- Simplified explanations +- Relatable parallels +- Conceptual bridges + +## Example Placement Strategy + +**Add examples when:** +- Introducing new concepts +- Explaining technical details +- Making abstract claims +- Teaching procedures or methods +- Justifying recommendations +- Clarifying distinctions + +**Example formats:** +- Inline: "For example, ..." +- Expanded: Dedicated paragraph or section +- Lists: Multiple brief examples +- Callouts: Boxed or highlighted examples + +## Guidelines + +- **Relevance**: Match examples to audience knowledge level +- **Diversity**: Vary industries, contexts, and complexity +- **Clarity**: Make examples immediately understandable +- **Specificity**: Use concrete details, not generic placeholders +- **Balance**: Don't overload with examplesβ€”one or two per concept +- **Authenticity**: Use realistic scenarios, even if hypothetical + +## Output Format + +Return the enhanced text with: +- Examples naturally integrated +- Clear markers (e.g., "For example:", "Consider:", "Imagine:") +- Proper formatting (indentation, bold, etc. as appropriate) +- Optional: Notes on where examples were added and why + +## Example + +**Before:** +"API rate limiting is important for protecting your infrastructure. It prevents abuse and ensures fair usage among clients." + +**After:** +"API rate limiting is important for protecting your infrastructure. It prevents abuse and ensures fair usage among clients. + +**For example**, imagine you run a weather API service. Without rate limiting, a single user could make thousands of requests per secondβ€”perhaps due to a buggy scriptβ€”overwhelming your servers and degrading service for everyone else. By implementing a limit of 1,000 requests per hour per API key, you ensure that all 10,000 of your users can reliably access weather data simultaneously. + +**Another common scenario**: A mobile app developer accidentally deploys code with an infinite loop that hammers your authentication endpoint. Rate limiting (say, 5 login attempts per minute) stops this runaway process from bringing down your auth service, while still allowing legitimate users to log in normally." + +**Changes made:** +- Added two concrete examples with specific numbers +- Included both malicious (buggy script) and accidental (deployment error) scenarios +- Used realistic metrics (1,000 requests/hour, 10,000 users) +- Demonstrated clear cause-and-effect relationships + +--- + +Now, please provide the text where you'd like me to add examples. diff --git a/commands/writing-and-editing/enhancement/add-statistics.md b/commands/writing-and-editing/enhancement/add-statistics.md new file mode 100644 index 0000000000000000000000000000000000000000..4a450020292973c4dcaea83b030e77a8da7a48ea --- /dev/null +++ b/commands/writing-and-editing/enhancement/add-statistics.md @@ -0,0 +1,61 @@ +# Add Statistics + +You are a data enrichment specialist. Your task is to identify places in the provided text where statistics would strengthen arguments and either suggest specific statistics to add or indicate where they should be researched. + +## Your Task + +1. **Analyze the text** for claims that would benefit from statistical support +2. **Identify opportunities** where numbers would add credibility +3. **Suggest relevant statistics** when you have knowledge of them, OR +4. **Mark locations** where statistics should be researched and added + +## Types of Statistics to Consider + +### Quantitative Support +- **Market data**: Size, growth rates, trends +- **Performance metrics**: Success rates, improvements, ROI +- **Demographic data**: Population statistics, user numbers +- **Research findings**: Study results, survey data, meta-analyses +- **Comparative data**: Industry benchmarks, before/after comparisons +- **Temporal data**: Historical trends, projections, time-based metrics + +### Statistical Formats +- **Percentages**: "40% increase", "9 out of 10 users" +- **Absolute numbers**: "2.5 million customers", "$500K saved" +- **Ratios**: "3:1 return on investment" +- **Ranges**: "Between 15-20% improvement" +- **Rates**: "95% satisfaction rate" + +## Guidelines + +- **Precision matters**: Use specific numbers rather than vague claims +- **Source credibility**: Note reputable sources when suggesting statistics +- **Relevance**: Only add statistics that directly support the argument +- **Recency**: Prefer recent data (note when data may be outdated) +- **Context**: Include necessary context (timeframe, sample size, methodology) +- **Balance**: Don't overwhelm with numbersβ€”use strategically + +## Output Format + +Provide the text with: +- **[STAT NEEDED: description]** markers where research is required +- **[SUGGESTED STAT: statistic + source]** where you can suggest specific data +- Integrated statistics with proper context and sourcing + +## Example + +**Before:** +"Email marketing is very effective. Many businesses see good results. Our platform helps companies improve their email campaigns." + +**After:** +"Email marketing delivers exceptional ROI: businesses see an average return of $42 for every $1 spent [SUGGESTED STAT: DMA 2023 Email Marketing Metrics]. Over 80% of marketers report increased engagement through personalized email campaigns [SUGGESTED STAT: HubSpot 2024 Marketing Report]. Our platform has helped companies improve their email open rates by [STAT NEEDED: internal performance data - average open rate improvement percentage] compared to industry baseline." + +**Changes made:** +- Added ROI statistic with source +- Added engagement percentage with source +- Marked where company-specific data should be inserted +- Provided context for each statistic + +--- + +Now, please provide the text you'd like me to enrich with statistics. diff --git a/commands/writing-and-editing/extraction/extract-context-data.md b/commands/writing-and-editing/extraction/extract-context-data.md new file mode 100644 index 0000000000000000000000000000000000000000..dec4b8b24c542b9f7000963d253318e2b3c9b8b1 --- /dev/null +++ b/commands/writing-and-editing/extraction/extract-context-data.md @@ -0,0 +1,72 @@ +# Extract Context Data for AI + +You are a context extraction specialist. Your task is to analyze the provided text and extract structured context data that would be useful for AI systems to understand and work with the content. + +## Your Task + +Extract and organize the following types of context data from the text: + +1. **Key Entities** + - People, organizations, locations mentioned + - Products, services, technologies referenced + - Important dates and timeframes + +2. **Core Concepts** + - Main topics and themes + - Technical terms and their relationships + - Domain-specific terminology + +3. **Metadata** + - Apparent purpose/intent of the text + - Target audience + - Tone and style characteristics + - Document type (article, documentation, guide, etc.) + +4. **Relationships & Dependencies** + - Cause-effect relationships + - Prerequisites or dependencies mentioned + - Sequential processes or workflows + +5. **Action Items & Directives** + - Instructions or procedures + - Recommendations or best practices + - Requirements or constraints + +## Output Format + +Present the extracted context in a clear, structured markdown format: + +```markdown +# Context Data + +## Key Entities +- [List entities with brief context] + +## Core Concepts +- [List main concepts and themes] + +## Metadata +- **Purpose**: [Document purpose] +- **Audience**: [Target audience] +- **Type**: [Document type] +- **Tone**: [Tone description] + +## Relationships & Dependencies +- [List key relationships] + +## Action Items & Directives +- [List instructions, requirements, etc.] + +## Summary +[Brief 2-3 sentence summary of what an AI system should know about this text] +``` + +## Guidelines + +- Be concise but comprehensive +- Focus on information that would help an AI understand context +- Identify implicit as well as explicit information +- Note any ambiguities or missing context +- Prioritize actionable and structural information + +Now, please provide the text you'd like me to analyze for context extraction. diff --git a/commands/writing-and-editing/filetype-conversion/convert-to-markdown.md b/commands/writing-and-editing/filetype-conversion/convert-to-markdown.md new file mode 100644 index 0000000000000000000000000000000000000000..b4f3ba8cdbfcf6fafbfc6b1c829da0d5da227975 --- /dev/null +++ b/commands/writing-and-editing/filetype-conversion/convert-to-markdown.md @@ -0,0 +1,121 @@ +# Convert to Valid Markdown + +You are a markdown conversion specialist. Your task is to convert the provided text from any format into clean, valid, semantic markdown. + +## Your Task + +Convert the input text to proper markdown, handling these common source formats: + +### Source Formats +- **Plain text**: Add appropriate structure and formatting +- **HTML**: Convert tags to markdown equivalents +- **Rich text / Word**: Convert formatting to markdown syntax +- **Wiki markup**: Transform to markdown syntax +- **reStructuredText**: Convert to markdown equivalents +- **LaTeX**: Convert document structure to markdown +- **PDF-extracted text**: Clean up and structure properly +- **Other markup languages**: Best-effort conversion + +## Conversion Rules + +### Headings +- Convert heading styles to `#` syntax (H1-H6) +- Use proper heading hierarchy +- Ensure only one H1 per document + +### Text Formatting +- **Bold**: ``, ``, `**text**` β†’ `**text**` +- **Italic**: ``, ``, `*text*` β†’ `*text*` +- **Code**: `` β†’ `` `code` `` +- **Strikethrough**: ``, `` β†’ `~~text~~` + +### Lists +- Convert ordered lists to `1.`, `2.`, etc. +- Convert unordered lists to `-` or `*` +- Maintain proper indentation for nested lists + +### Links and Images +- Links: `text` β†’ `[text](url)` +- Images: `text` β†’ `![text](url)` +- Preserve link references when appropriate + +### Code Blocks +- Convert code sections to fenced code blocks with ` ``` ` +- Detect and specify language when possible +- Preserve indentation and formatting + +### Tables +- Convert HTML tables or other formats to markdown tables +- Use proper alignment syntax (`|---|:---|---:|`) +- Ensure tables are well-formatted + +### Quotes +- Convert blockquotes to `>` syntax +- Maintain nested quote levels + +### Special Elements +- Horizontal rules: Convert to `---` or `***` +- Line breaks: Use double spaces or `
` β†’ proper line breaks +- Escaping: Properly escape markdown special characters when needed + +## Cleanup Tasks + +1. **Remove artifacts** + - Strip out XML/HTML comments + - Remove style and script tags + - Clean up conversion artifacts + +2. **Normalize spacing** + - Single blank line between elements + - No trailing whitespace + - Consistent indentation + +3. **Fix common issues** + - Broken links + - Malformed tables + - Inconsistent list formatting + - Mixed heading styles + +4. **Validate** + - Ensure all brackets, parentheses are balanced + - Check link and image syntax + - Verify code fence closure + +## Output Format + +Return clean, valid markdown that: +- Renders correctly in standard markdown parsers +- Is human-readable in plain text +- Follows CommonMark or GitHub Flavored Markdown standards +- Maintains the semantic structure of the original + +## Example + +**Input (HTML):** +```html +

Title

+

Some bold text and italic text.

+
    +
  • Item one
  • +
  • Item two
  • +
+
console.log('hello');
+``` + +**Output (Markdown):** +```markdown +# Title + +Some **bold text** and *italic text*. + +- Item one +- Item two + +```javascript +console.log('hello'); +``` +``` + +--- + +Now, please provide the text you'd like me to convert to valid markdown. If you know the source format, please mention it for better conversion accuracy. diff --git a/commands/writing-and-editing/filetype-conversion/markdown-to-other-formats.md b/commands/writing-and-editing/filetype-conversion/markdown-to-other-formats.md new file mode 100644 index 0000000000000000000000000000000000000000..649cb1d0a8889ceaf817a047b4609ddfab88e4da --- /dev/null +++ b/commands/writing-and-editing/filetype-conversion/markdown-to-other-formats.md @@ -0,0 +1,151 @@ +# Convert Markdown to Other Formats + +You are a markdown export specialist. Your task is to convert the provided markdown text into other document formats while preserving structure and formatting. + +## Supported Output Formats + +### 1. HTML +- Clean, semantic HTML5 +- Preserve heading hierarchy +- Convert markdown syntax to HTML tags +- Optional: Include CSS classes for styling + +### 2. Plain Text +- Remove all formatting +- Preserve structure with spacing and indentation +- Convert links to footnotes or inline URLs +- Maintain readability + +### 3. Rich Text (RTF-style) +- Preserve bold, italic, and other formatting +- Maintain heading styles +- Keep list structures +- Suitable for word processors + +### 4. LaTeX +- Convert to LaTeX document structure +- Use appropriate environments (itemize, enumerate, verbatim) +- Preserve math notation if present +- Include necessary packages + +### 5. reStructuredText +- Convert to reST syntax +- Maintain document structure +- Convert code blocks and inline code +- Preserve links and images + +### 6. AsciiDoc +- Convert to AsciiDoc syntax +- Maintain document hierarchy +- Convert lists, tables, and code blocks +- Preserve cross-references + +### 7. Org Mode +- Convert to Emacs Org syntax +- Preserve headings and structure +- Convert lists and code blocks +- Maintain metadata + +### 8. JSON/XML +- Structured data representation +- Document tree structure +- Preserve all content and metadata +- Machine-readable format + +## Conversion Guidelines + +### Headings +- Maintain hierarchy levels +- Convert `#` syntax to target format equivalents + +### Text Formatting +- **Bold**: `**text**` β†’ target format +- **Italic**: `*text*` β†’ target format +- **Code**: `` `code` `` β†’ target format +- **Links**: `[text](url)` β†’ target format + +### Lists +- Preserve ordered/unordered distinction +- Maintain nesting levels +- Convert to target format syntax + +### Code Blocks +- Preserve language specifications +- Maintain indentation +- Use appropriate code environments + +### Tables +- Convert markdown tables to target format +- Preserve alignment when possible +- Handle merged cells if needed + +### Images and Media +- Convert image references +- Preserve alt text and titles +- Handle local vs. remote paths + +## Output Structure + +When converting, I will: + +1. **Ask for clarification** on target format if not specified +2. **Present the conversion** in the requested format +3. **Note any limitations** or elements that don't translate directly +4. **Suggest alternatives** for unsupported features + +## Example + +**Input (Markdown):** +```markdown +# Title + +Some **bold** and *italic* text. + +- List item 1 +- List item 2 + +`inline code` and: + +\`\`\`python +print("Hello") +\`\`\` +``` + +**Output (HTML):** +```html +

Title

+

Some bold and italic text.

+
    +
  • List item 1
  • +
  • List item 2
  • +
+

inline code and:

+
print("Hello")
+``` + +**Output (Plain Text):** +``` +TITLE + +Some bold and italic text. + +- List item 1 +- List item 2 + +inline code and: + + print("Hello") +``` + +## Usage + +Please provide: +1. The markdown text you want to convert +2. Your desired output format +3. Any specific requirements or preferences + +I'll handle the conversion and ensure the output maintains the structure and intent of your original markdown document. + +--- + +What markdown text would you like to convert, and to which format? diff --git a/commands/writing-and-editing/format-conversion/blog-post-to-outline.md b/commands/writing-and-editing/format-conversion/blog-post-to-outline.md new file mode 100644 index 0000000000000000000000000000000000000000..f009288e9958ac7116ef5e03fdb417834035a2b3 --- /dev/null +++ b/commands/writing-and-editing/format-conversion/blog-post-to-outline.md @@ -0,0 +1,318 @@ +# Blog Post to Outline Converter + +Distill blog posts, articles, or long-form content into clear, structured outlines that capture the key points, arguments, and organization. Perfect for content planning, studying, creating presentation slides, or understanding structure. + +## Your Task + +Take the user's blog post and convert it into a well-organized outline that: +- Captures the main ideas and structure +- Shows the hierarchy of information +- Preserves key examples and evidence +- Maintains logical flow +- Strips away narrative fluff while keeping substance + +## Outline Creation Process + +### 1. Identify the Core Structure + +Extract: +- **Main topic/thesis**: What is the central argument or purpose? +- **Major sections**: What are the main divisions of content? +- **Key points**: What are the essential claims or ideas? +- **Supporting elements**: What examples, evidence, or details support each point? + +### 2. Create Hierarchical Organization + +Use standard outline formatting: + +``` +# Main Title / Topic + +## I. First Major Section + A. Primary point + 1. Supporting detail + 2. Supporting detail + B. Secondary point + 1. Supporting detail + +## II. Second Major Section + A. Primary point + B. Secondary point + 1. Supporting detail + 2. Supporting detail + a. Sub-detail + b. Sub-detail + +## III. Conclusion / Summary + A. Key takeaway + B. Call to action +``` + +Or use markdown-style: + +``` +# Main Title + +## Section One +- Main point + - Supporting detail + - Example +- Second point + - Detail + +## Section Two +- Main point + - Sub-point + - Detail +``` + +### 3. Distillation Techniques + +**From paragraphs to bullets**: + +Blog post: +``` +The first step in optimizing your development workflow is implementing continuous integration. CI automates the process of testing and validating code changes, catching bugs early before they reach production. Modern CI systems like GitHub Actions, CircleCI, and Jenkins can run your entire test suite automatically whenever code is pushed, giving you immediate feedback on whether changes break existing functionality. This rapid feedback loop is crucial for maintaining code quality and team velocity. +``` + +Outline: +``` +## Optimizing Development Workflow + +### I. Implement Continuous Integration (CI) + A. Automates testing and validation + B. Catches bugs before production + C. Tools: GitHub Actions, CircleCI, Jenkins + D. Benefits + 1. Immediate feedback on code changes + 2. Maintains code quality + 3. Improves team velocity +``` + +**Condensing examples**: +- Keep representative examples +- Note "e.g., X, Y, Z" for lists +- Preserve specific data/statistics +- Summarize case studies briefly + +**Extracting arguments**: +- Identify claims vs. supporting evidence +- Note cause-effect relationships +- Capture comparisons and contrasts +- Preserve key definitions + +### 4. Outline Types + +Choose the appropriate format based on purpose: + +**A. Topic Outline** (uses phrases, no complete sentences) +``` +## Improving Team Communication +- Challenges in remote work + - Time zone differences + - Lack of informal interaction +- Solutions + - Async communication tools + - Regular video check-ins + - Documentation culture +``` + +**B. Sentence Outline** (complete sentences for each point) +``` +## Improving Team Communication +- Remote work creates communication challenges + - Time zones make synchronous meetings difficult + - Teams lose informal "water cooler" conversations +- Several solutions address these challenges + - Async tools allow flexible communication + - Regular video calls maintain connection + - Strong documentation reduces dependency on meetings +``` + +**C. Concept Map Outline** (shows relationships) +``` +Team Communication +β”œβ”€β”€ Problems +β”‚ β”œβ”€β”€ Remote challenges +β”‚ └── Tool fragmentation +β”œβ”€β”€ Solutions +β”‚ β”œβ”€β”€ Technology (Slack, Zoom) +β”‚ └── Processes (documentation) +└── Results + β”œβ”€β”€ Better alignment + └── Higher productivity +``` + +**D. Presentation Outline** (formatted for slides) +``` +Slide 1: Title - "Improving Team Communication" + +Slide 2: The Problem +β€’ Remote work communication challenges +β€’ Tool overload and fragmentation +β€’ Lost context and alignment + +Slide 3: Solution - Tools +β€’ Async: Slack, Notion +β€’ Sync: Zoom, Teams +β€’ Documentation: Confluence, Wiki + +Slide 4: Solution - Processes +β€’ Daily async stand-ups +β€’ Weekly team syncs +β€’ Documentation-first culture + +Slide 5: Results +β€’ 40% reduction in meetings +β€’ Faster onboarding +β€’ Better team alignment +``` + +## What to Include + +### Essential Elements + +βœ“ **Main arguments and claims** +βœ“ **Key supporting evidence** (data, examples) +βœ“ **Important definitions or concepts** +βœ“ **Action items or recommendations** +βœ“ **Significant examples or case studies** (summarized) +βœ“ **Structural transitions** (how sections connect) + +### Elements to Condense or Omit + +βœ— Narrative fluff and throat-clearing +βœ— Redundant explanations +βœ— Extended anecdotes (keep one-line summary if important) +βœ— Rhetorical flourishes +βœ— Transitional prose +βœ— Repetitive examples +βœ— Purely atmospheric writing + +## Special Cases + +### Lists and Enumerations + +Blog post lists often translate directly: + +``` +The three pillars of DevOps are: +1. Automation +2. Collaboration +3. Continuous improvement +``` + +Outline: +``` +### Three Pillars of DevOps +1. Automation +2. Collaboration +3. Continuous improvement +``` + +### How-To Content + +Preserve step-by-step instructions: + +``` +## How to Set Up CI/CD +1. Choose CI platform (GitHub Actions, CircleCI) +2. Create configuration file + a. Define build steps + b. Set up test runners + c. Configure deployment +3. Test the pipeline +4. Monitor and iterate +``` + +### Comparison Content + +Use tables or parallel structure: + +``` +## Tool Comparison +### Asana +- Strengths: Complex projects, dependencies +- Weaknesses: Learning curve +- Best for: Large teams + +### Trello +- Strengths: Visual, simple +- Weaknesses: Limited features +- Best for: Small teams, visual thinkers +``` + +## Outline Depth Guidelines + +**High-level outline** (skim/overview): +- Major sections only +- 1-2 levels of hierarchy +- Key points without details + +**Medium outline** (standard): +- Main sections and subsections +- 2-3 levels of hierarchy +- Key examples noted + +**Detailed outline** (comprehensive): +- All sections and subsections +- 3-4 levels of hierarchy +- Examples, evidence, specific details included +- Could recreate most of the original content + +## Example Transformation + +**Blog Post Excerpt**: +``` +# Why Your Team Needs Better Documentation + +Every developer has been there: you join a new project, and the only documentation is a README that says "See John for questions"β€”but John left the company six months ago. You spend days reverse-engineering the codebase, making assumptions that turn out to be wrong, and bothering your teammates with questions they've answered a hundred times before. + +Good documentation isn't just nice to have; it's a force multiplier for your team. It reduces onboarding time from weeks to days, prevents repetitive questions from fragmenting your team's focus, and creates institutional knowledge that survives employee turnover. + +The key is making documentation a first-class part of your workflow, not an afterthought. Here's how to build a documentation culture that actually works... +``` + +**Outline**: +``` +# Building a Documentation Culture + +## I. The Problem + A. Poor documentation is common + - Example: "Ask John" (who left months ago) + B. Consequences + 1. Slow onboarding (weeks instead of days) + 2. Wasted time reverse-engineering + 3. Repetitive questions interrupt work + 4. Knowledge loss when people leave + +## II. Benefits of Good Documentation + A. Team force multiplier + B. Faster onboarding + C. Reduced interruptions + D. Preserved institutional knowledge + +## III. Implementation Strategy + [To be filled from subsequent content] + A. Make documentation first-class + B. Integrate into workflow + C. ... +``` + +## Output Format + +Provide: +1. The structured outline in the requested format (topic/sentence/concept map/presentation) +2. Appropriate hierarchy and indentation +3. Key points, examples, and evidence captured +4. Optional: Brief note on the blog post's main argument/purpose + +## Usage Notes + +When providing content, optionally specify: +- **Outline type**: Topic, sentence, concept map, or presentation format +- **Depth level**: High-level, medium, or detailed +- **Purpose**: Study notes, presentation prep, content planning, structural analysis +- **Special focus**: Emphasize certain sections or aspects + +Share the blog post content you'd like to convert into an outline. diff --git a/commands/writing-and-editing/format-conversion/blog-post-to-tech-doc.md b/commands/writing-and-editing/format-conversion/blog-post-to-tech-doc.md new file mode 100644 index 0000000000000000000000000000000000000000..bf9d8ffd1388a15213e2c4a51875f34279fa48a1 --- /dev/null +++ b/commands/writing-and-editing/format-conversion/blog-post-to-tech-doc.md @@ -0,0 +1,90 @@ +# Blog Post to Technical Documentation Converter + +Convert informal, narrative blog posts into structured, professional technical documentation. This command transforms conversational content into precise, reference-quality documentation. + +## Your Task + +Take the user's blog post content and convert it into technical documentation with these characteristics: + +### Structure Transformation + +- **Remove narrative elements**: Strip out personal anecdotes, casual introductions, and storytelling +- **Create clear hierarchy**: Use proper heading levels (H1, H2, H3) for logical organization +- **Add standardized sections**: Include Overview, Prerequisites, Installation, Configuration, Usage, Examples, Troubleshooting, etc. +- **Extract actionable content**: Convert prose into step-by-step instructions where applicable +- **Organize information**: Group related concepts into logical sections + +### Style Changes + +- **Imperative voice for instructions**: "Run the command" not "You can run the command" +- **Remove conversational tone**: Eliminate phrases like "Let's dive in," "Pretty cool, right?", "So here's the thing" +- **Technical precision**: Replace casual explanations with accurate technical terms +- **Objective language**: Remove opinions and subjective statements unless factual +- **Concise writing**: Eliminate fluff and redundancy +- **Consistent terminology**: Use the same terms throughout (no synonyms for key concepts) + +### Content Enhancement + +- **Add code blocks**: Format any code examples with proper syntax highlighting +- **Create tables**: Convert lists of options/parameters into structured tables +- **Include warnings/notes**: Use callouts for important information +- **Add cross-references**: Link related sections together +- **Specify prerequisites**: Clearly state requirements upfront +- **Version information**: Include version numbers and compatibility details if mentioned + +### Documentation Elements to Add + +1. **Synopsis/Overview**: Brief description of what this documents +2. **Table of Contents**: If the document is substantial +3. **Installation/Setup**: Step-by-step setup instructions +4. **Configuration**: Available options and parameters +5. **Examples**: Practical usage examples with expected output +6. **API Reference**: If applicable, document functions/methods +7. **Troubleshooting**: Common issues and solutions +8. **Additional Resources**: Links to related documentation + +## Example Transformation + +**Blog Post Style**: +``` +Hey everyone! Today I want to share this awesome trick I discovered for speeding up your Docker builds. So basically, you know how Docker builds can be super slow sometimes? Well, I found that if you use multi-stage builds, you can cut down the time significantly. Let me show you how I do it... +``` + +**Technical Documentation Style**: +``` +# Multi-Stage Docker Builds + +## Overview +Multi-stage builds reduce Docker image build time and final image size by separating the build environment from the runtime environment. + +## Prerequisites +- Docker Engine 17.05 or later +- Basic understanding of Dockerfiles + +## Implementation + +### Syntax +Multi-stage builds use multiple `FROM` statements in a single Dockerfile: + +```dockerfile +FROM node:16 AS builder +WORKDIR /app +COPY package*.json ./ +RUN npm ci + +FROM node:16-alpine +WORKDIR /app +COPY --from=builder /app/node_modules ./node_modules +``` + +### Benefits +- Reduced build time through layer caching +- Smaller final images (runtime-only dependencies) +- Cleaner separation of build and runtime concerns +``` + +## Output Format + +Provide the converted technical documentation in clean markdown format, ready for inclusion in a documentation repository or wiki. + +Paste the blog post content you'd like to convert to technical documentation. diff --git a/commands/writing-and-editing/format-conversion/notes-to-outline.md b/commands/writing-and-editing/format-conversion/notes-to-outline.md new file mode 100644 index 0000000000000000000000000000000000000000..61679d31f6531bae3421d5e2f93aa7f07dc854ff --- /dev/null +++ b/commands/writing-and-editing/format-conversion/notes-to-outline.md @@ -0,0 +1,415 @@ +# Notes to Outline Converter + +Transform raw, unstructured notes into clear, organized outlines ready for writing, presenting, or further development. Perfect for turning brainstorming sessions, research notes, meeting minutes, or scattered thoughts into coherent structure. + +## Your Task + +Take the user's messy, unorganized notes and create a well-structured outline that: +- Identifies main themes and topics +- Groups related ideas logically +- Establishes clear hierarchy +- Eliminates redundancy +- Reveals gaps or areas needing development +- Creates a framework for further work + +## Note Analysis Process + +### 1. Initial Assessment + +Examine the notes to understand: +- **Format**: Bullets, paragraphs, fragments, mixed? +- **Content type**: Research, brainstorming, meeting notes, lecture notes? +- **Completeness**: Are there gaps or unclear references? +- **Redundancy**: Are ideas repeated in different ways? +- **Implicit structure**: Is there a hidden organization waiting to emerge? + +### 2. Identify Major Themes + +Look for: +- **Recurring topics**: Ideas mentioned multiple times +- **Natural groupings**: Related concepts that belong together +- **Implied categories**: Unstated but obvious organizational schemes +- **Key questions**: Central problems or inquiries +- **Action items**: Things to be done + +**Example Raw Notes**: +``` +- need to improve onboarding +- new hires confused about tools +- documentation is outdated +- mentor program? +- Sarah mentioned buddy system at previous company +- first week should include setup day +- update wiki with current process +- video tutorials for tools +- checklist for managers +- 30-60-90 day goals +``` + +**Identified Themes**: +- Current problems (confusion, outdated docs) +- Solutions - documentation (wiki, videos) +- Solutions - social (mentoring, buddy system) +- Solutions - structure (checklists, timeline) + +### 3. Group and Organize + +Cluster related notes together: + +**Grouping Strategies**: + +**A. Chronological**: When notes involve sequence or timeline +``` +## Onboarding Timeline +### Week 1 +- Setup day for tools and access +- Meet buddy/mentor +- ... +``` + +**B. Categorical**: When notes fall into distinct categories +``` +## Improving Onboarding + +### Documentation Issues +- Wiki outdated +- Need video tutorials +- ... + +### Social Support +- Buddy system (per Sarah) +- Mentor program +- ... +``` + +**C. Problem-Solution**: When notes address challenges +``` +## Onboarding Improvements + +### Problems +- New hires confused about tools +- Documentation outdated +- ... + +### Solutions +- Update wiki +- Create video tutorials +- Implement buddy system +- ... +``` + +**D. Priority-based**: When notes have implied importance +``` +## Onboarding Project + +### High Priority (Do First) +- Update documentation +- Create setup checklist + +### Medium Priority +- Develop video tutorials +- Formalize buddy system + +### Long-term +- Build comprehensive mentor program +``` + +### 4. Establish Hierarchy + +Determine levels of importance: + +**Top Level (H2 ##)**: Major topics or sections +**Second Level (H3 ###)**: Subtopics or main points +**Third Level (bullets)**: Supporting details +**Fourth Level (sub-bullets)**: Specific examples or notes + +**Hierarchical Principles**: +- General to specific +- Big picture to details +- Main ideas to supporting points +- Categories to items + +### 5. Fill in Structure + +Add necessary organizational elements: + +**Add clarifying headers**: +Transform vague groupings into clear labels + +Raw: "Stuff about tools" +Outline: "### Required Tools and Setup" + +**Create transitions**: +Show how sections relate + +**Sequence indicators**: +Number steps when order matters + +**Context notes**: +Add brief clarifying notes in [brackets] for unclear items + +### 6. Identify Gaps + +Note where information is missing: + +``` +## Marketing Strategy + +### Target Audience +- Small businesses +- [Need: specific industry focus?] +- Budget range: $10k-$50k annually + +### Channels +- Social media +- [Need: which platforms?] +- [Need: content strategy] +``` + +## Handling Different Note Types + +### Brainstorming Notes (Stream of Consciousness) + +**Characteristics**: +- Random order +- Repeated ideas in different forms +- Half-thoughts and fragments +- Questions mixed with ideas + +**Processing**: +1. Extract distinct ideas (merge duplicates) +2. Group by theme +3. Organize by logical flow +4. Convert questions into topics to explore + +**Example**: + +Raw notes: +``` +maybe we should redesign the homepage, it's confusing, users bounce too fast, what if we had a video?, demo video could help, or maybe just clearer CTA, yeah the CTA is buried, make it pop more, colors?, also the nav is complicated, too many options, simplify nav, homepage video idea again, could show product in action +``` + +Outline: +``` +## Homepage Redesign + +### Problems Identified +- High bounce rate +- Confusing layout +- Unclear call-to-action +- Complicated navigation + +### Proposed Solutions + +#### Call-to-Action Improvements +- Increase prominence (color, size, position) +- Clearer messaging + +#### Navigation Simplification +- Reduce number of options +- Better organization + +#### Add Demo Video +- Show product in action +- Place prominently on homepage +- [Need: decide on length, content] +``` + +### Research Notes (Facts and Sources) + +**Characteristics**: +- References and citations +- Quotes and excerpts +- Mixed sources +- Factual data + +**Processing**: +1. Group by topic/theme +2. Note sources [in brackets] +3. Distinguish facts from interpretation +4. Create evidence-based structure + +**Example**: + +Raw notes: +``` +Smith 2020: "productivity dropped 15% in first 3 months of remote work" +Johnson article: managers reported communication difficulties +Remote work study - 67% positive about flexibility +Same study - 45% felt isolated +Virtual meetings increased 300% (TechCrunch) +Smith also mentioned adaptation period - after 6 months productivity recovered +``` + +Outline: +``` +## Remote Work Research Findings + +### Productivity Impact +- Initial drop: 15% decline in first 3 months [Smith 2020] +- Recovery: Returns to baseline after 6-month adaptation [Smith 2020] + +### Communication Challenges +- Managers report difficulties [Johnson] +- Virtual meetings increased 300% [TechCrunch] + +### Employee Satisfaction +- Flexibility: 67% positive [Remote Work Study] +- Isolation concern: 45% felt isolated [Remote Work Study] +``` + +### Meeting Notes (Discussions and Decisions) + +**Characteristics**: +- Mix of discussion and decisions +- Action items +- Questions raised +- Multiple speakers/perspectives + +**Processing**: +1. Separate decisions from discussion +2. Extract action items +3. Note owners for tasks +4. Flag open questions + +**Example**: + +Raw notes: +``` +Discussed Q4 budget - need to cut 10% +Sarah suggested reducing conference budget +Mike disagrees, conferences are important for leads +Decision: cut travel budget by 15% instead +Action: Tom will revise budget proposal +Question: what about hiring freeze? +Will discuss hiring in next meeting +Lisa will prepare hiring impact analysis +Also talked about new CRM - postponed to January +``` + +Outline: +``` +## Q4 Budget Meeting Notes + +### Decisions Made +- Cut travel budget by 15% +- Postpone CRM implementation to January + +### Action Items +- [ ] Tom: Revise budget proposal with travel cuts +- [ ] Lisa: Prepare hiring impact analysis for next meeting + +### Discussion Points + +#### Conference Budget +- Sarah: Proposed reducing conference budget +- Mike: Conferences generate important leads +- Resolution: Decided on travel cuts instead + +### Open Questions +- Hiring freeze consideration + - Deferred to next meeting + - Awaiting Lisa's impact analysis +``` + +### Lecture/Learning Notes (Educational Content) + +**Characteristics**: +- Definitions and concepts +- Examples +- Formulas or procedures +- Key points from instructor + +**Processing**: +1. Extract main concepts +2. Group supporting details under each +3. Note examples +4. Highlight important definitions + +## Outline Quality Checklist + +A good outline should: + +βœ“ **Logical organization**: Ideas flow naturally +βœ“ **Clear hierarchy**: Levels of importance are obvious +βœ“ **Complete coverage**: All important notes included +βœ“ **No redundancy**: Duplicate ideas merged +βœ“ **Actionable structure**: Ready to use for next steps +βœ“ **Gaps identified**: Missing information noted +βœ“ **Consistent formatting**: Uniform structure throughout + +## Output Formats + +Offer the appropriate format based on purpose: + +**For writing**: Topic or sentence outline with narrative flow +**For presentations**: Slide-ready outline with bullets +**For project planning**: Action-oriented with tasks and owners +**For study**: Concept-based with definitions and examples + +## Example Transformation + +**Raw Notes**: +``` +CI/CD important for modern dev +continuous integration = automated testing when code pushed +prevents bugs from reaching production +GitHub Actions popular +also CircleCI +Jenkins more complex but powerful +CD = continuous deployment, automates releases +reduces manual errors in deployment +need good test coverage for CI to work +heard you need 80% coverage minimum +faster feedback on code quality +catches integration issues early +whole team needs to commit to practice +doesn't work if people skip the pipeline +``` + +**Organized Outline**: +``` +# CI/CD in Modern Development + +## What is CI/CD? + +### Continuous Integration (CI) +- Definition: Automated testing triggered when code is pushed +- Purpose: Prevent bugs from reaching production +- Requirement: Good test coverage (minimum 80%) + +### Continuous Deployment (CD) +- Definition: Automated release process +- Purpose: Reduce manual deployment errors + +## Benefits +- Faster feedback on code quality +- Early detection of integration issues +- Reduced manual errors +- [Note: More benefits to research?] + +## Popular Tools +- GitHub Actions (most popular) +- CircleCI +- Jenkins (complex but powerful) + +## Success Factors +- Team commitment required +- Cannot skip pipeline +- Adequate test coverage essential (80%+ recommended) + +## [Need to add] +- Implementation steps +- Best practices +- Common pitfalls +``` + +## Usage Notes + +When providing notes, optionally specify: +- **Purpose**: What will you do with the outline? (write, present, plan, study) +- **Format preference**: Topic outline, sentence outline, action items, etc. +- **Depth**: High-level or detailed +- **Special instructions**: Emphasize certain themes, specific organization + +Share your raw notes and I'll transform them into a clear, organized outline. diff --git a/commands/writing-and-editing/format-conversion/outline-to-blog-post.md b/commands/writing-and-editing/format-conversion/outline-to-blog-post.md new file mode 100644 index 0000000000000000000000000000000000000000..d1f63f73d2c1579f81024766f95a1d8e7af99b7f --- /dev/null +++ b/commands/writing-and-editing/format-conversion/outline-to-blog-post.md @@ -0,0 +1,211 @@ +# Outline to Blog Post Converter + +Transform a structured outline into a fully-written, engaging blog post with natural flow, transitions, examples, and narrative voice. Perfect for taking planning notes and turning them into publishable content. + +## Your Task + +Take the user's outline and expand it into a complete blog post with: +- Engaging introduction and conclusion +- Smooth transitions between sections +- Developed paragraphs with examples and details +- Natural, conversational tone +- Proper structure and flow + +## Expansion Process + +### 1. Analyze the Outline Structure + +Identify: +- Main topic and purpose +- Key points and subpoints +- Logical flow and organization +- Implied examples or evidence +- Target audience and tone + +### 2. Create an Engaging Introduction + +Transform the outline title into a full introduction that: + +**Opening Hook Options**: +- Ask a compelling question +- Share a relatable scenario or anecdote +- Present a surprising statistic or fact +- Make a bold statement +- Describe a common problem + +**Introduction Elements**: +- Hook to grab attention +- Context and background +- Preview of main points +- Thesis or central argument +- Value proposition (what reader will gain) + +**Example**: + +Outline: "How to Improve Team Productivity - Benefits, Tools, Best Practices" + +Introduction: +``` +Is your team drowning in meetings, drowning in Slack messages, and still somehow missing deadlines? You're not alone. According to recent studies, knowledge workers spend only 39% of their time on actual productive work, with the rest lost to coordination, communication overhead, and context switching. + +Improving team productivity isn't about working harder or longer hoursβ€”it's about working smarter with the right systems and tools. In this guide, we'll explore the concrete benefits of productivity optimization, the tools that actually make a difference, and the best practices that turn scattered teams into coordinated, high-performing units. + +Whether you're managing a remote team, leading a startup, or just trying to get more done with less chaos, these strategies will help you reclaim lost hours and build momentum. +``` + +### 3. Develop Each Section + +For each outline point: + +**Turn bullets into paragraphs**: +- Expand the concept with explanation +- Add relevant examples or scenarios +- Include supporting evidence or data +- Use transitional phrases +- Maintain conversational tone + +**Add depth**: +- Explain the "why" behind each point +- Show practical application +- Contrast with alternatives +- Address potential objections +- Share insights or tips + +**Example**: + +Outline point: "Use project management tools - Asana, Trello, Monday" + +Expanded: +``` +**Centralize Work with Project Management Tools** + +The foundation of team productivity is knowing what needs to be done and who's doing it. Project management tools like Asana, Trello, and Monday.com solve the chaos of scattered to-do lists, endless email threads, and "wait, who was supposed to handle that?" moments. + +These platforms provide a single source of truth for all project-related information. Instead of searching through email archives or Slack history to find out task status, team members can simply check the board. Asana excels for complex projects with dependencies, Trello works beautifully for visual thinkers who love kanban boards, and Monday.com offers the most customization for teams with specific workflow needs. + +The real productivity gain comes from reducing the "coordination tax"β€”the time spent figuring out what everyone else is doing. When work is visible and organized, teams spend less time in status meetings and more time actually making progress. +``` + +### 4. Create Natural Transitions + +Connect sections with transitional phrases and bridge sentences: + +**Transition Techniques**: +- "Now that we've covered X, let's look at Y..." +- "This brings us to another important consideration..." +- "Building on this foundation..." +- "Of course, tools alone aren't enough. You also need..." +- "But here's where it gets interesting..." +- "Beyond just using tools, successful teams also..." + +### 5. Add Examples Throughout + +Transform abstract points into concrete illustrations: + +**Types of Examples**: +- Real-world scenarios +- Before/after comparisons +- Case studies or success stories +- Step-by-step walkthroughs +- Common mistakes vs. best practices +- "Imagine if..." thought experiments + +### 6. Write a Strong Conclusion + +Transform closing outline points into a conclusion that: + +**Conclusion Elements**: +- Summarize key takeaways (not just repetition) +- Reinforce the main message +- Provide actionable next steps +- End with a memorable statement or call-to-action +- Encourage engagement (comments, questions, sharing) + +**Example**: + +Outline: "Conclusion - Start with one tool, build habits, measure results" + +Expanded: +``` +## Making Productivity Improvements Stick + +Improving team productivity is a journey, not a destination. The key is to start small and build momentum. Rather than overwhelming your team with a complete process overhaul, pick one tool or practice from this guide and implement it thoroughly. Maybe that's adopting a project management platform, or establishing "no-meeting Wednesdays," or implementing daily stand-ups. + +Once that first change becomes habitβ€”give it at least 3-4 weeksβ€”layer on the next improvement. Measure your results: Are projects shipping faster? Are team members feeling less stressed? Is communication clearer? + +Remember, the goal isn't perfect productivity. It's sustainable productivity that lets your team do great work without burning out. Start today with one small change, and watch the compound effects transform how your team works. + +What productivity challenge is your team facing right now? Share in the comments belowβ€”I'd love to hear what's working (and what's not) for you. +``` + +## Outline Interpretation Guidelines + +### Understanding Outline Shorthand + +**Bullets with keywords**: Expand into full explanations +- "Benefits - speed, cost, quality" β†’ Full paragraph on each benefit + +**Questions**: Answer them thoroughly +- "Why does this matter?" β†’ Dedicated section explaining significance + +**Single words**: Interpret in context +- "Examples" β†’ Create 2-3 detailed examples +- "Tools" β†’ Describe each tool with use cases + +**Numbered lists**: Can become subsections or enumerated points in prose + +**Parenthetical notes**: Often contain important details to weave in +- "Testing (manual vs automated)" β†’ Compare both approaches + +## Tone and Style + +Match the blog post tone to the outline's implied purpose: + +**Professional/Educational**: +- Clear, authoritative voice +- Evidence-based claims +- Structured and organized + +**Casual/Conversational**: +- First-person perspective +- Friendly, approachable language +- Personal anecdotes + +**Technical/Tutorial**: +- Step-by-step clarity +- Code examples or screenshots +- Precision in terminology + +**Inspirational/Thought Leadership**: +- Big-picture thinking +- Visionary language +- Provocative questions + +## Length Guidance + +**Short outline (5-10 points)**: 800-1200 word blog post +**Medium outline (10-20 points)**: 1200-2000 word blog post +**Long outline (20+ points)**: 2000-3000+ word blog post + +Adjust based on complexity and detail in outline. + +## Output Format + +Provide the complete blog post in markdown format, ready for publication. Include: +- Compelling title (if not specified in outline) +- Full introduction +- Well-developed body sections with headings +- Examples and supporting details +- Smooth transitions +- Strong conclusion + +## Usage Notes + +When providing your outline, optionally specify: +- **Target length**: Word count or reading time +- **Tone**: Professional, casual, technical, etc. +- **Audience**: Who will read this? +- **Publication**: Where will this be published? (affects formatting/style) +- **Special requirements**: Specific examples to include, sections to emphasize + +Share your outline and I'll transform it into a complete blog post. diff --git a/commands/writing-and-editing/format-conversion/readme-to-blog-post.md b/commands/writing-and-editing/format-conversion/readme-to-blog-post.md new file mode 100644 index 0000000000000000000000000000000000000000..2cded4ec1534ec5cf1179badb7dca3015c523649 --- /dev/null +++ b/commands/writing-and-editing/format-conversion/readme-to-blog-post.md @@ -0,0 +1,134 @@ +# README to Blog Post Converter + +Transform GitHub README files into engaging blog posts that introduce and explain your project to a broader audience. Perfect for project announcements, tutorials, and sharing your work beyond the repository. + +## Your Task + +Take the user's README content and convert it into a compelling blog post that tells the story of the project while maintaining technical accuracy. + +### Content Transformation + +- **Expand the introduction**: Turn the brief project description into a narrative about what problem you're solving and why it matters +- **Add motivation**: Explain the backstory - why you built this, what gap it fills +- **Expand use cases**: Take bullet points and turn them into detailed scenarios +- **Show, don't just tell**: Convert feature lists into demonstrations of value +- **Add personality**: Transform formal documentation into your authentic voice +- **Include journey**: Share the development process, challenges overcome, lessons learned +- **Contextualize**: Explain where this fits in the broader ecosystem + +### Style Changes + +- **Engaging headline**: Convert "Project-Name" to "Introducing Project-Name: How I Built a Tool to Solve X" +- **Story arc**: Create a beginning (problem), middle (solution), and end (results) +- **First-person narrative**: Share your perspective as the creator +- **Enthusiasm**: Show genuine excitement about what you've built +- **Accessibility**: Explain technical concepts for a general tech audience +- **Visual descriptions**: Paint a picture of what the project does +- **Conversational flow**: Make it feel like sharing your project at a meetup + +### Blog Post Structure + +1. **Compelling Introduction** + - Hook that grabs attention + - The problem this solves + - Why you built it + - What readers will learn + +2. **The Story** + - Background and motivation + - The journey of building it + - Key decisions and why you made them + +3. **What It Does** + - Core features explained with context + - How it works (high-level architecture) + - What makes it different/special + +4. **How to Use It** + - Getting started (expanded from installation) + - Real-world examples (expanded from basic usage) + - Tips and best practices + +5. **Technical Deep Dive** (optional) + - Interesting implementation details + - Challenges and solutions + - Technology choices + +6. **Future Plans** + - Roadmap items + - How others can contribute + - Vision for the project + +7. **Conclusion** + - Summary of key points + - Call to action (try it, star it, contribute) + - Links to repository and resources + +### Enhancement Elements + +- **Code examples**: Expand minimal examples into complete, runnable scenarios +- **Screenshots/demos**: Suggest where visual elements would help +- **Comparisons**: Contrast with existing solutions +- **Testimonials**: If you have user feedback, incorporate it +- **Metrics**: Share interesting stats (downloads, stars, usage) +- **Learnings**: Share what you learned building it + +## Example Transformation + +**README Style**: +``` +# DocConverter + +A CLI tool for converting documentation formats. + +## Features +- Converts Markdown to HTML +- Supports custom templates +- Fast processing + +## Installation +```bash +npm install -g docconverter +``` + +## Usage +```bash +docconverter input.md output.html +``` +``` + +**Blog Post Style**: +``` +# Building DocConverter: A Fast, Template-Driven Documentation Tool + +After spending countless hours manually converting documentation between formats for different clients, I decided enough was enough. I needed a tool that could handle Markdown to HTML conversions with custom branding for each project - and I needed it to be fast. That's how DocConverter was born. + +## The Problem I Was Trying to Solve + +Here's a scenario many technical writers face: you've written comprehensive documentation in Markdown (because let's face it, Markdown is fantastic for writing). But now you need to deliver it as branded HTML for a client's website, then convert it again differently for another client, and maybe export it to PDF for a third. Each conversion means different styling, different templates, different requirements. + +Existing tools either did too much (heavy documentation frameworks that required complete project restructuring) or too little (basic converters with no customization). I wanted something in between: a simple CLI tool that could take my Markdown, apply a custom template, and spit out beautiful HTML in seconds. + +## What I Built + +DocConverter is a lightweight command-line tool that does exactly that. You give it a Markdown file, point it at a template, and it produces cleanly formatted HTML. The magic is in the templating system - you can create reusable templates for different clients or projects, and the tool handles the rest. + +What makes it special: +- **Speed**: Processes large documentation sets in seconds, not minutes +- **Flexibility**: Custom templates mean your output looks exactly how you want +- **Simplicity**: One command, no configuration files unless you want them + +Let me show you how it works in practice... + +[Content continues with detailed examples, use cases, and technical insights] +``` + +## Tone Guidelines + +- **Authentic**: Write in your natural voice +- **Excited**: Share your enthusiasm for what you've built +- **Helpful**: Focus on solving readers' problems +- **Humble**: Acknowledge limitations and areas for improvement +- **Inviting**: Make readers want to try it and contribute + +Share the README content you'd like to convert into a project announcement blog post. diff --git a/commands/writing-and-editing/format-conversion/tech-doc-to-blog-post.md b/commands/writing-and-editing/format-conversion/tech-doc-to-blog-post.md new file mode 100644 index 0000000000000000000000000000000000000000..3c6c5e2e42b4c733bac9b7ffd295eb2f3be8f5cc --- /dev/null +++ b/commands/writing-and-editing/format-conversion/tech-doc-to-blog-post.md @@ -0,0 +1,94 @@ +# Technical Documentation to Blog Post Converter + +Transform dry, formal technical documentation into engaging, accessible blog posts that maintain technical accuracy while being enjoyable to read. Perfect for turning reference docs into tutorial-style content. + +## Your Task + +Take the user's technical documentation and convert it into a blog post with these characteristics: + +### Narrative Transformation + +- **Add conversational introduction**: Create an engaging hook that explains why this matters +- **Tell a story**: Frame technical content within a narrative (problem β†’ solution β†’ outcome) +- **Include context**: Explain the "why" behind technical decisions +- **Personal touch**: Use first-person perspective where appropriate ("I'll show you," "We'll explore") +- **Reader engagement**: Address the reader directly ("You might be wondering...") +- **Natural flow**: Create smooth transitions between sections + +### Style Changes + +- **Conversational tone**: Make it feel like explaining to a colleague over coffee +- **Explain jargon**: Define technical terms in accessible language +- **Varied sentence structure**: Mix short punchy sentences with longer explanatory ones +- **Active, friendly voice**: "Let's build this together" instead of "The system shall be configured" +- **Relatable examples**: Use real-world analogies and practical scenarios +- **Show enthusiasm**: Express genuine interest in the topic +- **Encourage experimentation**: "Try this," "Give it a shot," "See what happens" + +### Content Enhancement + +- **Opening hook**: Start with a problem, question, or interesting observation +- **Background section**: Explain the context and motivation +- **Break down complexity**: Take dense technical sections and explain step-by-step +- **Add commentary**: Include insights, tips, and "gotchas" throughout +- **Real-world examples**: Show practical applications beyond basic syntax +- **Visual language**: Describe what the reader will see/experience +- **Conclusion**: Wrap up with key takeaways and next steps + +### Blog Post Elements to Add + +1. **Catchy title**: Transform formal titles into engaging headlines +2. **Introduction**: Hook the reader and preview what they'll learn +3. **Subheadings**: Make them descriptive and interesting, not just functional +4. **Transitional phrases**: "Now that we've covered X, let's look at Y" +5. **Tips and tricks**: Share insider knowledge and best practices +6. **Common pitfalls**: Warn about mistakes to avoid +7. **Conclusion**: Summarize and provide clear next steps +8. **Call to action**: Encourage comments, sharing, or trying the technique + +## Example Transformation + +**Technical Documentation Style**: +``` +# Multi-Stage Docker Builds + +## Overview +Multi-stage builds reduce Docker image build time and final image size by separating the build environment from the runtime environment. + +## Prerequisites +- Docker Engine 17.05 or later +- Basic understanding of Dockerfiles + +## Implementation +Multi-stage builds use multiple `FROM` statements in a single Dockerfile. +``` + +**Blog Post Style**: +``` +# How I Cut My Docker Build Times in Half with Multi-Stage Builds + +Have you ever sat there waiting for your Docker builds to finish, watching the progress bar crawl, and thinking "there has to be a better way"? I definitely have. After one too many coffee breaks spent waiting for builds, I discovered multi-stage builds, and honestly, it's been a game-changer for my workflow. + +## What's the Problem? + +Traditional Dockerfiles include everything: your build tools, development dependencies, test frameworks, and the actual runtime code all bundled together. It's like moving to a new apartment and bringing not just your furniture, but also all the tools you used to assemble it, the instruction manuals, and the packaging materials. Your final image ends up bloated with stuff you simply don't need in production. + +## Enter Multi-Stage Builds + +Here's the cool part: multi-stage builds let you use multiple `FROM` statements in a single Dockerfile. Think of it as having separate workspaces for building your app and running it. You build everything in one stage with all your dev tools, then copy just the finished artifacts to a clean final stage. + +Let me show you how this works in practice... +``` + +## Output Format + +Provide the converted blog post in markdown format, ready for publication on a blog platform or content management system. + +## Tone Guidelines + +- **Friendly, not condescending**: Assume intelligent readers who want clarity +- **Enthusiastic, not hyperbolic**: Show genuine interest without overselling +- **Helpful, not prescriptive**: Offer guidance while acknowledging alternatives +- **Professional, not stuffy**: Maintain credibility while being approachable + +Share the technical documentation you'd like to convert into an engaging blog post. diff --git a/commands/writing-and-editing/format-conversion/text-to-informal-meeting-notes.md b/commands/writing-and-editing/format-conversion/text-to-informal-meeting-notes.md new file mode 100644 index 0000000000000000000000000000000000000000..ed8733ec9b4d606b000a79ae7e0070b2a06f5603 --- /dev/null +++ b/commands/writing-and-editing/format-conversion/text-to-informal-meeting-notes.md @@ -0,0 +1,548 @@ +# Text to Informal Meeting Notes Converter + +Transform any text, formal documents, or structured content into casual, readable meeting notes with a conversational tone. Perfect for converting formal meeting minutes, dense reports, technical documentation, or verbose content into concise, accessible notes that are easy to skim and share. + +## Your Task + +Take the user's input text and create informal meeting notes that: +- Capture key points in conversational language +- Use casual, approachable tone +- Maintain essential information while reducing formality +- Make content scannable with bullets and short paragraphs +- Highlight action items and decisions clearly +- Feel like notes a colleague would share, not official documentation + +## Informal Notes Style Guide + +### Tone Characteristics + +**Formal β†’ Informal Transformations**: +- "It was determined that..." β†’ "We decided..." +- "The committee recommends..." β†’ "The team thinks we should..." +- "Attendees were presented with..." β†’ "We looked at..." +- "Consensus was reached regarding..." β†’ "Everyone agreed on..." +- "Action item assigned to..." β†’ "Sarah's going to..." + +**Key Principles**: +- **Conversational**: Write like you're telling a colleague what happened +- **Concise**: Cut unnecessary words and formality +- **Active**: Use active voice, personal pronouns +- **Direct**: Get to the point quickly +- **Human**: It's okay to note tone, reactions, humor + +### Structure + +**Typical Informal Notes Format**: +``` +# [Meeting Topic] - [Date] + +**Who was there**: [Names] +**How long**: [Duration or time] + +## Quick Summary +[2-3 sentences: What was this meeting about? What did we accomplish?] + +## Main Points + +### [Topic] +- [Key point in casual language] +- [Decision or outcome] +- [Notable discussion or concern] + +## Action Items +- [ ] **[Name]** - [What they're doing] - by [when] +- [ ] **[Name]** - [What they're doing] - by [when] + +## Other Stuff +- [Parking lot items] +- [Follow-up meetings] +- [Random useful info] +``` + +### Language Patterns + +**Use These Casual Phrases**: +- "We talked about..." +- "Main takeaway is..." +- "Quick update on..." +- "Still figuring out..." +- "Big win:" +- "Heads up:" +- "FYI..." +- "Next up:" +- "Good news:" / "Bad news:" +- "TLDR:" + +**Avoid These Formal Phrases**: +- "It has been brought to our attention..." +- "The aforementioned..." +- "Pursuant to..." +- "Heretofore..." +- "In accordance with..." +- "With respect to..." +- "It should be noted that..." + +## Content Transformation Process + +### 1. Identify Core Information + +Extract the essentials: +- **Who**: Key participants +- **What**: Topics discussed +- **Why**: Context or reason for meeting +- **Decisions**: What was decided +- **Actions**: What needs to happen next +- **Concerns**: Problems or blockers raised + +### 2. Convert Formal to Casual + +**Bureaucratic β†’ Conversational**: + +Formal: +``` +"The project steering committee convened to review the status of deliverables and assess adherence to the established timeline. Significant concerns were raised regarding resource allocation." +``` + +Informal: +``` +"We checked in on how the project's going. Timeline looks tight and we're worried we don't have enough people." +``` + +**Technical β†’ Accessible**: + +Formal: +``` +"Implementation of the caching layer resulted in a 43% reduction in API response latency, thereby improving user experience metrics." +``` + +Informal: +``` +"Added caching and now the API is 43% faster. Users should notice things loading quicker." +``` + +### 3. Structure for Skimmability + +**Use Visual Hierarchy**: +- **Bold** for names, key terms, deadlines +- Bullets for lists +- Short paragraphs (2-3 lines max) +- Headers to break up content +- Emoji sparingly for emphasis (if appropriate for audience) + +**Before (wall of text)**: +``` +We discussed the marketing campaign and Sarah mentioned that the budget might be an issue because we've already spent a lot on the previous campaign and Lisa said that we could probably reallocate from the conference budget since that event was cancelled and Tom agreed but wanted to make sure we still have enough for the product launch in Q4. +``` + +**After (scannable)**: +``` +### Marketing Campaign Budget + +- Sarah flagged that we're running low on budget +- Already spent a lot on the last campaign +- **Solution**: Lisa suggested moving money from the cancelled conference +- Tom's on board but wants to make sure we save enough for the Q4 product launch +``` + +### 4. Highlight Actions Prominently + +**Clear Action Format**: +``` +## Action Items +- [ ] **Sarah** - Get budget approval from finance - by Friday +- [ ] **Tom** - Draft campaign timeline - by next meeting +- [ ] **Lisa** - Research vendors for conference booth - by EOW +``` + +**Not This**: +``` +Sarah will obtain the necessary approvals from the finance department. Tom is responsible for preparing a draft timeline. Lisa has been assigned to conduct vendor research. +``` + +### 5. Add Context When Needed + +Include relevant background without over-explaining: + +**Good Context**: +``` +### Server Migration + +Quick context: We're moving to AWS because our current host keeps having outages. + +- Migration planned for weekend of June 15 +- Dev team will prep everything Friday +- Expect 2-3 hours downtime Saturday morning +- Tom's leading it, Mike backing him up +``` + +**Too Much Context**: +``` +"As previously discussed in meetings dating back to Q1, and following extensive analysis of our infrastructure requirements and cost-benefit analysis of various cloud providers..." +``` + +## Handling Different Input Types + +### From Formal Meeting Minutes + +**Input Characteristics**: +- Stiff, bureaucratic language +- "The committee" rather than "we" +- Passive voice throughout +- Overly detailed + +**Processing**: +1. Convert passive to active voice +2. Replace formal terms with casual equivalents +3. Cut unnecessary procedural language +4. Add conversational connectors +5. Simplify complex sentences + +**Example Transformation**: + +Formal Minutes: +``` +"The Finance Committee convened on October 15, 2024, at 2:00 PM. The meeting was called to order by the Chairperson. The minutes from the previous meeting were reviewed and approved without amendment. The Committee reviewed the Q3 financial statements. It was noted that expenses exceeded projections by 12%. After considerable discussion, it was determined that cost reduction measures should be implemented. A motion was made and seconded to freeze discretionary spending. The motion carried unanimously." +``` + +Informal Notes: +``` +# Finance Check-in - Oct 15 + +**Who**: Finance team +**When**: 2pm + +## TLDR +We went over budget in Q3 (by 12%), so we're freezing non-essential spending until things stabilize. + +## What Happened + +### Q3 Numbers +- Expenses were 12% over what we planned +- Team talked through where the overages happened +- Main culprits: hiring happened faster than expected, some unexpected software costs + +### What We're Doing About It +- **Decision**: Freeze on discretionary spending +- Everyone voted yes +- Essential stuff still gets approved, but we need to be careful + +## Action Items +- [ ] **[Finance lead]** - Send out guidelines on what counts as "discretionary" - by EOW +- [ ] **Department heads** - Review your planned Q4 spending and flag anything questionable + +## Next Meeting +[Date] - Check how the freeze is working +``` + +### From Technical Reports + +**Input Characteristics**: +- Dense jargon +- Detailed methodology +- Complex sentence structure +- Academic tone + +**Processing**: +1. Extract practical implications +2. Simplify technical terms (or explain briefly) +3. Focus on "so what?" / impact +4. Use analogies where helpful +5. Cut methodology details (or move to end) + +**Example**: + +Technical: +``` +"The regression analysis indicates a statistically significant correlation (r=0.73, p<0.01) between user engagement metrics and feature utilization. The analysis employed a multivariate approach controlling for user tenure and subscription tier." +``` + +Informal: +``` +"We found that users who try more features tend to stick around longer. Pretty clear connection there - the more stuff they use, the more engaged they are. (This held true even when we accounted for how long they've been customers and what plan they're on.)" +``` + +### From Email Chains + +**Input Characteristics**: +- Multiple threads mixed +- Chronological but confusing +- Various tangents +- Some info outdated by later messages + +**Processing**: +1. Extract final state (ignore superseded info) +2. Group by topic, not chronology +3. Note who had which concerns +4. Capture resolution, not debate +5. Flag unresolved threads + +**Example**: + +Email Chain (abbreviated): +``` +Thread: "RE: RE: FW: Q4 Planning" +- Mike: "Should we do X?" +- Sarah: "What about Y instead?" +- Tom: "Y could work but expensive" +- Lisa: "Actually we did Y last year, didn't go well" +- Mike: "OK so back to X. Sarah, can you price it out?" +- Sarah: "Sure, I'll have numbers by Thursday" +``` + +Informal Notes: +``` +# Q4 Planning Discussion (from email thread) + +## Decision +Going with approach X for Q4 + +## How We Got There +- Mike initially suggested X +- Sarah pitched Y as alternative +- Tom noted Y is pricey +- Lisa remembered we tried Y last year - didn't work out +- Circled back to X + +## Next Steps +- [ ] **Sarah** - Price out approach X - by Thursday +``` + +### From Presentations or Slide Decks + +**Input Characteristics**: +- Bullet points and short phrases +- Missing connective context +- Assumes visual was explained +- Abbreviated + +**Processing**: +1. Add back the spoken context +2. Explain charts/graphs in words +3. Flesh out abbreviated bullets +4. Note reactions or discussion +5. Connect ideas that are separate on slides + +## Special Note Formats + +### Quick Update Format (5-10 min meetings) + +``` +# Quick Sync - [Date] + +## Status +- [Name]: Working on [X], on track / ahead / behind +- [Name]: Finished [Y], starting [Z] +- [Name]: Blocked on [issue] + +## Heads Up +- [Important thing people should know] + +## Next Time +- [Topic for next sync] +``` + +### Stand-up Style Format + +``` +# Team Standup - [Date] + +## What We Did Yesterday +- [Name]: [accomplishment] +- [Name]: [accomplishment] + +## Today's Plan +- [Name]: [focus] +- [Name]: [focus] + +## Blockers +- [Name]: [what's stuck] +- **Action**: [how we're unsticking it] +``` + +### One-on-One Format + +``` +# [Manager] + [Employee] - [Date] + +## [Employee's] Updates +- [Current work] +- [Wins] +- [Challenges] + +## We Talked About +- [Topic]: [key points] +- [Topic]: [key points] + +## Manager's Feedback/Input +- [Advice, direction, or context shared] + +## Action Items +- [ ] **[Name]** - [action] + +## Next 1:1 +[Date] - [any specific topics to cover] +``` + +### Decision Log Format + +``` +# [Decision Topic] - [Date] + +## What We Decided +[Clear statement of decision] + +## Why +- [Reason 1] +- [Reason 2] +- [Reason 3] + +## Who Decided +[Names] - [unanimous / majority / etc.] + +## Alternatives We Considered +- **Option A**: [why we didn't choose this] +- **Option B**: [why we didn't choose this] + +## What Happens Now +- [Immediate next steps] +- [Timeline] +- [Who's responsible] + +## If This Doesn't Work +[Revisit conditions or plan B] +``` + +## Quality Checklist + +Good informal meeting notes should: + +βœ“ **Capture essentials**: All important info is there +βœ“ **Easy to skim**: Can get the gist in 30 seconds +βœ“ **Conversational**: Reads naturally, not stiff +βœ“ **Action-oriented**: Clear what needs to happen next +βœ“ **Accurate**: Doesn't lose meaning in translation +βœ“ **Appropriate detail**: Enough context, not overwhelming +βœ“ **Shareable**: Others can read and understand without being there + +## Common Pitfalls to Avoid + +**Too Casual**: +- Don't lose critical details for the sake of brevity +- Maintain professionalism (even if casual) +- Avoid unclear abbreviations +- Don't editorialize or add opinion + +**Not Casual Enough**: +- Don't keep corporate speak +- Avoid passive voice +- Cut the fluff +- Use contractions (it's, we're, that's) + +**Structure Issues**: +- Don't bury action items +- Avoid walls of text +- Group related points together +- Use consistent formatting + +## Example Transformations + +### Example 1: Committee Minutes β†’ Team Notes + +**Formal Input**: +``` +The Product Development Committee met on October 20, 2024. Present: Smith (Chair), Johnson, Williams, Martinez. The Committee reviewed the proposed feature roadmap for Q1 2025. Martinez presented research indicating strong customer demand for mobile application enhancements. Following discussion, the Committee voted to prioritize mobile features over desktop enhancements for Q1. Williams raised concerns regarding development resources. Smith agreed to escalate the resource issue to executive leadership. +``` + +**Informal Output**: +``` +# Product Planning - Oct 20 + +**Who was there**: Smith, Johnson, Williams, Martinez + +## TLDR +We're focusing on mobile features for Q1 instead of desktop stuff. Resource concerns but we're escalating that. + +## What Happened + +### Q1 Roadmap Decision +- Martinez showed research - customers really want better mobile features +- Team discussed mobile vs desktop priorities +- **Decision**: Mobile features are the focus for Q1 +- Desktop improvements pushed to Q2 + +### Resource Concerns +- Williams flagged that we might not have enough people to do this well +- Valid concern - we're stretched thin +- Smith's taking this to exec team to see about getting more help + +## Action Items +- [ ] **Smith** - Talk to execs about getting more dev resources +- [ ] **Martinez** - Share the customer research deck with broader team +- [ ] **[TBD]** - Start planning Q1 mobile feature breakdown + +## Next Meeting +[Date] - Review exec feedback on resources, finalize Q1 sprint planning +``` + +### Example 2: Technical Report β†’ Casual Summary + +**Technical Input**: +``` +System Performance Analysis - Q3 2024 + +Methodology: Analysis of application logs, database query performance metrics, and user session data over a 90-day period. + +Findings: +- Database query latency increased 34% over the quarter +- 73% of slow queries originate from the reporting module +- Peak load times (9-11 AM) show 2.3x normal latency +- User abandonment rate correlated with page load times >3 seconds + +Recommendations: +1. Implement query optimization for reporting module +2. Consider read replica for reporting workload +3. Implement caching layer for frequently accessed data +4. Investigate database indexing strategy +``` + +**Informal Output**: +``` +# Q3 Performance Check - What We Found + +## The Problem +Our app has been getting slower, especially in the mornings (9-11 AM). When pages take more than 3 seconds to load, people give up and close the app. + +## Why It's Happening +- Database queries are taking 34% longer than they did at the start of Q3 +- The culprit: reporting features (73% of slow queries come from there) +- Morning rush (everyone checking reports at once) makes it way worse + +## What We Should Do + +### Quick Wins +1. **Optimize reporting queries** - clean up the inefficient ones +2. **Add caching** - stop hitting the database for stuff that doesn't change much + +### Bigger Changes (Q4) +3. **Set up a read replica** - move all reporting to a separate database copy +4. **Review our indexes** - probably missing some key ones + +## Next Steps +- [ ] **[Dev lead]** - Prioritize which reporting queries to optimize first +- [ ] **[Engineering]** - Scope out read replica implementation +- [ ] **[TBD]** - Audit current database indexes + +## Impact +If we do this right, we should see faster load times and fewer people bouncing. Target: get everything under 2 seconds, even during morning rush. +``` + +## Usage Notes + +When providing text to convert, optionally specify: +- **Audience**: Team, department, company-wide (affects tone) +- **Detail level**: High-level summary vs. detailed notes +- **Formality level**: How casual you want to go +- **Special focus**: Emphasize actions, decisions, or context +- **Format preference**: Standard notes, standup style, decision log + +Share your text and I'll transform it into casual, readable meeting notes. diff --git a/commands/writing-and-editing/format-conversion/text-to-meeting-agenda.md b/commands/writing-and-editing/format-conversion/text-to-meeting-agenda.md new file mode 100644 index 0000000000000000000000000000000000000000..da9fcead4e97d34c6be130f4ce48e93d16904cff --- /dev/null +++ b/commands/writing-and-editing/format-conversion/text-to-meeting-agenda.md @@ -0,0 +1,521 @@ +# Text to Meeting Agenda Converter + +Transform any text, notes, or ideas into a professional meeting agenda with clear objectives, time allocations, and actionable structure. Perfect for converting brainstorming notes, email threads, project updates, or informal discussions into structured meeting plans. + +## Your Task + +Take the user's input text and create a well-structured meeting agenda that: +- Establishes clear meeting objectives +- Organizes topics in logical order +- Allocates appropriate time for each item +- Identifies discussion leaders or presenters +- Includes space for decisions and action items +- Creates a professional, actionable framework + +## Agenda Creation Process + +### 1. Analyze Input Content + +Examine the text to identify: +- **Core topics**: Main subjects to be discussed +- **Implicit goals**: What needs to be achieved? +- **Stakeholders**: Who needs to be involved? +- **Decisions needed**: What requires group consensus? +- **Information sharing**: What needs to be communicated? +- **Priority levels**: What's most important/urgent? + +### 2. Define Meeting Structure + +**Standard Meeting Agenda Format**: +``` +# [Meeting Title] + +**Date**: [To be scheduled] +**Time**: [Duration] +**Location**: [Physical/Virtual] +**Attendees**: [Key participants] + +## Objectives +[What we aim to accomplish] + +## Agenda Items + +### 1. [Topic] (X minutes) + - **Lead**: [Person] + - **Purpose**: [Discussion/Decision/Information] + - [Key points or questions] + +### 2. [Topic] (X minutes) + ... + +## Pre-Meeting Preparation +[What attendees should review or prepare] + +## Expected Outcomes +[What we'll have by end of meeting] +``` + +### 3. Organize Topics + +**Recommended Order**: + +1. **Opening** (5 min) + - Welcome and introductions (if needed) + - Review agenda and objectives + - Confirm timekeeper and note-taker + +2. **Information Items** (10-15 min) + - Quick updates + - Announcements + - Status reports + +3. **Discussion Items** (20-30 min) + - Topics requiring input + - Brainstorming + - Problem-solving + +4. **Decision Items** (15-20 min) + - Items requiring votes or approval + - Resource allocation + - Priority setting + +5. **Action Planning** (10 min) + - Assign action items + - Set deadlines + - Identify owners + +6. **Closing** (5 min) + - Recap decisions and actions + - Confirm next meeting + - Open questions + +### 4. Allocate Time + +**Time Allocation Guidelines**: + +- **Total meeting**: Aim for 30-60 minutes (attention span sweet spot) +- **Quick updates**: 2-3 minutes each +- **Discussion topics**: 10-15 minutes each +- **Major decisions**: 15-20 minutes each +- **Action planning**: Always allocate 10 minutes at end + +**Time Management Tips**: +- Add 5-minute buffer for overruns +- Front-load critical items +- Mark "parking lot" items (discuss if time permits) +- Consider breaking very long agendas into multiple meetings + +### 5. Add Context and Details + +**For Each Agenda Item Include**: + +- **Topic title**: Clear, descriptive +- **Time allocation**: Realistic estimate +- **Purpose**: Information/Discussion/Decision +- **Lead**: Who's driving this item +- **Context**: Brief background (1-2 lines) +- **Desired outcome**: What success looks like + +**Example Item**: +``` +### 3. Q4 Marketing Budget Review (15 minutes) + - **Lead**: Sarah (Marketing Director) + - **Purpose**: Decision + - **Context**: Need to finalize Q4 budget allocation across channels + - **Key Questions**: + - Should we increase social media spend? + - Conference ROI analysis + - New tool requests + - **Desired Outcome**: Approved budget breakdown by channel +``` + +## Handling Different Input Types + +### From Email Threads + +**Input Characteristics**: +- Multiple topics mixed together +- Various people's concerns +- Some decisions already made +- Action items scattered throughout + +**Processing**: +1. Extract distinct topics +2. Group related concerns +3. Note decisions that need confirmation +4. List outstanding questions +5. Identify who needs to be present + +**Example Input**: +``` +Thread about product launch: +- Mike: "We need to finalize the launch date" +- Sarah: "Marketing materials aren't ready yet" +- Lisa: "What about the pricing strategy?" +- Tom: "Should we do a beta period first?" +- Mike: "Also need to discuss support training" +``` + +**Agenda Output**: +``` +# Product Launch Planning Meeting + +## Objectives +Finalize launch timeline, pricing, and pre-launch activities + +## Agenda Items + +### 1. Launch Date Decision (10 min) + - **Lead**: Mike + - **Dependencies**: Marketing readiness + - **Decision needed**: Firm launch date + +### 2. Marketing Materials Status (10 min) + - **Lead**: Sarah + - **Purpose**: Information & Discussion + - **Question**: What's blocking completion? + +### 3. Pricing Strategy (15 min) + - **Lead**: Lisa + - **Purpose**: Decision + - **Options to discuss**: [To be prepared by Lisa] + +### 4. Beta Period Consideration (10 min) + - **Lead**: Tom + - **Purpose**: Discussion + - **Decision**: Go/no-go on beta + +### 5. Support Team Training (10 min) + - **Lead**: Mike + - **Purpose**: Planning + - **Outcome**: Training timeline and approach +``` + +### From Brainstorming Notes + +**Input Characteristics**: +- Stream of consciousness +- Mix of problems and solutions +- Varying priority levels +- Some tangential ideas + +**Processing**: +1. Identify core themes +2. Separate problems from solutions +3. Prioritize by importance/urgency +4. Flag items for later discussion +5. Group related items + +### From Project Updates + +**Input Characteristics**: +- Status information +- Blockers and challenges +- Upcoming milestones +- Resource needs + +**Processing**: +1. Organize by project or workstream +2. Highlight blockers requiring discussion +3. Identify decisions needed +4. Note information-only items +5. Schedule strategic vs. tactical items + +### From Informal Discussions + +**Input Characteristics**: +- Conversational tone +- Missing details +- Implied priorities +- Unclear outcomes + +**Processing**: +1. Extract concrete topics +2. Clarify ambiguous points +3. Define what needs to be decided +4. Add missing structure +5. Identify information gaps + +## Meeting Types & Formats + +### Status/Check-in Meeting (30 min) +``` +## Objectives +Quick sync on progress, blockers, and priorities + +## Agenda +1. Round-robin updates (15 min) + - Each person: 2-3 minutes + - Current focus + - Blockers + +2. Priority alignment (10 min) + - This week's focus + - Coordination needs + +3. Action items (5 min) +``` + +### Decision-Making Meeting (45-60 min) +``` +## Objectives +Make informed decisions on [specific topics] + +## Agenda +1. Context & Background (10 min) + - Current situation + - Why we need to decide now + +2. Options Review (15 min) + - Option A: Pros/Cons + - Option B: Pros/Cons + - Option C: Pros/Cons + +3. Discussion (15 min) + - Questions and concerns + - Additional considerations + +4. Decision (10 min) + - Vote or consensus + - Document decision + +5. Next Steps (5 min) + - Action items + - Communication plan +``` + +### Planning Meeting (60+ min) +``` +## Objectives +Develop plan for [project/initiative] + +## Agenda +1. Goals & Success Criteria (10 min) +2. Current State Assessment (15 min) +3. Brainstorm Approach (20 min) +4. Timeline Development (15 min) +5. Resource Allocation (15 min) +6. Risk Identification (10 min) +7. Action Items & Owners (10 min) +``` + +### Problem-Solving Meeting (45 min) +``` +## Objectives +Resolve [specific problem] + +## Agenda +1. Problem Definition (10 min) + - What's happening? + - Impact and urgency + +2. Root Cause Analysis (10 min) + - Why is this happening? + - Contributing factors + +3. Solution Brainstorming (15 min) + - Possible approaches + - Quick wins vs. long-term fixes + +4. Solution Selection (5 min) + - Decide on approach + - Resource requirements + +5. Action Planning (5 min) + - Immediate next steps + - Owners and timeline +``` + +## Quality Checklist + +A good meeting agenda should have: + +βœ“ **Clear objectives**: Purpose is obvious +βœ“ **Realistic timing**: Time allocations are achievable +βœ“ **Logical flow**: Topics in sensible order +βœ“ **Defined outcomes**: What success looks like +βœ“ **Assigned roles**: Who's leading what +βœ“ **Pre-work identified**: What to prepare beforehand +βœ“ **Action-oriented**: Focus on decisions and next steps +βœ“ **Appropriate length**: Not too long for attention spans + +## Special Considerations + +### Virtual Meetings +- Add 5-10% more time for technical issues +- Include Zoom/Teams link +- Note time zones if participants are distributed +- Build in more breaks for longer meetings +- Consider async alternatives for information items + +### Recurring Meetings +- Review and rotate agenda items +- Include "continuous improvement" item monthly +- Periodically question if meeting is still needed +- Adjust format based on feedback + +### Large Group Meetings +- Limit discussion items (use breakouts instead) +- Focus on information sharing and high-level decisions +- Send detailed materials in advance +- Record for those who can't attend + +### Executive/Leadership Meetings +- Front-load strategic items +- Minimize status updates (send in advance) +- Focus on decisions only they can make +- Include market/competitive intelligence + +## Example Transformations + +### Example 1: From Scattered Ideas + +**Input**: +``` +Need to talk about the website redesign, also the budget is getting tight, maybe we should discuss cutting some features, Sarah has concerns about the timeline, dev team needs to give an update, we should probably look at the competitor analysis too, and there's that vendor contract that expires soon +``` + +**Agenda Output**: +``` +# Website Redesign Planning Meeting + +**Date**: [To be scheduled] +**Duration**: 60 minutes +**Attendees**: Project team, Sarah, Dev lead, Budget owner + +## Objectives +- Review project status and timeline concerns +- Address budget constraints and scope decisions +- Align on priorities moving forward + +## Agenda Items + +### 1. Welcome & Objectives (3 min) + +### 2. Development Team Update (10 min) + - **Lead**: Dev Lead + - **Purpose**: Information + - Current progress and blockers + +### 3. Timeline Concerns Discussion (12 min) + - **Lead**: Sarah + - **Purpose**: Discussion + - Specific concerns and impact assessment + +### 4. Budget Status & Scope Review (20 min) + - **Lead**: Budget Owner + - **Purpose**: Discussion & Decision + - Current spend vs. allocation + - Feature prioritization exercise + - What can we cut if needed? + +### 5. Competitive Analysis Review (10 min) + - **Lead**: [TBD] + - **Purpose**: Information + - Key findings and implications + +### 6. Vendor Contract Status (5 min) + - **Lead**: [TBD] + - **Purpose**: Decision + - Renewal decision and timing + +### 7. Action Items & Next Steps (5 min) + +## Pre-Meeting Preparation +- Dev team: Prepare status update with metrics +- Budget owner: Current spend report +- [TBD]: Compile competitive analysis highlights +- [TBD]: Vendor contract terms and renewal options + +## Expected Outcomes +- Clear understanding of project health +- Scope decisions if budget requires cuts +- Action plan for addressing timeline concerns +``` + +### Example 2: From Meeting Request + +**Input**: +``` +Hey team, can we meet to go over the Q4 goals? I think we need to make sure everyone's aligned on priorities. Also want to hear from each team about their plans and any dependencies. +``` + +**Agenda Output**: +``` +# Q4 Goals Alignment Meeting + +**Date**: [To be scheduled] +**Duration**: 45 minutes +**Attendees**: All team leads + [Manager name] + +## Objectives +- Review and confirm Q4 organizational goals +- Share team-specific plans and priorities +- Identify cross-team dependencies +- Ensure alignment across teams + +## Agenda Items + +### 1. Q4 Goals Overview (10 min) + - **Lead**: [Manager] + - **Purpose**: Information + - Company/department objectives for Q4 + - Success metrics and targets + - Key constraints or considerations + +### 2. Team Plans - Round Robin (20 min) + - **Format**: Each team lead - 4 minutes + - **Share**: + - Top 3 priorities for Q4 + - Key deliverables and timing + - Resource needs + - Dependencies on other teams + + Teams to present: + - Engineering + - Product + - Marketing + - Customer Success + - [Others as applicable] + +### 3. Dependency Mapping (10 min) + - **Lead**: [Facilitator] + - **Purpose**: Discussion + - Identify critical path items + - Flag potential conflicts or bottlenecks + - Coordination needs + +### 4. Alignment Check & Questions (5 min) + - **Purpose**: Discussion + - Concerns or misalignments? + - Resource conflicts? + - Open questions? + +### 5. Next Steps (5 min) + - Communication plan + - Follow-up meetings if needed + - Action items + +## Pre-Meeting Preparation +Each team lead should prepare: +- 3-5 slide deck or one-pager with Q4 plan +- List of dependencies on other teams +- Any resource concerns or requests + +## Expected Outcomes +- Shared understanding of Q4 priorities +- Documented dependencies +- Action items for cross-team coordination +- Identified conflicts resolved or escalated +``` + +## Usage Notes + +When providing text to convert, optionally specify: +- **Meeting type**: Status, planning, decision-making, brainstorming +- **Duration preference**: 30/45/60/90 minutes +- **Attendee count**: Affects format and interaction style +- **Formality level**: Executive vs. team vs. working session +- **Virtual or in-person**: Affects timing and format + +Share your text and I'll transform it into a professional, actionable meeting agenda. diff --git a/commands/writing-and-editing/format-conversion/text-to-meeting-minutes.md b/commands/writing-and-editing/format-conversion/text-to-meeting-minutes.md new file mode 100644 index 0000000000000000000000000000000000000000..37aca77e3d6ee1a16f4e1187d3dc692d592916e5 --- /dev/null +++ b/commands/writing-and-editing/format-conversion/text-to-meeting-minutes.md @@ -0,0 +1,697 @@ +# Text to Meeting Minutes Converter + +Transform any text, notes, or recordings into formal, professional meeting minutes with complete documentation, proper structure, and official tone. Perfect for converting informal notes, brainstorming sessions, casual discussions, or recorded conversations into official records suitable for corporate governance, compliance, or archival purposes. + +## Your Task + +Take the user's input text and create formal meeting minutes that: +- Follow professional meeting documentation standards +- Use formal, objective language +- Maintain complete record of discussions and decisions +- Document attendance, motions, votes, and resolutions +- Create legally defensible documentation +- Preserve institutional knowledge + +## Meeting Minutes Standards + +### Core Components + +**Complete Meeting Minutes Include**: + +1. **Header Information** + - Organization or committee name + - Meeting type (regular, special, emergency) + - Date, time, and location + - Presiding officer + +2. **Attendance** + - Members present + - Members absent (excused/unexcused) + - Guests or non-members present + +3. **Opening Procedures** + - Call to order + - Verification of quorum + - Approval of previous minutes + - Approval of agenda + +4. **Body of Minutes** + - Reports presented + - Matters discussed + - Motions made + - Votes taken + - Decisions reached + +5. **Closing Procedures** + - Announcements + - Next meeting date + - Adjournment + +6. **Certification** + - Signature line for secretary/recorder + - Date of approval + +### Formal Language Standards + +**Objective, Third-Person Voice**: +- "The Committee discussed..." (not "We discussed...") +- "Ms. Johnson reported..." (not "Sarah said...") +- "It was moved and seconded..." (formal motion language) + +**Precise Documentation**: +- Record motions verbatim +- Note vote tallies exactly +- Document dissenting opinions +- Capture specific commitments + +**Avoid**: +- Personal opinions +- Subjective descriptions +- Informal language +- Emotional characterizations +- Unnecessary detail about debate + +## Standard Minute Format + +### Full Format Template + +``` +[ORGANIZATION NAME] +[COMMITTEE/BOARD NAME] +[TYPE] MEETING MINUTES + +Date: [Full date] +Time: [Start time] - [End time] +Location: [Physical address or virtual platform] +Presiding: [Name, Title] + +PRESENT: +[List of attendees with titles] + +ABSENT: +[List with notation if excused] + +GUESTS: +[Non-member attendees] + +--- + +I. CALL TO ORDER +The meeting was called to order at [time] by [Name, Title]. + +II. ESTABLISHMENT OF QUORUM +The Secretary confirmed that a quorum was present. + +III. APPROVAL OF PREVIOUS MINUTES +The minutes of the [date] meeting were presented. [Name] moved to approve the minutes as presented. [Name] seconded the motion. The motion carried unanimously. + +IV. APPROVAL OF AGENDA +The agenda was presented. [Name] moved to approve the agenda with [modifications if any]. [Name] seconded. The motion carried. + +V. REPORTS + +A. [Report Type/Officer] +[Summary of report content] + +B. [Report Type/Officer] +[Summary of report content] + +VI. OLD BUSINESS + +A. [Topic] +[Discussion summary] +[Motion if applicable] +[Vote result] + +VII. NEW BUSINESS + +A. [Topic] +[Discussion summary] +[Motion if applicable] +[Vote result] + +VIII. ANNOUNCEMENTS +[Any announcements] + +IX. NEXT MEETING +The next meeting is scheduled for [date, time, location]. + +X. ADJOURNMENT +There being no further business, [Name] moved to adjourn the meeting. [Name] seconded. The motion carried. The meeting was adjourned at [time]. + +Respectfully submitted, + +_______________________________ +[Name], Secretary +[Date] + +Approved: + +_______________________________ +[Name], Chairperson +[Date] +``` + +## Content Transformation Process + +### 1. Extract Factual Information + +Identify objective facts: +- **Who**: Attendees and roles +- **When**: Precise timing +- **Where**: Meeting location +- **What**: Topics, decisions, votes +- **Actions**: Assignments and deadlines + +**Ignore**: +- How people felt +- Tone of discussions +- Side conversations +- Speculation or hypotheticals + +### 2. Convert to Formal Language + +**Informal β†’ Formal Transformations**: + +| Informal | Formal | +|----------|--------| +| "We talked about..." | "The Committee discussed..." | +| "Sarah thinks we should..." | "Ms. Johnson recommended..." | +| "Everyone agreed..." | "The motion carried unanimously." | +| "Mike's going to..." | "Mr. Smith was assigned responsibility for..." | +| "We decided..." | "It was resolved that..." | + +**Casual Discussion β†’ Minutes Format**: + +Informal Notes: +``` +Sarah mentioned that the budget is looking tight. We went over the Q3 numbers and it looks like we're about 12% over what we planned. Tom suggested we freeze discretionary spending. Everyone thought that was a good idea. Mike will send out guidelines about what that means. +``` + +Formal Minutes: +``` +VI. FINANCIAL REPORT + +Ms. Johnson, Treasurer, presented the Q3 financial report. The report indicated that expenditures exceeded budgeted amounts by approximately 12%. Following discussion of contributing factors and potential remediation strategies, Mr. Roberts moved that discretionary spending be frozen until budget realignment is achieved. Ms. Williams seconded the motion. The motion carried unanimously. + +Mr. Davis was directed to prepare and distribute guidelines clarifying the scope of discretionary spending freeze within five business days. +``` + +### 3. Document Motions Properly + +**Standard Motion Format**: +``` +[Name] moved that [exact motion language]. [Name] seconded the motion. + +[Summary of discussion if significant] + +Upon vote: +- In favor: [number or names] +- Opposed: [number or names] +- Abstaining: [number or names] + +The motion [carried/failed]. +``` + +**Types of Motions to Record**: +- Main motions (proposals for action) +- Amendments to motions +- Procedural motions (table, postpone, etc.) +- Points of order +- Appeals + +**Examples**: + +Simple Motion: +``` +Mr. Thompson moved to approve the proposed vendor contract with ABC Company for IT services not to exceed $50,000 annually. Ms. Rodriguez seconded. The motion carried with 7 in favor, 1 opposed. +``` + +Amended Motion: +``` +Ms. Chen moved to allocate $25,000 to the marketing initiative. Mr. Johnson seconded. + +Mr. Wilson moved to amend the motion to increase the allocation to $30,000. Ms. Lee seconded the amendment. The amendment carried unanimously. + +The main motion, as amended, was then voted upon and carried with 8 in favor, 0 opposed. +``` + +Motion That Failed: +``` +Dr. Martinez moved to postpone consideration of the facility expansion until the April meeting. Ms. Thompson seconded. Following discussion, the motion failed with 3 in favor, 6 opposed. +``` + +### 4. Summarize Reports and Discussions + +**Report Summary Guidelines**: +- Include presenter name and title +- Highlight key data or findings +- Note significant implications +- Keep summary concise but complete +- Focus on information relevant to decisions + +**Good Report Summary**: +``` +B. Membership Report + +Ms. Anderson, Membership Chair, reported that membership stands at 247 members, representing a 12% increase over the same period last year. New member retention rate is 85%, exceeding the target of 80%. The report noted concerns regarding declining participation in events among long-term members. The Committee received the report and directed the Membership Chair to develop recommendations for improving long-term member engagement for consideration at the next meeting. +``` + +**Discussion Summary Guidelines**: +- Focus on substantive points made +- Don't attribute every comment +- Summarize themes, not verbatim debate +- Include dissenting views if significant +- Note consensus or division + +**Good Discussion Summary**: +``` +A. Website Redesign Proposal + +The Committee reviewed the proposal for website redesign presented by the Communications Committee. Discussion centered on budget implications, timeline feasibility, and alignment with organizational strategic goals. + +Concerns were raised regarding the proposed timeline of six months, with several members noting this might be optimistic given staff capacity constraints. The proposed budget of $75,000 was generally viewed as reasonable. + +Members discussed whether the redesign should prioritize mobile optimization or enhanced member portal functionality. It was noted that both are important but resource constraints may require prioritization. + +[Motion and vote would follow] +``` + +### 5. Handle Special Situations + +**Confidential Matters**: +``` +VII. EXECUTIVE SESSION + +At [time], [Name] moved to enter executive session to discuss personnel matters. [Name] seconded. The motion carried. + +The Committee entered executive session at [time]. All guests were excused. + +[For public minutes] +The Committee discussed [general topic] in executive session. No action was taken. + +[Or if action taken] +Following executive session, the Committee returned to open session at [time]. [Name] moved to [action taken]. [Name] seconded. The motion carried [vote tally]. + +[Detailed confidential matters are recorded in sealed minutes] +``` + +**Conflicts of Interest**: +``` +B. ABC Company Contract Approval + +Mr. Thompson declared a potential conflict of interest, noting that his spouse is employed by ABC Company. Mr. Thompson recused himself from discussion and voting on this matter and left the room at [time]. + +[Discussion and vote without Mr. Thompson's participation] + +Mr. Thompson returned to the meeting at [time]. +``` + +**Point of Order or Procedural Issues**: +``` +During discussion of the budget proposal, Ms. Wilson raised a point of order, stating that the motion under consideration was not properly seconded. The Chair ruled that the motion had been properly seconded by Mr. Davis. Ms. Wilson appealed the ruling of the Chair. The appeal was seconded by Ms. Lee. The Committee voted to sustain the ruling of the Chair by a vote of 8 in favor, 2 opposed. +``` + +**Tabled or Deferred Items**: +``` +C. Marketing Strategy Review + +Mr. Johnson moved to table consideration of the marketing strategy pending receipt of additional data from the Marketing Committee. Ms. Anderson seconded. The motion carried. The item will be placed on the agenda for the next meeting. +``` + +## Handling Different Input Types + +### From Informal Meeting Notes + +**Input Characteristics**: +- Casual language +- Incomplete sentences +- Implied information +- Missing procedural details + +**Processing**: +1. Add proper procedural framework +2. Convert all language to formal tone +3. Infer and document proper motions +4. Add standard sections (call to order, adjournment) +5. Clarify ambiguous references + +**Example Transformation**: + +Informal Notes: +``` +Team Meeting - Oct 15 +Present: Sarah, Mike, Tom, Lisa + +Went over Q4 goals. Everyone agreed we should focus on mobile features instead of desktop. Tom's worried about resources. Sarah will talk to execs. + +Decided to push the conference to next quarter. + +Next meeting: Oct 29 +``` + +Formal Minutes: +``` +PRODUCT DEVELOPMENT COMMITTEE +REGULAR MEETING MINUTES + +Date: October 15, 2024 +Time: 2:00 PM - 3:15 PM +Location: Conference Room B +Presiding: Sarah Johnson, Committee Chair + +PRESENT: +Sarah Johnson, Chair +Michael Davis, Member +Thomas Wilson, Member +Lisa Anderson, Member + +ABSENT: +None + +--- + +I. CALL TO ORDER +The meeting was called to order at 2:00 PM by Ms. Johnson, Chair. + +II. Q4 STRATEGIC PRIORITIES + +The Committee reviewed proposed priorities for Q4 development efforts. Following presentation of customer research data and resource capacity analysis, discussion focused on whether to prioritize mobile application enhancements or desktop feature development. + +Ms. Anderson moved that the Committee adopt mobile application enhancement as the primary Q4 development priority, with desktop features deferred to Q1 2025. Mr. Davis seconded the motion. + +Discussion included consideration of development resource availability. Mr. Wilson expressed concern regarding current resource allocation and capacity to deliver quality mobile enhancements within the quarter. + +The motion carried unanimously. + +Ms. Johnson was directed to escalate the resource capacity concern to executive leadership and report findings at the next Committee meeting. + +III. INDUSTRY CONFERENCE PARTICIPATION + +Mr. Wilson moved to defer participation in the November industry conference to Q1 2025. Ms. Anderson seconded. Following brief discussion regarding budget and staff availability considerations, the motion carried unanimously. + +IV. NEXT MEETING +The next regular meeting is scheduled for October 29, 2024, at 2:00 PM in Conference Room B. + +V. ADJOURNMENT +There being no further business, Mr. Davis moved to adjourn. Mr. Wilson seconded. The meeting was adjourned at 3:15 PM. + +Respectfully submitted, + +_______________________________ +[Secretary Name] +[Date] +``` + +### From Recorded Conversations + +**Input Characteristics**: +- Verbatim speech (ums, ahs, restarts) +- Tangents and side discussions +- Unclear who said what +- Lots of irrelevant content + +**Processing**: +1. Identify speakers and establish names/titles +2. Extract substantive content only +3. Organize chronologically by agenda topic +4. Synthesize rambling discussions into clear summaries +5. Reconstruct formal motions from informal decisions + +**Example**: + +Transcript Excerpt: +``` +Sarah: "Okay so, um, I wanted to talk about the budget thing. We're kinda over." +Mike: "Yeah, I saw that. How much?" +Sarah: "Like 12 percent or something." +Tom: "We should probably, I don't know, stop spending on stuff we don't need?" +Lisa: "Yeah, good idea." +Mike: "I can write up like, what that means, like what we can't spend on." +Sarah: "That'd be great. Can you do that by end of week?" +Mike: "Sure." +``` + +Formal Minutes: +``` +VI. FINANCIAL STATUS REVIEW + +Ms. Johnson, Finance Director, reported that current expenditures exceed budgeted amounts by approximately 12 percent. Following discussion of appropriate remedial measures, Mr. Wilson moved that discretionary spending be suspended pending budget review and realignment. Ms. Anderson seconded the motion. The motion carried unanimously. + +Mr. Davis was directed to prepare and distribute guidance defining discretionary spending categories within five business days. +``` + +### From Email Threads or Async Discussions + +**Input Characteristics**: +- Non-chronological decision-making +- Various people contributing at different times +- Some obsolete information +- Final decision may be buried + +**Processing**: +1. Identify the final state of each decision +2. Summarize discussion without email-by-email detail +3. Construct timeline of decisions +4. Note dissent if significant +5. Clarify who made final decisions + +### From Board or Committee Recordings + +**Input Characteristics**: +- Formal but conversational +- Parliamentary procedure mixed with discussion +- Multiple agenda items +- Motions and votes clearly stated + +**Processing**: +1. Follow established order from recording +2. Capture motions verbatim +3. Summarize discussions succinctly +4. Record exact vote counts +5. Note procedural actions (recesses, executive sessions) + +## Minutes by Organization Type + +### Corporate Board Minutes + +**Special Considerations**: +- Fiduciary duty documentation +- Shareholder interest protection +- Legal compliance records +- More detailed financial discussions +- Officer reports formally recorded + +**Example Section**: +``` +V. FINANCIAL REPORT + +The Chief Financial Officer presented the financial statements for the quarter ended September 30, 2024. The Board reviewed the balance sheet, income statement, and cash flow statement. The CFO noted that revenue increased 8% compared to the same quarter in the prior year, while operating expenses increased 5%. + +The Board discussed the Company's debt-to-equity ratio and cash reserves. Questions were raised regarding capital allocation strategy for the upcoming fiscal year. + +Following discussion, Mr. Wilson moved to accept the financial report as presented. Ms. Chen seconded. The motion carried unanimously. + +The Board directed management to prepare a comprehensive capital allocation proposal for review at the December meeting. +``` + +### Nonprofit Board Minutes + +**Special Considerations**: +- Mission alignment emphasis +- Fundraising and development activities +- Program outcome reporting +- Volunteer and staff matters +- Regulatory compliance (IRS requirements) + +**Example Section**: +``` +VI. PROGRAM REPORT + +The Executive Director presented the Q3 Program Report, highlighting outcomes and impact metrics for the organization's three primary programs. + +Youth Mentorship Program: Served 145 youth (32% increase over prior year), with 92% of participants meeting program goals. + +The Board discussed program sustainability and potential for expansion. Members noted strong outcomes support case for increased foundation grant applications. + +The report was received with commendation for program staff. +``` + +### Committee or Team Minutes + +**Special Considerations**: +- Less formal than board minutes +- Focus on recommendations to larger body +- Work assignments prominent +- May be more action-oriented + +**Example Section**: +``` +IV. RECOMMENDATIONS FOR BOARD CONSIDERATION + +Following review of the facility needs assessment, the Committee developed recommendations for Board consideration: + +The Committee recommends that the Board: +1. Approve engagement of XYZ Architects to develop preliminary facility expansion plans not to exceed $25,000 +2. Establish a Facility Planning Task Force to oversee the planning process +3. Allocate $500,000 in reserves for potential facility expansion + +Ms. Thompson will present these recommendations to the Board at the November meeting. +``` + +### HOA or Condo Board Minutes + +**Special Considerations**: +- Property owner rights documentation +- Maintenance and repair decisions +- Enforcement actions +- Financial assessments +- Open meeting law compliance + +## Quality Checklist + +Formal meeting minutes should have: + +βœ“ **Complete header information**: Date, time, place, attendees +βœ“ **Proper procedural documentation**: Call to order, quorum, adjournment +βœ“ **Accurate motion records**: Exact wording, seconders, vote counts +βœ“ **Objective language**: Third person, formal tone, no editorializing +βœ“ **Organized structure**: Follows agenda, clear sections +βœ“ **Action documentation**: Clear assignments with names and deadlines +βœ“ **Sufficient detail**: Enough to recreate what happened +βœ“ **Appropriate brevity**: Not verbatim transcript +βœ“ **Signature lines**: For official approval + +## Common Pitfalls to Avoid + +**Too Much Detail**: +- Don't transcribe entire discussions +- Avoid attributing every comment +- Don't include side conversations +- Skip "he said/she said" back-and-forth + +**Too Little Detail**: +- Don't write "discussed budget" without substance +- Include vote tallies, not just "passed" +- Note dissent when significant +- Document specific commitments made + +**Inappropriate Tone**: +- Avoid "everyone was excited about..." +- Don't editorialize +- Skip emotional descriptions +- Maintain objectivity + +**Procedural Errors**: +- Don't invent motions that weren't made +- Record actual vote results, not hoped-for results +- Note if quorum lost during meeting +- Document when procedures weren't followed + +## Example Transformation: Complete Meeting + +**Input (Casual Notes)**: +``` +Planning Committee - Oct 20, 3pm + +Present: John (chair), Mary, Steve, Jennifer +Not there: Bob (sick) + +1. Approved last meeting minutes - everyone voted yes + +2. Budget stuff +- Mary showed Q3 numbers, we're over budget by $15k +- Discussed why - mainly the new software we bought +- Steve suggested cutting the December event to save money +- Jennifer worried that members will be upset +- Voted to cancel December event: 3 yes, 1 no (Jennifer) + +3. Membership drive +- Need new members +- Jennifer will create a marketing plan +- Due in 2 weeks + +Next meeting Nov 17 +Done at 4:30pm +``` + +**Output (Formal Minutes)**: +``` +ANNUAL PLANNING COMMITTEE +REGULAR MEETING MINUTES + +Date: October 20, 2024 +Time: 3:00 PM - 4:30 PM +Location: Administrative Office Conference Room +Presiding: John Smith, Committee Chair + +PRESENT: +John Smith, Chair +Mary Williams, Treasurer +Steve Johnson, Member +Jennifer Davis, Member + +ABSENT: +Robert Thompson, Member (excused) + +--- + +I. CALL TO ORDER +The meeting was called to order at 3:00 PM by Mr. Smith, Chair. + +II. ESTABLISHMENT OF QUORUM +The Secretary confirmed that a quorum was present. + +III. APPROVAL OF PREVIOUS MINUTES +The minutes from the September 22, 2024 meeting were presented for approval. Ms. Williams moved to approve the minutes as presented. Mr. Johnson seconded the motion. The motion carried unanimously. + +IV. FINANCIAL REPORT + +Ms. Williams, Treasurer, presented the financial report for the third quarter of 2024. The report indicated expenditures of $15,000 in excess of budgeted amounts for the quarter. The overage was attributed primarily to unanticipated software licensing costs incurred in August. + +The Committee discussed various options for addressing the budget shortfall, including potential event cancellation, reduction of discretionary spending, and reallocation from other budget categories. + +Mr. Johnson moved to cancel the December member appreciation event in order to realign expenditures with the annual budget. Ms. Williams seconded the motion. + +Discussion included consideration of member satisfaction and potential impact on retention. Ms. Davis expressed concern that event cancellation could negatively affect member engagement during the holiday season. + +Upon vote, the motion carried with three in favor (Smith, Williams, Johnson) and one opposed (Davis). + +Mr. Smith directed staff to communicate the cancellation to membership and to develop alternative approaches to member appreciation within existing budget constraints. + +V. MEMBERSHIP DEVELOPMENT + +The Committee discussed the need for increased membership recruitment efforts. Current membership stands at 287, below the target of 325 for year-end. + +Following discussion of potential strategies, Ms. Davis was assigned responsibility for developing a comprehensive membership marketing plan. The plan is to be submitted for Committee review by November 3, 2024. + +VI. NEXT MEETING +The next regular meeting of the Committee is scheduled for November 17, 2024, at 3:00 PM in the Administrative Office Conference Room. + +VII. ADJOURNMENT +There being no further business, Ms. Williams moved to adjourn the meeting. Mr. Johnson seconded the motion. The motion carried unanimously. The meeting was adjourned at 4:30 PM. + +Respectfully submitted, + +_______________________________ +[Secretary Name], Secretary +Date: __________________ + +Approved: + +_______________________________ +John Smith, Chair +Date: __________________ +``` + +## Usage Notes + +When providing text to convert, optionally specify: +- **Organization type**: Corporate, nonprofit, HOA, government, committee +- **Meeting type**: Board, committee, special, annual, emergency +- **Formality level**: Highly formal (legal record) vs. standard professional +- **Governance requirements**: Any specific bylaws or regulations to follow +- **Detail level**: Comprehensive vs. summary format +- **Confidentiality**: Are there executive session or confidential matters? + +Share your text and I'll transform it into formal, professional meeting minutes. diff --git a/commands/writing-and-editing/formatting/24-hour-time.md b/commands/writing-and-editing/formatting/24-hour-time.md new file mode 100644 index 0000000000000000000000000000000000000000..1d521c3b772aacebd70915edc434ce6052662bd8 --- /dev/null +++ b/commands/writing-and-editing/formatting/24-hour-time.md @@ -0,0 +1,15 @@ +Convert all times in this text to 24-hour format. + +Your task: +- Convert 12-hour times (AM/PM) to 24-hour format +- Use HH:MM format (e.g., 14:30, 09:00) +- Remove AM/PM indicators +- Ensure all times are standardized consistently + +Examples: +- 1:00 PM β†’ 13:00 +- 9:30 AM β†’ 09:30 +- 12:00 AM (midnight) β†’ 00:00 +- 12:00 PM (noon) β†’ 12:00 + +Preserve all other content and formatting. Only change time representations. diff --git a/commands/writing-and-editing/formatting/add-missing-subheadings.md b/commands/writing-and-editing/formatting/add-missing-subheadings.md new file mode 100644 index 0000000000000000000000000000000000000000..3cccc9efab091a8f628db6354241d0fcaaf4d7b7 --- /dev/null +++ b/commands/writing-and-editing/formatting/add-missing-subheadings.md @@ -0,0 +1,74 @@ +# Add Missing Subheadings + +You are a document structure specialist. Your task is to analyze the provided text and add appropriate subheadings to improve readability and organization. + +## Your Task + +1. **Analyze the text structure** + - Identify major topic shifts and logical sections + - Look for paragraphs that introduce new concepts or themes + - Notice where the text transitions between ideas + +2. **Create appropriate subheadings** + - Use clear, descriptive titles that reflect the content + - Maintain consistent heading hierarchy (H2, H3, H4 as needed) + - Follow the document's existing tone and style + - Use parallel structure when possible (e.g., all verb phrases or all noun phrases) + +3. **Insert subheadings strategically** + - Break up long blocks of text + - Create logical sections (aim for 2-4 paragraphs per section) + - Don't over-segment short sections + - Preserve the original text without modification + +## Heading Hierarchy Guidelines + +- **H2 (##)**: Main sections representing major topic divisions +- **H3 (###)**: Subsections within a major topic +- **H4 (####)**: Sub-subsections for detailed breakdowns (use sparingly) + +## Style Guidelines + +- Keep headings concise (2-6 words typically) +- Use title case or sentence case consistently +- Avoid redundant words ("Section on..." or "Information about...") +- Make headings scannable and informative +- Consider SEO-friendly phrasing when appropriate + +## Example + +**Before:** +``` +Content about topic A spanning 3 paragraphs... + +Content about topic B spanning 2 paragraphs... + +Content about topic C spanning 4 paragraphs... +``` + +**After:** +``` +## Topic A + +Content about topic A spanning 3 paragraphs... + +## Topic B + +Content about topic B spanning 2 paragraphs... + +## Topic C + +### Subtopic C1 +Content about first aspect... + +### Subtopic C2 +Content about second aspect... +``` + +## Output Format + +Return the full text with subheadings inserted. Preserve all original content exactly as written, adding only markdown heading syntax. + +--- + +Now, please provide the text you'd like me to enhance with subheadings. diff --git a/commands/writing-and-editing/formatting/date-ddmmyyyy.md b/commands/writing-and-editing/formatting/date-ddmmyyyy.md new file mode 100644 index 0000000000000000000000000000000000000000..212f537a1c4f7c8978b901dd24e502bfd202721a --- /dev/null +++ b/commands/writing-and-editing/formatting/date-ddmmyyyy.md @@ -0,0 +1,15 @@ +Convert all dates in this text to DD/MM/YYYY format. + +Your task: +- Standardize all date formats to DD/MM/YYYY +- Convert various date formats (MM/DD/YYYY, YYYY-MM-DD, written dates, etc.) +- Use two digits for day and month (e.g., 05/03/2024, not 5/3/2024) +- Use four digits for year + +Examples: +- March 5, 2024 β†’ 05/03/2024 +- 3/5/24 β†’ 05/03/2024 +- 2024-03-05 β†’ 05/03/2024 +- 5th March 2024 β†’ 05/03/2024 + +Preserve all other content and formatting. Only change date representations. diff --git a/commands/writing-and-editing/formatting/remove-all-images.md b/commands/writing-and-editing/formatting/remove-all-images.md new file mode 100644 index 0000000000000000000000000000000000000000..b4e2dee422fdfe474d89da24f85e5768b96d8b13 --- /dev/null +++ b/commands/writing-and-editing/formatting/remove-all-images.md @@ -0,0 +1,81 @@ +# Remove All Images + +You are a document cleaning specialist. Your task is to remove all image references and related syntax from the provided text while preserving all other content. + +## Your Task + +Remove all instances of: + +1. **Markdown image syntax** + - `![alt text](image-url.jpg)` + - `![alt text](image-url.png "title")` + - `![](image.gif)` + +2. **HTML image tags** + - `description` + - `text` + - Self-closing variants: `` + +3. **Image figure elements** + - `
` and `
` tags containing images + - Associated `
` elements (optional: preserve caption text without the figure wrapper) + +4. **Base64 embedded images** + - `![alt](data:image/png;base64,...)` + - `Process diagram + +## Conclusion + +Final thoughts here. +``` + +**After:** +```markdown +# Article Title + +Here's some introductory text. + +The sunset was captured in Hawaii. More details follow. + +## Conclusion + +Final thoughts here. +``` + +--- + +Now, please provide the text you'd like me to clean by removing all images. diff --git a/commands/writing-and-editing/length-editors/expand-word-count.md b/commands/writing-and-editing/length-editors/expand-word-count.md new file mode 100644 index 0000000000000000000000000000000000000000..837249262ba71584fdaf64acbde2601d91a6f782 --- /dev/null +++ b/commands/writing-and-editing/length-editors/expand-word-count.md @@ -0,0 +1,186 @@ +# Expand Word Count - Content Padding and Elaboration + +Expand content to meet minimum word count requirements by adding relevant detail, examples, explanations, and context while maintaining natural flow and readability. Perfect for meeting publication minimums, fleshing out thin content, or adding depth to brief writing. + +## Your Task + +Take the user's content and expand it to their specified word count (or make it significantly more detailed if no target is specified) while keeping the writing natural and adding genuine value. + +## Expansion Strategies + +### 1. Add Examples and Illustrations + +Transform general statements into detailed examples: + +**Before**: "The software improves productivity." + +**After**: "The software improves productivity in multiple ways. For instance, automated workflows eliminate the need for manual data entry, saving teams up to 10 hours per week. Real-time collaboration features allow team members to work simultaneously on documents, reducing the back-and-forth of email exchanges and version control issues." + +### 2. Provide Context and Background + +Add relevant backstory and context: + +**Before**: "Cloud computing has become popular." + +**After**: "Cloud computing has become increasingly popular over the past decade. Originally dominated by early adopters in the tech industry, cloud services have now expanded to virtually every sector, from healthcare to manufacturing. This shift was driven by several factors, including reduced infrastructure costs, improved accessibility, and the rise of remote work necessitating flexible access to resources." + +### 3. Explain the "Why" and "How" + +Add explanations and mechanisms: + +**Before**: "The algorithm processes data quickly." + +**After**: "The algorithm processes data quickly by leveraging parallel processing techniques. Rather than handling data sequentially, it divides large datasets into smaller chunks that can be processed simultaneously across multiple CPU cores. This approach significantly reduces processing time, especially for operations that can be executed independently of one another." + +### 4. Include Comparisons and Contrasts + +Compare with alternatives or previous states: + +**Before**: "This approach is more efficient." + +**After**: "This approach is more efficient compared to traditional methods. Where the conventional process required manual review of each item individually, the new system automates initial screening and flags only exceptions for human review. This reduces processing time from hours to minutes while maintaining the same accuracy levels." + +### 5. Add Supporting Details + +Expand with relevant statistics, quotes, or research: + +**Before**: "Many companies use this technology." + +**After**: "Many companies use this technology, with adoption rates climbing steadily year over year. According to industry research, 67% of Fortune 500 companies have integrated this technology into their core operations, and that number is expected to reach 85% by next year. Industry analysts attribute this rapid adoption to measurable ROI and competitive pressure to modernize." + +### 6. Elaborate on Implications + +Discuss consequences, benefits, or future impact: + +**Before**: "This will change the industry." + +**After**: "This will change the industry in fundamental ways. In the short term, we'll likely see rapid consolidation as companies race to adopt and integrate these capabilities. Over the longer term, it could reshape job roles, with certain repetitive tasks being automated while creating new positions focused on oversight and strategic decision-making. The economic implications are substantial, with some estimates suggesting productivity gains of 20-30% for early adopters." + +### 7. Break Down Complex Ideas + +Expand single sentences into multi-sentence explanations: + +**Before**: "The system uses machine learning." + +**After**: "The system uses machine learning to continuously improve its performance. When first deployed, it starts with baseline models trained on historical data. As it processes new information, it learns from patterns and outcomes, gradually refining its predictions. Over time, this iterative learning process makes the system more accurate and better adapted to the specific needs of each organization using it." + +### 8. Add Transitional and Connective Content + +Include transitions, signposting, and flow improvers: + +**Techniques**: +- "Let's explore this in more detail..." +- "To understand why this matters..." +- "This brings us to an important point..." +- "Building on this concept..." +- "It's worth noting that..." +- "This raises an interesting question..." + +### 9. Include Qualifications and Nuance + +Add appropriate caveats and considerations: + +**Before**: "The solution works well." + +**After**: "The solution works well in most typical use cases, though there are some scenarios where alternative approaches may be more appropriate. For organizations with very specific compliance requirements, for example, a more customized solution might be necessary. However, for the majority of small to medium-sized businesses, the standard implementation provides an excellent balance of functionality and ease of use." + +### 10. Expand Lists and Enumerations + +Turn brief lists into detailed descriptions: + +**Before**: +``` +Benefits include: +- Cost savings +- Efficiency +- Scalability +``` + +**After**: +``` +This approach delivers several key benefits that make it attractive for modern businesses: + +**Cost savings**: By reducing manual labor and minimizing errors that require expensive corrections, organizations typically see 20-30% reduction in operational costs within the first year of implementation. + +**Efficiency improvements**: Automated workflows process tasks in minutes rather than hours, freeing up team members to focus on higher-value activities that require human judgment and creativity. + +**Scalability**: The system easily accommodates growth, handling increased volume without requiring proportional increases in staff or infrastructure investment. +``` + +## What NOT to Do + +Avoid these padding mistakes: + +- **Don't add fluff**: "It is important to note that..." "As a matter of fact..." +- **Don't repeat yourself**: Saying the same thing in different words +- **Don't go off-topic**: Stay relevant to the core subject +- **Don't use meaningless qualifiers**: "very," "really," "quite" without purpose +- **Don't add redundant adjectives**: "absolutely essential," "completely finished" +- **Don't create run-on sentences**: Keep readability high + +## Expansion Levels + +**Light expansion (20-50% increase)**: +- Add examples to key points +- Include brief explanations +- Expand on benefits/features +- Minimal structural changes + +**Moderate expansion (50-100% increase)**: +- Multiple examples per section +- Detailed explanations +- Add context and background +- Include comparisons +- Some new subsections + +**Heavy expansion (100-200% increase)**: +- Comprehensive examples +- Extended background/context +- Multiple perspectives +- Detailed step-by-step processes +- Additional sections and subsections + +**Extreme expansion (200%+ increase)**: +- Deep dives on each topic +- Multiple detailed examples +- Extensive context and history +- Consider alternative viewpoints +- Substantial new content sections + +## Usage Instructions + +When providing content, specify: +1. **Target word count** (e.g., "expand to 2000 words") + OR +2. **Expansion percentage** (e.g., "increase by 150%") + OR +3. **General instruction** (e.g., "make it much more detailed") + +Optional: Indicate which areas need most expansion + +## Example Transformation + +**Before (50 words)**: +``` +Our project management tool helps teams collaborate effectively. It includes task tracking, file sharing, and communication features. Users can create projects, assign tasks, and monitor progress. The tool integrates with popular services and works on all devices. +``` + +**After - Expanded to 150 words (200% increase)**: +``` +Our project management tool helps teams collaborate effectively by providing a centralized platform for all project-related activities. Rather than juggling multiple disconnected tools, teams can manage everything in one place. + +The platform includes comprehensive task tracking capabilities that allow managers to create detailed task lists, set priorities, establish deadlines, and track completion status in real-time. File sharing functionality ensures all team members have access to the latest versions of documents, eliminating the confusion of email attachments and outdated files. Built-in communication features, including threaded discussions and @mentions, keep conversations organized and connected to relevant tasks and projects. + +Users can create unlimited projects, each with its own workspace, and easily assign tasks to team members with clear ownership and accountability. The intuitive dashboard provides at-a-glance visibility into project progress, helping identify bottlenecks before they become problems. + +The tool integrates seamlessly with popular services like Slack, Google Drive, and Dropbox, and works flawlessly across desktop, tablet, and mobile devices. +``` + +## Output Format + +Provide: +1. The expanded version of the content +2. Word count information (original count β†’ new count, % increase) +3. Brief note about expansion methods used if relevant + +Share your content and desired word count target (or expansion percentage) for elaboration. diff --git a/commands/writing-and-editing/length-editors/reduce-word-count.md b/commands/writing-and-editing/length-editors/reduce-word-count.md new file mode 100644 index 0000000000000000000000000000000000000000..646fb77065f6c2a420bfef9c9d68910b37b754fd --- /dev/null +++ b/commands/writing-and-editing/length-editors/reduce-word-count.md @@ -0,0 +1,136 @@ +# Reduce Word Count - Concise Content Editor + +Trim content to meet specific word count targets while preserving all essential information and maintaining readability. Perfect for meeting publication limits, creating executive summaries, or tightening verbose writing. + +## Your Task + +Take the user's content and reduce it to their specified word count (or make it significantly more concise if no target is specified) while keeping all critical information intact. + +### Reduction Strategies + +#### 1. Eliminate Redundancy +- Remove repeated ideas or points +- Cut redundant adjectives and adverbs +- Eliminate "in other words" restatements +- Remove circular definitions + +**Example**: +- Before: "The software is fast and quick, providing rapid response times" +- After: "The software provides rapid response times" + +#### 2. Condense Wordy Phrases +- "in order to" β†’ "to" +- "due to the fact that" β†’ "because" +- "at this point in time" β†’ "now" +- "has the ability to" β†’ "can" +- "make a decision" β†’ "decide" +- "take into consideration" β†’ "consider" +- "prior to" β†’ "before" +- "in the event that" β†’ "if" +- "a number of" β†’ "several" or "many" +- "it is important to note that" β†’ [delete] + +#### 3. Simplify Sentence Structure +- Combine related sentences +- Remove unnecessary clauses +- Use active voice (usually shorter than passive) +- Cut throat-clearing phrases ("It should be noted that...") + +**Example**: +- Before: "There are many developers who prefer TypeScript. They choose it because it provides type safety." +- After: "Many developers prefer TypeScript for its type safety." + +#### 4. Remove Padding +- Delete filler words: really, very, quite, basically, actually, literally +- Remove unnecessary qualifiers: "It is worth noting that", "As a matter of fact" +- Cut hedge words when appropriate: somewhat, fairly, rather, pretty much +- Eliminate examples if the point is clear without them + +#### 5. Convert to More Concise Forms +- Replace long examples with brief ones +- Use lists instead of prose when appropriate +- Consolidate similar points +- Remove parenthetical asides +- Cut background information that's not essential + +#### 6. Prioritize Information +When making deep cuts: +1. Keep the main argument/thesis +2. Preserve key evidence and data +3. Retain critical examples +4. Cut: background context, tangential points, redundant examples, extended explanations + +### What to Preserve + +- **Core message**: Never sacrifice the main point +- **Key facts and data**: Essential evidence stays +- **Technical accuracy**: Don't oversimplify to the point of incorrectness +- **Logical flow**: Maintain coherent transitions +- **Critical examples**: Keep one strong example rather than three weaker ones + +### Reduction Levels + +**Light reduction (10-20% cut)**: +- Remove obvious fluff and redundancy +- Tighten wordy phrases +- Minimal content loss + +**Moderate reduction (20-40% cut)**: +- Consolidate similar points +- Remove secondary examples +- Simplify explanations +- Some context may be lost + +**Heavy reduction (40-60% cut)**: +- Focus on core message only +- One example per point maximum +- Remove most background/context +- Keep only essential information + +**Extreme reduction (60%+ cut)**: +- Executive summary style +- Absolute essentials only +- May approach outline form +- Significant detail loss + +## Usage Instructions + +When the user provides content, they should also specify: +1. **Target word count** (e.g., "reduce to 500 words") + OR +2. **Reduction percentage** (e.g., "cut by 30%") + OR +3. **General instruction** (e.g., "make it much more concise") + +## Example Transformation + +**Before (150 words)**: +``` +In today's modern business environment, it has become increasingly important for organizations to leverage technology in order to improve their operational efficiency. There are many different tools and platforms available on the market that can help companies streamline their workflows and processes. One particularly effective approach that has gained significant traction in recent years is the implementation of automation systems. These systems have the ability to handle repetitive tasks that would otherwise require manual intervention by human employees. By automating these routine processes, businesses can free up their valuable human resources to focus on more strategic, high-value activities that require creative thinking and problem-solving capabilities. Additionally, automation can also help to reduce the likelihood of human error, which can be costly for organizations. +``` + +**After - Reduced to 75 words (50% reduction)**: +``` +Organizations increasingly leverage technology to improve operational efficiency. Automation systems effectively handle repetitive tasks that would otherwise require manual intervention. By automating routine processes, businesses free human resources for strategic, high-value activities requiring creative thinking and problem-solving. Automation also reduces costly human errors. Various tools and platforms help companies streamline workflows, making automation one of the most effective approaches gaining traction in modern business. +``` + +**After - Reduced to 40 words (73% reduction)**: +``` +Automation systems handle repetitive tasks, freeing employees for strategic work requiring creativity and problem-solving. This reduces human error and improves operational efficiency. Multiple platforms enable workflow automation, making it an increasingly popular approach for modern businesses. +``` + +## Output Format + +Provide: +1. The reduced version of the content +2. Word count information (original count β†’ new count, % reduction) +3. Brief note about what was cut if the reduction was substantial + +## Tips for Best Results + +- **Specify your target**: Give a concrete word count or percentage +- **Note priorities**: If certain sections must be preserved, mention them +- **Accept trade-offs**: Heavy reductions mean less detail and nuance +- **Review carefully**: Ensure the condensed version still says what you need it to say + +Share your content and desired word count target (or reduction percentage) for trimming. diff --git a/commands/writing-and-editing/length-editors/shorten.md b/commands/writing-and-editing/length-editors/shorten.md new file mode 100644 index 0000000000000000000000000000000000000000..fceaacd875114fec60663ee6b4dafe94cfe1a03f --- /dev/null +++ b/commands/writing-and-editing/length-editors/shorten.md @@ -0,0 +1,11 @@ +Make this text more concise without losing key information. + +Apply conciseness techniques: +- Remove redundant phrases +- Eliminate unnecessary words +- Combine related sentences +- Cut filler content +- Keep only essential information +- Make every word count + +Preserve all important facts and main points. diff --git a/commands/writing-and-editing/optimize-paragraph-breaks.md b/commands/writing-and-editing/optimize-paragraph-breaks.md new file mode 100644 index 0000000000000000000000000000000000000000..4a77ed19e4af17bc9f70c856eb7cf0d52ebbed77 --- /dev/null +++ b/commands/writing-and-editing/optimize-paragraph-breaks.md @@ -0,0 +1,41 @@ +You are a text editor specializing in optimizing paragraph structure for web readability. + +Your task is to add appropriate paragraph breaks or shorten existing paragraphs to make text more scannable and easier to read on screens. + +## Guidelines + +1. **Ideal paragraph length for web**: 2-4 sentences or 40-70 words per paragraph +2. **Maximum paragraph length**: 5-6 sentences or ~100 words +3. **Use white space strategically**: Break up dense text blocks to improve scannability +4. **Group related ideas**: Keep sentences about the same topic together +5. **Create logical breaks**: Split at natural topic transitions or shifts in focus +6. **Maintain flow**: Ensure paragraph breaks don't disrupt the narrative or argument +7. **Consider visual balance**: Vary paragraph lengths slightly for visual interest +8. **Mobile-first thinking**: Shorter paragraphs work better on small screens + +## Web Readability Best Practices + +- **Dense paragraphs**: Split paragraphs longer than 100 words +- **Topic shifts**: Create new paragraphs when introducing new ideas or subtopics +- **Lists**: Consider breaking out enumerated items into separate short paragraphs or bullet points +- **Emphasis**: Use paragraph breaks to emphasize key points or important transitions +- **Breathing room**: Add breaks to give readers visual rest points + +## Process + +1. Identify long paragraphs (>100 words or >6 sentences) +2. Locate natural breaking points (topic shifts, transitions, logical groupings) +3. Split paragraphs at appropriate points +4. Review for logical flow and coherence +5. Ensure no orphaned single-sentence paragraphs unless used for emphasis +6. Verify improved scannability and readability +7. Present the revised text + +## What NOT to do + +- Don't create choppy, disjointed text +- Don't break up well-structured short paragraphs unnecessarily +- Don't separate tightly connected ideas +- Don't create too many single-sentence paragraphs (unless for deliberate emphasis) + +Return only the revised text with optimized paragraph breaks, without explanations or commentary unless specifically requested. \ No newline at end of file diff --git a/commands/writing-and-editing/proofreading/add-punctuation.md b/commands/writing-and-editing/proofreading/add-punctuation.md new file mode 100644 index 0000000000000000000000000000000000000000..ff8f4aabc864a4c93956149b1f60dcc2a372c313 --- /dev/null +++ b/commands/writing-and-editing/proofreading/add-punctuation.md @@ -0,0 +1,12 @@ +This text is missing basic punctuation. + +Add appropriate punctuation including: + +- Periods at the end of sentences +- Commas where needed for readability +- Question marks for questions +- Exclamation points where appropriate +- Apostrophes for contractions and possessives +- Quotation marks where needed + +Keep the original meaning and tone intact. Don't change the words themselves unless absolutely necessary for clarity. diff --git a/commands/writing-and-editing/proofreading/clean-stt.md b/commands/writing-and-editing/proofreading/clean-stt.md new file mode 100644 index 0000000000000000000000000000000000000000..11ff550245563cd83f4b86f121d2ac05616ffc9f --- /dev/null +++ b/commands/writing-and-editing/proofreading/clean-stt.md @@ -0,0 +1,12 @@ +This text was transcribed from speech-to-text and needs cleaning. + +Clean up the text by: + +- Removing filler words (um, uh, like, you know, etc.) +- Fixing obvious transcription errors +- Removing false starts and repetitions +- Adding proper punctuation +- Fixing capitalization +- Making sentence structure clearer where speech was fragmented + +Keep the original meaning and tone. Don't change the speaker's voice or substantive content. diff --git a/commands/writing-and-editing/proofreading/fix-typos.md b/commands/writing-and-editing/proofreading/fix-typos.md new file mode 100644 index 0000000000000000000000000000000000000000..c52ee1a15edc696db54464d489734f0f0ea1f455 --- /dev/null +++ b/commands/writing-and-editing/proofreading/fix-typos.md @@ -0,0 +1,10 @@ +Fix typos, spelling mistakes, punctuation, capitalization, and spacing in the provided text or files. + +Your task: +- Correct all typos and spelling errors +- Fix punctuation issues +- Correct capitalization errors +- Fix spacing problems +- Return the clean, corrected version + +Do not make stylistic changes or alter the voice/tone. Focus only on mechanical errors. diff --git a/commands/writing-and-editing/proofreading/proofread.md b/commands/writing-and-editing/proofreading/proofread.md new file mode 100644 index 0000000000000000000000000000000000000000..f4e3c296997a0b595d20fb70abb1ec902838ab8b --- /dev/null +++ b/commands/writing-and-editing/proofreading/proofread.md @@ -0,0 +1,21 @@ +Proofread and fact-check the provided text or files in this repository. + +Your task: +1. Verify all facts, statistics, dates, names, and claims +2. Correct any inaccuracies found +3. Insert precise inline Markdown links `[anchor text](URL)` for any claim that should be sourced +4. If a claim cannot be verified after diligent search, append `[source needed]` after it + +Source selection hierarchy: +- Primary/official sources (official websites, statutes, standards bodies, datasets, DOIs) +- Peer-reviewed literature and reputable preprints +- High-quality secondary sources (.gov, .edu, major journals, authoritative news) +- Reputable reports with transparent methods + +Guidelines: +- Preserve the original structure, headings, lists, tables, and code blocks +- Use concise, factual anchor text (e.g., `[ISO/IEC 27001]`, `[2024 WHO report]`) +- Never use "here" or "click here" as anchor text +- Link the first occurrence of a recurring fact; re-link only when context demands +- When sources conflict, choose the highest-tier, most recent, jurisdiction-relevant source +- Return only the revised text with corrections and links applied diff --git a/commands/writing-and-editing/readability/optimize-for-web.md b/commands/writing-and-editing/readability/optimize-for-web.md new file mode 100644 index 0000000000000000000000000000000000000000..137e8d72cb939a49d7a6587c0384c3c34a0af20a --- /dev/null +++ b/commands/writing-and-editing/readability/optimize-for-web.md @@ -0,0 +1,73 @@ +# Optimize for Web Readability + +You are a web content optimization specialist. Your task is to transform the provided text into highly scannable, digestible web content with shorter paragraphs and improved visual flow. + +## Your Task + +Restructure the text following web readability best practices: + +### 1. Paragraph Management +- **Break up long paragraphs**: Maximum 3-4 sentences per paragraph +- **One idea per paragraph**: Each paragraph should make a single clear point +- **Vary paragraph length**: Mix 1-sentence, 2-sentence, and 3-4 sentence paragraphs +- **White space**: Create visual breathing room + +### 2. Sentence Structure +- **Average 15-20 words** per sentence +- **Mix lengths**: Short punchy sentences + moderate compound sentences +- **Front-load information**: Put key points at sentence start +- **Active voice**: Prefer active constructions + +### 3. Visual Hierarchy +- **Compelling subheadings**: Add H2/H3 headers every 300-400 words +- **Bullet points**: Convert lists and multiple items to bullets +- **Bold key phrases**: Highlight important terms or takeaways +- **Numbered lists**: Use for sequential information or rankings + +### 4. Scannability Enhancements +- **Lead with value**: Start with the most important information +- **Use transitional subheadings**: Guide readers through content +- **Pull quotes or callouts**: Highlight key insights (mark with `> `) +- **Action-oriented language**: Clear, direct verbs + +### 5. Engagement Techniques +- **Conversational tone**: Write like you're talking to a friend +- **Address reader directly**: Use "you" and "your" +- **Questions as headers**: Engage reader curiosity +- **Examples and specifics**: Concrete details over abstractions + +## Web-Specific Guidelines + +- **F-pattern reading**: Front-load sentences and paragraphs +- **Mobile-first**: Ensure readability on small screens +- **Load time consideration**: Break very long content into sections +- **Link naturally**: Internal/external links in context (mark with `[text](url)`) +- **Avoid walls of text**: Never more than 5-6 lines per paragraph + +## Before/After Example + +**Before:** +"Our platform offers comprehensive project management capabilities that enable teams to collaborate effectively while maintaining visibility into project progress and resource allocation. The system integrates with existing tools and provides real-time updates to ensure everyone stays informed about project status and upcoming deadlines. Users can customize their dashboards to display the most relevant information for their role and responsibilities." + +**After:** +"Our platform gives your team everything needed for seamless project management. + +**Real-time collaboration.** Work together effortlessly while tracking progress and resources at a glance. + +**Instant updates.** Everyone stays in the loop with real-time notifications about project status and deadlines. + +**Personalized dashboards.** Customize your view to show exactly what matters for your role. + +Plus, it integrates with the tools you're already using." + +--- + +## Output Format + +Return the web-optimized text with: +- Short, scannable paragraphs +- Strategic use of bold, bullets, and headers +- Clear visual hierarchy +- Engaging, conversational tone + +Now, please provide the text you'd like me to optimize for web readability. diff --git a/commands/writing-and-editing/readability/target-grade-level.md b/commands/writing-and-editing/readability/target-grade-level.md new file mode 100644 index 0000000000000000000000000000000000000000..af5da96ed34af4c98f2e2d6a40ff7f6592edc107 --- /dev/null +++ b/commands/writing-and-editing/readability/target-grade-level.md @@ -0,0 +1,87 @@ +# Target Specific Readability Level + +You are a readability optimization specialist. Your task is to rewrite the provided text to match a specific reading level while preserving the core message and key information. + +## Available Target Levels + +### Grade 6-8 (Middle School) +- **Audience**: General public, broad accessibility +- **Sentence length**: 10-15 words average +- **Vocabulary**: Common words, minimal jargon +- **Complexity**: Simple sentence structures +- **Use for**: Public communications, consumer content, general instructions + +### Grade 9-10 (High School) +- **Audience**: Educated general public +- **Sentence length**: 15-20 words average +- **Vocabulary**: Everyday language with some technical terms (explained) +- **Complexity**: Mix of simple and compound sentences +- **Use for**: News articles, business communications, user guides + +### Grade 11-12 (Advanced High School) +- **Audience**: College-ready, professional readers +- **Sentence length**: 18-22 words average +- **Vocabulary**: Sophisticated vocabulary, explained technical terms +- **Complexity**: Varied sentence structures +- **Use for**: Business reports, marketing materials, detailed guides + +### College Level (13-16) +- **Audience**: Professionals, specialists +- **Sentence length**: 20-25 words average +- **Vocabulary**: Advanced vocabulary, technical terminology +- **Complexity**: Complex and compound-complex sentences +- **Use for**: Academic papers, technical documentation, professional analysis + +### Graduate Level (17+) +- **Audience**: Subject matter experts, academics +- **Sentence length**: 25+ words average +- **Vocabulary**: Specialized terminology, domain-specific jargon +- **Complexity**: Dense, sophisticated syntax +- **Use for**: Research papers, specialized journals, expert communications + +## Adjustment Techniques + +### To Simplify (Lower Grade Level) +- **Shorten sentences**: Break complex sentences into shorter ones +- **Replace vocabulary**: Swap advanced words for common alternatives +- **Simplify syntax**: Convert passive to active voice +- **Add definitions**: Explain necessary technical terms +- **Use examples**: Illustrate abstract concepts concretely +- **Remove redundancy**: Cut unnecessary complexity + +### To Elevate (Higher Grade Level) +- **Combine sentences**: Merge related ideas with conjunctions +- **Enhance vocabulary**: Use more precise, sophisticated terms +- **Add nuance**: Include qualifying clauses and details +- **Increase density**: Pack more information per sentence +- **Employ variety**: Use varied sentence structures and lengths + +## Readability Metrics + +The rewritten text should approximate these Flesch-Kincaid scores: +- **Grade 6-8**: Flesch Reading Ease 70-80, Grade Level 6-8 +- **Grade 9-10**: Flesch Reading Ease 60-70, Grade Level 9-10 +- **Grade 11-12**: Flesch Reading Ease 50-60, Grade Level 11-12 +- **College**: Flesch Reading Ease 30-50, Grade Level 13-16 +- **Graduate**: Flesch Reading Ease 0-30, Grade Level 17+ + +## Usage Instructions + +**To use this command**, please specify: +1. Your target readability level (Grade 6-8, 9-10, 11-12, College, or Graduate) +2. The text you want adjusted + +## Example + +**Original (College Level):** +"The implementation of sophisticated algorithmic trading strategies necessitates comprehensive risk management protocols to mitigate potential adverse market movements and ensure optimal capital allocation efficiency across diversified portfolios." + +**Grade 9-10 Target:** +"Automated trading systems need strong risk management rules. These rules help protect against unexpected market changes and make sure money is spread wisely across different investments." + +**Graduate Level Target:** +"The deployment of advanced algorithmic trading architectures requires robust, multi-layered risk management frameworks that proactively address systematic and idiosyncratic risks while optimizing capital efficiency through dynamic portfolio rebalancing mechanisms informed by real-time market microstructure analysis." + +--- + +Please specify your target readability level and provide the text you'd like me to adjust. diff --git a/commands/writing-and-editing/seo-optimize.md b/commands/writing-and-editing/seo-optimize.md new file mode 100644 index 0000000000000000000000000000000000000000..29155f4d2e5f7b289dc5e456b2403c059f2b8e65 --- /dev/null +++ b/commands/writing-and-editing/seo-optimize.md @@ -0,0 +1,16 @@ +Apply minimal, high-precision SEO edits to improve organic search performance. + +Your task: +1. Enhance discoverability and relevance with subtle, natural edits +2. Improve keyword placement without keyword stuffing +3. Optimize headings and subheadings for search intent +4. Ensure meta-friendly structure (clear hierarchy, scannable content) +5. Improve internal linking opportunities where natural + +Constraints: +- Do NOT alter material flow, voice, tone, or narrative structure +- Keep edits minimal and human-natural +- Preserve the original message and style +- Focus on discoverability, not dramatic rewrites + +Apply changes directly while maintaining the text's authenticity. diff --git a/commands/writing-and-editing/simplify-sentence-structure.md b/commands/writing-and-editing/simplify-sentence-structure.md new file mode 100644 index 0000000000000000000000000000000000000000..c6c13f96ecdab6f9693cdbb3094721d8d084b566 --- /dev/null +++ b/commands/writing-and-editing/simplify-sentence-structure.md @@ -0,0 +1,23 @@ +You are a text editor specializing in simplifying sentence structure for improved clarity and readability. + +Your task is to simplify complex sentence structures while preserving the original meaning and intent. + +## Guidelines + +1. **Break down complex sentences**: Identify sentences with multiple clauses, subordinate clauses, or nested structures +2. **Use simple subject-verb-object patterns**: Convert complex grammatical structures into straightforward constructions +3. **Reduce nested clauses**: Eliminate or separate nested subordinate clauses into standalone sentences +4. **Remove unnecessary complexity**: Simplify passive voice to active voice where appropriate +5. **Maintain meaning**: Ensure the simplified version conveys the same information and nuance +6. **Preserve tone**: Keep the original tone (formal, casual, technical, etc.) while simplifying structure +7. **Keep key terminology**: Don't oversimplify domain-specific terms or technical vocabulary + +## Process + +1. Analyze each sentence for structural complexity +2. Identify areas where simplification would improve clarity +3. Rewrite complex sentences using simpler grammatical structures +4. Verify that meaning and tone are preserved +5. Present the revised text + +Return only the simplified text without explanations or commentary unless specifically requested. \ No newline at end of file diff --git a/commands/writing-and-editing/structure/add-headings.md b/commands/writing-and-editing/structure/add-headings.md new file mode 100644 index 0000000000000000000000000000000000000000..498f886e190d38f960a3720c3a61027ad2ff409c --- /dev/null +++ b/commands/writing-and-editing/structure/add-headings.md @@ -0,0 +1,10 @@ +Structure the provided text by generating, reviewing, and refining subheadings. + +Your task: +1. **Add Subheadings** – Break the text into well-defined sections with concise, descriptive subheadings +2. **Logical Division** – Ensure every major shift in topic, argument, or focus is clearly marked +3. **Hierarchy** – Use appropriate heading levels (##, ###, etc.) to show information hierarchy +4. **Descriptive** – Make headings informative and aligned with the main ideas of each section +5. **Readability** – Improve navigation and comprehension through clear section breaks + +Apply changes directly. Preserve the original content while adding structure. diff --git a/commands/writing-and-editing/structure/add-sources.md b/commands/writing-and-editing/structure/add-sources.md new file mode 100644 index 0000000000000000000000000000000000000000..19c56ec87b521176b252524d484bcbb8fe1df6f4 --- /dev/null +++ b/commands/writing-and-editing/structure/add-sources.md @@ -0,0 +1,25 @@ +Add inline Markdown source links to factual claims in the provided text. + +Your task: +1. Identify claims that require sourcing (statistics, dates, names, study findings, quotations, events) +2. Find reliable sources for each claim +3. Insert inline Markdown links `[anchor text](URL)` at the point of claim +4. If a source cannot be found, append `[source needed]` + +What requires a source: +- Specific numbers and statistics +- Dates and timelines +- Names and roles of people/organizations +- Study findings and research results +- Legal/policy references +- Standards and specifications +- Quotations +- Notable events + +Source hierarchy: +1. Primary/official sources +2. Peer-reviewed literature +3. High-quality secondary sources (.gov, .edu, major journals) +4. Reputable reports with transparent methods + +Use concise, factual anchor text. Link the first occurrence of recurring facts. Preserve document structure and formatting. diff --git a/commands/writing-and-editing/structure/improve-flow.md b/commands/writing-and-editing/structure/improve-flow.md new file mode 100644 index 0000000000000000000000000000000000000000..407f89a062fe7870287e57820e0045329d558c11 --- /dev/null +++ b/commands/writing-and-editing/structure/improve-flow.md @@ -0,0 +1,10 @@ +Improve the flow and readability of the provided text by refining its structure and coherence. + +Your task: +1. **Structural Flow** – Ensure ideas unfold naturally and logically. Rearrange sentences, paragraphs, or sections so each part leads smoothly into the next +2. **Pacing** – Avoid abrupt jumps or redundant detours +3. **Transitions** – Add or improve transition words/phrases between sections +4. **Clarity** – Simplify complex sentences where needed without losing meaning +5. **Coherence** – Ensure the entire piece progresses in an orderly manner from start to finish + +Apply edits directly without seeking approval. Preserve the original voice and tone. diff --git a/commands/writing-and-editing/stylistic/add-metaphors.md b/commands/writing-and-editing/stylistic/add-metaphors.md new file mode 100644 index 0000000000000000000000000000000000000000..7b2797623e264aa0c2f3ae9dd51277b09c3ce7d8 --- /dev/null +++ b/commands/writing-and-editing/stylistic/add-metaphors.md @@ -0,0 +1,180 @@ +# Add Metaphors - Figurative Language Enhancer + +Inject vivid metaphors, analogies, and figurative language into content to make it more engaging, memorable, and accessible. Perfect for making technical concepts relatable, adding color to dry content, or creating more evocative writing. + +## Your Task + +Take the user's content and strategically add metaphors, similes, and analogies that illuminate concepts, create visual imagery, and make the writing more engaging while maintaining clarity and professionalism. + +## Types of Figurative Language to Add + +### 1. Conceptual Metaphors + +Explain abstract concepts through concrete comparisons: + +**Before**: "The database stores information efficiently." + +**After**: "The database is like a well-organized library, storing information efficiently with each piece of data catalogued and indexed so you can retrieve exactly what you need in seconds rather than searching through piles of unsorted papers." + +### 2. Visual Metaphors + +Create mental images: + +**Before**: "The network experienced high traffic." + +**After**: "The network experienced a rush-hour traffic jam, with data packets bumper-to-bumper, inching along congested digital highways as the infrastructure struggled to handle the volume." + +### 3. Process Analogies + +Compare processes to familiar activities: + +**Before**: "The compiler checks your code for errors." + +**After**: "The compiler acts like a meticulous editor reviewing your manuscript, catching typos, grammar mistakes, and logical inconsistencies before your code goes to 'publication.'" + +### 4. Comparative Metaphors + +Contrast before/after or different approaches: + +**Before**: "The old system was slow and the new one is fast." + +**After**: "Upgrading from the old system to the new one is like trading in a bicycle for a sports carβ€”you're still getting from A to B, but the experience and speed are in completely different leagues." + +### 5. Personification + +Give human qualities to abstract concepts or systems: + +**Before**: "The algorithm learns from data." + +**After**: "The algorithm is a diligent student, poring over data sets and learning from each example, gradually building expertise and improving its performance with every lesson." + +### 6. Extended Metaphors + +Develop a metaphor across multiple sentences: + +**Before**: "The architecture has multiple layers that work together." + +**After**: "Think of the architecture as a well-run kitchen brigade. The front-end is like the waitstaff, presenting beautifully plated experiences to customers. The middleware is the expediter, coordinating between front and back. The backend is the kitchen itselfβ€”the real powerhouse where the magic happens. And the database? That's the walk-in freezer and pantry, storing all the ingredients these layers need to work their magic." + +### 7. Everyday Analogies + +Use common experiences to explain technical concepts: + +**Before**: "Caching improves performance by storing frequently accessed data." + +**After**: "Caching is like keeping your coffee mug on your desk instead of walking to the kitchen cabinet every time you want a sip. By storing frequently accessed data close at hand, the system dramatically reduces the time spent fetching information." + +### 8. Nature and Physical World Metaphors + +Draw from natural phenomena: + +**Before**: "The system handles multiple requests simultaneously." + +**After**: "The system juggles multiple requests simultaneously, like a river branching into tributariesβ€”each stream flowing independently yet all part of the same watershed, eventually converging back together downstream." + +## Metaphor Placement Strategies + +### Where to Add Metaphors + +1. **Introducing new concepts**: Use metaphors to make first exposure memorable +2. **Explaining complex processes**: Break down complexity with familiar comparisons +3. **Transitions**: Create bridges between sections with thematic metaphors +4. **Opening hooks**: Start sections with engaging comparisons +5. **Summaries**: Reinforce key points with memorable imagery +6. **Abstract ideas**: Ground theoretical concepts in concrete terms + +### Density Guidelines + +**Light touch (1-2 metaphors per page)**: +- Professional/technical documents +- Subtle enhancement +- Preserve formal tone + +**Moderate use (3-5 metaphors per page)**: +- Blog posts and articles +- Educational content +- Balanced engagement + +**Heavy use (6+ metaphors per page)**: +- Creative writing +- Marketing content +- Maximum engagement and memorability + +## Metaphor Quality Guidelines + +### Good Metaphors + +βœ“ **Illuminating**: Actually clarifies the concept +βœ“ **Appropriate**: Matches the audience and context +βœ“ **Consistent**: Doesn't contradict itself when extended +βœ“ **Fresh**: Avoids clichΓ©s (unless intentionally used) +βœ“ **Cultural**: Accessible to your target audience +βœ“ **Scalable**: Works at the intended level of detail + +### Avoid + +βœ— **Mixed metaphors**: "We'll burn that bridge when we come to it" +βœ— **Overextension**: Pushing a metaphor too far +βœ— **Obscure references**: Metaphors the audience won't understand +βœ— **Inappropriate comparisons**: Context-insensitive or offensive analogies +βœ— **ClichΓ©d overuse**: Too many tired expressions +βœ— **Confusing metaphors**: More confusing than the original concept + +## Domain-Specific Metaphor Banks + +### Technology/Software +- Architecture as buildings/construction +- Networks as highways/roads +- Data as water/flow +- Security as locks/fortresses +- Code as recipes/blueprints +- Processes as assembly lines + +### Business/Management +- Growth as gardening/cultivation +- Strategy as chess/war games +- Teams as sports/orchestras +- Markets as weather/oceans +- Innovation as exploration/pioneering + +### Science/Research +- Discovery as treasure hunting +- Analysis as detective work +- Experimentation as cooking +- Theories as frameworks/scaffolding + +## Example Transformations + +**Before (Technical, no metaphors)**: +``` +Machine learning models require training data to develop predictive capabilities. The quality of the training data significantly impacts model performance. More diverse data typically produces more robust models that generalize well to new situations. +``` + +**After (Metaphor-enhanced)**: +``` +Machine learning models are like apprentices learning a craftβ€”they require training data to develop predictive capabilities. The quality of their education (the training data) significantly impacts their professional performance. Just as a chef who trains in multiple cuisines becomes more versatile than one who specializes narrowly, models fed diverse data become more robust, able to handle the unexpected ingredients of new situations with confidence and skill. +``` + +**Before (Plain business writing)**: +``` +Our company is transitioning to a new strategic direction. This requires all teams to adapt their processes and adopt new tools. The change will take time but will ultimately improve efficiency. +``` + +**After (Metaphor-rich)**: +``` +Our company is turning the ship toward new waters. Like a sailing vessel changing course, this requires all teams to adjust their sailsβ€”adapting processes and hauling in new tools to harness the winds of change. The turn won't happen overnight; ships of our size need time to change direction. But once we're sailing this new heading, we'll cut through the waves with greater speed and efficiency than ever before. +``` + +## Output Format + +Return the enhanced version of the content with metaphors naturally woven throughout. If the original content has a specific tone (technical, casual, formal), preserve it while adding figurative language that fits. + +## Usage Notes + +Specify if you want: +- **Metaphor density**: Light, moderate, or heavy +- **Tone**: Professional, casual, creative, educational +- **Audience**: Technical experts, general public, specific industry +- **Specific themes**: Prefer certain types of metaphors (nature, sports, cooking, etc.) + +Share the content you'd like to enhance with metaphors and analogies. diff --git a/commands/writing-and-editing/stylistic/business-to-casual.md b/commands/writing-and-editing/stylistic/business-to-casual.md new file mode 100644 index 0000000000000000000000000000000000000000..7ffccb54d9ef0d04192c63cf71d3f5b36416bef7 --- /dev/null +++ b/commands/writing-and-editing/stylistic/business-to-casual.md @@ -0,0 +1,31 @@ +--- +description: Convert business-appropriate text to casual, conversational style +tags: [writing, editing, style, tone, conversion] +--- + +You are a text style conversion specialist. Convert the provided business-appropriate text to a casual, conversational style while preserving the core message and key information. + +**Guidelines:** + +1. **Tone**: Transform formal language into friendly, relaxed communication +2. **Vocabulary**: Replace business jargon and formal terms with everyday language +3. **Structure**: Use shorter sentences, contractions, and natural flow +4. **Pronouns**: Switch from third person to first/second person where appropriate +5. **Formality markers**: Remove or soften overly formal phrases (e.g., "I am writing to inform you" β†’ "Just wanted to let you know") +6. **Preserve meaning**: Keep all important facts, dates, and commitments intact +7. **Context-appropriate**: Maintain professionalism where necessary (don't make it too informal for the situation) + +**Example transformations:** + +- "Please be advised that" β†’ "Just so you know" or "FYI" +- "We kindly request" β†’ "Could you" or "Would you mind" +- "At your earliest convenience" β†’ "When you get a chance" or "Whenever works for you" +- "Pursuant to our previous discussion" β†’ "Like we talked about" +- "I would like to express my gratitude" β†’ "Thanks!" or "Really appreciate it" + +**Output format:** +- Provide the converted casual text +- If multiple interpretations are possible, offer the most natural-sounding option +- Maintain paragraph structure unless combining short paragraphs improves flow + +Please convert the following business text to casual style: diff --git a/commands/writing-and-editing/stylistic/casual-to-business.md b/commands/writing-and-editing/stylistic/casual-to-business.md new file mode 100644 index 0000000000000000000000000000000000000000..5f8da1039cf46e54cb84cfffc566148e7cbd5ce7 --- /dev/null +++ b/commands/writing-and-editing/stylistic/casual-to-business.md @@ -0,0 +1,34 @@ +--- +description: Convert casual text to business-appropriate, professional style +tags: [writing, editing, style, tone, conversion, professional] +--- + +You are a text style conversion specialist. Convert the provided casual text to a business-appropriate, professional style while preserving the core message and key information. + +**Guidelines:** + +1. **Tone**: Transform informal language into polished, professional communication +2. **Vocabulary**: Replace slang, colloquialisms, and casual terms with business-appropriate language +3. **Structure**: Use complete sentences, proper grammar, and clear organization +4. **Contractions**: Expand contractions (e.g., "don't" β†’ "do not", "we're" β†’ "we are") +5. **Formality markers**: Add appropriate professional phrases while avoiding stuffiness +6. **Preserve meaning**: Keep all important facts, dates, and commitments intact +7. **Context-appropriate**: Match the level of formality to business context (email, report, memo, etc.) + +**Example transformations:** + +- "Just so you know" or "FYI" β†’ "Please be advised" or "For your information" +- "Could you" or "Would you mind" β†’ "We kindly request" or "I would appreciate if you could" +- "When you get a chance" β†’ "At your earliest convenience" or "When time permits" +- "Like we talked about" β†’ "As discussed" or "Pursuant to our previous conversation" +- "Thanks!" β†’ "Thank you for your assistance" or "I appreciate your cooperation" +- "ASAP" β†’ "as soon as possible" or "at your earliest convenience" +- "Heads up" β†’ "Please note" or "I wanted to inform you" + +**Output format:** +- Provide the converted business-appropriate text +- Maintain proper email/document structure if applicable (greeting, body, closing) +- Use appropriate paragraph breaks for readability +- If the casual text is very brief, expand it appropriately for professional context + +Please convert the following casual text to business-appropriate style: diff --git a/commands/writing-and-editing/stylistic/emotion/add-emotion.md b/commands/writing-and-editing/stylistic/emotion/add-emotion.md new file mode 100644 index 0000000000000000000000000000000000000000..64fe71e8427016dac0ed13382957a0bf5c819d84 --- /dev/null +++ b/commands/writing-and-editing/stylistic/emotion/add-emotion.md @@ -0,0 +1,7 @@ +This text is very "flat." + +Try to make it a little bit more lively with: + +- Emotion +- Metaphors +- Statements of conviction \ No newline at end of file diff --git a/commands/writing-and-editing/stylistic/formality/make-casual.md b/commands/writing-and-editing/stylistic/formality/make-casual.md new file mode 100644 index 0000000000000000000000000000000000000000..ecdcfe4eeeed4b4f72691442a98218282289fbe9 --- /dev/null +++ b/commands/writing-and-editing/stylistic/formality/make-casual.md @@ -0,0 +1,10 @@ +Make this text more casual and conversational. + +Apply casual writing style: +- Use contractions where natural +- Replace formal language with everyday equivalents +- Make sentences flow more naturally +- Use a friendly, approachable tone +- Break up overly complex sentences + +Keep the core message clear and maintain accuracy. diff --git a/commands/writing-and-editing/stylistic/formality/make-formal.md b/commands/writing-and-editing/stylistic/formality/make-formal.md new file mode 100644 index 0000000000000000000000000000000000000000..d9851bf408dd22cd2d4f265e76dd2ed9cc99250a --- /dev/null +++ b/commands/writing-and-editing/stylistic/formality/make-formal.md @@ -0,0 +1,11 @@ +Make this text more formal and professional. + +Apply formal writing conventions: +- Replace casual language with formal equivalents +- Remove contractions (can't β†’ cannot) +- Use more sophisticated vocabulary where appropriate +- Eliminate slang and colloquialisms +- Structure sentences more formally +- Use objective tone + +Maintain the core message and meaning. diff --git a/commands/writing-and-editing/stylistic/hype/de-hype.md b/commands/writing-and-editing/stylistic/hype/de-hype.md new file mode 100644 index 0000000000000000000000000000000000000000..67445367f915358308c689d2058dc74c703af2a3 --- /dev/null +++ b/commands/writing-and-editing/stylistic/hype/de-hype.md @@ -0,0 +1,5 @@ +This text contains language that is overly promotional. + +Edit it to achieve a mroe neutral and informative tone. + +Remove hyperbole and unnecessary descriptors and replace with more eye level language. \ No newline at end of file diff --git a/commands/writing-and-editing/stylistic/hype/maximum-hype.md b/commands/writing-and-editing/stylistic/hype/maximum-hype.md new file mode 100644 index 0000000000000000000000000000000000000000..73cd27be971355e037d86a947e0f5ce7aa3daa71 --- /dev/null +++ b/commands/writing-and-editing/stylistic/hype/maximum-hype.md @@ -0,0 +1,41 @@ +# Maximum Hype - Ultra-Promotional Style + +Transform any content into the most exciting, promotional, hype-filled version possible. This style is perfect for product launches, marketing campaigns, announcements, and anything that needs maximum energy and enthusiasm. + +## Your Task + +Take the user's input content and rewrite it with: + +- **Maximum enthusiasm and energy**: Use powerful, exciting language throughout +- **Superlatives everywhere**: "Revolutionary," "game-changing," "unprecedented," "industry-leading," "cutting-edge" +- **Action-oriented power words**: "Transform," "unleash," "skyrocket," "dominate," "revolutionize" +- **Urgency and FOMO**: Create excitement and fear of missing out +- **Bold claims**: Make big, impressive statements (while staying truthful to the core content) +- **Emotional appeal**: Connect with readers' aspirations and desires +- **Victory language**: Focus on success, winning, achievement, breakthroughs +- **Exclamation points**: Use them liberally (but don't overdo it) +- **Numbers and metrics**: Emphasize impressive statistics when available +- **Social proof indicators**: Reference widespread adoption, popularity, trust + +## Style Guidelines + +1. **Opening hook**: Start with a bang - make it impossible to look away +2. **Build momentum**: Each sentence should increase excitement +3. **Power phrases**: "Discover the secret," "Join thousands," "Experience the future," "Don't miss out" +4. **Short, punchy sentences**: Mix with longer, flowing ones for rhythm +5. **Active voice only**: Dynamic, energetic, never passive +6. **Future-focused**: Emphasize transformation and possibilities +7. **Exclusive language**: "Elite," "premium," "exclusive," "select" +8. **Call to action**: End with compelling next steps + +## Examples + +**Before**: "Our new software helps teams collaborate more efficiently." + +**After**: "πŸš€ REVOLUTIONIZE Your Team's Productivity! Introducing the game-changing collaboration platform that's transforming how elite teams work together. Experience unprecedented efficiency, unlock explosive growth, and dominate your industry like never before. Join thousands of forward-thinking leaders who've already made the switch. The future of teamwork is here - don't get left behind!" + +## Output Format + +Return the hyped-up version of the content, maintaining the core message while maximizing promotional energy. If the input is structured (like a blog post or product description), preserve the structure but inject maximum hype into every section. + +Ready to create some SERIOUS excitement? Share the content you want to supercharge! diff --git a/commands/writing-and-editing/stylistic/improve-argumentation-order.md b/commands/writing-and-editing/stylistic/improve-argumentation-order.md new file mode 100644 index 0000000000000000000000000000000000000000..7b8a6bddd880d4c1a94b45ec51e944af5d7c000e --- /dev/null +++ b/commands/writing-and-editing/stylistic/improve-argumentation-order.md @@ -0,0 +1,96 @@ +# Improve Argumentation Order + +You are an argumentation structure specialist. Your task is to analyze and reorganize the provided text to create a more logical, compelling sequence of arguments. + +## Your Task + +Restructure the text to optimize argumentative flow using these principles: + +### 1. Analyze Current Structure +- Identify all key arguments and supporting points +- Map relationships between ideas +- Note any logical gaps or weak transitions +- Assess the current persuasive trajectory + +### 2. Choose Optimal Ordering Strategy + +**Strongest-to-Weakest (Primacy)** +- Lead with most compelling argument +- Best for skeptical audiences +- Creates strong first impression + +**Weakest-to-Strongest (Recency)** +- Build momentum to most powerful point +- Best for engaged audiences +- Leaves lasting impression + +**Problem-Solution** +- Establish problem urgency first +- Present solution with supporting arguments +- Best for action-oriented goals + +**Chronological** +- Follow natural time sequence +- Best for historical or process-based arguments +- Clear causal relationships + +**Categorical** +- Group related arguments together +- Best for complex topics with multiple dimensions +- Clear organizational logic + +**Inductive (Specific to General)** +- Start with examples, data, cases +- Build to broader conclusion +- Best when reader needs convincing + +**Deductive (General to Specific)** +- State principle or thesis first +- Support with specific evidence +- Best when thesis is likely accepted + +### 3. Restructuring Principles + +- **Logical progression**: Each point should flow naturally to the next +- **Build complexity**: Start simple, add nuance progressively +- **Address objections strategically**: Refute counterarguments at optimal moments +- **Create narrative arc**: Beginning (context), middle (development), end (resolution) +- **Use signposting**: Clear transitions showing relationships between arguments + +### 4. Enhance Connections + +- Add transitional phrases showing logical relationships +- Create bridges between sections +- Eliminate repetition through strategic consolidation +- Ensure supporting evidence appears near relevant claims + +## Output Format + +Provide: +1. **Restructured text** with improved argument order +2. **Brief explanation** of the organizational strategy chosen and why +3. **Key changes** highlighting major reorderings + +## Example + +**Before:** +``` +Our product saves money. It's also environmentally friendly. +Many competitors have security issues. We use advanced encryption. +Our product is easy to use. Studies show 40% cost reduction. +``` + +**After (Strongest-to-Weakest):** +``` +Our product delivers proven results: studies show 40% cost reduction +for businesses like yours. Beyond financial benefits, we prioritize +security with advanced encryptionβ€”a critical advantage over competitors +facing ongoing security issues. As a bonus, our solution is +environmentally friendly and remarkably easy to use. +``` + +**Strategy:** Strongest-to-weakest, leading with quantified benefit (40% reduction), followed by critical differentiator (security), ending with additional benefits. Consolidated related points and improved transitions. + +--- + +Now, please provide the text whose argumentation order you'd like me to improve. diff --git a/commands/writing-and-editing/stylistic/jargon/jargon-removal.md b/commands/writing-and-editing/stylistic/jargon/jargon-removal.md new file mode 100644 index 0000000000000000000000000000000000000000..629bc37241fd7606f7671b56b7cb944d7dec0747 --- /dev/null +++ b/commands/writing-and-editing/stylistic/jargon/jargon-removal.md @@ -0,0 +1,110 @@ +# Jargon Removal - Plain Language Converter + +Transform technical jargon, buzzwords, and complex terminology into clear, accessible language that anyone can understand. Perfect for making content accessible to broader audiences, explaining technical concepts to non-technical stakeholders, or simplifying overly complex writing. + +## Your Task + +Take the user's content and systematically remove or replace jargon with plain language while preserving the core meaning and technical accuracy. + +### What to Replace + +#### Technical Jargon +- **Industry-specific terms**: Replace with common language equivalents +- **Acronyms and abbreviations**: Spell out and explain on first use, or replace entirely +- **Complex terminology**: Use simpler, more familiar words +- **Specialized vocabulary**: Convert to everyday language + +#### Business Buzzwords +- "Synergy" β†’ "working together effectively" +- "Leverage" β†’ "use" +- "Paradigm shift" β†’ "major change" +- "Circle back" β†’ "return to" or "discuss later" +- "Move the needle" β†’ "make progress" or "have an impact" +- "Low-hanging fruit" β†’ "easy wins" or "simple opportunities" +- "Think outside the box" β†’ "be creative" or "try a new approach" +- "Best of breed" β†’ "best available" or "highest quality" +- "Going forward" β†’ "from now on" or "in the future" + +#### Technical Buzzwords +- "Cloud-native" β†’ "designed to run on remote servers" +- "AI-powered" β†’ "uses artificial intelligence" (or explain what it actually does) +- "Blockchain-based" β†’ explain the actual mechanism +- "Next-generation" β†’ "new" or "improved" +- "Enterprise-grade" β†’ "suitable for large organizations" or "reliable and scalable" +- "Cutting-edge" β†’ "modern" or "new" +- "Robust" β†’ "reliable" or "well-built" +- "Scalable" β†’ "can grow with your needs" + +### How to Simplify + +1. **Replace complex words with simple ones** + - "Utilize" β†’ "use" + - "Facilitate" β†’ "help" or "make easier" + - "Implement" β†’ "put in place" or "start using" + - "Optimize" β†’ "improve" or "make better" + - "Parameter" β†’ "setting" or "option" + +2. **Break down compound concepts** + - "API-driven microservices architecture" β†’ "the system is built from small, independent services that communicate with each other" + +3. **Explain rather than label** + - "Machine learning algorithm" β†’ "a program that learns from data to make predictions" + - "Distributed system" β†’ "a system that runs on multiple computers working together" + +4. **Remove unnecessary qualification** + - "Highly performant" β†’ "fast" + - "Fully integrated" β†’ "integrated" or "works together" + - "Seamlessly deployed" β†’ "deployed" or "set up" + +5. **Clarify acronyms** + - "REST API" β†’ "a way for programs to communicate over the internet" + - "CI/CD pipeline" β†’ "automated testing and deployment process" + - "SLA" β†’ "service agreement" or "guaranteed uptime" + +### Simplification Principles + +- **Active voice**: Replace passive constructions with active ones +- **Short sentences**: Break up long, complex sentences +- **Concrete examples**: Replace abstract concepts with specific examples +- **Direct language**: Say what you mean without fancy vocabulary +- **Human terms**: Use language people actually speak +- **Everyday analogies**: Compare technical concepts to familiar things + +### What to Keep + +- **Essential technical terms**: If a term is necessary and has no good substitute, keep it but explain it +- **Proper nouns**: Keep product names, company names, etc. +- **Precision**: Don't sacrifice accuracy for simplicity +- **Context-appropriate language**: Some audiences expect certain terminology + +## Example Transformations + +**Before**: +``` +Our cutting-edge, cloud-native platform leverages AI-powered analytics to facilitate real-time insights, enabling stakeholders to synergize cross-functional workflows and optimize mission-critical operations at scale. +``` + +**After**: +``` +Our modern web platform uses artificial intelligence to analyze your data instantly, helping your team work together more effectively and improve important business operations as your company grows. +``` + +**Before**: +``` +The API-first architecture ensures seamless integration with existing enterprise systems through our robust SDK, providing unparalleled extensibility for next-generation applications. +``` + +**After**: +``` +The system is designed to work easily with your current software through our developer toolkit, making it simple to add new features to your applications. +``` + +## Output Format + +Return the simplified version of the content with all jargon removed or replaced with plain language. If certain technical terms must be retained for accuracy, provide brief explanations in parentheses or footnotes. + +## Tone + +Maintain the original intent and professionalism while making the language accessible and clear. The goal is clarity without condescension. + +Share the jargon-heavy content you'd like to simplify. diff --git a/commands/writing-and-editing/stylistic/jargon/jargon-stuffing.md b/commands/writing-and-editing/stylistic/jargon/jargon-stuffing.md new file mode 100644 index 0000000000000000000000000000000000000000..3347f9add1246762e7e3958ecf4494ca19aa197c --- /dev/null +++ b/commands/writing-and-editing/stylistic/jargon/jargon-stuffing.md @@ -0,0 +1,159 @@ +# Jargon Stuffing - Corporate Buzzword Generator + +Transform plain, straightforward language into impressively dense corporate jargon and technical buzzwords. Perfect for satire, humor, or when you need to sound "enterprise-ready" for presentations, proposals, or impressing stakeholders who expect maximum buzzword density. + +## Your Task + +Take the user's content and systematically inject jargon, buzzwords, and complex terminology to make it sound as impressively corporate and technical as possible. + +### Jargon to Add + +#### Corporate Buzzwords (Use Liberally) + +**Action Verbs**: +- use β†’ leverage, utilize, harness, operationalize +- improve β†’ optimize, enhance, augment, synergize +- help β†’ facilitate, enable, empower, drive +- change β†’ transform, revolutionize, disrupt, pivot +- manage β†’ orchestrate, streamline, coordinate +- create β†’ ideate, architect, engineer, cultivate +- work together β†’ synergize, collaborate, align, integrate +- start β†’ initiate, launch, implement, deploy +- grow β†’ scale, expand, amplify, proliferate + +**Qualifying Adjectives**: +- good β†’ best-in-class, industry-leading, world-class, premium +- new β†’ next-generation, cutting-edge, innovative, disruptive +- reliable β†’ robust, enterprise-grade, mission-critical, production-ready +- fast β†’ high-performance, real-time, low-latency, performant +- complete β†’ end-to-end, holistic, comprehensive, 360-degree +- custom β†’ bespoke, tailored, personalized, purpose-built +- connected β†’ integrated, unified, seamless, cohesive + +**Nouns and Concepts**: +- idea β†’ paradigm, framework, methodology, approach +- plan β†’ roadmap, strategy, initiative, blueprint +- solution β†’ ecosystem, platform, suite, offering +- feature β†’ capability, functionality, competency +- benefit β†’ value proposition, differentiator, synergy +- method β†’ best practice, framework, playbook +- change β†’ transformation, evolution, disruption + +#### Technical Buzzwords + +**Architecture Terms**: +- cloud-native, microservices-based, API-first +- distributed, decentralized, federated +- event-driven, serverless, containerized +- service-oriented, plugin-based, modular +- scalable, elastic, self-healing +- multi-tenant, zero-trust, immutable + +**Tech Qualifiers**: +- AI-powered, ML-driven, blockchain-based +- data-driven, analytics-enabled, insight-driven +- automation-first, DevOps-enabled, GitOps-native +- observability-focused, security-hardened +- performance-optimized, cost-effective + +**Emerging Tech**: +- quantum-ready, edge-computing, IoT-enabled +- 5G-optimized, AR/VR-compatible +- Web3-native, metaverse-ready + +#### Business Speak + +**Strategy Phrases**: +- "align stakeholders" +- "drive value creation" +- "maximize ROI" +- "unlock potential" +- "accelerate time-to-market" +- "gain competitive advantage" +- "capture market share" +- "future-proof the business" +- "enable digital transformation" + +**Process Phrases**: +- "move the needle" +- "shift the paradigm" +- "think outside the box" +- "circle back" +- "touch base" +- "run it up the flagpole" +- "take it offline" +- "drill down" +- "peel back the onion" +- "move forward" + +### Jargon-Stuffing Techniques + +1. **Compound Buzzwords**: Combine multiple buzzwords + - "our AI-powered, cloud-native, microservices-based platform" + - "a holistic, end-to-end, enterprise-grade solution" + +2. **Redundant Qualification**: Add multiple adjectives that mean the same thing + - "innovative and groundbreaking" + - "seamless and frictionless" + - "robust and resilient" + +3. **Acronym Injection**: Add relevant (or semi-relevant) acronyms + - API, SDK, REST, GraphQL, CI/CD, K8s, ML, AI, IoT, SaaS, PaaS + +4. **Passive Voice**: Make simple actions sound complex + - "the data is leveraged" instead of "we use the data" + - "optimization is achieved through" instead of "we optimize by" + +5. **Nominalization**: Turn verbs into nouns + - "achieve implementation" instead of "implement" + - "provide facilitation" instead of "facilitate" + +6. **Strategic Vagueness**: Replace specifics with impressive generalities + - "drive key performance indicators" instead of "improve sales" + - "optimize operational excellence" instead of "work more efficiently" + +## Example Transformations + +**Before**: +``` +Our new app helps teams work together better by making it easy to share files and chat in real-time. +``` + +**After**: +``` +Our next-generation, cloud-native collaboration platform leverages AI-powered insights to facilitate seamless cross-functional synergies, enabling enterprise teams to operationalize knowledge management through a unified, real-time communication and document-sharing ecosystem that drives organizational alignment and maximizes productivity ROI. +``` + +**Before**: +``` +The software is fast, reliable, and easy to use. It works on any device and connects to your existing tools. +``` + +**After**: +``` +Our industry-leading, enterprise-grade SaaS platform delivers unparalleled performance optimization with mission-critical reliability, offering an intuitive, frictionless user experience across a device-agnostic, omnichannel deployment model. The solution provides seamless, API-first integration capabilities with your existing technology stack, enabling holistic digital transformation while future-proofing your infrastructure investment. +``` + +**Before**: +``` +We help companies analyze their data to make better decisions. +``` + +**After**: +``` +We empower organizations to harness the full potential of their data assets through our cutting-edge, AI-driven analytics platform, unlocking actionable insights that catalyze data-informed decision-making, optimize strategic initiatives, and drive measurable business outcomes at scale. +``` + +## Output Format + +Return the jargon-stuffed version of the content with maximum buzzword density while maintaining grammatical correctness. The result should sound impressively corporate and technical, even if it's somewhat absurd. + +## Tone + +Maintain a serious, professional tone even as you pile on the jargon. The goal is to sound as "enterprise" as possible. Think corporate presentation deck meets vendor white paper. + +## Usage Note + +While this command is great for satire and humor, it can also be genuinely useful when you need to match the linguistic expectations of certain corporate environments, RFP responses, or stakeholder presentations where plain language might seem insufficiently "strategic." + +Share the plain-language content you'd like to transform into maximum corporate jargon. diff --git a/commands/writing-and-editing/stylistic/make-more-persuasive.md b/commands/writing-and-editing/stylistic/make-more-persuasive.md new file mode 100644 index 0000000000000000000000000000000000000000..13dba7e7d4bed2f2e44d2569c4b65409b4f158e8 --- /dev/null +++ b/commands/writing-and-editing/stylistic/make-more-persuasive.md @@ -0,0 +1,77 @@ +# Make Text More Persuasive + +You are a persuasive writing specialist. Your task is to rewrite the provided text to make it more compelling and convincing while maintaining factual accuracy. + +## Your Task + +Transform the text using proven persuasive techniques: + +### 1. Strengthen the Core Message +- Lead with the strongest benefit or value proposition +- Make the main argument crystal clear +- Create a compelling hook in the opening +- End with a strong call to action or memorable conclusion + +### 2. Apply Persuasive Techniques + +**Ethos (Credibility)** +- Add authoritative sources or expert backing +- Include relevant credentials or experience +- Reference reputable organizations or studies + +**Pathos (Emotion)** +- Use vivid, sensory language +- Create relatable scenarios +- Appeal to values, aspirations, or concerns +- Use storytelling elements where appropriate + +**Logos (Logic)** +- Present clear reasoning +- Use data and evidence effectively +- Build logical progression of ideas +- Address potential objections preemptively + +### 3. Enhance Rhetoric + +- **Power words**: Incorporate impactful vocabulary (proven, essential, breakthrough, transform, guarantee) +- **Active voice**: Convert passive constructions to active +- **Concrete details**: Replace vague statements with specific claims +- **Social proof**: Add references to popularity, testimonials, or widespread adoption +- **Urgency**: Create appropriate sense of timeliness (when authentic) +- **Contrast**: Highlight before/after or problem/solution dynamics + +### 4. Structural Improvements + +- **Headlines**: Make subheadings compelling +- **Transitions**: Smooth logical flow between ideas +- **Emphasis**: Bold or italicize key points strategically +- **Lists**: Use bullet points for scannable benefits +- **Questions**: Engage reader with rhetorical questions + +## Guidelines + +- **Maintain truthfulness**: Never exaggerate or misrepresent facts +- **Preserve intent**: Keep the original purpose and message +- **Match audience**: Consider the target reader's sophistication and concerns +- **Avoid manipulation**: Use ethical persuasion, not deception +- **Respect tone limits**: Don't oversell if the original is measured + +## Output Format + +Provide the rewritten text with: +1. The persuasive revision +2. A brief note explaining the key persuasive techniques applied + +## Example + +**Before:** +"Our software can help with project management. It has features like task tracking and team collaboration." + +**After:** +"Transform your team's productivity with our proven project management solution. Join over 10,000 companies who've reduced project delays by 40% using our intuitive task tracking and seamless collaboration tools. Your team deserves software that actually delivers results." + +**Techniques applied:** Added social proof (10,000 companies), concrete benefit (40% reduction), emotional appeal (team deserves better), power words (transform, proven, seamless), and stronger opening. + +--- + +Now, please provide the text you'd like me to make more persuasive. diff --git a/commands/writing-and-editing/stylistic/simplicity/simplify.md b/commands/writing-and-editing/stylistic/simplicity/simplify.md new file mode 100644 index 0000000000000000000000000000000000000000..2f3f2ed96f36cb4c0958c00a628eaab65145f78d --- /dev/null +++ b/commands/writing-and-editing/stylistic/simplicity/simplify.md @@ -0,0 +1,11 @@ +Simplify this text to make it easier to understand. + +Apply simplification techniques: +- Replace complex words with simpler alternatives +- Break long sentences into shorter ones +- Remove unnecessary jargon +- Use active voice instead of passive +- Make the structure more straightforward +- Clarify confusing phrases + +Maintain accuracy and all important information. diff --git a/commands/writing-and-editing/stylistic/technicality/add-technical-depth.md b/commands/writing-and-editing/stylistic/technicality/add-technical-depth.md new file mode 100644 index 0000000000000000000000000000000000000000..9d2b3558164a6f344eb5e5154657d7bca0c77437 --- /dev/null +++ b/commands/writing-and-editing/stylistic/technicality/add-technical-depth.md @@ -0,0 +1,10 @@ +Add more technical depth and specificity to this text. + +Enhance technical content: +- Replace general terms with specific technical terminology +- Add relevant technical details +- Include technical context where appropriate +- Use industry-standard nomenclature +- Make explanations more technically precise + +Assume the reader has technical background. Keep accuracy paramount. diff --git a/commands/writing-and-editing/summary/summarize-to-paragraph.md b/commands/writing-and-editing/summary/summarize-to-paragraph.md new file mode 100644 index 0000000000000000000000000000000000000000..c59e4a9ed6d29e0fefa694e456985b0309d6d430 --- /dev/null +++ b/commands/writing-and-editing/summary/summarize-to-paragraph.md @@ -0,0 +1,36 @@ +# Summarize Text to Paragraph + +You are a summarization specialist. Your task is to condense the provided text into a clear, comprehensive single paragraph summary. + +## Your Task + +Read the provided text and create a concise paragraph summary that: + +1. **Captures the main point** - What is the core message or purpose? +2. **Includes key details** - What are the most important facts, findings, or arguments? +3. **Maintains accuracy** - Represents the original text faithfully without distortion +4. **Flows naturally** - Reads as a cohesive paragraph, not a list of points +5. **Stands alone** - Can be understood without reading the original + +## Guidelines + +- Target length: 3-6 sentences (adjust based on source text complexity) +- Use clear, direct language +- Avoid starting with "This text..." or "The document..." - jump straight into the content +- Preserve critical terminology and concepts +- Omit minor details, examples, and redundancies +- Maintain the original tone (formal/informal, technical/accessible) + +## Output Format + +Simply provide the paragraph summary without additional formatting or headers. + +## Example + +**Original text** (500 words about machine learning benefits) + +**Summary**: Machine learning enables computers to learn from data and improve their performance without explicit programming, revolutionizing industries from healthcare to finance. The technology excels at pattern recognition, predictive analytics, and automation of complex decision-making processes. While offering significant advantages in efficiency and accuracy, machine learning systems require careful consideration of data quality, bias, and interpretability to ensure responsible deployment. + +--- + +Now, please provide the text you'd like me to summarize into a paragraph. diff --git a/commands/writing-and-editing/transformations/first-to-third.md b/commands/writing-and-editing/transformations/first-to-third.md new file mode 100644 index 0000000000000000000000000000000000000000..a908ec3cdf81d03669b121cb6e33efd3707b6394 --- /dev/null +++ b/commands/writing-and-editing/transformations/first-to-third.md @@ -0,0 +1,10 @@ +Convert this text from first person to third person. + +Change pronouns and perspective: +- "I" becomes "he/she/they" (use appropriate gender or ask if unclear) +- "me" becomes "him/her/them" +- "my" becomes "his/her/their" +- "we" becomes "they" +- Adjust verb forms accordingly + +Maintain the original meaning, tone, and all other content. Only change the person/perspective. diff --git a/commands/writing-and-editing/transformations/past-to-present.md b/commands/writing-and-editing/transformations/past-to-present.md new file mode 100644 index 0000000000000000000000000000000000000000..b9f3ced4636c791f13a97e69613e0a1d69c68678 --- /dev/null +++ b/commands/writing-and-editing/transformations/past-to-present.md @@ -0,0 +1,9 @@ +Convert this text from past tense to present tense. + +Transform all verbs and time references: +- Change past tense verbs to present tense +- Convert "was/were" to "is/are" +- Adjust time markers if they conflict with present tense +- Maintain verb agreement with subjects + +Keep the original meaning, tone, and style. Only change the tense. diff --git a/commands/writing-and-editing/transformations/present-to-past.md b/commands/writing-and-editing/transformations/present-to-past.md new file mode 100644 index 0000000000000000000000000000000000000000..a3c81b1b64b29fe70e9d8f65049d973cd207c00b --- /dev/null +++ b/commands/writing-and-editing/transformations/present-to-past.md @@ -0,0 +1,9 @@ +Convert this text from present tense to past tense. + +Transform all verbs and time references: +- Change present tense verbs to past tense +- Convert "is/are" to "was/were" +- Adjust time markers if they conflict with past tense +- Maintain verb agreement with subjects + +Keep the original meaning, tone, and style. Only change the tense. diff --git a/commands/writing-and-editing/transformations/third-to-first.md b/commands/writing-and-editing/transformations/third-to-first.md new file mode 100644 index 0000000000000000000000000000000000000000..6be8987ca39c73a570017929ccfd883a11b1c68a --- /dev/null +++ b/commands/writing-and-editing/transformations/third-to-first.md @@ -0,0 +1,9 @@ +Convert this text from third person to first person. + +Change pronouns and perspective: +- "he/she/they" becomes "I" (or "we" if plural) +- "him/her/them" becomes "me" (or "us" if plural) +- "his/her/their" becomes "my" (or "our" if plural) +- Adjust verb forms accordingly + +Maintain the original meaning, tone, and all other content. Only change the person/perspective.