content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
---\n:position: before\n:position_in_additional_file_patterns: before\n:position_in_class: before\n:position_in_factory: before\n:position_in_fixture: before\n:position_in_routes: before\n:position_in_serializer: before\n:position_in_test: before\n:classified_sort: true\n:exclude_controllers: true\n:exclude_factories: true\n:exclude_fixtures: true\n:exclude_helpers: true\n:exclude_scaffolds: true\n:exclude_serializers: true\n:exclude_sti_subclasses: true\n:exclude_tests: true\n:force: false\n:format_markdown: false\n:format_rdoc: false\n:format_yard: false\n:frozen: false\n:ignore_model_sub_dir: false\n:ignore_unknown_models: false\n:include_version: false\n:show_complete_foreign_keys: false\n:show_foreign_keys: false\n:show_indexes: false\n:simple_indexes: false\n:sort: false\n:timestamp: false\n:trace: false\n:with_comment: true\n:with_column_comments: true\n:with_table_comments: true\n:active_admin: false\n:command:\n:debug: false\n:hide_default_column_types: ''\n:hide_limit_column_types: 'integer,boolean'\n:ignore_columns:\n:ignore_routes:\n:models: true\n:routes: false\n:skip_on_db_migrate: false\n:target_action: :do_annotations\n:wrapper:\n:wrapper_close:\n:wrapper_open:\n:classes_default_to_s: []\n:additional_file_patterns: []\n:model_dir:\n - app/models\n:require: []\n:root_dir:\n - ''\n\n:show_check_constraints: false\n
dataset_sample\yaml\mastodon_mastodon\.annotaterb.yml
.annotaterb.yml
YAML
1,293
0.85
0
0
python-kit
713
2024-08-06T02:44:51.838778
GPL-3.0
false
f49688771487470ea35afe1812e20fc2
exclude:\n - 'vendor/**/*'\n\nrequire:\n - ./lib/linter/haml_middle_dot.rb\n\nlinters:\n AltText:\n enabled: true\n MiddleDot:\n enabled: true\n LineLength:\n max: 300\n ViewLength:\n max: 200 # Override default value of 100 inherited from rubocop\n
dataset_sample\yaml\mastodon_mastodon\.haml-lint.yml
.haml-lint.yml
YAML
252
0.95
0
0
react-lib
464
2024-10-07T06:14:08.198169
GPL-3.0
false
2b657376df12a1b4de3babf963ada22c
---\nAllCops:\n CacheRootDirectory: tmp\n DisplayStyleGuide: true\n Exclude:\n - Vagrantfile\n - config/initializers/json_ld*\n - lib/mastodon/migration_helpers.rb\n ExtraDetails: true\n NewCops: enable\n TargetRubyVersion: 3.2 # Oldest supported ruby version\n\ninherit_from:\n - .rubocop/layout.yml\n - .rubocop/metrics.yml\n - .rubocop/naming.yml\n - .rubocop/rails.yml\n - .rubocop/rspec_rails.yml\n - .rubocop/rspec.yml\n - .rubocop/style.yml\n - .rubocop/i18n.yml\n - .rubocop/custom.yml\n - .rubocop_todo.yml\n - .rubocop/strict.yml\n\ninherit_mode:\n merge:\n - Exclude\n\nplugins:\n - rubocop-capybara\n - rubocop-i18n\n - rubocop-performance\n - rubocop-rails\n - rubocop-rspec\n - rubocop-rspec_rails\n
dataset_sample\yaml\mastodon_mastodon\.rubocop.yml
.rubocop.yml
YAML
713
0.8
0
0
python-kit
725
2023-10-16T09:20:24.259554
Apache-2.0
false
47ce40b96d87ed454daa4193702e1d71
# This configuration was generated by\n# `rubocop --auto-gen-config --auto-gen-only-exclude --no-offense-counts --no-auto-gen-timestamp`\n# using RuboCop version 1.75.2.\n# The point is for the user to remove these configuration records\n# one by one as the offenses are removed from the code base.\n# Note that changes in the inspected code, or installation of new\n# versions of RuboCop, may require this file to be generated again.\n\nLint/NonLocalExitFromIterator:\n Exclude:\n - 'app/helpers/json_ld_helper.rb'\n\n# Configuration parameters: AllowedMethods, AllowedPatterns, CountRepeatedAttributes.\nMetrics/AbcSize:\n Max: 82\n\n# Configuration parameters: CountBlocks, CountModifierForms, Max.\nMetrics/BlockNesting:\n Exclude:\n - 'lib/tasks/mastodon.rake'\n\n# Configuration parameters: AllowedMethods, AllowedPatterns.\nMetrics/CyclomaticComplexity:\n Max: 25\n\n# Configuration parameters: AllowedMethods, AllowedPatterns.\nMetrics/PerceivedComplexity:\n Max: 27\n\nRails/OutputSafety:\n Exclude:\n - 'config/initializers/simple_form.rb'\n\n# This cop supports safe autocorrection (--autocorrect).\n# Configuration parameters: AllowedVars.\nStyle/FetchEnvVar:\n Exclude:\n - 'config/environments/production.rb'\n - 'config/initializers/2_limited_federation_mode.rb'\n - 'config/initializers/3_omniauth.rb'\n - 'config/initializers/cache_buster.rb'\n - 'config/initializers/devise.rb'\n - 'config/initializers/paperclip.rb'\n - 'config/initializers/vapid.rb'\n - 'lib/tasks/repo.rake'\n\n# This cop supports safe autocorrection (--autocorrect).\n# Configuration parameters: EnforcedStyle, MaxUnannotatedPlaceholdersAllowed, Mode, AllowedMethods, AllowedPatterns.\n# SupportedStyles: annotated, template, unannotated\n# AllowedMethods: redirect\nStyle/FormatStringToken:\n Exclude:\n - 'config/initializers/devise.rb'\n - 'lib/paperclip/color_extractor.rb'\n\n# This cop supports safe autocorrection (--autocorrect).\n# Configuration parameters: MinBodyLength, AllowConsecutiveConditionals.\nStyle/GuardClause:\n Enabled: false\n\n# Configuration parameters: AllowedMethods.\n# AllowedMethods: respond_to_missing?\nStyle/OptionalBooleanParameter:\n Exclude:\n - 'app/lib/admin/system_check/message.rb'\n - 'app/lib/request.rb'\n - 'app/lib/webfinger.rb'\n - 'app/services/block_domain_service.rb'\n - 'app/services/fetch_resource_service.rb'\n - 'app/workers/domain_block_worker.rb'\n - 'app/workers/unfollow_follow_worker.rb'\n\n# This cop supports unsafe autocorrection (--autocorrect-all).\n# Configuration parameters: EnforcedStyle.\n# SupportedStyles: short, verbose\nStyle/PreferredHashMethods:\n Exclude:\n - 'config/initializers/paperclip.rb'\n\n# This cop supports safe autocorrection (--autocorrect).\nStyle/RedundantConstantBase:\n Exclude:\n - 'config/environments/production.rb'\n - 'config/initializers/sidekiq.rb'\n
dataset_sample\yaml\mastodon_mastodon\.rubocop_todo.yml
.rubocop_todo.yml
YAML
2,837
0.95
0.011905
0.347222
react-lib
69
2024-04-06T19:01:41.561467
BSD-3-Clause
false
9b6f3a7ddffdb4780ec3fc72531d0745
# This is needed for the GitHub Action\nproject_id_env: CROWDIN_PROJECT_ID\napi_token_env: CROWDIN_PERSONAL_TOKEN\n\nskip_untranslated_strings: 1\ncommit_message: '[ci skip]'\npreserve_hierarchy: true\nfiles:\n - source: /app/javascript/mastodon/locales/en.json\n translation: /app/javascript/mastodon/locales/%two_letters_code%.json\n - source: /config/locales/en.yml\n translation: /config/locales/%two_letters_code%.yml\n - source: /config/locales/simple_form.en.yml\n translation: /config/locales/simple_form.%two_letters_code%.yml\n - source: /config/locales/activerecord.en.yml\n translation: /config/locales/activerecord.%two_letters_code%.yml\n - source: /config/locales/devise.en.yml\n translation: /config/locales/devise.%two_letters_code%.yml\n - source: /config/locales/doorkeeper.en.yml\n translation: /config/locales/doorkeeper.%two_letters_code%.yml\n
dataset_sample\yaml\mastodon_mastodon\crowdin.yml
crowdin.yml
YAML
870
0.8
0.05
0.052632
awesome-app
165
2024-10-18T12:36:21.126148
GPL-3.0
false
f058bd4bb7f464c78c7159ca304ae59c
# This file is designed for production server deployment, not local development work\n# For a containerized local dev environment, see: https://github.com/mastodon/mastodon/blob/main/docs/DEVELOPMENT.md#docker\n\nservices:\n db:\n restart: always\n image: postgres:14-alpine\n shm_size: 256mb\n networks:\n - internal_network\n healthcheck:\n test: ['CMD', 'pg_isready', '-U', 'postgres']\n volumes:\n - ./postgres14:/var/lib/postgresql/data\n environment:\n - 'POSTGRES_HOST_AUTH_METHOD=trust'\n\n redis:\n restart: always\n image: redis:7-alpine\n networks:\n - internal_network\n healthcheck:\n test: ['CMD', 'redis-cli', 'ping']\n volumes:\n - ./redis:/data\n\n # es:\n # restart: always\n # image: docker.elastic.co/elasticsearch/elasticsearch:7.17.4\n # environment:\n # - "ES_JAVA_OPTS=-Xms512m -Xmx512m -Des.enforce.bootstrap.checks=true"\n # - "xpack.license.self_generated.type=basic"\n # - "xpack.security.enabled=false"\n # - "xpack.watcher.enabled=false"\n # - "xpack.graph.enabled=false"\n # - "xpack.ml.enabled=false"\n # - "bootstrap.memory_lock=true"\n # - "cluster.name=es-mastodon"\n # - "discovery.type=single-node"\n # - "thread_pool.write.queue_size=1000"\n # networks:\n # - external_network\n # - internal_network\n # healthcheck:\n # test: ["CMD-SHELL", "curl --silent --fail localhost:9200/_cluster/health || exit 1"]\n # volumes:\n # - ./elasticsearch:/usr/share/elasticsearch/data\n # ulimits:\n # memlock:\n # soft: -1\n # hard: -1\n # nofile:\n # soft: 65536\n # hard: 65536\n # ports:\n # - '127.0.0.1:9200:9200'\n\n web:\n # You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes\n # build: .\n image: ghcr.io/mastodon/mastodon:v4.3.7\n restart: always\n env_file: .env.production\n command: bundle exec puma -C config/puma.rb\n networks:\n - external_network\n - internal_network\n healthcheck:\n # prettier-ignore\n test: ['CMD-SHELL',"curl -s --noproxy localhost localhost:3000/health | grep -q 'OK' || exit 1"]\n ports:\n - '127.0.0.1:3000:3000'\n depends_on:\n - db\n - redis\n # - es\n volumes:\n - ./public/system:/mastodon/public/system\n\n streaming:\n # You can uncomment the following lines if you want to not use the prebuilt image, for example if you have local code changes\n # build:\n # dockerfile: ./streaming/Dockerfile\n # context: .\n image: ghcr.io/mastodon/mastodon-streaming:v4.3.7\n restart: always\n env_file: .env.production\n command: node ./streaming/index.js\n networks:\n - external_network\n - internal_network\n healthcheck:\n # prettier-ignore\n test: ['CMD-SHELL', "curl -s --noproxy localhost localhost:4000/api/v1/streaming/health | grep -q 'OK' || exit 1"]\n ports:\n - '127.0.0.1:4000:4000'\n depends_on:\n - db\n - redis\n\n sidekiq:\n # You can uncomment the following line if you want to not use the prebuilt image, for example if you have local code changes\n # build: .\n image: ghcr.io/mastodon/mastodon:v4.3.7\n restart: always\n env_file: .env.production\n command: bundle exec sidekiq\n depends_on:\n - db\n - redis\n networks:\n - external_network\n - internal_network\n volumes:\n - ./public/system:/mastodon/public/system\n healthcheck:\n test: ['CMD-SHELL', "ps aux | grep '[s]idekiq\ 6' || false"]\n\n ## Uncomment to enable federation with tor instances along with adding the following ENV variables\n ## http_hidden_proxy=http://privoxy:8118\n ## ALLOW_ACCESS_TO_HIDDEN_SERVICE=true\n # tor:\n # image: sirboops/tor\n # networks:\n # - external_network\n # - internal_network\n #\n # privoxy:\n # image: sirboops/privoxy\n # volumes:\n # - ./priv-config:/opt/config\n # networks:\n # - external_network\n # - internal_network\n\nnetworks:\n external_network:\n internal_network:\n internal: true\n
dataset_sample\yaml\mastodon_mastodon\docker-compose.yml
docker-compose.yml
YAML
4,095
0.8
0.071429
0.44697
react-lib
394
2025-01-12T13:28:09.022099
MIT
false
c156f19920c0eda15e382b60e782893e
# To get started with Dependabot version updates, you'll need to specify which\n# package ecosystems to update and where the package manifests are located.\n# Please see the documentation for all configuration options:\n# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates\n\nversion: 2\nupdates:\n - package-ecosystem: "nuget" # See documentation for possible values\n directory: "/" # Location of package manifests\n schedule:\n interval: "weekly"\n\n - package-ecosystem: "github-actions" # See documentation for possible values\n directory: "/" # Location of package manifests\n schedule:\n interval: "weekly" \n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\dependabot.yml
dependabot.yml
YAML
676
0.8
0.25
0.285714
node-utils
369
2025-03-29T06:15:39.511392
Apache-2.0
false
d62c90856fd8ea9dbdb9b88a48acb9e0
# .github/release.yml\n\nchangelog:\n exclude:\n labels:\n - ignore-for-release\n authors:\n - MDIX-SA\n - github-actions[bot]\n categories:\n - title: Key Changes \n labels:\n - release notes\n - title: Breaking Changes \n labels:\n - breaking change\n - visual breaking change\n \n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\release.yml
release.yml
YAML
329
0.8
0.055556
0.0625
python-kit
638
2024-01-17T15:02:28.350575
Apache-2.0
false
d9ff8e386b5ecc415c467d15c8fa6d94
blank_issues_enabled: false\ncontact_links:\n - name: GitHub discussion tab\n url: https://github.com/MaterialDesignInXAML/MaterialDesignInXamlToolkit/discussions\n about: Please ask and answer questions here
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\ISSUE_TEMPLATE\config.yml
config.yml
YAML
211
0.8
0
0
node-utils
296
2023-11-06T13:39:47.126170
GPL-3.0
false
6d997d1c83013ca66641cc0b7a8a5132
name: Build Artifacts\n\non:\n workflow_call:\n inputs:\n build-configuration:\n default: "Release"\n required: false\n type: string\n mdix-version:\n required: true\n type: string\n mdix-colors-version:\n required: true\n type: string\n mdix-mahapps-version:\n required: true\n type: string\n\njobs:\n build:\n name: Build and Test\n runs-on: windows-latest\n\n env:\n solution: MaterialDesignToolkit.Full.sln\n\n steps:\n - uses: actions/checkout@v4\n\n - name: Setup .NET\n uses: actions/setup-dotnet@v4\n with:\n dotnet-version: |\n 6.x\n 8.x\n\n - name: Restore dependencies\n run: dotnet restore ${{ env.solution }}\n\n - name: Build\n run: dotnet build ${{ env.solution }} --configuration ${{ inputs.build-configuration }} --no-restore -p:Platform="Any CPU" -p:TreatWarningsAsErrors=True\n env:\n MDIXVersion: ${{ inputs.mdix-version }}\n MDIXColorsVersion: ${{ inputs.mdix-colors-version }}\n MDIXMahAppsVersion: ${{ inputs.mdix-mahapps-version }}\n\n - name: Test\n timeout-minutes: 20\n run: dotnet test ${{ env.solution }} --configuration ${{ inputs.build-configuration }} --no-build --blame-crash --logger GitHubActions\n\n - name: Upload Screenshots\n if: ${{ always() }}\n uses: actions/upload-artifact@v4\n with:\n name: Screenshots-${{ github.run_number }}\n path: ${{ github.workspace }}tests\MaterialDesignThemes.UITests\bin\${{ inputs.build-configuration }}\net8.0-windows7\Screenshots\n if-no-files-found: ignore\n\n - name: Build NuGets\n run: .\build\BuildNugets.ps1 -MDIXVersion ${{ inputs.mdix-version }} -MDIXColorsVersion ${{ inputs.mdix-colors-version }} -MDIXMahAppsVersion ${{ inputs.mdix-mahapps-version }}\n\n - name: Upload NuGets\n uses: actions/upload-artifact@v4\n with:\n name: NuGets\n path: "*.nupkg"\n\n - name: Upload Demo App\n uses: actions/upload-artifact@v4\n with:\n name: DemoApp\n path: "src/MainDemo.Wpf/bin/${{ env.buildConfiguration }}"\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\build_artifacts.yml
build_artifacts.yml
YAML
2,569
0.85
0.027397
0
node-utils
681
2025-06-29T18:18:42.770787
BSD-3-Clause
false
0be7a921e77fde4d4eb0fba11034a81f
name: Wiki - Update Control Styles\n\non:\n # push:\n # branches: [ master ]\n workflow_dispatch:\n\njobs:\n build:\n runs-on: ubuntu-latest\n\n steps:\n - uses: actions/checkout@v4\n - name: Generate Control Styles Markdown\n run: build/GenerateThemesWikiMarkdown.ps1\n shell: pwsh\n - name: Upload Control Styles to Wiki\n uses: docker://decathlon/wiki-page-creator-action:latest\n env:\n GH_PAT: ${{ secrets.SA_TOKEN }}\n ACTION_MAIL: mdixsa@outlook.com\n ACTION_NAME: Material Design Service Account\n OWNER: MaterialDesignInXAML\n REPO_NAME: MaterialDesignInXamlToolkit\n MD_FOLDER: build\n WIKI_PUSH_MESSAGE: Automatic update of ControlStyleList.md from GitHub Action\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\control_styles.yml
control_styles.yml
YAML
881
0.8
0
0.086957
react-lib
421
2025-06-01T10:12:53.855708
MIT
false
c90b7e45e9532be33bf9493592c5b79c
# This is a basic workflow to help you get started with Actions\nname: Generate Milestone Contributors\n\n# Controls when the workflow will run\non:\n # Allows you to run this workflow manually from the Actions tab\n workflow_dispatch:\n inputs:\n milestone:\n description: 'The milestone to generate contributors'\n required: true\n type: string\n\njobs:\n show_contributors:\n runs-on: ubuntu-latest\n name: A job to get contributors\n steps:\n - name: GetContribs\n id: get-contribs\n uses: Keboo/GitHubHelper@master\n with:\n milestone: '${{ inputs.milestone }}'\n repository: 'MaterialDesignInXamlToolkit'\n repository-owner: 'MaterialDesignInXAML'\n token: ${{ github.token }}\n - name: Get the output\n run: echo "${{ steps.get-contribs.outputs.contributors }}"\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\generate_contributors.yml
generate_contributors.yml
YAML
857
0.95
0
0.115385
awesome-app
598
2025-04-11T17:41:09.116063
Apache-2.0
false
336bbdd3bdf4f0502d01a11761bce9be
name: Setup Versions\n\non:\n workflow_call:\n inputs:\n is-full-release:\n required: false\n type: boolean\n default: false\n\n outputs:\n mdix-version:\n description: "The version for the MaterialDesignThemes library"\n value: ${{ jobs.getting_versions.outputs.mdix-version }}\n mdix-colors-version:\n description: "The version for the MaterialDesignColors library"\n value: ${{ jobs.getting_versions.outputs.mdix-colors-version }}\n mdix-mahapps-version:\n description: "The version for the MaterialDesignThemes.MahApps library"\n value: ${{ jobs.getting_versions.outputs.mdix-mahapps-version }}\n\njobs:\n getting_versions:\n env:\n #Update these base version numbers\n mdix-version: "5.2.2"\n mdix-colors-version: "5.2.2"\n mdix-mahapps-version: "5.2.2"\n name: Set version numbers\n runs-on: ubuntu-latest\n defaults:\n run:\n shell: pwsh\n # Map the job outputs to step outputs\n outputs:\n mdix-version: ${{ steps.output_versions.outputs.mdix-version }}\n mdix-colors-version: ${{ steps.output_versions.outputs.mdix-colors-version }}\n mdix-mahapps-version: ${{ steps.output_versions.outputs.mdix-mahapps-version }}\n steps:\n - name: Set preview version numbers\n if: ${{ !inputs.is-full-release }}\n run: |\n "mdix-version=${{ env.mdix-version }}-ci${{ github.run_number }}" >> $env:GITHUB_ENV\n "mdix-colors-version=${{ env.mdix-colors-version }}-ci${{ github.run_number }}" >> $env:GITHUB_ENV\n "mdix-mahapps-version=${{ env.mdix-mahapps-version }}-ci${{ github.run_number }}" >> $env:GITHUB_ENV\n - name: Output Versions\n id: output_versions\n run: |\n echo ${{ env.mdix-version }}\n "mdix-version=${{ env.mdix-version }}" >> $env:GITHUB_OUTPUT\n echo ${{ env.mdix-colors-version }}\n "mdix-colors-version=${{ env.mdix-colors-version }}" >> $env:GITHUB_OUTPUT\n echo ${{ env.mdix-mahapps-version }}\n "mdix-mahapps-version=${{ env.mdix-mahapps-version }}" >> $env:GITHUB_OUTPUT\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\get_versions.yml
get_versions.yml
YAML
2,117
0.95
0.074074
0.039216
vue-tools
595
2024-05-04T11:30:43.670766
GPL-3.0
false
254dacf4aa5b4d3a86df52b36dde517e
name: Icon Update\n\non:\n workflow_dispatch:\n schedule:\n - cron: "0 3 * * *"\n\nenv:\n GH_TOKEN: ${{ secrets.SA_PAT }}\n\ndefaults:\n run:\n shell: pwsh\n\njobs:\n build:\n #This check prevents this from running on forks\n if: ${{ github.repository == 'MaterialDesignInXAML/MaterialDesignInXamlToolkit' }}\n\n runs-on: windows-latest\n\n steps:\n - uses: actions/checkout@v4\n \n - name: Run Icon Generation\n run: dotnet run -c Release -- icons\n working-directory: ./src/MaterialDesignToolkit.ResourceGeneration\n\n - name: Check for changes\n id: check_for_changes\n run: |\n $hasChanges = $((git status --porcelain).Length -gt 0).ToString().ToLower()\n "has_changes=$hasChanges" >> $env:GITHUB_OUTPUT\n\n - name: Open Pull Request\n if: ${{ steps.check_for_changes.outputs.has_changes == 'true' }}\n run: |\n git config --local user.email "github-actions[bot]@users.noreply.github.com"\n git config --local user.name "github-actions[bot]"\n git checkout -b "automated/icon_update"\n git commit -m "[bot] Pack Icon update" --all\n git push -f --set-upstream origin automated/icon_update\n gh pr create --fill\n gh pr merge automated/icon_update --delete-branch --auto --squash\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\icon_update.yml
icon_update.yml
YAML
1,483
0.8
0.068182
0.028571
awesome-app
224
2024-06-17T16:13:19.784278
GPL-3.0
false
d7209ba04bb25fee08c308229236ade9
name: Nightly Release\n\non:\n workflow_dispatch:\n schedule:\n - cron: '0 9 * * *'\n\nenv:\n GH_TOKEN: ${{ github.token }}\n\ndefaults:\n run:\n shell: pwsh\n\njobs:\n check_for_changes:\n #This check prevents this from running on forks\n if: ${{ github.repository == 'MaterialDesignInXAML/MaterialDesignInXamlToolkit' }}\n runs-on: ubuntu-latest\n\n outputs:\n should_run: ${{ steps.check_for_changes.outputs.has_changed }}\n\n steps:\n - id: check_for_changes\n name: Check for changes\n run: |\n # Grab the last two run, since the latest run will be the current one executing\n $workflowList = gh run list --workflow "${{ github.workflow }}" --branch "${{ github.ref_name }}" --json databaseId --limit 2 --repo "${{ github.repository }}"\n $runId = ($workflowList | ConvertFrom-Json)[1].databaseId\n $lastRunHash = ((gh run view $runId --json headSha --repo "${{ github.repository }}") | ConvertFrom-Json).headSha\n\n echo "Last hash $lastRunHash"\n echo "Current hash ${{ github.sha }}"\n\n $hasChanged = ($lastRunHash -ne "${{ github.sha }}").ToString().ToLower()\n echo "Has updates $hasChanged"\n "has_changed=$hasChanged" >> $env:GITHUB_OUTPUT\n \n get_versions:\n needs: [check_for_changes]\n if: ${{ needs.check_for_changes.outputs.should_run == 'true' }}\n name: Get Versions\n uses: ./.github/workflows/get_versions.yml\n\n build_artifacts:\n name: Build artifacts\n needs: [get_versions, check_for_changes]\n if: ${{ needs.check_for_changes.outputs.should_run == 'true' }}\n uses: ./.github/workflows/build_artifacts.yml\n with:\n mdix-version: ${{ needs.get_versions.outputs.mdix-version }}\n mdix-colors-version: ${{ needs.get_versions.outputs.mdix-colors-version }}\n mdix-mahapps-version: ${{ needs.get_versions.outputs.mdix-mahapps-version }}\n\n push_nugets:\n needs: [build_artifacts]\n runs-on: ubuntu-latest\n name: Push NuGets\n\n steps:\n - name: Download NuGet Artifacts\n uses: actions/download-artifact@v4\n with:\n name: NuGets\n path: nugets\n\n - name: Push NuGets\n run: |\n dotnet nuget push nugets/*.nupkg --api-key ${{ secrets.PAT }} --source https://api.nuget.org/v3/index.json --skip-duplicate\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\nightly_release.yml
nightly_release.yml
YAML
2,292
0.8
0.057143
0.035088
awesome-app
776
2025-05-17T20:03:47.079506
GPL-3.0
false
53f61e9b365b752b0db3fb80eae941bc
name: Pull Request Verification\n\non:\n push:\n branches: [ master ]\n pull_request:\n branches: [ master ]\n workflow_dispatch:\n\ndefaults:\n run:\n shell: pwsh\n\njobs:\n build:\n name: Build artifacts\n uses: ./.github/workflows/build_artifacts.yml\n with:\n mdix-version: "0.0.0"\n mdix-colors-version: "0.0.0"\n mdix-mahapps-version: "0.0.0"\n\n automerge:\n needs: build\n runs-on: ubuntu-latest\n\n permissions:\n pull-requests: write\n contents: write\n\n steps:\n - uses: fastify/github-action-merge-dependabot@v3.11.1\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\pr_verification.yml
pr_verification.yml
YAML
565
0.7
0
0
node-utils
724
2025-06-17T09:37:03.291735
Apache-2.0
false
5161893151ff632e6c640e7c691ff25a
name: Production Release\n\non:\n workflow_dispatch:\n inputs:\n milestone:\n description: "The milestone to generate a release for"\n required: true\n type: string\n\nenv:\n solution: MaterialDesignToolkit.Full.sln\n GITHUB_TOKEN: ${{ secrets.SA_PAT }}\n\ndefaults:\n run:\n shell: pwsh\n\n\njobs:\n get_versions:\n name: Get Versions\n uses: ./.github/workflows/get_versions.yml\n with:\n is-full-release: true\n\n version_number_check:\n needs: [get_versions]\n runs-on: ubuntu-latest\n name: Version matches milestone\n\n steps:\n - name: Check versions\n run: |\n if ('${{ inputs.milestone }}' -ne '${{ needs.get_versions.outputs.mdix-version }}') {\n Write-Error "Milestone ${{ inputs.milestone }} does not match MDIX ${{ needs.get_versions.outputs.mdix-version }}"\n exit 1\n } else {\n Write-Host "Versions match"\n }\n\n build_release_notes:\n runs-on: ubuntu-latest\n name: Generate Release Notes\n\n steps:\n - name: Get Contributors\n id: get-contribs\n uses: Keboo/GitHubHelper@master\n with:\n milestone: "${{ inputs.milestone }}"\n repository: "MaterialDesignInXamlToolkit"\n repository-owner: "MaterialDesignInXAML"\n token: ${{ github.token }}\n\n - name: Generate Release Notes\n run: |\n $response = gh api --method POST -H "Accept: application/vnd.github+json" /repos/${{ github.repository }}/releases/generate-notes -f tag_name='v${{ inputs.milestone }}'\n $json = $response | ConvertFrom-Json\n $releaseNotes = $json.body\n $contributors = "${{ steps.get-contribs.outputs.contributors }}"\n $releaseNotes | Out-File -Append "ReleaseNotes.md"\n "`n" | Out-File -Append "ReleaseNotes.md"\n $contributors | Out-File -Append "ReleaseNotes.md"\n cat "ReleaseNotes.md"\n\n - name: Upload Release Notes\n uses: actions/upload-artifact@v4\n with:\n name: ReleaseNotes\n path: "ReleaseNotes.md"\n\n build_artifacts:\n name: Build artifacts\n needs: get_versions\n uses: ./.github/workflows/build_artifacts.yml\n with:\n mdix-version: ${{ needs.get_versions.outputs.mdix-version }}\n mdix-colors-version: ${{ needs.get_versions.outputs.mdix-colors-version }}\n mdix-mahapps-version: ${{ needs.get_versions.outputs.mdix-mahapps-version }}\n\n build_icon_changes:\n needs: [build_artifacts]\n runs-on: windows-latest\n name: Get Icon Changes\n\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n\n - name: Download NuGet Artifacts\n uses: actions/download-artifact@v4\n with:\n name: NuGets\n path: nugets\n\n - name: Get Previous NuGet release\n run: |\n $release = (gh api -H "Accept: application/vnd.github+json" /repos/${{ github.repository }}/releases/latest) | ConvertFrom-Json\n gh release download "$($release.tag_name)" --repo "${{ github.repository }}" --pattern *.nupkg\n working-directory: nugets\n\n - name: Create Icon Diff File\n run: |\n dotnet run --project ./src/MaterialDesignToolkit.ResourceGeneration/MaterialDesignToolkit.ResourceGeneration.csproj -c Release -- icon-diff\n\n - name: Upload Icon Changes\n uses: actions/upload-artifact@v4\n with:\n name: IconChanges\n path: "IconChanges*.md"\n\n create_release:\n needs:\n [\n build_release_notes,\n build_artifacts,\n build_icon_changes,\n version_number_check,\n ]\n runs-on: ubuntu-latest\n name: Create Draft Release\n\n steps:\n - name: Download NuGet Artifacts\n uses: actions/download-artifact@v4\n with:\n name: NuGets\n path: nugets\n\n - name: Download Demo App Artifacts\n uses: actions/download-artifact@v4\n with:\n name: DemoApp\n path: demo-app\n\n - name: Zip Demo App\n run: zip -r DemoApp.zip demo-app/*\n\n - name: Download Release Notes\n uses: actions/download-artifact@v4\n with:\n name: ReleaseNotes\n\n - name: Create Release\n run: |\n # We can't use glob pattern because of this bug https://github.com/cli/cli/issues/5099\n gh release create v${{ inputs.milestone }} --repo '${{ github.repository }}' --draft --latest --title "${{ inputs.milestone }}" --notes-file ReleaseNotes.md (Get-Item '${{ github.workspace }}/nugets/*.nupkg') '${{ github.workspace }}/DemoApp.zip'\n\n update_wiki:\n needs: [create_release]\n runs-on: ubuntu-latest\n environment: production\n name: Update Wiki\n\n steps:\n - name: Checkout Wiki Repo\n uses: actions/checkout@v4\n with:\n repository: ${{ github.repository }}.wiki\n\n - name: Download Icon Changes\n uses: actions/download-artifact@v4\n with:\n name: IconChanges\n path: icon-changes\n\n - name: Update Wiki\n run: |\n $iconUpdates = Get-ChildItem -Path "icon-changes/*.md"\n foreach($update in $iconUpdates){\n Write-Host "Updating from $update"\n if ($update.Name -match '-(?<PreviousVersion>\d+\.\d+\.\d+)--(?<TargetVersion>\d+\.\d+\.\d+).md$') {\n $previousVersion = $Matches.PreviousVersion\n $targetVersion = $Matches.TargetVersion\n \n $majorVersion = $targetVersion.Split(".") | Select-Object -First 1\n \n # Update the changes\n $changesFileName = "$majorVersion-x-icon-changes"\n $changesFile = "$changesFileName.md"\n \n if (!(Test-Path $changesFile)) {\n New-Item -ItemType File $changesFile | Out-Null\n }\n @(\n (Get-Content $update),\n "",\n (Get-Content $changesFile)\n ) | Set-Content $changesFile\n \n # Update the PackIcon Release notes\n [string[]] $releaseNotes = Get-Content "PackIcon-ReleaseNotes.md"\n \n $firstLine = $releaseNotes | Where-Object { $_.StartsWith("[Pack Icon Changes") } | Select-Object -First 1\n $index = $releaseNotes.IndexOf($firstLine)\n \n @(\n ($releaseNotes | Select-Object -First $index),\n "[Pack Icon Changes $previousVersion => $targetVersion]($changesFileName#pack-icon-changes-$($previousVersion -replace '\.', '')--$($targetVersion -replace '\.', ''))",\n "",\n ($releaseNotes | Select-Object -Skip $index)\n ) | Set-Content "PackIcon-ReleaseNotes.md"\n Remove-Item $update\n } else {\n Write-Warning "Did not parse version from $($update.Name)"\n }\n }\n\n - name: Push Wiki\n run: |\n git config --local user.email "github-actions[bot]@users.noreply.github.com"\n git config --local user.name "github-actions[bot]"\n git add .\n git commit -m "[automated] Update Wiki with icon changes for ${{ inputs.milestone }}" --all\n git push\n\n push_nugets:\n needs: [create_release]\n runs-on: ubuntu-latest\n name: Push NuGets\n environment: production\n\n steps:\n - name: Download NuGet Artifacts\n uses: actions/download-artifact@v4\n with:\n name: NuGets\n path: nugets\n\n - name: Push NuGets\n run: |\n dotnet nuget push nugets/*.nupkg --api-key ${{ secrets.PAT }} --source https://api.nuget.org/v3/index.json --skip-duplicate\n\n publish_release:\n needs: [create_release]\n runs-on: ubuntu-latest\n name: Publish Release\n environment: production\n\n steps:\n - name: Publish Release\n run: |\n gh release edit v${{ inputs.milestone }} --repo '${{ github.repository }}' --draft=false\n\n close_milestone:\n needs: [create_release]\n runs-on: ubuntu-latest\n name: Close Milestone\n environment: production\n\n steps:\n # Doing a checkout, until this issue is resolved.\n # https://github.com/valeriobelli/gh-milestone/issues/15\n # As of version 2.1.0 the -R doesn't appear to output valid json when filtering. So still using the checkout.\n - uses: actions/checkout@v4\n\n - name: Close Milestone\n run: |\n gh extension install valeriobelli/gh-milestone\n Write-Host "Using extension version $(gh milestone --version)"\n $milestones = $(gh milestone list --json number,title) | ConvertFrom-Json\n $milestoneNumber = ($milestones | Where-Object { $_.title -eq "${{ inputs.milestone }}" }).number\n gh milestone edit $milestoneNumber --state closed\n env:\n GITHUB_TOKEN: ${{ secrets.SA_PAT }}\n\n update_version_numbers:\n needs: [create_release]\n runs-on: ubuntu-latest\n name: Update Version Numbers\n environment: production\n\n steps:\n # Checkout is needed so that we can update the get_versions.yml file\n - uses: actions/checkout@v4\n\n - name: Increment Version Numbers\n run: |\n function Update-Version {\n param (\n [string]$Prefix\n )\n $workflowPath = "./.github/workflows/get_versions.yml"\n $workflowContent = Get-Content -Path $workflowPath\n\n $versionPattern = '"(\d+\.\d+\.)(\d+)"'\n $pattern = "$Prefix`: $versionPattern"\n $match = $workflowContent -match $pattern\n\n if ($match[0] -match $versionPattern) {\n $newVersion = $Matches[1] + ([int]$Matches[2] + 1)\n $workflowContent = $workflowContent -replace $pattern,"$Prefix`: `"$newVersion`""\n Write-Host "$Prefix updated to $newVersion"\n } else {\n Write-Error "Failed to update $Prefix version"\n }\n\n Set-Content -Path $workflowPath -Value $workflowContent\n }\n\n Update-Version -Prefix "mdix-version"\n Update-Version -Prefix "mdix-colors-version"\n Update-Version -Prefix "mdix-mahapps-version"\n\n - name: Open Pull Request\n run: |\n git config --local user.email "github-actions[bot]@users.noreply.github.com"\n git config --local user.name "github-actions[bot]"\n git checkout -b "automated/version_update"\n git commit -m "[bot] Release version update" --all\n git push -f --set-upstream origin automated/version_update\n gh pr create --fill\n gh pr merge automated/version_update --delete-branch --auto --squash\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\release.yml
release.yml
YAML
12,392
0.95
0.022222
0.026515
react-lib
888
2025-07-08T22:05:10.881674
MIT
false
67985864b360aa8ea3f8547f28bf6e4c
name: 'Close stale issues and PRs'\non:\n schedule:\n - cron: '30 1 * * *'\n workflow_dispatch:\n\njobs:\n stale:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/stale@v9\n with:\n stale-issue-message: 'This issue is marked stale because it has been open 30 days with no activity. Remove stale label or update the issue, otherwise it will be closed in 14 days.'\n stale-pr-message: 'This PR is marked stale because it has been open 60 days with no activity. Remove stale label or update the PR, otherwise it will be closed in 14 days.'\n close-issue-message: 'This issue was closed because it has been stalled for 14 days with no activity.'\n close-pr-message: 'This PR was closed because it has been stalled for 14 days with no activity.'\n only-issue-labels: Waiting on feedback # Only consider issues with 'Waiting on feedback' labels\n exempt-draft-pr: true # Do not consider a draft PR stale\n exempt-all-pr-milestones: true # Do not consider a PR associated with a Milestone stale\n days-before-stale: 30\n days-before-pr-stale: 60\n days-before-close: 14\n ascending: true # Start with the oldest issues/PRs first\n debug-only: false # Currently only doing "dry runs" until we're satisfied with the configuration\n operations-per-run: 30 # GitHub API calls are rate limited. When debug-only is "false", this value should be approx. 30 (default) or less
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\.github\workflows\stale_issues_and_prs.yml
stale_issues_and_prs.yml
YAML
1,631
0.8
0.083333
0
node-utils
651
2024-02-02T12:07:54.329742
GPL-3.0
false
0d39c115451d9fdadc35d2efb27d13d6
trigger: none\n\npr: none\n\npool:\n vmImage: "VS2017-Win2016"\n\nsteps:\n - task: PowerShell@2\n inputs:\n filePath: 'build\\GenerateThemesWikiMarkdown.ps1'\n\n - task: WikiUpdaterTask@1\n inputs:\n repo: "github.com/MaterialDesignInXAML/MaterialDesignInXamlToolkit.wiki.git"\n filename: "ControlStyleList.md"\n dataIsFile: true\n sourceFile: 'build\\ControlStyleList.md'\n message: "Automatic update of ControlStyleList.md from Azure pipeline"\n gitname: "Azure Dev Ops"\n gitemail: "azure@materialdesigninxaml.net"\n user: "$(GitHubUser)"\n password: "$(GitHubPat)"\n localpath: '$(System.DefaultWorkingDirectory)\\Scripts\\Wiki'\n
dataset_sample\yaml\MaterialDesignInXAML_MaterialDesignInXamlToolkit\build\azure-pipelines-wiki.yml
azure-pipelines-wiki.yml
YAML
728
0.7
0
0
python-kit
622
2024-01-27T00:45:00.817394
MIT
false
723e97a1554fc37f4541b87f7d3b186d
version: 2\nupdates:\n- package-ecosystem: cargo\n directory: /\n schedule:\n interval: weekly\n day: sunday\n time: "22:00"\n open-pull-requests-limit: 50\n labels: [A-dependencies]\n groups:\n simple1:\n applies-to: version-updates\n update-types:\n - patch\n patterns:\n - "a*"\n - "b*"\n - "c*"\n - "d*"\n - "e*"\n - "f*"\n - "g*"\n - "h*"\n - "i*"\n - "j*"\n - "k*"\n - "l*"\n - "m*"\n simple2:\n applies-to: version-updates\n update-types:\n - patch\n patterns:\n - "n*"\n - "o*"\n - "p*"\n - "q*"\n - "r*"\n - "s*"\n - "t*"\n - "u*"\n - "v*"\n - "w*"\n - "x*"\n - "y*"\n - "z*"\n- package-ecosystem: cargo\n directory: /misc/wasm\n schedule:\n interval: weekly\n day: sunday\n time: "22:00"\n open-pull-requests-limit: 50\n labels: [A-dependencies]\n groups:\n simple:\n applies-to: version-updates\n update-types:\n - minor\n - patch\n- package-ecosystem: pip\n directory: /misc/dbt-materialize\n schedule:\n # dbt hardly changes, so this isn't too onerous and cuts down the latency\n # of releasing a new version of dbt-materialize when a new version of\n # dbt drops.\n interval: daily\n labels: [A-dependencies]\n groups:\n simple:\n applies-to: version-updates\n update-types:\n - minor\n - patch\n- package-ecosystem: docker\n directory: /misc/images/ubuntu-base\n schedule:\n interval: weekly\n day: sunday\n time: "22:00"\n open-pull-requests-limit: 50\n labels: [A-dependencies]\n groups:\n simple:\n applies-to: version-updates\n update-types:\n - minor\n - patch\n- package-ecosystem: docker\n directory: /ci/builder\n schedule:\n interval: weekly\n day: sunday\n time: "22:00"\n open-pull-requests-limit: 50\n labels: [A-dependencies]\n groups:\n simple:\n applies-to: version-updates\n update-types:\n - minor\n - patch\n- package-ecosystem: pip\n directory: /ci/builder\n schedule:\n interval: weekly\n day: sunday\n time: "22:00"\n open-pull-requests-limit: 50\n labels: [A-dependencies]\n groups:\n simple:\n applies-to: version-updates\n update-types:\n - minor\n - patch\n
dataset_sample\yaml\MaterializeInc_materialize\.github\dependabot.yml
dependabot.yml
YAML
2,333
0.8
0
0.025641
node-utils
33
2023-10-12T08:26:04.892188
Apache-2.0
false
e08050558028a7b0f23b047f4b91c34b
# Copyright 2020 SAP SE\n# Modifications Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Portions of this file are derived from the README examples in the\n# Contributor Assistant project. The original source code was retrieved on\n# July 31, 2024 from:\n#\n# https://github.com/contributor-assistant/github-action/blob/master/README.md\n\n# Check if PR authors have signed the Materialize CLA, and if not have them sign\n# and record their signature. The CLA and signatures are stored in\n# https://github.com/MaterializeInc/cla\n\nname: CLA Assistant\non:\n issue_comment:\n types: [created]\n pull_request_target:\n types: [opened, closed, synchronize]\n\npermissions:\n actions: write\n contents: read\n pull-requests: write\n statuses: write\n\njobs:\n cla-assistant:\n runs-on: ubuntu-latest\n steps:\n - name: CLA Assistant\n if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'\n uses: contributor-assistant/github-action@v2.4.0\n env:\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n PERSONAL_ACCESS_TOKEN: ${{ secrets.CLA_TOKEN }}\n with:\n remote-organization-name: MaterializeInc\n remote-repository-name: cla\n branch: main\n path-to-signatures: signatures.v1.json\n path-to-document: https://github.com/MaterializeInc/cla/blob/main/cla.md\n allowlist: materialize-bot,materialize-bot-monitoring,dependabot[bot]\n custom-notsigned-prcomment: Thank you for your submission! We really appreciate it. Like many source-available projects, we require that you sign our [Contributor License Agreement](https://github.com/MaterializeInc/cla/blob/main/cla.md) (CLA) before we can accept your contribution.<br><br>You can sign the CLA by posting a comment with the message below.\n custom-pr-sign-comment: I have read the Contributor License Agreement (CLA) and I hereby sign the CLA.\n custom-allsigned-prcomment: All contributors have signed the CLA.\n lock-pullrequest-aftermerge: false\n
dataset_sample\yaml\MaterializeInc_materialize\.github\workflows\cla.yml
cla.yml
YAML
2,702
0.95
0.084746
0.407407
react-lib
890
2024-07-07T16:18:00.877151
MIT
false
1a0e9b92bbc8ec14767180044e968d11
# Copyright 2020 The Actions Ecosystem Authors\n# Modifications Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Portions of this file are derived from the README examples in the Action\n# Slack Notifier project. The original source code was retrieved on\n# January 5, 2022 from:\n#\n# https://github.com/actions-ecosystem/action-slack-notifier/blob/fc778468d09c43a6f4d1b8cccaca59766656996a/README.md\n\n# Send a notification to the #rnd-design-docs Slack channel when a new\n# design doc is added.\n#\n# A notification is sent when all of these conditions are true:\n# * A ready-to-review PR is (re-)opened, or a PR is moved from draft\n# to ready-to-review.\n# * The PR adds an '.md' document under 'doc/developer/design/'.\n\nname: Slack Design Doc Notifications\n\non:\n pull_request_target:\n types:\n - opened\n - reopened\n - ready_for_review\n paths:\n - "doc/developer/design/*.md"\n\njobs:\n notify:\n name: "Notify about new design docs"\n runs-on: ubuntu-latest\n if: ${{ !github.event.pull_request.draft }}\n steps:\n - name: "Path filter"\n id: filter\n uses: dorny/paths-filter@v2\n with:\n filters: |\n new-design:\n - added: "doc/developer/design/*.md"\n - name: "Push to Slack"\n if: steps.filter.outputs.new-design == 'true'\n uses: actions-ecosystem/action-slack-notifier@fc778468d09c43a6f4d1b8cccaca59766656996a\n with:\n slack_token: ${{ secrets.SLACK_TOKEN }}\n channel: rnd-design-docs\n custom_payload: |\n {\n "blocks": [\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "A new design doc is ready for review!"\n }\n },\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "• *PR:* <${{ github.event.pull_request.html_url }}|${{ github.event.pull_request.title }}>"\n }\n },\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "• *Author:* <${{ github.event.pull_request.user.html_url }}|${{ github.event.pull_request.user.login }}>"\n }\n }\n ]\n }\n
dataset_sample\yaml\MaterializeInc_materialize\.github\workflows\slack_notify_design_doc.yml
slack_notify_design_doc.yml
YAML
2,989
0.95
0.047059
0.325
vue-tools
77
2024-04-05T11:00:26.655602
MIT
false
958940a9e1718fda5a72379279a90f77
# Copyright 2020 The Actions Ecosystem Authors\n# Modifications Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Portions of this file are derived from the README examples in the Action\n# Slack Notifier project. The original source code was retrieved on\n# January 5, 2022 from:\n\n# https://github.com/actions-ecosystem/action-slack-notifier/blob/fc778468d09c43a6f4d1b8cccaca59766656996a/README.md\n\nname: Slack Label Notifications\n\non:\n pull_request:\n types:\n - labeled\n\njobs:\n notify:\n runs-on: ubuntu-latest\n steps:\n - uses: actions-ecosystem/action-slack-notifier@fc778468d09c43a6f4d1b8cccaca59766656996a\n if: ${{ github.event.label.name == 'release-blocker' }}\n with:\n slack_token: ${{ secrets.SLACK_TOKEN }}\n channel: release\n message: |\n `${{ github.event.label.name }}` label has been added to "${{ github.event.pull_request.title }}" (${{ github.event.pull_request.html_url }}) (assigned to: ${{ github.event.pull_request.assignee.login || 'unassigned' }}).\n color: red\n verbose: false\n
dataset_sample\yaml\MaterializeInc_materialize\.github\workflows\slack_notify_labeled.yml
slack_notify_labeled.yml
YAML
1,647
0.95
0.04878
0.470588
node-utils
652
2024-01-04T20:17:49.893433
MIT
false
2c6802cd31ba3021187e7ce3458aadaf
# Copyright 2020 The Actions Ecosystem Authors\n# Modifications Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Portions of this file are derived from the README examples in the Action\n# Slack Notifier project. The original source code was retrieved on\n# January 5, 2022 from:\n#\n# https://github.com/actions-ecosystem/action-slack-notifier/blob/fc778468d09c43a6f4d1b8cccaca59766656996a/README.md\n\n# Send a notification to the #team-testing-risky Slack channel when a risky change is made.\n\nname: Slack QA Risky Notifications\n\non:\n pull_request_target:\n types:\n - opened\n - reopened\n - ready_for_review\n paths:\n - src/pgwire/src/message.rs\n - src/sql/src/plan/statement.rs\n - src/catalog/src/builtin.rs\n - src/sql/src/rbac.rs\n - src/sqllogictest/src/runner.rs\n - src/adapter/src/coord/command_handler.rs\n - src/expr/src/relation/mod.rs\n - src/adapter/src/coord.rs\n - src/sql-parser/src/ast/defs/statement.rs\n - src/adapter/src/catalog.rs\n - src/sql/src/catalog.rs\n - src/pgwire/src/protocol.rs\n - src/expr/src/scalar/mod.rs\n - src/adapter/src/coord/sequencer.rs\n - src/sql/src/func.rs\n - src/sql/src/plan.rs\n - src/adapter/src/coord/sequencer/inner.rs\n - src/expr/src/scalar/func.rs\n - src/sql/src/plan/statement/ddl.rs\n - src/adapter/src/catalog.rs\n - src/adapter/src/coord.rs\n - src/sql/src/plan/query.rs\n - src/sql-parser/src/parser.rs\n\njobs:\n notify:\n name: "Notify about risky PRs"\n runs-on: ubuntu-latest\n if: ${{ !github.event.pull_request.draft }}\n steps:\n - name: Checkout\n uses: actions/checkout@v4\n with:\n fetch-depth: 1\n - name: "Long change filter"\n id: long\n run: |\n num_lines=$(git diff --diff-filter=AM ${{ github.event.pull_request.base.sha }} -- '*.rs' | grep "^+" | grep -v "^+++" | wc -l)\n if [[ $num_lines -gt 300 ]]; then\n echo "long=true" >> $GITHUB_OUTPUT\n fi\n - name: "Push to Slack"\n if: steps.long.outputs.long == 'true'\n uses: actions-ecosystem/action-slack-notifier@fc778468d09c43a6f4d1b8cccaca59766656996a\n with:\n slack_token: ${{ secrets.SLACK_TOKEN }}\n channel: team-testing-risky\n custom_payload: |\n {\n "blocks": [\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "• <${{ github.event.pull_request.html_url }}|${{ github.event.pull_request.title }}>"\n }\n }\n ]\n }\n
dataset_sample\yaml\MaterializeInc_materialize\.github\workflows\slack_notify_qa_risky.yml
slack_notify_qa_risky.yml
YAML
3,232
0.95
0.043956
0.232558
node-utils
888
2025-03-08T21:21:44.734946
Apache-2.0
false
5a54f1884fc1806e534e4496f5bdb610
# Copyright 2020 The Actions Ecosystem Authors\n# Modifications Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Portions of this file are derived from the README examples in the Action\n# Slack Notifier project. The original source code was retrieved on\n# January 5, 2022 from:\n#\n# https://github.com/actions-ecosystem/action-slack-notifier/blob/fc778468d09c43a6f4d1b8cccaca59766656996a/README.md\n\n# Send a notification to the #rnd-sql-council Slack channel when a change\n# to the SQL parser or the system catalog schema is made.\n#\n# A notification is sent when all of these conditions are true:\n# * A ready-to-review PR is (re-)opened, or a PR is moved from draft\n# to ready-to-review.\n# * The PR modifies a file in 'src/sql-parser/','src/sql-lexer/', or 'src/catalog/src/builtin.rs'.\n\nname: Slack SQL Council Notifications\n\non:\n pull_request_target:\n types:\n - opened\n - reopened\n - ready_for_review\n paths:\n - "src/sql-parser/**"\n - "src/sql-lexer/**"\n - "src/catalog/src/builtin.rs"\n - test/sqllogictest/mz_catalog_server_index_accounting.slt\n\njobs:\n notify:\n name: "Notify about changes to the SQL parser"\n runs-on: ubuntu-latest\n if: ${{ !github.event.pull_request.draft }}\n steps:\n - name: "Path filter"\n id: filter\n uses: dorny/paths-filter@v2\n with:\n filters: |\n sql-parser:\n - 'src/sql-parser/**'\n - 'src/sql-lexer/**'\n - '!**/Cargo.toml'\n - '!**/BUILD.bazel'\n system-catalog:\n - 'src/catalog/src/builtin.rs'\n - '!**/Cargo.toml'\n - '!**/BUILD.bazel'\n index-slt:\n - 'test/sqllogictest/mz_catalog_server_index_accounting.slt'\n - name: Checkout\n uses: actions/checkout@v4\n - name: "Check Retained Metric Changes"\n id: check-retain-metrics\n if: steps.filter.outputs.builtin-rs == 'true'\n run: |\n # Check for the text "is_retained_metrics" modified in builtin.rs in the pull request\n if git diff ${{ github.event.pull_request.base.sha }} -- 'src/catalog/src/builtin.rs' | grep -i 'is_retained_metrics'; then\n echo "changed=true" >> $GITHUB_OUTPUT\n fi\n - name: "Push to Slack"\n if: steps.filter.outputs.sql-parser == 'true' || steps.filter.outputs.system-catalog == 'true' || steps.filter.outputs.index-slt == 'true'\n uses: actions-ecosystem/action-slack-notifier@fc778468d09c43a6f4d1b8cccaca59766656996a\n with:\n slack_token: ${{ secrets.SLACK_TOKEN }}\n channel: rnd-sql-council\n custom_payload: |\n {\n "blocks": [\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "A new ${{ steps.filter.outputs.sql-parser == 'true' && 'SQL parser' || 'system catalog' }} change is ready for review!"\n }\n },\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": ${{ toJSON(format('• *PR:* <{0}|{1}>', github.event.pull_request.html_url, github.event.pull_request.title)) }}\n }\n },\n {\n "type": "section",\n "text": {\n "type": "mrkdwn",\n "text": "• *Author:* <${{ github.event.pull_request.user.html_url }}|${{ github.event.pull_request.user.login }}>"\n }\n }\n ]\n }\n
dataset_sample\yaml\MaterializeInc_materialize\.github\workflows\slack_notify_sql_parser.yml
slack_notify_sql_parser.yml
YAML
4,209
0.95
0.065421
0.264706
react-lib
902
2023-08-04T18:34:39.441539
Apache-2.0
false
68b910bc4b87d80b5cfe89560a88a7c0
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: cloudtest\ndescription: Runs cloudtest pytest-based tests\nauthor: "Materialize, Inc."\nrequirements: []\nconfiguration:\n properties:\n args:\n type: array\n additionalProperties: false\n
dataset_sample\yaml\MaterializeInc_materialize\ci\plugins\cloudtest\plugin.yml
plugin.yml
YAML
572
0.95
0
0.470588
node-utils
117
2024-03-15T11:01:01.980351
BSD-3-Clause
true
f9ea6601ef16d065c116c7b9e7575578
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: mzcompose\ndescription: Runs an mzcompose file\nauthor: "Materialize, Inc."\nrequirements: []\nconfiguration:\n properties:\n args:\n type: array\n run:\n type: string\n composition:\n type: string\n required: ["composition"]\n additionalProperties: false\n
dataset_sample\yaml\MaterializeInc_materialize\ci\plugins\mzcompose\plugin.yml
plugin.yml
YAML
653
0.95
0
0.363636
react-lib
506
2024-11-22T13:23:50.728571
MIT
false
d487cbb6378fc1ba5123b79011fa6d39
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: mzcompose\ndescription: Runs an mzcompose file\nauthor: "Materialize, Inc."\nrequirements: []\nconfiguration:\n additionalProperties: false\n
dataset_sample\yaml\MaterializeInc_materialize\ci\plugins\scratch-aws-access\plugin.yml
plugin.yml
YAML
519
0.95
0
0.571429
node-utils
933
2024-09-24T03:44:45.808872
BSD-3-Clause
false
80c9f5ad5c3cba03492bf63e9b1ddae7
operators:\n - operator: Constant\n plan_types: "optimized,raw"\n description: |\n Always produces the same collection of rows.\n uses_memory: False\n memory_details: ""\n expansive: False\n\n example: |\n ```mzsql\n Constant\n - ((1, 2) x 2)\n - (3, 4)\n ```\n\n - operator: Constant\n plan_types: "LIR"\n description: |\n Always produces the same collection of rows.\n uses_memory: False\n memory_details: ""\n expansive: False\n\n example: |\n ```mzsql\n Constant 2 rows\n ```\n\n - operator: Get\n plan_types: "optimized,raw"\n description: |\n Produces rows from either an existing relation (source/view/materialized view/table) or from a previous\n CTE in the same plan.\n uses_memory: False\n memory_details: ""\n expansive: False\n\n example: "`Get materialize.public.ordered`"\n\n - operator: Get::~\n plan_types: "LIR"\n description: |\n Produces rows from either an existing relation (source/view/materialized view/table) or from a previous\n CTE in the same plan.\n There may be a `MapFilterProject` included in the lookup.\n\n There are three types of `Get`.\n\n 1. `Get::PassArrangements`, which means the plan will use an\n existing [arrangement](/get-started/arrangements/#arrangements).\n\n 2. `Get::Arrangement`, which means that the results will be\n _looked up_ in an existing arrangement.\n\n 3. `Get::Collection`, which means that the results are\n unarranged, and will be processed as they arrive.\n\n uses_memory: False\n memory_details: ""\n expansive: False\n example: "`Get::PassArrangements materialize.public.ordered`"\n\n - operator: Project\n plan_types: "optimized,raw"\n description: |\n Produces a subset of the [columns](#explain-plan-columns) in the input\n rows. See also [column numbering](#explain-plan-columns).\n uses_memory: False\n memory_details: ""\n expansive: False\n expansive_details: |\n Each row has _less_ data (i.e., shorter rows, but same number of rows).\n example: "`Project (#2, #3)`"\n\n - operator: Map\n plan_types: "optimized,raw"\n description: |\n Appends the results of some scalar expressions to each row in the input.\n uses_memory: False\n memory_details: ""\n expansive: True\n expansive_details: |\n Each row has more data (i.e., longer rows but same number of rows).\n example: "`Map (((#1 * 10000000dec) / #2) * 1000dec)`"\n\n - operator: MapFilterProject\n plan_types: "LIR"\n description: |\n The number after the operator is the input operator's `lir_id`.\n\n Computes new columns, filters columns, and projects away columns. Works row-by-row.\n uses_memory: False\n memory_details: ""\n expansive: True\n expansive_details: |\n Each row may have more data, from the `Map`.\n Each row may also have less data, from the `Project`.\n There may be fewer rows, from the `Filter`.\n\n example: "`MapFilterProject 5`"\n\n - operator: FlatMap\n plan_types: "optimized"\n description: |\n Appends the result of some (one-to-many) [table function](/sql/functions/#table-functions) to each row in the input.\n uses_memory: False\n memory_details: ""\n expansive: True\n expansive_details: |\n Depends on the [table function](/sql/functions/#table-functions) used.\n example: "`FlatMap jsonb_foreach(#3)`"\n\n - operator: FlatMap\n plan_types: "LIR"\n description: |\n The number after the operator is the input operator's `lir_id`.\n\n Appends the result of some (one-to-many) [table function](/sql/functions/#table-functions) to each row in the input.\n uses_memory: False\n memory_details: ""\n expansive: True\n expansive_details: |\n Depends on the [table function](/sql/functions/#table-functions) used.\n example: "`FlatMap 3 (jsonb_foreach)`"\n\n - operator: CallTable\n plan_types: "raw"\n description: |\n Appends the result of some (one-to-many) [table function](/sql/functions/#table-functions) to each row in the input.\n uses_memory: False\n memory_details: ""\n expansive: True\n expansive_details: |\n Depends on the [table function](/sql/functions/#table-functions) used.\n example: "`CallTable generate_series(1, 7, 1)`"\n\n - operator: Filter\n plan_types: "optimized,raw"\n description: |\n Removes rows of the input for which some scalar predicates return `false`.\n uses_memory: False\n memory_details: ""\n expansive: False\n example: "`Filter (#20 < #21)`"\n expansive_details: |\n May reduce the number of rows.\n\n - operator: ~Join\n plan_types: "raw"\n description: |\n Performs one of `INNER` / `LEFT` / `RIGHT` / `FULL OUTER` / `CROSS` join on the two inputs, using the given predicate.\n uses_memory: True\n memory_details: |\n Uses memory proportional to the input sizes, unless [the inputs have appropriate indexes](/transform-data/optimization/#join). Certain joins with more than 2 inputs use additional memory, see details in the optimized plan.\n expansive: True\n expansive_details: |\n For `CrossJoin`s, Cartesian product of the inputs (|N| x |M|). Note that, in many cases, a join that shows up as a cross join in the RAW PLAN will actually be turned into an inner join in the OPTIMIZED PLAN, by making use of an equality WHERE condition.\n For other join types, depends on the join order and facts about the joined collections.\n example: "`InnerJoin (#0 = #2)`"\n\n - operator: Join\n plan_types: "optimized"\n description: |\n Returns combinations of rows from each input whenever some equality predicates are `true`.\n uses_memory: True\n memory_details: |\n The `Join` operator itself uses memory only for `type=differential` with more than 2 inputs.\n However, `Join` operators need [arrangements](/get-started/arrangements/#arrangements) on their inputs (shown by the `ArrangeBy` operator).\n These arrangements use memory proportional to the input sizes. If an input has an [appropriate index](/transform-data/optimization/#join), then the arrangement of the index will be reused.\n expansive: True\n expansive_details: |\n Depends on the join order and facts about the joined collections.\n example: "`Join on=(#1 = #2) type=delta`"\n\n - operator: Join::~\n plan_types: "LIR"\n description: |\n The input operators are listed in the order performed by the join.\n\n Returns combinations of rows from each input whenever some equality predicates are `true`.\n\n There are two types of `Join`: `Join::Differential` (also called linear join) and `Join::Delta`, with [documented differences](/transform-data/optimization/#join).\n uses_memory: True\n memory_details: |\n Uses memory for 3-way or more differential joins.\n expansive: True\n expansive_details: |\n Depends on the join order and facts about the joined collections.\n example: "`Join::Differential 6 » 7`"\n\n\n - operator: CrossJoin\n plan_types: "optimized"\n description: |\n An alias for a `Join` with an empty predicate (emits all combinations). Note that not all cross joins are marked\n as `CrossJoin`: In a join with more than 2 inputs, it can happen that there is a cross join between some of the inputs.\n You can recognize this case by `ArrangeBy` operators having empty keys, i.e., `ArrangeBy keys=[[]]`.\n uses_memory: True\n memory_details: |\n Uses memory for 3-way or more differential joins.\n expansive: True\n expansive_details: |\n Cartesian product of the inputs (|N| x |M|).\n example: "`CrossJoin type=differential`"\n\n - operator: Reduce\n plan_types: "optimized"\n description: |\n Groups the input rows by some scalar expressions, reduces each group using some aggregate functions, and produces rows containing the group key and aggregate outputs.\n uses_memory: True\n memory_details: |\n `SUM`, `COUNT`, and most other aggregations use a moderate amount of memory (proportional either to twice the output size or to input size + output size).\n `MIN` and `MAX` aggregates can use significantly more memory. This can be improved by including group size hints in the query, see\n [`mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n expansive: False\n example: "`Reduce group_by=[#0] aggregates=[max((#0 * #1))]`"\n\n - operator: Reduce::~\n plan_types: "LIR"\n description: |\n The number after the operator is the input operator's `lir_id`.\n\n Groups the input rows by some scalar expressions, reduces each group using some aggregate functions, and produces rows containing the group key and aggregate outputs.\n\n There are five types of `Reduce`, ordered by increasing complexity:\n\n 1. `Reduce::Distinct` corresponds to the SQL `DISTINCT` operator.\n\n 2. `Reduce::Accumulable` (e.g., `SUM`, `COUNT`) corresponds to several easy to implement aggregations that can be executed simultaneously and efficiently.\n\n 3. `Reduce::Hierarchical` (e.g., `MIN`, `MAX`) corresponds to an aggregation requiring a tower of arrangements. These can be either monotonic (more efficient) or bucketed. These may benefit from a hint; [see `mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n\n 4. `Reduce::Collation` corresponds to an arbitrary mix of reductions of different types, which will be performed separately and then joined together.\n\n 5. `Reduce::Basic` (e.g., window functions, `list_agg`) corresponds to a single non-incremental aggregation.\n\n uses_memory: True\n memory_details: |\n `Distinct` and `Accumulable` aggregates use a moderate amount of memory (proportional to twice the output size).\n `MIN` and `MAX` aggregates can use significantly more memory. This can be improved by including group size hints in the query, see\n [`mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n `Basic` aggregates use memory proportional to the input + output size.\n `Collation` aggregates use memory that is the sum of their constituents, plus some memory for the join at the end.\n expansive: False\n example: "`Reduce::Accumulable 8`"\n\n - operator: Reduce\n plan_types: "raw"\n description: |\n Groups the input rows by some scalar expressions, reduces each group using\n some aggregate functions, and produces rows containing the group key and\n aggregate outputs. In the case where the group key is empty and the input\n is empty, returns a single row with the aggregate functions applied to the\n empty input collection.\n uses_memory: True\n memory_details: |\n `SUM`, `COUNT`, and most other aggregations use a moderate amount of memory (proportional either to twice the output size or to input size + output size).\n `MIN` and `MAX` aggregates can use significantly more memory. This can be improved by including group size hints in the query, see\n [`mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n expansive: False\n example: "`Reduce group_by=[#0] aggregates=[max((#0 * #1))]`"\n\n - operator: Distinct\n plan_types: "optimized"\n description: |\n Alias for a `Reduce` with an empty aggregate list.\n uses_memory: True\n memory_details: |\n Uses memory proportional to twice the output size.\n expansive: False\n example: "`Distinct`"\n\n - operator: Distinct\n plan_types: "raw"\n description: |\n Removes duplicate copies of input rows.\n uses_memory: True\n memory_details: |\n Uses memory proportional to twice the output size.\n expansive: False\n example: "`Distinct`"\n\n - operator: TopK\n plan_types: "optimized,raw"\n description: |\n Groups the input rows by some scalar expressions, sorts each group using the group key, removes the top `offset` rows in each group, and returns the next `limit` rows.\n uses_memory: True\n memory_details: |\n Can use significant amount as the operator can significantly overestimate\n the group sizes. Consult\n [`mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n expansive: False\n example: "`TopK order_by=[#1 asc nulls_last, #0 desc nulls_first] limit=5`"\n\n - operator: TopK::~\n plan_types: "LIR"\n description: |\n The number after the operator is the input operator's `lir_id`.\n\n Groups the input rows, sorts them according to some ordering, and returns at most `K` rows at some offset from the top of the list, where `K` is some (possibly computed) limit.\n\n There are three types of `TopK`. Two are special cased for monotonic inputs (i.e., inputs which never retract data).\n\n 1. `TopK::MonotonicTop1`.\n 2. `TopK::MonotonicTopK`, which may give an expression indicating the limit.\n 3. `TopK::Basic`, a generic `TopK` plan.\n uses_memory: True\n memory_details: |\n `MonotonicTop1` or `MonotonicTopK` uses a moderate amount of memory. Other TopKs use significantly more memory as the operator can significantly overestimate\n the group sizes. Consult\n [`mz_introspection.mz_expected_group_size_advice`](/sql/system-catalog/mz_introspection/#mz_expected_group_size_advice).\n expansive: False\n example: "`TopK::Basic 10`"\n\n\n - operator: Negate\n plan_types: "optimized,raw"\n description: |\n Negates the row counts of the input. This is usually used in combination with union to remove rows from the other union input.\n uses_memory: False\n memory_details: ""\n expansive: False\n example: "`Negate`"\n\n - operator: Negate\n plan_types: "LIR"\n description: |\n Negates the row counts of the input. This is usually used in combination with union to remove rows from the other union input.\n uses_memory: False\n memory_details: ""\n expansive: False\n example: "`Negate 17`"\n\n - operator: Threshold\n plan_types: "optimized,raw"\n description: |\n Removes any rows with negative counts.\n uses_memory: True\n memory_details: |\n Uses memory proportional to the input and output size, twice.\n expansive: False\n example: "`Threshold`"\n\n - operator: Threshold\n plan_types: "LIR"\n description: |\n Removes any rows with negative counts.\n uses_memory: True\n memory_details: |\n Uses memory proportional to the input and output size, twice.\n expansive: False\n example: "`Threshold 47`"\n\n - operator: Union\n plan_types: "optimized,raw"\n description: |\n Sums the counts of each row of all inputs. (Corresponds to `UNION ALL` rather than `UNION`/`UNION DISTINCT`.)\n uses_memory: True\n memory_details: |\n Moderate use of memory. Some union operators force consolidation, which results in a memory spike, largely at hydration time.\n expansive: False\n example: "`Union`"\n\n - operator: Union\n plan_types: "LIR"\n description: |\n Combines its inputs into a unified output, emitting one row for each row on any input. (Corresponds to `UNION ALL` rather than `UNION`/`UNION DISTINCT`.)\n uses_memory: True\n memory_details: |\n If the union "consolidates output", it will make moderate use of memory, particularly at hydration time. If the union is not marked with "consolidates output", it will not consume memory.\n expansive: False\n example: "`Union 7 10 11 14 (consolidates output)`"\n\n - operator: ArrangeBy\n plan_types: "optimized"\n description: |\n Indicates a point that will become an [arrangement](/get-started/arrangements/#arrangements) in the dataflow engine (each `keys` element will be a different arrangement). Note that if an appropriate index already exists on the input or the output of the previous operator is already arranged with a key that is also requested here, then this operator will just pass on that existing arrangement instead of creating a new one.\n uses_memory: True\n memory_details: |\n Depends. If arrangements need to be created, they use memory proportional to the input size.\n expansive: False\n example: "`ArrangeBy keys=[[#0]]`"\n\n - operator: Arrange\n plan_types: "LIR"\n description: |\n Indicates a point that will become an [arrangement](/get-started/arrangements/#arrangements) in the dataflow engine, i.e., it will consume memory to cache results.\n uses_memory: True\n memory_details: |\n Uses memory proportional to the input size. Note that in the LIR / physical plan, `Arrange`/`ArrangeBy` almost always means that an arrangement will actually be created. (This is in contrast to the "optimized" plan, where an `ArrangeBy` being present in the plan often does not mean that an arrangement will actually be created.)\n expansive: False\n example: "`Arrange 12`"\n\n - operator: With ... Return ...\n plan_types: "optimized,raw"\n description: |\n Introduces CTEs, i.e., makes it possible for sub-plans to be consumed multiple times by downstream operators.\n uses_memory: False\n memory_details: ""\n expansive: False\n example: "[See above](#reading-decorrelated-and-optimized-plans)"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\explain_plan_operators.yml
explain_plan_operators.yml
YAML
17,304
0.95
0.042289
0
python-kit
269
2024-02-10T02:19:56.245146
Apache-2.0
false
1011113f0a169bddeca21756b6e96448
# CREATE INDEX idx_orders_view_qty on orders_view (quantity);\nqueries:\n - query: |\n ```mzsql\n SELECT * FROM orders_view;\n ```\n index_usage: Index scan.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10;\n ```\n index_usage: Point lookup.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity IN (10, 20);\n ```\n index_usage: Point lookup.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10 OR quantity = 20;\n ```\n index_usage: |\n Point lookup. Query uses `OR` to combine conditions on the **same** field.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10 AND price = 5.00;\n ```\n index_usage: |\n Point lookup on `quantity`, then filter on `price`.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE (quantity, price) = (10, 5.00);\n ```\n index_usage: |\n Point lookup on `quantity`, then filter on `price`.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10 OR price = 5.00;\n ```\n index_usage: |\n Index scan. Query uses `OR` to combine conditions on **different** fields.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity <= 10;\n ```\n index_usage: Index scan.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE round(quantity) = 20;\n ```\n index_usage: Index scan.\n\n\n - query: |\n ```mzsql\n -- Assume quantity is an integer\n SELECT * FROM orders_view WHERE quantity = 'hello';\n SELECT * FROM orders_view WHERE quantity::TEXT = 'hello';\n ```\n index_usage: |\n Index scan, assuming `quantity` field in `orders_view` is an integer.\n In the first query, the quantity is implicitly cast to text.\n In the second query, the quantity is explicitly cast to text.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\index_usage_key_quantity.yml
index_usage_key_quantity.yml
YAML
1,894
0.8
0
0.016393
awesome-app
969
2024-02-01T21:12:51.825342
Apache-2.0
false
d70f9d6cd67cdcc9791c2013890a3ce8
# CREATE INDEX idx_orders_view_qty on orders_view (quantity);\nqueries:\n - query: |\n ```mzsql\n SELECT * FROM orders_view;\n ```\n index_usage: Index scan.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10;\n ```\n index_usage: |\n Index scan. Query does not include equality conditions on **all** indexed\n fields.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10 AND price = 2.50;\n ```\n index_usage: Point lookup.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view WHERE quantity = 10 OR price = 2.50;\n ```\n index_usage: |\n Index scan. Query uses `OR` to combine conditions on **different** fields.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view\n WHERE quantity = 10 AND (price = 2.50 OR price = 3.00);\n ```\n index_usage: |\n Point lookup. Query uses `OR` to combine conditions on **same** field and `AND` to combine conditions on **different** fields.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view\n WHERE quantity = 10 AND price = 2.50 AND item = 'cupcake';\n ```\n index_usage: |\n Point lookup on the index keys `quantity` and `price`, then filter on\n `item`.\n\n - query: |\n ```mzsql\n SELECT * FROM orders_view\n WHERE quantity = 10 AND price = 2.50 OR item = 'cupcake';\n ```\n index_usage: |\n Index scan. Query uses `OR` to combine conditions on **different** fields.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\index_usage_key_quantity_price.yml
index_usage_key_quantity_price.yml
YAML
1,489
0.8
0
0.021277
awesome-app
96
2025-07-09T18:27:45.998985
GPL-3.0
false
5cd70d741a4187539b53e4a89ff0a3d1
columns:\n - column: name\n - column: global_id\n - column: lir_id\n - column: parent_lir_id\n - column: operator\n - column: duration\n - column: count\n\nrows:\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`6`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n duration: "`00:00:00.74516`"\n count: "`108650`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`5`"\n parent_lir_id: "`6`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n duration: "`00:00:00.017005`"\n count: "`19099`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`4`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n duration: "`00:00:00.058835`"\n count: "`11506`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`3`"\n parent_lir_id: "`4`"\n operator: |\n ```mzsql\n Get::PassArrangements u145\n ```\n duration: "`null`"\n count: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`2`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n duration: "`00:00:00.013885`"\n count: "`7413`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`1`"\n parent_lir_id: "`2`"\n operator: |\n ```mzsql\n Get::Collection u144\n ```\n duration: "`null`"\n count: "`null`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`8`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n duration: "`00:00:00.013887`"\n count: "`9347`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`7`"\n parent_lir_id: "`8`"\n operator: |\n ```mzsql\n Get::PassArrangements u148\n ```\n duration: "`null`"\n count: "`null`"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\query_attribution_computation_time_output.yml
query_attribution_computation_time_output.yml
YAML
1,872
0.7
0
0
vue-tools
926
2023-12-03T01:16:14.731377
GPL-3.0
false
ca6a3e1fcae0695b3211b4461554a769
columns:\n - column: name\n - column: global_id\n - column: lir_id\n - column: parent_lir_id\n - column: operator\n - column: size\n\nrows:\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`6`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n size: "`38 MB`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`5`"\n parent_lir_id: "`6`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n size: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`4`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n size: "`2008 kB`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`3`"\n parent_lir_id: "`4`"\n operator: |\n ```mzsql\n Get::PassArrangements u145\n ```\n size: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`2`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n size: "`900 kB`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`1`"\n parent_lir_id: "`2`"\n operator: |\n ```mzsql\n Get::Collection u144\n ```\n size: "`null`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`8`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n size: "`707 kB`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`7`"\n parent_lir_id: "`8`"\n operator: |\n ```mzsql\n Get::PassArrangements u148\n ```\n size: "`null`"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\query_attribution_memory_usage_output.yml
query_attribution_memory_usage_output.yml
YAML
1,608
0.7
0
0
react-lib
160
2023-08-28T17:32:42.002177
Apache-2.0
false
63bf29f867724282a31af281011b09dc
columns:\n - column: name\n - column: global_id\n - column: lir_id\n - column: parent_lir_id\n - column: operator\n - column: size\n\nrows:\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`6`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n size: "`11 MB`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`5`"\n parent_lir_id: "`6`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n size: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`4`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n size: "`1996 kB`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`3`"\n parent_lir_id: "`4`"\n operator: |\n ```mzsql\n Get::PassArrangements u145\n ```\n size: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`2`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n size: "`575 kB`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`1`"\n parent_lir_id: "`2`"\n operator: |\n ```mzsql\n Get::Collection u144\n ```\n size: "`null`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`8`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n size: "`402 kB`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`7`"\n parent_lir_id: "`8`"\n operator: |\n ```mzsql\n Get::PassArrangements u186\n ```\n size: "`null`"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\query_attribution_memory_usage_w_hint_output.yml
query_attribution_memory_usage_w_hint_output.yml
YAML
1,608
0.7
0
0
react-lib
75
2023-10-18T15:07:34.301610
MIT
false
9c9548a768a59eb80ded82d4114be340
columns:\n - column: name\n - column: global_id\n - column: lir_id\n - column: parent_lir_id\n - column: operator\n - column: levels\n - column: to_cut\n - column: hint\n - column: savings\n\n\nrows:\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`6`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n levels: "`8`"\n to_cut: "`6`"\n savings: "`27 MB`"\n hint: "`255.0`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`5`"\n parent_lir_id: "`6`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`4`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`3`"\n parent_lir_id: "`4`"\n operator: |\n ```mzsql\n Get::PassArrangements u145\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`2`"\n parent_lir_id: "`5`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`winning_bids`"\n global_id: "`u148`"\n lir_id: "`1`"\n parent_lir_id: "`2`"\n operator: |\n ```mzsql\n Get::Collection u144\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`8`"\n parent_lir_id: "`null`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n\n - name: "`wins_by_item`"\n global_id: "`u149`"\n lir_id: "`7`"\n parent_lir_id: "`8`"\n operator: |\n ```mzsql\n Get::PassArrangements u148\n ```\n levels: "`null`"\n to_cut: "`null`"\n savings: "`null`"\n hint: "`null`"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\query_attribution_topk_hints_output.yml
query_attribution_topk_hints_output.yml
YAML
2,167
0.7
0
0
react-lib
374
2025-03-12T17:54:40.043821
Apache-2.0
false
b8980eb934175aec7a320b1f03a9d3af
columns:\n - column: name\n - column: global_id\n - column: lir_id\n - column: operator\n - column: worker_id\n - column: ratio\n - column: elapsed_ns\n - column: avg_ns\n\nrows:\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`6`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n worker_id: "`0`"\n ratio: "`1`"\n elapsed_ns: "`00:00:03.172611`"\n avg_ns: "`00:00:03.177245`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`6`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n worker_id: "`1`"\n ratio: "`1`"\n elapsed_ns: "`00:00:03.175515`"\n avg_ns: "`00:00:03.177245`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`6`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n worker_id: "`2`"\n ratio: "`1`"\n elapsed_ns: "`00:00:03.174291`"\n avg_ns: "`00:00:03.177245`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`6`"\n operator: |\n ```mzsql\n TopK::Basic 5\n ```\n worker_id: "`3`"\n ratio: "`1`"\n elapsed_ns: "`00:00:03.186564`"\n avg_ns: "`00:00:03.177245`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`5`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n worker_id: "`0`"\n ratio: "`0.97`"\n elapsed_ns: "`00:00:00.157787`"\n avg_ns: "`00:00:00.162148`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`5`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n worker_id: "`1`"\n ratio: "`1.05`"\n elapsed_ns: "`00:00:00.170231`"\n avg_ns: "`00:00:00.162148`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`5`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n worker_id: "`2`"\n ratio: "`1`"\n elapsed_ns: "`00:00:00.162352`"\n avg_ns: "`00:00:00.162148`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`5`"\n operator: |\n ```mzsql\n Join::Differential 2 » 4\n ```\n worker_id: "`3`"\n ratio: "`0.98`"\n elapsed_ns: "`00:00:00.158224`"\n avg_ns: "`00:00:00.162148`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`4`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n worker_id: "`0`"\n ratio: "`0.67`"\n elapsed_ns: "`00:00:00.059754`"\n avg_ns: "`00:00:00.088972`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`4`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n worker_id: "`1`"\n ratio: "`0.64`"\n elapsed_ns: "`00:00:00.057283`"\n avg_ns: "`00:00:00.088972`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`4`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n worker_id: "`2`"\n ratio: "`2.02`"\n elapsed_ns: "`00:00:00.179739`"\n avg_ns: "`00:00:00.088972`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`4`"\n operator: |\n ```mzsql\n Arrange 3\n ```\n worker_id: "`3`"\n ratio: "`0.66`"\n elapsed_ns: "`00:00:00.059112`"\n avg_ns: "`00:00:00.088972`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`2`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n worker_id: "`0`"\n ratio: "`0.82`"\n elapsed_ns: "`00:00:00.023081`"\n avg_ns: "`00:00:00.028271`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`2`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n worker_id: "`1`"\n ratio: "`1.61`"\n elapsed_ns: "`00:00:00.045394`"\n avg_ns: "`00:00:00.028271`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`2`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n worker_id: "`2`"\n ratio: "`0.77`"\n elapsed_ns: "`00:00:00.021894`"\n avg_ns: "`00:00:00.028271`"\n\n - name: "`winning_bids`"\n global_id: "`u186`"\n lir_id: "`2`"\n operator: |\n ```mzsql\n Arrange 1\n ```\n worker_id: "`3`"\n ratio: "`0.8`"\n elapsed_ns: "`00:00:00.022717`"\n avg_ns: "`00:00:00.028271`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`8`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n worker_id: "`0`"\n ratio: "`0.85`"\n elapsed_ns: "`00:00:00.02085`"\n avg_ns: "`00:00:00.024526`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`8`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n worker_id: "`1`"\n ratio: "`1.27`"\n elapsed_ns: "`00:00:00.031028`"\n avg_ns: "`00:00:00.024526`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`8`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n worker_id: "`2`"\n ratio: "`1.44`"\n elapsed_ns: "`00:00:00.035279`"\n avg_ns: "`00:00:00.024526`"\n\n - name: "`wins_by_item`"\n global_id: "`u187`"\n lir_id: "`8`"\n operator: |\n ```mzsql\n Arrange 7\n ```\n worker_id: "`3`"\n ratio: "`0.45`"\n elapsed_ns: "`00:00:00.010946`"\n avg_ns: "`00:00:00.024526`"\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\query_attribution_worker_skew_output.yml
query_attribution_worker_skew_output.yml
YAML
4,999
0.7
0
0
node-utils
105
2024-03-13T09:08:16.342516
GPL-3.0
false
fb0fa999f417c939c941fe467a18c280
columns:\n - column: Option\n - column: Description\n\nrows:\n - Option: "**REDACTED**"\n Description: "If specified, literals will be redacted."\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\show_create_redacted_option.yml
show_create_redacted_option.yml
YAML
146
0.7
0
0
react-lib
691
2024-10-02T21:52:20.740396
GPL-3.0
false
6915766be408a70876757df1d58a9aae
- type: Generic\n description: Generic functions can typically take arguments of any type.\n functions:\n - signature: CAST (cast_expr) -> T\n description: Value as type `T`\n url: /sql/functions/cast\n\n - signature: 'coalesce(x: T...) -> T?'\n description: First non-_NULL_ arg, or _NULL_ if all are _NULL_.\n\n - signature: 'greatest(x: T...) -> T?'\n description: The maximum argument, or _NULL_ if all are _NULL_.\n\n - signature: 'least(x: T...) -> T?'\n description: The minimum argument, or _NULL_ if all are _NULL_.\n\n - signature: 'nullif(x: T, y: T) -> T?'\n description: _NULL_ if `x == y`, else `x`.\n\n- type: Aggregate\n description: Aggregate functions take one or more of the same element type as arguments.\n functions:\n - signature: 'array_agg(x: T) -> T[]'\n description: Aggregate values (including nulls) as an array\n url: /sql/functions/array_agg\n\n - signature: 'avg(x: T) -> U'\n description: |\n Average of `T`'s values.\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n\n - signature: 'bool_and(x: T) -> T'\n description: _NULL_ if all values of `x` are _NULL_, otherwise true if all values of `x` are true, otherwise false.\n\n - signature: 'bool_or(x: T) -> T'\n description: _NULL_ if all values of `x` are _NULL_, otherwise true if any values of `x` are true, otherwise false.\n\n - signature: 'count(x: T) -> bigint'\n description: Number of non-_NULL_ inputs.\n\n - signature: jsonb_agg(expression) -> jsonb\n description: Aggregate values (including nulls) as a jsonb array\n url: /sql/functions/jsonb_agg\n\n - signature: jsonb_object_agg(keys, values) -> jsonb\n description: Aggregate keys and values (including nulls) as a jsonb object\n url: /sql/functions/jsonb_object_agg\n\n - signature: 'max(x: T) -> T'\n description: Maximum value among `T`.\n\n - signature: 'min(x: T) -> T'\n description: Minimum value among `T`.\n\n - signature: 'stddev(x: T) -> U'\n description: |\n Historical alias for `stddev_samp`. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n\n - signature: 'stddev_pop(x: T) -> U'\n description: |\n Population standard deviation of `T`'s values. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n\n - signature: 'stddev_samp(x: T) -> U'\n description: |\n Sample standard deviation of `T`'s values. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n\n - signature: 'string_agg(value: text, delimiter: text) -> text'\n description: Concatenates the non-null input values into text. Each value after the first is preceded by the corresponding delimiter\n url: /sql/functions/string_agg\n\n - signature: 'sum(x: T) -> U'\n description: |\n Sum of `T`'s values\n\n Returns `bigint` if `x` is `int` or `smallint`, `numeric` if `x` is `bigint` or `uint8`,\n `uint8` if `x` is `uint4` or `uint2`, else returns same type as `x`.\n\n - signature: 'variance(x: T) -> U'\n description: |\n Historical alias for `var_samp`. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n - signature: 'var_pop(x: T) -> U'\n description: |\n Population variance of `T`'s values. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n - signature: 'var_samp(x: T) -> U'\n description: |\n Sample variance of `T`'s values. *(imprecise)*\n\n Returns `numeric` if `x` is `int`, `double` if `x` is `real`, else returns\n same type as `x`.\n\n- type: List\n description: List functions take [`list`](../types/list) arguments, and are [polymorphic](../types/list/#polymorphism).\n functions:\n - signature: 'list_agg(x: any) -> L'\n description: Aggregate values (including nulls) as a list\n url: /sql/functions/list_agg\n\n - signature: 'list_append(l: listany, e: listelementany) -> L'\n description: Appends `e` to `l`.\n\n - signature: 'list_cat(l1: listany, l2: listany) -> L'\n description: Concatenates `l1` and `l2`.\n\n - signature: 'list_length(l: listany) -> int'\n description: Return the number of elements in `l`.\n\n - signature: 'list_prepend(e: listelementany, l: listany) -> listany'\n description: Prepends `e` to `l`.\n\n- type: Map\n description: Map functions take [`map`](../types/map) arguments, and are [polymorphic](../types/#polymorphism).\n functions:\n - signature: 'map_length(m: mapany) -> int'\n description: Return the number of elements in `m`.\n - signature: 'map_build(kvs: list record(text, T)) -> map[text=>T]'\n description: |\n Builds a map from a list of records whose fields are two elements, the\n first of which is `text`. In the face of duplicate keys, `map_build` retains\n value from the record in the latest positition. This function is\n purpose-built to process [Kafka headers](/sql/create-source/kafka/#headers).\n version-added: v0.86\n - signature: 'map_agg(keys: text, values: T) -> map[text=>T]'\n description: Aggregate keys and values (including nulls) as a map\n url: /sql/functions/map_agg\n\n- type: Numbers\n description: Number functions take number-like arguments, e.g. [`int`](../types/int),\n [`float`](../types/float), [`numeric`](../types/numeric), unless otherwise specified.\n functions:\n - signature: 'abs(x: N) -> N'\n description: The absolute value of `x`.\n\n - signature: 'cbrt(x: double precision) -> double precision'\n description: The cube root of `x`.\n\n - signature: 'ceil(x: N) -> N'\n description: The smallest integer >= `x`.\n\n - signature: 'ceiling(x: N) -> N'\n description: "Alias of `ceil`."\n\n - signature: 'exp(x: N) -> N'\n description: Exponential of `x` (e raised to the given power)\n\n - signature: 'floor(x: N) -> N'\n description: The largest integer <= `x`.\n\n - signature: 'ln(x: double precision) -> double precision'\n description: Natural logarithm of `x`.\n\n - signature: 'ln(x: numeric) -> numeric'\n description: Natural logarithm of `x`.\n\n - signature: 'log(x: double precision) -> double precision'\n description: Base 10 logarithm of `x`.\n\n - signature: 'log(x: numeric) -> numeric'\n description: Base 10 logarithm of `x`.\n\n - signature: 'log10(x: double precision) -> double precision'\n description: Base 10 logarithm of `x`, same as `log`.\n\n - signature: 'log10(x: numeric) -> numeric'\n description: Base 10 logarithm of `x`, same as `log`.\n\n - signature: 'log(b: numeric, x: numeric) -> numeric'\n description: Base `b` logarithm of `x`.\n\n - signature: 'mod(x: N, y: N) -> N'\n description: "`x % y`"\n\n - signature: 'pow(x: double precision, y: double precision) -> double precision'\n description: "Alias of `power`."\n\n - signature: 'pow(x: numeric, y: numeric) -> numeric'\n description: "Alias of `power`."\n\n - signature: 'power(x: double precision, y: double precision) -> double precision'\n description: "`x` raised to the power of `y`."\n\n - signature: 'power(x: numeric, y: numeric) -> numeric'\n description: "`x` raised to the power of `y`."\n\n - signature: 'round(x: N) -> N'\n description: |\n `x` rounded to the nearest whole number.\n If `N` is `real` or `double precision`, rounds ties to the nearest even number.\n If `N` is `numeric`, rounds ties away from zero.\n\n - signature: 'round(x: numeric, y: int) -> numeric'\n description: "`x` rounded to `y` decimal places, while retaining the same\n [`numeric`](../types/numeric) scale; rounds ties away from zero."\n\n - signature: 'sqrt(x: numeric) -> numeric'\n description: The square root of `x`.\n\n - signature: 'sqrt(x: double precision) -> double precision'\n description: The square root of `x`.\n\n - signature: 'trunc(x: N) -> N'\n description: "`x` truncated toward zero to a whole number."\n\n- type: Trigonometric\n description: Trigonometric functions take and return `double precision` values.\n functions:\n - signature: 'cos(x: double precision) -> double precision'\n description: The cosine of `x`, with `x` in radians.\n\n - signature: 'acos(x: double precision) -> double precision'\n description: The inverse cosine of `x`, result in radians.\n\n - signature: 'cosh(x: double precision) -> double precision'\n description: The hyperbolic cosine of `x`, with `x` as a hyperbolic angle.\n\n - signature: 'acosh(x: double precision) -> double precision'\n description: The inverse hyperbolic cosine of `x`.\n\n - signature: 'cot(x: double precision) -> double precision'\n description: The cotangent of `x`, with `x` in radians.\n\n - signature: 'sin(x: double precision) -> double precision'\n description: The sine of `x`, with `x` in radians.\n\n - signature: 'asin(x: double precision) -> double precision'\n description: The inverse sine of `x`, result in radians.\n\n - signature: 'sinh(x: double precision) -> double precision'\n description: The hyperbolic sine of `x`, with `x` as a hyperbolic angle.\n\n - signature: 'asinh(x: double precision) -> double precision'\n description: The inverse hyperbolic sine of `x`.\n\n - signature: 'tan(x: double precision) -> double precision'\n description: The tangent of `x`, with `x` in radians.\n\n - signature: 'atan(x: double precision) -> double precision'\n description: The inverse tangent of `x`, result in radians.\n\n - signature: 'tanh(x: double precision) -> double precision'\n description: The hyperbolic tangent of `x`, with `x` as a hyperbolic angle.\n\n - signature: 'atanh(x: double precision) -> double precision'\n description: The inverse hyperbolic tangent of `x`.\n\n - signature: 'radians(x: double precision) -> double precision'\n description: Converts degrees to radians.\n\n - signature: 'degrees(x: double precision) -> double precision'\n description: Converts radians to degrees.\n\n- type: String\n functions:\n - signature: 'ascii(s: str) -> int'\n description: The ASCII value of `s`'s left-most character.\n\n - signature: 'btrim(s: str) -> str'\n description: Trim all spaces from both sides of `s`.\n\n - signature: 'btrim(s: str, c: str) -> str'\n description: Trim any character in `c` from both sides of `s`.\n\n - signature: 'bit_count(b: bytea) -> int'\n description: Returns the number of bits set in the bit string (aka _popcount_).\n\n - signature: 'bit_length(s: str) -> int'\n description: Number of bits in `s`.\n\n - signature: 'bit_length(b: bytea) -> int'\n description: Number of bits in `b`.\n\n - signature: 'char_length(s: str) -> int'\n description: Number of code points in `s`.\n\n - signature: 'chr(i: int) -> str'\n description: |\n Character with the given Unicode codepoint.\n Only supports codepoints that can be encoded in UTF-8.\n The NULL (0) character is not allowed.\n\n - signature: 'concat(f: any, r: any...) -> text'\n description: Concatenates the text representation of non-NULL arguments.\n\n - signature: 'concat_ws(sep: str, f: any, r: any...) -> text'\n description: Concatenates the text representation of non-NULL arguments from `f` and `r` separated by `sep`.\n\n - signature: 'convert_from(b: bytea, src_encoding: text) -> text'\n description: Convert data `b` from original encoding specified by `src_encoding` into `text`.\n\n - signature: 'decode(s: text, format: text) -> bytea'\n description: Decode `s` using the specified textual representation\n url: /sql/functions/encode\n\n - signature: 'encode(b: bytea, format: text) -> text'\n description: Encode `b` using the specified textual representation\n url: /sql/functions/encode\n\n - signature: 'get_bit(b: bytea, n: int) -> int'\n description: Return the `n`th bit from `b`, where the left-most bit in `b` is at the 0th position.\n\n - signature: 'get_byte(b: bytea, n: int) -> int'\n description: Return the `n`th byte from `b`, where the left-most byte in `b` is at the 0th position.\n\n - signature: 'constant_time_eq(a: bytea, b: bytea) -> bool'\n description: Returns `true` if the arrays are identical, otherwise returns `false`. The implementation mitigates timing attacks by making a best-effort attempt to execute in constant time if the arrays have the same length, regardless of their contents.\n\n - signature: 'constant_time_eq(a: text, b: text) -> bool'\n description: Returns `true` if the strings are identical, otherwise returns `false`. The implementation mitigates timing attacks by making a best-effort attempt to execute in constant time if the strings have the same length, regardless of their contents.\n\n - signature: 'initcap(a: text) -> text'\n description: |\n Returns `a` with the first character of every word in upper case and all\n other characters in lower case. Words are separated by non-alphanumeric\n characters.\n version-added: v0.97\n\n - signature: 'left(s: str, n: int) -> str'\n description: The first `n` characters of `s`. If `n` is negative, all but the last `|n|` characters of `s`.\n\n - signature: 'length(s: str) -> int'\n description: Number of code points in `s`.\n url: /sql/functions/length\n\n - signature: 'length(b: bytea) -> int'\n description: Number of bytes in `s`.\n url: /sql/functions/length\n\n - signature: 'length(s: bytea, encoding_name: str) -> int'\n description: Number of code points in `s` after encoding\n url: /sql/functions/length\n\n - signature: 'lower(s: str) -> str'\n description: Convert `s` to lowercase.\n\n - signature: 'lpad(s: str, len: int) -> str'\n description: "Prepend `s` with spaces up to length `len`,\n or right truncate if `len` is less than the length of `s`."\n\n - signature: 'lpad(s: str, len: int, p: str) -> str'\n description: "Prepend `s` with characters pulled from `p` up to length `len`,\n or right truncate if `len` is less than the length of `s`."\n\n - signature: 'ltrim(s: str) -> str'\n description: Trim all spaces from the left side of `s`.\n\n - signature: 'ltrim(s: str, c: str) -> str'\n description: Trim any character in `c` from the left side of `s`.\n\n - signature: 'octet_length(s: str) -> int'\n description: Number of bytes in `s`.\n\n - signature: 'octet_length(b: bytea) -> int'\n description: Number of bytes in `b`.\n\n - signature: 'parse_ident(ident: str[, strict_mode: bool]) -> str[]'\n description: |\n Given a qualified identifier like `a."b".c`, splits into an array of the\n constituent identifiers with quoting removed and escape sequences decoded.\n Extra characters after the last identifier are ignored unless the\n `strict_mode` parameter is `true` (defaults to `false`).\n\n - signature: 'position(sub: str IN s: str) -> int'\n description: The starting index of `sub` within `s` or `0` if `sub` is not a substring of `s`.\n\n - signature: 'regexp_match(haystack: str, needle: str [, flags: str]]) -> str[]'\n description: |\n Matches the regular expression `needle` against haystack, returning a\n string array that contains the value of each capture group specified in\n `needle`, in order. If `flags` is set to the string `i` matches\n case-insensitively.\n\n - signature: 'regexp_replace(source: str, pattern: str, replacement: str [, flags: str]]) -> str'\n description: |\n Replaces the first occurrence of `pattern` with `replacement` in `source`.\n No match will return `source` unchanged.\n\n If `flags` is set to `g`, all occurrences are replaced.\n If `flags` is set to `i`, matches case-insensitively.\n\n `$N` or `$name` in `replacement` can be used to match capture groups.\n `${N}` must be used to disambiguate capture group indexes from names if other characters follow `N`.\n A `$$` in `replacement` will write a literal `$`.\n\n See the [rust regex docs](https://docs.rs/regex/latest/regex/struct.Regex.html#method.replace) for more details about replacement.\n\n - signature: "regexp_matches(haystack: str, needle: str [, flags: str]]) -> str[]"\n description: |\n Matches the regular expression `needle` against haystack, returning a\n string array that contains the value of each capture group specified in\n `needle`, in order. If `flags` is set to the string `i` matches\n case-insensitively. If `flags` is set to the string `g` all matches are\n returned, otherwise only the first match is returned. Without the `g`\n flag, the behavior is the same as `regexp_match`.\n\n - signature: 'regexp_split_to_array(text: str, pattern: str [, flags: str]]) -> str[]'\n description: |\n Splits `text` by the regular expression `pattern` into an array.\n If `flags` is set to `i`, matches case-insensitively.\n\n - signature: 'repeat(s: str, n: int) -> str'\n description: Replicate the string `n` times.\n\n - signature: 'replace(s: str, f: str, r: str) -> str'\n description: "`s` with all instances of `f` replaced with `r`."\n\n - signature: 'right(s: str, n: int) -> str'\n description: The last `n` characters of `s`. If `n` is negative, all but the first `|n|` characters of `s`.\n\n - signature: 'rtrim(s: str) -> str'\n description: Trim all spaces from the right side of `s`.\n\n - signature: 'rtrim(s: str, c: str) -> str'\n description: Trim any character in `c` from the right side of `s`.\n\n - signature: 'split_part(s: str, d: s, i: int) -> str'\n description: Split `s` on delimiter `d`. Return the `str` at index `i`, counting from 1.\n\n - signature: 'starts_with(s: str, prefix: str) -> bool'\n description: Report whether `s` starts with `prefix`.\n\n - signature: 'substring(s: str, start_pos: int) -> str'\n description: Substring of `s` starting at `start_pos`\n url: /sql/functions/substring\n\n - signature: 'substring(s: str, start_pos: int, l: int) -> str'\n description: Substring starting at `start_pos` of length `l`\n url: /sql/functions/substring\n\n - signature: "substring('s' [FROM 'start_pos']? [FOR 'l']?) -> str"\n description: Substring starting at `start_pos` of length `l`\n url: /sql/functions/substring\n\n - signature: "translate(s: str, from: str, to: str) -> str"\n description: |\n Any character in `s` that matches a character in `from` is replaced by the corresponding character in `to`.\n If `from` is longer than `to`, occurrences of the extra characters in `from` are removed.\n\n - signature: "trim([BOTH | LEADING | TRAILING]? ['c'? FROM]? 's') -> str"\n description: |\n Trims any character in `c` from `s` on the specified side.\n\n Defaults:\n\n &bull; Side: `BOTH`\n\n &bull; `'c'`: `' '` (space)\n\n - signature: 'try_parse_monotonic_iso8601_timestamp(s: str) -> timestamp'\n description: |\n Parses a specific subset of ISO8601 timestamps, returning `NULL` instead of\n error on failure: `YYYY-MM-DDThh:mm:ss.sssZ`\n url: /sql/functions/pushdown\n\n - signature: 'upper(s: str) -> str'\n description: Convert `s` to uppercase.\n\n - signature: 'reverse(s: str) -> str'\n description: Reverse the characters in `s`.\n\n - signature: 'string_to_array(s: str, delimiter: str [, null_string: str]) -> str[]'\n description: |\n Splits the string at occurrences of delimiter and returns a text array of\n the split segments.\n\n If `delimiter` is NULL, each character in the string will become a\n separate element in the array.\n\n If `delimiter` is an empty string, then the string is treated as a single\n field.\n\n If `null_string` is supplied and is not NULL, fields matching that string\n are replaced by NULL.\n\n For example: `string_to_array('xx~~yy~~zz', '~~', 'yy')` → `{xx,NULL,zz}`\n\n- type: Scalar\n description: Scalar functions take a list of scalar expressions\n functions:\n - signature: 'expression bool_op ALL(s: Scalars) -> bool'\n description: "`true` if applying [bool_op](#boolean-operators) to `expression` and every\n value of `s` evaluates to `true`."\n\n - signature: 'expression bool_op ANY(s: Scalars) -> bool'\n description: |\n\n `true` if applying [bool_op](#boolean-operators) to `expression` and any\n value of `s` evaluates to `true`. Avoid using in equi-join conditions as\n its use in the equi-join condition can lead to a significant increase in\n memory usage. See [idiomatic Materialize\n SQL](/transform-data/idiomatic-materialize-sql/any) for the alternative.\n\n - signature: 'expression IN(s: Scalars) -> bool'\n description: "`true` for each value in `expression` if it matches at least one\n element of `s`."\n\n - signature: 'expression NOT IN(s: Scalars) -> bool'\n description: "`true` for each value in `expression` if it does not match any elements\n of `s`."\n\n - signature: 'expression bool_op SOME(s: Scalars) -> bool'\n description: "`true` if applying [bool_op](#boolean-operators) to `expression` and any value\n of `s` evaluates to `true`."\n\n- type: Subquery\n description: Subquery functions take a query, e.g. [`SELECT`](/sql/select)\n functions:\n - signature: 'expression bool_op ALL(s: Query) -> bool'\n description: "`s` must return exactly one column; `true` if applying [bool_op](#boolean-operators)\n to `expression` and every value of `s` evaluates to `true`."\n\n - signature: 'expression bool_op ANY(s: Query) -> bool'\n description: "`s` must return exactly one column; `true` if applying [bool_op](#boolean-operators)\n to `expression` and any value of `s` evaluates to `true`."\n\n - signature: 'csv_extract(num_csv_col: int, col_name: string) -> col1: string, ... coln: string'\n description: Extracts separated values from a column containing a CSV file formatted as a string\n url: /sql/functions/csv_extract\n\n - signature: 'EXISTS(s: Query) -> bool'\n description: "`true` if `s` returns at least one row."\n\n - signature: 'expression IN(s: Query) -> bool'\n description: "`s` must return exactly one column; `true` for each value in `expression`\n if it matches at least one element of `s`."\n\n - signature: 'NOT EXISTS(s: Query) -> bool'\n description: "`true` if `s` returns zero rows."\n\n - signature: 'expression NOT IN(s: Query) -> bool'\n description: "`s` must return exactly one column; `true` for each value in `expression`\n if it does not match any elements of `s`."\n\n - signature: 'expression bool_op SOME(s: Query) -> bool'\n description: "`s` must return exactly one column; `true` if applying [bool_op](#boolean-operators)\n to `expression` and any value of `s` evaluates to `true`."\n\n- type: Date and time\n description: Time functions take or produce a time-like type, e.g. [`date`](../types/date),\n [`timestamp`](../types/timestamp), [`timestamp with time zone`](../types/timestamptz).\n functions:\n - signature: 'age(timestamp, timestamp) -> interval'\n description: 'Subtracts one timestamp from another, producing a "symbolic" result that uses years and months, rather than just days.'\n\n - signature: current_timestamp() -> timestamptz\n description: 'The `timestamp with time zone` representing when the query was executed.'\n unmaterializable: true\n\n - signature: 'date_bin(stride: interval, source: timestamp, origin: timestamp) -> timestamp'\n description: Align `source` with `origin` along `stride`\n url: /sql/functions/date-bin\n\n - signature: 'date_trunc(time_component: str, val: timestamp) -> timestamp'\n description: Largest `time_component` <= `val`\n url: /sql/functions/date-trunc\n\n - signature: 'date_trunc(time_component: str, val: interval) -> interval'\n description: Largest `time_component` <= `val`\n url: /sql/functions/date-trunc\n\n - signature: EXTRACT(extract_expr) -> numeric\n description: Specified time component from value\n url: /sql/functions/extract\n\n - signature: 'date_part(time_component: str, val: timestamp) -> float'\n description: Specified time component from value\n url: /sql/functions/date-part\n\n - signature: mz_now() -> mz_timestamp\n description: |\n The logical time at which a query executes. Used for temporal filters and query timestamp introspection\n url: /sql/functions/now_and_mz_now\n unmaterializable_unless_temporal_filter: true\n\n - signature: now() -> timestamptz\n description: 'The `timestamp with time zone` representing when the query was executed'\n url: /sql/functions/now_and_mz_now\n unmaterializable: true\n\n - signature: timestamp AT TIME ZONE zone -> timestamptz\n description: 'Converts `timestamp` to the specified time zone, expressed as an offset from UTC'\n url: /sql/functions/timezone-and-at-time-zone\n known_time_zone_limitation_cast: true\n\n - signature: timestamptz AT TIME ZONE zone -> timestamp\n description: 'Converts `timestamp with time zone` from UTC to the specified time zone, expressed as the local time'\n url: /sql/functions/timezone-and-at-time-zone\n known_time_zone_limitation_cast: true\n\n - signature: timezone(zone, timestamp) -> timestamptz\n description: 'Converts `timestamp` to specified time zone, expressed as an offset from UTC'\n url: /sql/functions/timezone-and-at-time-zone\n known_time_zone_limitation_cast: true\n\n - signature: timezone(zone, timestamptz) -> timestamp\n description: 'Converts `timestamp with time zone` from UTC to specified time zone, expressed as the local time'\n url: /sql/functions/timezone-and-at-time-zone\n known_time_zone_limitation_cast: true\n\n - signature: |-\n timezone_offset(zone: str, when: timestamptz) ->\n (abbrev: str, base_utc_offset: interval, dst_offset: interval)\n description: |\n Describes a time zone's offset from UTC at a specified moment.\n\n `zone` must be a valid IANA Time Zone Database identifier.\n\n `when` is a `timestamp with time zone` that specifies the moment at which to determine `zone`'s offset from UTC.\n\n `abbrev` is the abbreviation for `zone` that is in use at the specified moment (e.g., `EST` or `EDT`).\n\n `base_utc_offset` is the base offset from UTC at the specified moment (e.g., `-5 hours`). Positive offsets mean east of Greenwich; negative offsets mean west of Greenwich.\n\n `dst_offset` is the additional offset at the specified moment due to Daylight Saving Time rules (e.g., `1 hours`). If non-zero, Daylight Saving Time is in effect.\n\n - signature: 'to_timestamp(val: double precision) -> timestamptz'\n description: Converts Unix epoch (seconds since 00:00:00 UTC on January 1, 1970)\n to timestamp.\n\n - signature: 'to_char(val: timestamp, format: str)'\n description: Converts a timestamp into a string using the specified format\n url: /sql/functions/to_char\n\n - signature: 'justify_days(val: interval) -> interval'\n description: Adjust interval so 30-day time periods are represented as months\n url: /sql/functions/justify-days\n\n - signature: 'justify_hours(val: interval) -> interval'\n description: Adjust interval so 24-hour time periods are represented as days\n url: /sql/functions/justify-hours\n\n - signature: 'justify_interval(val: interval) -> interval'\n description: Adjust interval using justify_days and justify_hours, with additional sign adjustments\n url: /sql/functions/justify-interval\n\n- type: UUID\n functions:\n\n - signature: 'uuid_generate_v5(namespace: uuid, name: text) -> uuid'\n description: 'Generates a [version 5 UUID](https://www.rfc-editor.org/rfc/rfc4122#page-7) (SHA-1) in the given namespace using\n the specified input name.'\n\n- type: JSON\n functions:\n - signature: jsonb_agg(expression) -> jsonb\n description: Aggregate values (including nulls) as a jsonb array\n url: /sql/functions/jsonb_agg\n\n - signature: 'jsonb_array_elements(j: jsonb) -> Col<jsonb>'\n description: "`j`'s elements if `j` is an array"\n url: /sql/types/jsonb#jsonb_array_elements\n\n - signature: 'jsonb_array_elements_text(j: jsonb) -> Col<string>'\n description: "`j`'s elements if `j` is an array"\n url: /sql/types/jsonb#jsonb_array_elements_text\n\n - signature: 'jsonb_array_length(j: jsonb) -> int'\n description: Number of elements in `j`'s outermost array\n url: /sql/types/jsonb#jsonb_array_length\n\n - signature: 'jsonb_build_array(x: ...) -> jsonb'\n description: Output each element of `x` as a `jsonb` array. Elements can be of heterogenous\n types\n url: /sql/types/jsonb#jsonb_build_array\n\n - signature: 'jsonb_build_object(x: ...) -> jsonb'\n description: The elements of x as a `jsonb` object. The argument list alternates\n between keys and values\n url: /sql/types/jsonb#jsonb_build_object\n\n - signature: 'jsonb_each(j: jsonb) -> Col<(key: string, value: jsonb)>'\n description: "`j`'s outermost elements if `j` is an object"\n url: /sql/types/jsonb#jsonb_each\n\n - signature: 'jsonb_each_text(j: jsonb) -> Col<(key: string, value: string)>'\n description: "`j`'s outermost elements if `j` is an object"\n url: /sql/types/jsonb#jsonb_each_text\n\n - signature: jsonb_object_agg(keys, values) -> jsonb\n description: Aggregate keys and values (including nulls) as a `jsonb` object\n url: /sql/functions/jsonb_object_agg\n\n - signature: 'jsonb_object_keys(j: jsonb) -> Col<string>'\n description: "`j`'s outermost keys if `j` is an object"\n url: /sql/types/jsonb#jsonb_object_keys\n\n - signature: 'jsonb_pretty(j: jsonb) -> string'\n description: Pretty printed (i.e. indented) `j`\n url: /sql/types/jsonb#jsonb_pretty\n\n - signature: 'jsonb_typeof(j: jsonb) -> string'\n description: Type of `j`'s outermost value. One of `object`, `array`, `string`,\n `number`, `boolean`, and `null`\n url: /sql/types/jsonb#jsonb_typeof\n\n - signature: 'jsonb_strip_nulls(j: jsonb) -> jsonb'\n description: "`j` with all object fields with a value of `null` removed. Other\n `null` values remain"\n url: /sql/types/jsonb#jsonb_strip_nulls\n\n - signature: 'to_jsonb(v: T) -> jsonb'\n description: "`v` as `jsonb`"\n url: /sql/types/jsonb#to_jsonb\n\n- type: Table\n description: Table functions evaluate to a set of rows, rather than a single expression.\n functions:\n - signature: 'generate_series(start: int, stop: int) -> Col<int>'\n description: Generate all integer values between `start` and `stop`, inclusive.\n - signature: 'generate_series(start: int, stop: int, step: int) -> Col<int>'\n description: Generate all integer values between `start` and `stop`, inclusive, incrementing\n by `step` each time.\n - signature: 'generate_series(start: timestamp, stop: timestamp, step: interval) -> Col<timestamp>'\n description: Generate all timestamp values between `start` and `stop`, inclusive, incrementing\n by `step` each time.\n - signature: 'generate_subscripts(a: anyarray, dim: int) -> Col<int>'\n description: Generates a series comprising the valid subscripts of the `dim`'th dimension of the given array `a`.\n - signature: 'regexp_extract(regex: str, haystack: str) -> Col<string>'\n description: Values of the capture groups of `regex` as matched in `haystack`.\n - signature: 'regexp_split_to_table(text: str, pattern: str [, flags: str]]) -> Col<string>'\n description: |\n Splits `text` by the regular expression `pattern`.\n If `flags` is set to `i`, matches case-insensitively.\n - signature: 'unnest(a: anyarray)'\n description: Expands the array `a` into a set of rows.\n - signature: 'unnest(l: anylist)'\n description: Expands the list `l` into a set of rows.\n - signature: "unnest(m: anymap)"\n description: Expands the map `m` in a set of rows with the columns `key` and `value`.\n\n- type: Array\n functions:\n - signature: 'array_cat(a1: arrayany, a2: arrayany) -> arrayany'\n description: 'Concatenates `a1` and `a2`.'\n - signature: 'array_fill(anyelement, int[], [, int[]]) -> anyarray'\n description: 'Returns an array initialized with supplied value and dimensions, optionally with lower bounds other than 1.'\n - signature: 'array_length(a: arrayany, dim: bigint) -> int'\n description: 'Returns the length of the specified dimension of the array.'\n - signature: 'array_position(haystack: anycompatiblearray, needle: anycompatible) -> int'\n description: 'Returns the subscript of `needle` in `haystack`. Returns `null` if not found.'\n - signature: 'array_position(haystack: anycompatiblearray, needle: anycompatible, skip: int) -> int'\n description: 'Returns the subscript of `needle` in `haystack`, skipping the first `skip` elements. Returns `null` if not found.'\n - signature: 'array_to_string(a: anyarray, sep: text [, ifnull: text]) -> text'\n description: |\n Concatenates the elements of `array` together separated by `sep`.\n Null elements are omitted unless `ifnull` is non-null, in which case\n null elements are replaced with the value of `ifnull`.\n - signature: 'array_remove(a: anyarray, e: anyelement) -> anyarray'\n description: |\n Returns the array `a` without any elements equal to the given value `e`.\n The array must be one-dimensional. Comparisons are done using `IS NOT\n DISTINCT FROM semantics, so it is possible to remove NULLs.\n\n- type: Hash\n functions:\n - signature: 'crc32(data: bytea) -> uint32'\n description: |\n Computes the 32-bit cyclic redundancy check of the given bytea `data` using the IEEE 802.3 polynomial.\n version-added: v0.114\n - signature: 'crc32(data: text) -> uint32'\n description: |\n Computes the 32-bit cyclic redundancy check of the given text `data` using the IEEE 802.3 polynomial.\n version-added: v0.114\n - signature: 'digest(data: text, type: text) -> bytea'\n description: |\n Computes a binary hash of the given text `data` using the specified `type` algorithm.\n Supported hash algorithms are: `md5`, `sha1`, `sha224`, `sha256`, `sha384`, and `sha512`.\n - signature: 'digest(data: bytea, type: text) -> bytea'\n description: |\n Computes a binary hash of the given bytea `data` using the specified `type` algorithm.\n The supported hash algorithms are the same as for the text variant of this function.\n - signature: 'hmac(data: text, key: text, type: text) -> bytea'\n description: |\n Computes a hashed MAC of the given text `data` using the specified `key` and\n `type` algorithm. Supported hash algorithms are the same as for `digest`.\n - signature: 'hmac(data: bytea, key: bytea, type: text) -> bytea'\n description: |\n Computes a hashed MAC of the given bytea `data` using the specified `key` and\n `type` algorithm. The supported hash algorithms are the same as for `digest`.\n - signature: 'kafka_murmur2(data: bytea) -> integer'\n description: |\n Computes the Murmur2 hash of the given bytea `data` using the seed used by Kafka's default partitioner and with the high bit cleared.\n version-added: v0.114\n - signature: 'kafka_murmur2(data: text) -> integer'\n description: |\n Computes the Murmur2 hash of the given text `data` using the seed used by Kafka's default partitioner and with the high bit cleared.\n version-added: v0.114\n - signature: 'md5(data: bytea) -> text'\n description: |\n Computes the MD5 hash of the given bytea `data`.\n For PostgreSQL compatibility, returns a hex-encoded value of type `text` rather than `bytea`.\n - signature: 'seahash(data: bytea) -> u64'\n description: |\n Computes the [SeaHash](https://docs.rs/seahash) hash of the given bytea `data`.\n version-added: v0.114\n - signature: 'seahash(data: text) -> u64'\n description: |\n Computes the [SeaHash](https://docs.rs/seahash) hash of the given text `data`.\n version-added: v0.114\n - signature: 'sha224(data: bytea) -> bytea'\n description: |\n Computes the SHA-224 hash of the given bytea `data`.\n - signature: 'sha256(data: bytea) -> bytea'\n description: |\n Computes the SHA-256 hash of the given bytea `data`.\n - signature: 'sha384(data: bytea) -> bytea'\n description: |\n Computes the SHA-384 hash of the given bytea `data`.\n - signature: 'sha512(data: bytea) -> bytea'\n description: |\n Computes the SHA-512 hash of the given bytea `data`.\n\n- type: Window\n description: |\n {{< tip >}}\n\n For some window function query patterns, rewriting your query to not use\n window functions can yield better performance. See [Idiomatic Materialize SQL](/transform-data/idiomatic-materialize-sql/) for details.\n\n {{</ tip >}}\n\n Window functions compute values across sets of rows related to the current row.\n For example, you can use a window aggregation to smooth measurement data by computing the average of the last 5\n measurements before every row as follows:\n\n ```\n SELECT\n avg(measurement) OVER (ORDER BY time ROWS BETWEEN 4 PRECEDING AND CURRENT ROW)\n FROM measurements;\n ```\n\n Window functions always need an `OVER` clause. For the `OVER` clause, Materialize supports the same\n [syntax as\n PostgreSQL](https://www.postgresql.org/docs/current/tutorial-window.html),\n but supports only the following frame modes:\n\n - the `ROWS` frame mode.\n\n - the default frame, which is `RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT\n ROW`.\n\n {{< note >}}\n {{% idiomatic-sql/materialize-window-functions %}}\n\n See [Idiomatic Materialize SQL](/transform-data/idiomatic-materialize-sql/)\n for examples of rewriting window functions.\n\n {{</ note >}}\n\n In addition to the below window functions, you can use the `OVER` clause with any [aggregation function](#aggregate-functions)\n (e.g., `sum`, `avg`) as well. Using an aggregation with an `OVER` clause is called a _window aggregation_. A\n window aggregation computes the aggregate not on the groups specified by the `GROUP BY` clause, but on the frames\n specified inside the `OVER` clause. (Note that a window aggregation produces exactly one output value _for each input\n row_. This is different from a standard aggregation, which produces one output value for each _group_ specified by\n the `GROUP BY` clause.)\n functions:\n - signature: 'dense_rank() -> int'\n description: |\n Returns the rank of the current row within its partition without gaps, counting from 1.\n Rows that compare equal will have the same rank.\n - signature: 'first_value(value anycompatible) -> anyelement'\n description: |\n Returns `value` evaluated at the first row of the window frame. The default window frame is\n `RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW`.\n\n See also [Idiomatic Materialize SQL: First value](/transform-data/idiomatic-materialize-sql/first-value/).\n\n - signature: 'lag(value anycompatible [, offset integer [, default anycompatible ]]) -> int'\n description: |\n Returns `value` evaluated at the row that is `offset` rows before the current row within the partition;\n if there is no such row, instead returns `default` (which must be of a type compatible with `value`).\n If `offset` is `NULL`, `NULL` is returned instead.\n Both `offset` and `default` are evaluated with respect to the current row.\n If omitted, `offset` defaults to 1 and `default` to `NULL`.\n\n See also [Idiomatic Materialize SQL: Lag over](/transform-data/idiomatic-materialize-sql/lag/).\n\n - signature: 'last_value(value anycompatible) -> anyelement'\n description: |\n Returns `value` evaluated at the last row of the window frame. The default window frame is\n `RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW`.\n\n See also [Idiomatic Materialize SQL: Last\n value](/transform-data/idiomatic-materialize-sql/last-value/).\n\n - signature: 'lead(value anycompatible [, offset integer [, default anycompatible ]]) -> int'\n description: |\n Returns `value` evaluated at the row that is `offset` rows after the current row within the partition;\n if there is no such row, instead returns `default` (which must be of a type compatible with `value`).\n If `offset` is `NULL`, `NULL` is returned instead.\n Both `offset` and `default` are evaluated with respect to the current row.\n If omitted, `offset` defaults to 1 and `default` to `NULL`.\n\n See also [Idiomatic Materialize SQL: Lead\n over](/transform-data/idiomatic-materialize-sql/lead/).\n\n - signature: 'rank() -> int'\n description: |\n Returns the rank of the current row within its partition with gaps (counting from 1):\n rows that compare equal will have the same rank, and then the rank is incremented by the number of rows that\n compared equal.\n - signature: 'row_number() -> int'\n description: |\n Returns the number of the current row within its partition, counting from 1.\n Rows that compare equal will be ordered in an unspecified way.\n\n See also [Idiomatic Materialize SQL: Top-K](/transform-data/idiomatic-materialize-sql/top-k/).\n\n\n- type: System information\n description: Functions that return information about the system.\n functions:\n - signature: 'mz_environment_id() -> text'\n description: Returns a string containing a `uuid` uniquely identifying the Materialize environment.\n unmaterializable: true\n - signature: 'mz_uptime() -> interval'\n description: Returns the length of time that the materialized process has been running.\n unmaterializable: true\n - signature: 'mz_version() -> text'\n description: Returns the server's version information as a human-readable string.\n unmaterializable: true\n - signature: 'mz_version_num() -> int'\n description: Returns the server's version as an integer having the format `XXYYYZZ`, where `XX` is the major version, `YYY` is the minor version and `ZZ` is the patch version.\n unmaterializable: true\n - signature: 'current_database() -> text'\n description: |\n Returns the name of the current database.\n unmaterializable: true\n - signature: 'current_catalog() -> text'\n description: |\n Alias for `current_database`.\n unmaterializable: true\n version-added: v0.102\n - signature: 'current_user() -> text'\n description: |\n Returns the name of the user who executed the containing query.\n unmaterializable: true\n - signature: 'current_role() -> text'\n description: |\n Alias for `current_user`.\n unmaterializable: true\n version-added: v0.102\n - signature: 'user() -> text'\n description: |\n Alias for `current_user`.\n unmaterializable: true\n version-added: v0.102\n - signature: 'session_user() -> text'\n description: |\n Returns the name of the user who initiated the database connection.\n unmaterializable: true\n - signature: 'mz_row_size(expr: Record) -> int'\n description: Returns the number of bytes used to store a row.\n\n- type: PostgreSQL compatibility\n description: |\n Functions whose primary purpose is to facilitate compatibility with PostgreSQL tools.\n These functions may have suboptimal performance characteristics.\n functions:\n - signature: 'format_type(oid: int, typemod: int) -> text'\n description: Returns the canonical SQL name for the type specified by `oid` with `typemod` applied.\n - signature: 'current_schema() -> text'\n description: |\n Returns the name of the first non-implicit schema on the search path, or\n `NULL` if the search path is empty.\n unmaterializable: true\n - signature: 'current_schemas(include_implicit: bool) -> text[]'\n description: |\n Returns the names of the schemas on the search path.\n The `include_implicit` parameter controls whether implicit schemas like\n `mz_catalog` and `pg_catalog` are included in the output.\n unmaterializable: true\n - signature: 'current_setting(setting_name: text[, missing_ok: bool]) -> text'\n description: |\n Returns the value of the named setting or error if it does not exist.\n If `missing_ok` is true, return NULL if it does not exist.\n unmaterializable: true\n - signature: 'obj_description(oid: oid, catalog: text) -> text'\n description: Returns the comment for a database object specified by its `oid` and the\n name of the containing system catalog.\n - signature: 'col_description(oid: oid, column: int) -> text'\n description: Returns the comment for a table column, which is specified by the `oid` of\n its table and its column number.\n - signature: 'pg_backend_pid() -> int'\n description: Returns the internal connection ID.\n unmaterializable: true\n - signature: 'pg_cancel_backend(connection_id: int) -> bool'\n description: |\n Cancels an in-progress query on the specified connection ID.\n Returns whether the connection ID existed (not if it cancelled a query).\n side_effecting: true\n - signature: 'pg_column_size(expr: any) -> int'\n description: Returns the number of bytes used to store any individual data value.\n - signature: 'pg_size_pretty(expr: numeric) -> text'\n description: Converts a size in bytes into a human-readable format.\n - signature: 'pg_get_constraintdef(oid: oid[, pretty: bool]) -> text'\n description: |\n Returns the constraint definition for the given `oid`. Currently always\n returns NULL since constraints aren't supported.\n - signature: 'pg_get_indexdef(index: oid[, column: integer, pretty: bool]) -> text'\n description: |\n Reconstructs the creating command for an index. (This is a decompiled\n reconstruction, not the original text of the command.) If column is\n supplied and is not zero, only the definition of that column is reconstructed.\n - signature: 'pg_get_ruledef(rule_oid: oid[, pretty bool]) -> text'\n description: |\n Reconstructs the creating command for a rule. This function\n always returns NULL because Materialize does not support rules.\n - signature: 'pg_get_userbyid(role: oid) -> text'\n description: |\n Returns the role (user) name for the given `oid`. If no role matches the\n specified OID, the string `unknown (OID=oid)` is returned.\n - signature: 'pg_get_viewdef(view_name: text[, pretty: bool]) -> text'\n description: Returns the underlying SELECT command for the given view.\n - signature: 'pg_get_viewdef(view_oid: oid[, pretty: bool]) -> text'\n description: Returns the underlying SELECT command for the given view.\n - signature: 'pg_get_viewdef(view_oid: oid[, wrap_column: integer]) -> text'\n description: Returns the underlying SELECT command for the given view.\n - signature: 'pg_has_role([user: name or oid,] role: text or oid, privilege: text) -> bool'\n description: Alias for `has_role` for PostgreSQL compatibility.\n - signature: 'pg_is_in_recovery() -> bool'\n description: Returns if the a recovery is still in progress.\n - signature: 'pg_table_is_visible(relation: oid) -> boolean'\n description: Reports whether the relation with the specified OID is visible in the search path.\n - signature: 'pg_tablespace_location(tablespace: oid) -> text'\n description: Returns the path in the file system that the provided tablespace is on.\n - signature: 'pg_type_is_visible(relation: oid) -> boolean'\n description: Reports whether the type with the specified OID is visible in the search path.\n - signature: 'pg_function_is_visible(relation: oid) -> boolean'\n description: Reports whether the function with the specified OID is visible in the search path.\n - signature: 'pg_typeof(expr: any) -> text'\n description: Returns the type of its input argument as a string.\n - signature: 'pg_encoding_to_char(encoding_id: integer) -> text'\n description: PostgreSQL compatibility shim. Not intended for direct use.\n - signature: 'pg_postmaster_start_time() -> timestamptz'\n description: Returns the time when the server started.\n unmaterializable: true\n - signature: 'pg_relation_size(relation: regclass[, fork: text]) -> bigint'\n description: |\n Disk space used by the specified fork ('main', 'fsm', 'vm', or 'init')\n of the specified table or index. If no fork is specified, it defaults\n to 'main'. This function always returns -1 because Materialize does\n not store tables and indexes on local disk.\n - signature: 'pg_stat_get_numscans(oid: oid) -> bigint'\n description: |\n Number of sequential scans done when argument is a table,\n or number of index scans done when argument is an index.\n This function always returns -1 because Materialize does\n not collect statistics.\n - signature: 'version() -> text'\n description: Returns a PostgreSQL-compatible version string.\n unmaterializable: true\n\n- type: Access privilege inquiry\n description: |\n Functions that allow querying object access privileges. None of the following functions consider\n whether the provided role is a _superuser_ or not.\n functions:\n - signature: 'has_cluster_privilege([role: text or oid,] cluster: text, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the cluster with the specified cluster name. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_connection_privilege([role: text or oid,] connection: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the connection with the specified connection name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_database_privilege([role: text or oid,] database: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the database with the specified database name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_schema_privilege([role: text or oid,] schema: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the schema with the specified schema name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_role([user: name or oid,] role: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the `user` has the privilege for `role`. `privilege` can either be `MEMBER`\n or `USAGE`, however currently this value is ignored. The `PUBLIC` pseudo-role cannot be used\n for the `user` nor the `role`. If the `user` is omitted then the `current_role` is assumed.\n - signature: 'has_secret_privilege([role: text or oid,] secret: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the secret with the specified secret name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_system_privilege([role: text or oid,] privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the system privilege.\n If the role is omitted then the `current_role` is assumed.\n - signature: 'has_table_privilege([role: text or oid,] relation: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the relation with the specified relation name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'has_type_privilege([role: text or oid,] type: text or oid, privilege: text) -> bool'\n description: |\n Reports whether the role with the specified role name or OID has the privilege on\n the type with the specified type name or OID. If the role is omitted then\n the `current_role` is assumed.\n - signature: 'mz_is_superuser() -> bool'\n description: Reports whether the `current_role` is a superuser.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\sql_funcs.yml
sql_funcs.yml
YAML
51,979
0.75
0.091153
0
vue-tools
836
2024-04-13T06:23:31.631406
GPL-3.0
false
00f304df5618d369fb6cc6a88e8ede5d
columns:\n - column: "Resource Type"\n - column: "Files"\nrows:\n - "Resource Type": "Container Logs"\n "Files": |\n - `logs-stdout.txt`\n - `logs-stderr.txt`\n\n - "Resource Type": "Container Inspection"\n "Files": |\n - `inspect.txt`\n\n - "Resource Type": "Container Stats"\n "Files": |\n - `stats.txt`\n\n - "Resource Type": "Container Processes"\n "Files": |\n - `top.txt`\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\mz-debug\docker_resource_files.yml
docker_resource_files.yml
YAML
402
0.7
0
0
python-kit
898
2025-06-24T09:33:15.870870
GPL-3.0
false
ba8bc3cbd9efc90bd8f26cfc95f58f24
columns:\n - column: "Option"\n - column: "Description"\nrows:\n - Option: "`--dump-docker <boolean>`"\n Description: |\n\n <a name="dump-docker"></a> If `true`, dump debug information from the Docker container.\n\n Defaults to `true`.\n\n - Option: "`--docker-container-id <ID>`"\n Description: |\n\n <a name="docker-container-id"></a> The Docker container to dump.\n\n Required if [`--dump-docker`](#dump-docker) is true.\n\n - Option: "`--mz-connection-url <URL>`"\n Description: |\n\n <a name="mz-connection-url"></a>The URL of the Materialize's SQL\n connection.\n\n Defaults to `postgres://127.0.0.1:6875/materialize?sslmode=prefer`.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\mz-debug\emulator_options.yml
emulator_options.yml
YAML
665
0.8
0.04
0
awesome-app
64
2024-08-20T10:44:18.107117
BSD-3-Clause
false
dded79c907035019d12b89008c0a3d19
columns:\n - column: "Resource Type"\n - column: "Files"\nrows:\n - "Resource Type": "Workloads"\n "Files": |\n - `pods/{namespace}/*.yaml`\n - `logs/{namespace}/{pod}.current.log`\n - `logs/{namespace}/{pod}.previous.log`\n - `deployments/{namespace}/*.yaml`\n - `statefulsets/{namespace}/*.yaml`\n - `replicasets/{namespace}/*.yaml`\n - `events/{namespace}/*.yaml`\n - `materializes/{namespace}/*.yaml`\n\n - "Resource Type": "Networking"\n "Files": |\n - `services/{namespace}/*.yaml`\n - `networkpolicies/{namespace}/*.yaml`\n - `certificates/{namespace}/*.yaml`\n\n - "Resource Type": "Storage"\n "Files": |\n - `persistentvolumes/*.yaml`\n - `persistentvolumeclaims/{namespace}/*.yaml`\n - `storageclasses/*.yaml`\n\n - "Resource Type": "Configuration"\n "Files": |\n - `roles/{namespace}/*.yaml`\n - `rolebinding/{namespace}/*.yaml`\n - `configmaps/{namespace}/*.yaml`\n - `secrets/{namespace}/*.yaml`\n - `serviceaccounts/{namespace}/*.yaml`\n\n - "Resource Type": "Cluster-level"\n "Files": |\n - `nodes/*.yaml`\n - `daemonsets/*.yaml`\n - `mutatingwebhookconfigurations/{namespace}/*.yaml`\n - `validatingwebhookconfigurations/{namespace}/*.yaml`\n - `customresourcedefinitions/*.yaml`\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\mz-debug\kubernetes_resource_files.yml
kubernetes_resource_files.yml
YAML
1,297
0.8
0
0
vue-tools
181
2024-12-19T23:57:17.195792
MIT
false
e82ea61cc08b2844a4b36b7a95a17cbf
columns:\n - column: "Option"\n - column: "Description"\nrows:\n - Option: "`--dump-system-catalog <boolean>`"\n Description: |\n\n <a name="dump-system-catalog"></a> If `true`, dump the system catalog from\n your Materialize instance.\n\n Defaults to `true`.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\mz-debug\mz_debug_option.yml
mz_debug_option.yml
YAML
271
0.7
0
0
node-utils
710
2023-12-26T16:16:26.826182
BSD-3-Clause
false
74de1cdcd44dcec2cef3295396e5de78
columns:\n - column: "Option"\n - column: "Description"\nrows:\n - Option: "`--dump-k8s`"\n Description: |\n\n <a name="dump-k8s"></a>\n If `true`, dump debug information from the Kubernetes cluster.\n\n Defaults to `true`.\n\n - Option: "`--k8s-namespace <NAMESPACE>`"\n Description: |\n\n <a name="k8s-namespace"></a> The namespaces to dump. Specify multiple\n times to dump multiple namespaces.\n\n Required if [`--dump-k8s`](#dump-k8s) is true.\n\n - Option: "`--k8s-context <CONTEXT>`"\n Description: |\n\n <a name="k8s-context"></a> The Kubernetes context to use.\n\n Defaults to the `KUBERNETES_CONTEXT` environment variable.\n\n - Option: "`--k8s-dump-secret-values`"\n Description: |\n\n <a name="k8s-dump-secret-values"></a> If `true`, include unredacted secrets in the\n dump. Use with caution.\n\n Defaults to `false`.\n\n - Option: "`--auto-port-forward`"\n Description: |\n\n <a name="auto-port-forward"></a>If `true`, automatically port-forward the\n external SQL port.\n\n Defaults to `true`.\n\n - Option: "`--port-forward-local-address <IP address>`"\n Description: |\n\n <a name="port-forward-local-address"></a> The address to listen on for\n port-forwarding.\n\n Defaults to `127.0.0.1`.\n\n - Option: "`--port-forward-local-port <PORT>`"\n Description: |\n\n <a name="port-forward-local-port"></a> The port to listen on for\n port-forwarding.\n\n Defaults to `6875`.\n\n - Option: "`--mz-connection-url <URL>`"\n Description: |\n\n <a name="mz-connection-url"></a>The Materialize instance's [PostgreSQL\n connection\n URL](https://www.postgresql.org/docs/14/libpq-connect.html#LIBPQ-CONNSTRING).\n\n Defaults to a connection URL constructed from:\n\n [`--port-forward-local-address`](#port-forward-local-address) and\n [`--port-forward-local-port`](#port-forward-local-port) values.\n\n - Option: "`-h`, `--help`"\n Description: |\n\n <a name="help"></a> Print help information.\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\mz-debug\self_managed_options.yml
self_managed_options.yml
YAML
2,000
0.8
0.04
0
node-utils
789
2025-01-09T21:07:33.653955
MIT
false
4a5d9bfbb73d7cf26a3373802df0aee3
parameters:\n - parameter: clusterd.nodeSelector\n description: |\n Node selector to use for clusterd pods spawned by the operator\n default: "{}"\n - parameter: environmentd.nodeSelector\n description: |\n Node selector to use for environmentd pods spawned by the operator\n default: "{}"\n\n - parameter: networkPolicies.egress\n description: egress from Materialize pods to sources and sinks\n default: |\n {"cidrs":["0.0.0.0/0"],"enabled":false}\n\n - parameter: networkPolicies.enabled\n description: |\n Whether to enable network policies for securing communication between pods\n default: false\n\n - parameter: networkPolicies.ingress\n description: |\n Ingress to the SQL and HTTP interfaces on environmentd or balancerd\n default: |\n {"cidrs":["0.0.0.0/0"],"enabled":false}\n\n - parameter: networkPolicies.internal\n description: |\n Internal communication between Materialize pods\n default: |\n {"enabled":false}\n\n - parameter: observability.enabled\n description: ""\n default: true\n\n - parameter: observability.podMetrics.enabled\n description: |\n Whether to enable the pod metrics scraper which populates the Environment Overview Monitoring tab in the web console (requires metrics-server to be installed)\n default: false\n\n - parameter: observability.prometheus.scrapeAnnotations.enabled\n description: |\n Whether to annotate pods with common keys used for prometheus scraping.\n default: true\n\n - parameter: operator.args.enableInternalStatementLogging\n description: ""\n default: true\n\n - parameter: operator.args.startupLogFilter\n description: Log filtering settings for startup logs\n default: INFO,mz_orchestratord=TRACE\n\n - parameter: operator.cloudProvider.providers.aws.accountID\n description: When using AWS, accountID is required\n default: ""\n\n - parameter: operator.cloudProvider.providers.aws.enabled\n description: ""\n default: false\n\n - parameter: operator.cloudProvider.providers.aws.iam.roles.connection\n description: ARN for CREATE CONNECTION feature\n default: ""\n\n - parameter: operator.cloudProvider.providers.aws.iam.roles.environment\n description: ARN of the IAM role for environmentd\n default: ""\n\n - parameter: operator.cloudProvider.providers.gcp\n description: GCP Configuration (placeholder for future use)\n default: |\n {"enabled":false}\n\n - parameter: operator.cloudProvider.region\n description: Common cloud provider settings\n default: |\n kind\n\n - parameter: operator.cloudProvider.type\n description: ""\n default: local\n\n - parameter: operator.clusters.defaultSizes.analytics\n description: ""\n default: 25cc\n\n - parameter: operator.clusters.defaultSizes.catalogServer\n description: ""\n default: 50cc\n\n - parameter: operator.clusters.defaultSizes.default\n description: ""\n default: 25cc\n\n - parameter: operator.clusters.defaultSizes.probe\n description: ""\n default: mz_probe\n\n - parameter: operator.clusters.defaultSizes.support\n description: ""\n default: 25cc\n\n - parameter: operator.clusters.defaultSizes.system\n description: ""\n default: 25cc\n\n - parameter: operator.features.authentication\n description: |\n Whether to enable environmentd rbac checks.\n\n *Not yet supported in the helm chart*\n default: false\n\n - parameter: operator.features.consoleImageTagMapOverride\n description: |\n Override the mapping of environmentd versions to console versions\n default: |\n {}\n\n - parameter: operator.features.createBalancers\n description: |\n Flag to indicate whether to create balancerd pods for the environments\n default: true\n\n - parameter: operator.features.createConsole\n description: |\n Flag to indicate whether to create console pods for the environments\n default: true\n\n - parameter: operator.image.pullPolicy\n description: |\n Policy for pulling the image: "IfNotPresent" avoids unnecessary re-pulling of images\n default: IfNotPresent\n\n - parameter: operator.image.repository\n description: |\n The Docker repository for the operator image\n default: materialize/orchestratord\n\n - parameter: operator.image.tag\n description: |\n The tag/version of the operator image to be used\n default: v0.127.0\n\n - parameter: operator.nodeSelector\n description: ""\n default: |\n {}\n\n - parameter: operator.resources.limits\n description: |\n Resource limits for the operator's CPU and memory\n default: |\n {"memory":"512Mi"}\n\n - parameter: operator.resources.requests\n description: |\n Resources requested by the operator for CPU and memory\n default: |\n {"cpu":"100m","memory":"512Mi"}\n\n - parameter: rbac.create\n description: |\n Whether to create necessary RBAC roles and bindings\n default: true\n\n - parameter: serviceAccount.create\n description: |\n Whether to create a new service account for the operator\n default: true\n\n - parameter: serviceAccount.name\n description: |\n The name of the service account to be created\n default: orchestratord\n\n - parameter: storage.storageClass.allowVolumeExpansion\n description: ""\n default: false\n\n - parameter: storage.storageClass.create\n description: |\n Set to false to use an existing StorageClass instead\n default: false\n\n - parameter: storage.storageClass.name\n description: |\n Name of the StorageClass to create/use: e.g.,"openebs-lvm-instance-store-ext4"\n default: ""\n\n - parameter: storage.storageClass.parameters\n description: Parameters for the CSI driver\n default: |\n {"fsType":"ext4","storage":"lvm","volgroup":"instance-store-vg"}\n\n - parameter: storage.storageClass.provisioner\n description: |\n CSI driver to use, eg "local.csi.openebs.io"\n default: ""\n\n - parameter: storage.storageClass.reclaimPolicy\n description: ""\n default: Delete\n\n - parameter: storage.storageClass.volumeBindingMode\n description: ""\n default: WaitForFirstConsumer\n\n - parameter: telemetry.enabled\n description: ""\n default: true\n\n - parameter: telemetry.segmentApiKey\n description: ""\n default: hMWi3sZ17KFMjn2sPWo9UJGpOQqiba4A\n\n - parameter: telemetry.segmentClientSide\n description: ""\n default: true\n\n - parameter: tls.defaultCertificateSpecs\n description: ""\n default: |\n {}\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\data\self_managed\materialize_operator_chart_parameter.yml
materialize_operator_chart_parameter.yml
YAML
6,396
0.85
0.070796
0.005587
python-kit
144
2025-04-08T09:09:17.943722
MIT
false
f90dd7e40aa2549bf341c67453e57473
openapi: 3.0.3\ninfo:\n title: materialize\n version: 0.1.0\npaths:\n /api/sql:\n post:\n summary: execute sql statement(s)\n parameters:\n - in: query\n name: options\n schema:\n type: string\n description: |-\n A JSON object containing configuration parameters.\n\n A simple example:\n {"application_name":"example_app","cluster":"quickstart"}\n\n Refer to the documentation for all available configuration parameters:\n https://materialize.com/docs/sql/show/#other-configuration-parameters\n\n requestBody:\n content:\n application/json:\n schema:\n oneOf:\n - $ref: "#/components/schemas/SimpleRequest"\n - $ref: "#/components/schemas/ExtendedRequest"\n required: true\n responses:\n "200":\n description: Successful operation\n content:\n application/json:\n schema:\n $ref: "#/components/schemas/Response"\n "422":\n description: Invalid body or parameters\n content:\n text/plain:\n schema:\n type: string\n "400":\n description: Invalid sql supplied\n content:\n text/plain:\n schema:\n type: string\n security:\n - tokenAuth: []\ncomponents:\n schemas:\n Response:\n type: object\n properties:\n results:\n type: object\n oneOf:\n - $ref: "#/components/schemas/ErrorResponse"\n - $ref: "#/components/schemas/NoticeResponse"\n - $ref: "#/components/schemas/QueryResponse"\n QueryResponse:\n type: object\n properties:\n tag:\n type: string\n rows:\n type: array\n items:\n type: object\n desc:\n type: array\n items:\n $ref: "#/components/schemas/Column"\n NoticeResponse:\n type: object\n properties:\n ok:\n type: string\n notices:\n type: array\n items:\n $ref: "#/components/schemas/Notice"\n ErrorResponse:\n type: object\n properties:\n error:\n type: string\n notices:\n type: array\n items:\n $ref: "#/components/schemas/Notice"\n Column:\n type: object\n properties:\n name:\n type: string\n type_oid:\n type: number\n format: u32\n type_len:\n type: number\n format: i16\n type_mod:\n type: number\n format: i32\n Error:\n type: object\n properties:\n message:\n type: string\n code:\n type: string\n detail:\n type: string\n nullable: true\n hint:\n type: string\n nullable: true\n Notice:\n type: object\n properties:\n message:\n type: string\n severity:\n type: string\n detail:\n type: string\n nullable: true\n hint:\n type: string\n nullable: true\n SimpleRequest:\n type: object\n properties:\n query:\n type: string\n Query:\n type: object\n properties:\n query:\n type: string\n params:\n type: array\n nullable: true\n items:\n nullable: true\n type: string\n ExtendedRequest:\n type: object\n properties:\n queries:\n type: array\n items:\n $ref: "#/components/schemas/Query"\n securitySchemes:\n tokenAuth:\n type: http\n scheme: bearer\n
dataset_sample\yaml\MaterializeInc_materialize\doc\user\static\materialize-openapi.yml
materialize-openapi.yml
YAML
3,690
0.95
0.00625
0
python-kit
345
2024-04-02T02:46:01.132424
MIT
false
910b32f10f3433e5cb67d54cebb5a2d6
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License in the LICENSE file at the\n# root of this repository, or online at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nname: dbt-materialize\n
dataset_sample\yaml\MaterializeInc_materialize\misc\dbt-materialize\mzbuild.yml
mzbuild.yml
YAML
702
0.95
0.0625
0.933333
node-utils
109
2023-08-19T16:04:29.799289
Apache-2.0
false
60c998d9a21ccfd6b29107be4fd253ea
# Copyright 2020 Josh Wills. All rights reserved.\n# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License in the LICENSE file at the\n# root of this repository, or online at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nname: dbt_materialize\nversion: 1.0\nconfig-version: 2\n\nmacro-paths: ["macros"]\n
dataset_sample\yaml\MaterializeInc_materialize\misc\dbt-materialize\dbt\include\materialize\dbt_project.yml
dbt_project.yml
YAML
808
0.95
0.047619
0.789474
react-lib
204
2023-08-22T20:04:40.038118
BSD-3-Clause
false
04b26baa16ffd276037f75b4cae8a5e7
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License in the LICENSE file at the\n# root of this repository, or online at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nname: '{project_name}'\nversion: '1.0.0'\nconfig-version: 2\n\nprofile: '{profile_name}'\n\nmodel-paths: ["models"]\n\nclean-targets:\n - "target"\n - "dbt_packages"\n\ndata_tests:\n {project_name}:\n +store_failures: true\n +schema: 'etl_failure'\n
dataset_sample\yaml\MaterializeInc_materialize\misc\dbt-materialize\dbt\include\starter_project\dbt_project.yml
dbt_project.yml
YAML
922
0.95
0.032258
0.538462
node-utils
830
2024-03-09T14:32:23.251456
GPL-3.0
false
be7590dc766eac151e4049707484e59e
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License in the LICENSE file at the\n# root of this repository, or online at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nversion: 2\n\n# https://materialize.com/docs/get-started/quickstart/\n\nmodels:\n - name: fraud_activity\n description: "Detects when a user wins an auction as a bidder, and then is identified as a seller for an item at a higher price."\n columns:\n - name: seller\n description: "The seller for an auction"\n data_tests:\n - unique\n - not_null\n - name: seller_item\n description: "The name of the seller item"\n - name: seller_amount\n description: "The bid amount of the seller"\n - name: buyer_item\n description: "The name of the buyer item"\n - name: buyer_amount\n description: "The bid amount of the buyer"\n\n - name: funds_movement\n description: "Tracks the credits and debits of all winning bids."\n columns:\n - name: id\n description: "The id of the buyer or seller"\n data_tests:\n - not_null\n - name: credits\n description: "Credit from an auction"\n - name: debits\n description: "Debit from an auction"\n\n - name: winning_bids\n description: "joins data from `auctions` and `bids` to get the bid with the highest `amount` for each auction at its `end_time`."\n columns:\n - name: id\n description: "The primary key of the auction"\n data_tests:\n - unique\n - not_null\n - name: buyer\n description: "The id of the buyer"\n - name: auction_id\n description: "The id of the auction"\n - name: amount\n description: "The bid amount"\n - name: bid_time\n description: "The time the bid was executed"\n - name: item\n description: "The name of the item"\n - name: seller\n description: "The id of the seller"\n\n - name: my_first_dbt_model\n description: "A starter dbt model"\n columns:\n - name: id\n description: "The primary key for this table"\n data_tests:\n - unique\n - not_null\n\n - name: my_second_dbt_model\n description: "A starter dbt model"\n columns:\n - name: id\n description: "The primary key for this table"\n data_tests:\n - unique\n - not_null\n
dataset_sample\yaml\MaterializeInc_materialize\misc\dbt-materialize\dbt\include\starter_project\models\example\schema.yml
schema.yml
YAML
2,847
0.95
0.068966
0.1875
vue-tools
83
2024-07-21T10:17:22.370286
BSD-3-Clause
false
83af32956bf21ea5a8b10899fadbf8ca
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License in the LICENSE file at the\n# root of this repository, or online at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nsources:\n - name: auction\n schema: "{{ target.schema }}"\n tables:\n - name: organizations\n - name: users\n - name: accounts\n - name: auctions\n - name: bids\n
dataset_sample\yaml\MaterializeInc_materialize\misc\dbt-materialize\dbt\include\starter_project\models\example\sources\sources.yml
sources.yml
YAML
866
0.95
0.041667
0.608696
node-utils
230
2024-11-01T05:14:39.108225
GPL-3.0
false
cefe7837b7459f8351e4e9d9625351d8
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: bazel\npublish: true\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\bazel\mzbuild.yml
mzbuild.yml
YAML
403
0.8
0
0.8
awesome-app
876
2023-07-21T06:47:01.565039
Apache-2.0
false
a737bb507a285f18a79fef2073fa082f
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: cli\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\cli\mzbuild.yml
mzbuild.yml
YAML
387
0.8
0
0.888889
vue-tools
847
2023-09-26T10:59:40.914912
BSD-3-Clause
false
94d20a609d34075c9aa17d31e71d191f
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: debezium\ndescription: Kafka Connect with Debezium and Confluent Avro converters.\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\debezium\mzbuild.yml
mzbuild.yml
YAML
464
0.8
0
0.8
node-utils
595
2025-01-25T01:47:20.056046
GPL-3.0
false
de98a684d4331b2ca0e1fcbe3f8e9d7c
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: fivetran-destination-tester\ndescription: Tester for the Fivetran Destination\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\fivetran-destination-tester\mzbuild.yml
mzbuild.yml
YAML
460
0.8
0.090909
0.8
vue-tools
641
2025-01-26T18:41:12.570008
Apache-2.0
true
130f5e8c52435effa3a118da865ddaad
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: frontegg-mock\ndescription: Frontegg mock server.\npre-image:\n - type: cargo-build\n bin: [mz-frontegg-mock]\n bazel-bin: "@//src/frontegg-mock:mz_frontegg_mock_bin"\n strip: false\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\frontegg-mock\mzbuild.yml
mzbuild.yml
YAML
569
0.8
0
0.533333
awesome-app
680
2025-05-31T15:08:25.265437
MIT
false
5bf874a0f993e7e9681c8d7a6f0f788e
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: jobs\npre-image:\n - type: cargo-build\n bin:\n - persistcli\n - mz-catalog-debug\n bazel-bin:\n persistcli: "@//src/persist-cli:persistcli"\n mz-catalog-debug: "@//src/catalog-debug:mz_catalog_debug"\n strip: false\npublish: true\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\jobs\mzbuild.yml
mzbuild.yml
YAML
634
0.8
0
0.421053
react-lib
219
2024-09-01T11:04:19.090010
BSD-3-Clause
false
c21cfc68cc96408b9f0c3ee385eaf550
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: materialized-base\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\materialized-base\mzbuild.yml
mzbuild.yml
YAML
401
0.8
0
0.888889
react-lib
144
2024-03-06T01:44:15.874074
MIT
false
5315359d17e5a158e906e448d60305d1
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: mysql-client\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\mysql-client\mzbuild.yml
mzbuild.yml
YAML
396
0.8
0
0.888889
vue-tools
688
2024-11-06T19:16:50.263949
Apache-2.0
false
64153d3c4dc290c9d3344ea8c83a0633
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: mz\ndescription: The CLI for Materialize.\nmainline: false\npre-image:\n - type: cargo-build\n bin: [mz]\n bazel-bin:\n mz: "@//src/mz:mz_bin"\n strip: false\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\mz\mzbuild.yml
mzbuild.yml
YAML
548
0.8
0.055556
0.470588
node-utils
499
2024-02-12T02:27:58.606223
GPL-3.0
false
ee3d09c94dbb4623685398f612cfa6d8
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: prod-base\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\prod-base\mzbuild.yml
mzbuild.yml
YAML
393
0.8
0
0.888889
python-kit
572
2024-06-02T06:35:32.926646
GPL-3.0
false
8231fa279980b5f6f377a2de47cc01d9
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: psql\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\psql\mzbuild.yml
mzbuild.yml
YAML
388
0.8
0
0.888889
awesome-app
375
2023-09-03T06:29:40.392835
Apache-2.0
false
e467df10d924bccb6060431f8df9c760
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: ubuntu-base\n
dataset_sample\yaml\MaterializeInc_materialize\misc\images\ubuntu-base\mzbuild.yml
mzbuild.yml
YAML
395
0.8
0
0.888889
vue-tools
127
2025-03-23T23:17:17.050311
Apache-2.0
false
ac58d125e2e76a485f2a5a70baa1c033
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nglobal:\n scrape_interval: 5s\nscrape_configs:\n - job_name: environmentd\n static_configs:\n - targets: [host.docker.internal:6878]\n labels:\n namespace: local\n pod: environmentd-0\n - job_name: services\n file_sd_configs:\n - files:\n - /mnt/services/*.json\n refresh_interval: 5s\n relabel_configs:\n # Rewrite references to 127.0.0.1 or 0.0.0.0 to host.docker.internal,\n # since the services are running on the host.\n - source_labels: [__address__]\n target_label: __address__\n regex: (127\.0\.0\.1|0\.0\.0\.0)(.*)\n replacement: host.docker.internal$2\n action: replace\n # The process orchestrator emits static configurations for all ports, but\n # only the "internal-http" port serves metrics. Filter out other configs,\n # to avoid scrape failures in the Prometheus UI.\n - source_labels: [mz_orchestrator_port]\n regex: internal-http\n action: keep\n # Construct namespace and pod labels that are similar to the label that\n # Kubernetes installs, so that production dashboards work without changes\n # when running locally.\n - target_label: namespace\n replacement: local\n - source_labels: [mz_orchestrator_namespace, mz_orchestrator_service_id]\n separator: "-"\n target_label: pod\n replacement: $1-0\n - job_name: prometheus\n static_configs:\n - targets: [ localhost:9090 ]\n - job_name: tempo\n static_configs:\n - targets: [ tempo:3200 ]\n
dataset_sample\yaml\MaterializeInc_materialize\misc\monitoring\prometheus.yml
prometheus.yml
YAML
1,891
0.8
0.019231
0.313725
python-kit
187
2024-07-04T00:02:35.025856
GPL-3.0
false
c95e03b383457c1a580a6bf59147aaa9
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\napiVersion: 1\ndatasources:\n - name: Prometheus\n type: prometheus\n url: http://prometheus:9090\n jsonData:\n timeInterval: 5s\n
dataset_sample\yaml\MaterializeInc_materialize\misc\monitoring\grafana\datasources\prometheus.yml
prometheus.yml
YAML
515
0.8
0
0.533333
python-kit
19
2024-12-30T14:48:52.605536
GPL-3.0
false
dac4678a1a4d32cf95623cd3b92e5631
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\napiVersion: 1\ndatasources:\n - name: Prometheus\n type: prometheus\n url: http://prometheus:9090\n
dataset_sample\yaml\MaterializeInc_materialize\misc\mzcompose\grafana\datasources\prometheus.yml
prometheus.yml
YAML
478
0.8
0
0.615385
vue-tools
592
2024-10-19T11:25:19.183968
BSD-3-Clause
false
2871b70690942fb5227e4817af1e0528
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nglobal:\n scrape_interval: 15s\nscrape_configs:\n - job_name: environmentd\n static_configs:\n - targets: [materialized:6878]\n labels:\n namespace: local\n pod: environmentd-0\n - job_name: services\n file_sd_configs:\n - files:\n - /mnt/mzdata/prometheus/*.json\n refresh_interval: 30s\n relabel_configs:\n # Rewrite references to 127.0.0.1 or 0.0.0.0 to materialized,\n # since the services are running in that container.\n - source_labels: [__address__]\n target_label: __address__\n regex: (127\.0\.0\.1|0\.0\.0\.0)(.*)\n replacement: materialized$2\n action: replace\n # The process orchestrator emits static configurations for all ports, but\n # only the "internal-http" port serves metrics. Filter out other configs,\n # to avoid scrape failures in the Prometheus UI.\n - source_labels: [mz_orchestrator_port]\n regex: internal-http\n action: keep\n # Construct namespace and pod labels that are similar to the label that\n # Kubernetes installs, so that production dashboards work without changes\n # when running locally.\n - target_label: namespace\n replacement: local\n - source_labels: [mz_orchestrator_namespace, mz_orchestrator_service_id]\n separator: "-"\n target_label: pod\n replacement: $1-0\n\n - job_name: 'cockroachdb'\n metrics_path: '/_status/vars'\n # Insecure mode:\n scheme: 'http'\n # Secure mode:\n # scheme: 'https'\n tls_config:\n insecure_skip_verify: true\n\n static_configs:\n - targets: ['cockroach:8080']\n labels:\n cluster: cockroach\n
dataset_sample\yaml\MaterializeInc_materialize\misc\mzcompose\prometheus\prometheus.yml
prometheus.yml
YAML
2,020
0.8
0.016667
0.333333
react-lib
356
2024-03-21T02:34:49.125642
GPL-3.0
false
3dab0aeb2232926b668526d11b4d8672
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: balancerd\ndescription: Ingress balancer.\npre-image:\n - type: cargo-build\n bin: [balancerd]\n bazel-bin: "@//src/balancerd:balancerd"\n split_debuginfo: true\n
dataset_sample\yaml\MaterializeInc_materialize\src\balancerd\ci\mzbuild.yml
mzbuild.yml
YAML
548
0.8
0
0.533333
python-kit
384
2025-01-24T21:20:05.089874
BSD-3-Clause
false
5c5c008b21a8a4b2c0bec68f7a35406e
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: clusterd\npre-image:\n - type: cargo-build\n bin: clusterd\n bazel-bin: "@//src/clusterd:clusterd"\n
dataset_sample\yaml\MaterializeInc_materialize\src\clusterd\ci\mzbuild.yml
mzbuild.yml
YAML
485
0.8
0
0.615385
awesome-app
897
2023-09-29T20:41:58.772097
MIT
false
57bc390a1ed915c2506d0a85a605b030
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: environmentd\npre-image:\n - type: cargo-build\n bin: environmentd\n bazel-bin: "@//src/environmentd:environmentd"\n
dataset_sample\yaml\MaterializeInc_materialize\src\environmentd\ci\mzbuild.yml
mzbuild.yml
YAML
501
0.8
0
0.615385
react-lib
339
2025-03-16T17:47:32.323657
MIT
false
0920af58280a64f72bb199dac268ddbc
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: fivetran-destination\npre-image:\n - type: cargo-build\n bin: mz-fivetran-destination\n bazel-bin: "@//src/fivetran-destination:mz_fivetran_destination_bin"\n
dataset_sample\yaml\MaterializeInc_materialize\src\fivetran-destination\ci\mzbuild.yml
mzbuild.yml
YAML
543
0.8
0
0.615385
vue-tools
727
2024-02-18T23:07:27.792489
GPL-3.0
false
224c0f64d9a380c0fe678210482adad8
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: kgen\npre-image:\n - type: cargo-build\n bin: kgen\n bazel-bin: "@//src/kafka-util:kgen"\n
dataset_sample\yaml\MaterializeInc_materialize\src\kafka-util\ci\mzbuild.yml
mzbuild.yml
YAML
475
0.8
0
0.615385
react-lib
75
2024-01-08T04:10:19.665693
GPL-3.0
false
58ce8f64e06e1266c1ac11fc9dc98f9c
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: materialized\ndescription: Materialize is a fast, distributed SQL database built on streaming internals.\npre-image:\n - type: cargo-build\n bin: [ materialized ]\n bazel-bin:\n materialized: "@//src/materialized:materialized"\n
dataset_sample\yaml\MaterializeInc_materialize\src\materialized\ci\mzbuild.yml
mzbuild.yml
YAML
616
0.8
0
0.533333
awesome-app
128
2023-10-27T20:42:37.817188
BSD-3-Clause
false
a2f7631303452a12535aaa3eefc9d711
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: orchestratord\npre-image:\n - type: cargo-build\n bin: orchestratord\n bazel-bin: "@//src/orchestratord:orchestratord"\n
dataset_sample\yaml\MaterializeInc_materialize\src\orchestratord\ci\mzbuild.yml
mzbuild.yml
YAML
505
0.8
0
0.615385
awesome-app
27
2025-01-30T10:23:49.711452
BSD-3-Clause
false
e2d0fbd268830fb85d2d9f4877eb8ac8
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: maelstrom-persist\npre-image:\n - type: cargo-build\n bin: persistcli\n bazel-bin: "@//src/persist-cli:persistcli"\n strip: false\n
dataset_sample\yaml\MaterializeInc_materialize\src\persist-cli\ci\mzbuild.yml
mzbuild.yml
YAML
518
0.8
0
0.571429
python-kit
701
2024-02-01T20:37:07.725429
GPL-3.0
false
186fc3a908ae86de55672664f15670ca
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: maelstrom-persist-base\n
dataset_sample\yaml\MaterializeInc_materialize\src\persist-cli\ci-base\mzbuild.yml
mzbuild.yml
YAML
406
0.8
0
0.888889
python-kit
826
2023-10-23T17:41:06.076534
GPL-3.0
false
63db65a03c6760f55fe1b6154af51eda
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: sqllogictest\npre-image:\n - type: cargo-build\n bin: [clusterd, sqllogictest]\n bazel-bin:\n clusterd: "@//src/clusterd:clusterd"\n sqllogictest: "@//src/sqllogictest:sqllogictest"\n
dataset_sample\yaml\MaterializeInc_materialize\src\sqllogictest\ci\mzbuild.yml
mzbuild.yml
YAML
576
0.8
0
0.533333
react-lib
885
2024-08-16T21:53:55.399373
Apache-2.0
true
0f02e4bcf1d933743faaa49538019278
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: testdrive\npre-image:\n - type: cargo-build\n bin: testdrive\n bazel-bin: "@//src/testdrive:testdrive"\n extract:\n protobuf-src:\n bin: protobuf-bin\n include: protobuf-include\n bazel-tar:\n "@//src/testdrive/ci:testdrive_image_extras": "."\n
dataset_sample\yaml\MaterializeInc_materialize\src\testdrive\ci\mzbuild.yml
mzbuild.yml
YAML
653
0.8
0
0.421053
node-utils
34
2023-12-25T14:12:02.418472
GPL-3.0
true
f8636772bcb96e91ff8d488c28393521
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: testdrive-base\n
dataset_sample\yaml\MaterializeInc_materialize\src\testdrive\ci-base\mzbuild.yml
mzbuild.yml
YAML
398
0.8
0
0.888889
node-utils
780
2024-08-18T14:50:01.267860
MIT
true
4b453c7c519f933cfd56e720d07a2130
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: 'canary_environment'\nversion: '1.0.0'\nconfig-version: 2\n\nprofile: 'canary_environment'\n\nmacro-paths: ["macros"]\nmodel-paths: ["models"]\ntest-paths: ["tests"]\n\ntarget-path: "target" # directory which will store compiled SQL files\nclean-targets: # directories to be removed by `dbt clean`\n - "target"\n - "dbt_packages"\n\nmodels:\n canary_environment:\n loadgen:\n schema: loadgen\n post-hook: "GRANT ALL PRIVILEGES ON TABLE {{ this }} TO \"infra+bot@materialize.com\", \"infra+qacanaryload@materialize.io\""\n tpch:\n schema: tpch\n post-hook: "GRANT ALL PRIVILEGES ON TABLE {{ this }} TO \"infra+bot@materialize.com\", \"infra+qacanaryload@materialize.io\""\n pg_cdc:\n schema: pg_cdc\n post-hook: "GRANT ALL PRIVILEGES ON TABLE {{ this }} TO \"infra+bot@materialize.com\", \"infra+qacanaryload@materialize.io\""\n mysql_cdc:\n schema: mysql_cdc\n post-hook: "GRANT ALL PRIVILEGES ON TABLE {{ this }} TO \"infra+bot@materialize.com\", \"infra+qacanaryload@materialize.io\""\n table:\n schema: table\n post-hook: "GRANT ALL PRIVILEGES ON TABLE {{ this }} TO \"infra+bot@materialize.com\", \"infra+qacanaryload@materialize.io\""\n\ntests:\n +cluster: qa_canary_environment_compute\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\dbt_project.yml
dbt_project.yml
YAML
1,621
0.8
0
0.210526
react-lib
120
2024-12-21T08:22:43.958806
BSD-3-Clause
true
6ec6a223050673845936d45cb50a5cc3
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\ncanary_environment:\n outputs:\n\n prod:\n type: materialize\n threads: 1\n host: "{{ env_var('MATERIALIZE_PROD_SANDBOX_HOSTNAME') }}"\n port: 6875\n user: "{{ env_var('MATERIALIZE_PROD_SANDBOX_USERNAME') }}"\n password: "{{env_var('MATERIALIZE_PROD_SANDBOX_APP_PASSWORD')}}"\n dbname: qa_canary_environment\n schema: public\n autocommit: True\n\n dev:\n type: materialize\n threads: 1\n host: materialized\n port: 6875\n user: materialize\n pass: materialize\n dbname: materialize\n schema: public\n autocommit: True\n\n target: prod\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\profiles.yml
profiles.yml
YAML
985
0.8
0
0.258065
react-lib
434
2025-03-27T03:13:10.750502
MIT
true
a630dde0dd76c368dc7b267204b211fb
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nversion: 2\n\nsources:\n - name: loadgen\n schema: public_loadgen\n tables:\n - name: sales\n data_tests:\n - makes_progress\n - name: sales_progress\n data_tests:\n - makes_progress\n - name: sales_large\n # TODO: Reenable once we have the index for sales_large again, currently fails to rehydrate\n #data_tests:\n # - makes_progress\n - name: sales_large_progress\n data_tests:\n - makes_progress\n - name: product\n - name: product_category\n - name: product_tbl\n - name: product_category_tbl\n - name: sales_tbl\n - name: sales_large_tbl\n\n\nmodels:\n - name: sales_product_product_category\n data_tests:\n - makes_progress\n\n - name: sales_large_product_product_category\n data_tests:\n - makes_progress\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\models\loadgen\schema.yml
schema.yml
YAML
1,212
0.8
0.022727
0.282051
vue-tools
835
2024-10-03T01:43:05.315790
GPL-3.0
true
76d225424107f9a342a85f9001d65469
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nversion: 2\n\nsources:\n - name: mysql_cdc\n schema: public_mysql_cdc\n tables:\n - name: mysql_cdc_progress\n data_tests:\n - makes_progress\n - name: mysql_people\n data_tests:\n - makes_progress\n - name: mysql_relationships\n data_tests:\n - makes_progress\n\nmodels:\n - name: mysql_wmr\n data_tests:\n - makes_progress\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\models\mysql_cdc\schema.yml
schema.yml
YAML
768
0.8
0
0.307692
vue-tools
897
2025-01-16T08:27:06.309655
MIT
true
f7c182a54d87622b9e6a7d149ccdf669
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nversion: 2\n\nsources:\n - name: pg_cdc\n schema: public_pg_cdc\n tables:\n - name: pg_cdc_progress\n data_tests:\n - makes_progress\n - name: pg_people\n data_tests:\n - makes_progress\n - name: pg_relationships\n data_tests:\n - makes_progress\n\nmodels:\n - name: pg_wmr\n data_tests:\n - makes_progress\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\models\pg_cdc\schema.yml
schema.yml
YAML
750
0.8
0
0.307692
python-kit
999
2024-03-29T10:04:13.569579
GPL-3.0
true
8217a0fa3181630d14df77bd28dfe7ce
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nversion: 2\n\nsources:\n - name: table\n schema: public_table\n\nmodels:\n - name: table_mv\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\models\table\schema.yml
schema.yml
YAML
467
0.8
0
0.571429
vue-tools
450
2025-02-06T05:02:08.778608
BSD-3-Clause
true
6c85aa596c1d2e5b0da18738a52700cd
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nversion: 2\n\nsources:\n - name: tpch\n schema: public_tpch\n tables:\n - name: tpch_progress\n data_tests:\n - makes_progress\n - name: tpch_customer\n - name: tpch_lineitem\n - name: tpch_orders\n data_tests:\n - makes_progress\n\nmodels:\n - name: tpch_q01\n data_tests:\n - makes_progress\n - name: tpch_q18\n data_tests:\n - makes_progress\n
dataset_sample\yaml\MaterializeInc_materialize\test\canary-environment\models\tpch\schema.yml
schema.yml
YAML
790
0.8
0
0.285714
react-lib
943
2024-11-11T00:21:59.982115
Apache-2.0
true
3e2a15f87305cd874cd22601d9ae77fc
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: ci-java-smoketest\n
dataset_sample\yaml\MaterializeInc_materialize\test\lang\smoketest\mzbuild.yml
mzbuild.yml
YAML
401
0.8
0
0.888889
node-utils
919
2024-01-31T08:50:23.776316
Apache-2.0
true
510c8d44f9e78f85b97171daf04f0b7f
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: ci-metabase-smoketest\npre-image:\n - type: cargo-build\n bin: metabase-smoketest\n bazel-bin: "@//test/metabase/smoketest:metabase_smoketest"\n
dataset_sample\yaml\MaterializeInc_materialize\test\metabase\smoketest\ci\mzbuild.yml
mzbuild.yml
YAML
529
0.8
0
0.615385
python-kit
423
2024-09-21T12:53:54.503357
BSD-3-Clause
true
e36b61c98db436df7a6c9947dfee6d37
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: mysql\n
dataset_sample\yaml\MaterializeInc_materialize\test\mysql\mzbuild.yml
mzbuild.yml
YAML
389
0.8
0
0.888889
awesome-app
408
2025-03-07T01:50:16.944906
MIT
true
429a66c25fd69bee696e86e1e3b21c71
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: postgres\n
dataset_sample\yaml\MaterializeInc_materialize\test\postgres\mzbuild.yml
mzbuild.yml
YAML
392
0.8
0
0.888889
vue-tools
71
2025-06-09T21:27:36.837807
BSD-3-Clause
true
2873857bc8b7e6f07ded7ee7d5746c69
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: rqg\npublish: false\n
dataset_sample\yaml\MaterializeInc_materialize\test\rqg\mzbuild.yml
mzbuild.yml
YAML
402
0.8
0
0.8
node-utils
728
2025-03-09T02:10:46.918700
GPL-3.0
true
401bfe9636e7e58ff5f4b5bfe391e87e
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: sqlancer\n
dataset_sample\yaml\MaterializeInc_materialize\test\sqlancer\mzbuild.yml
mzbuild.yml
YAML
392
0.8
0
0.888889
vue-tools
375
2024-04-26T13:23:47.719869
BSD-3-Clause
true
b3666161492384af3fb2a7bf96638492
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: sqlsmith\n
dataset_sample\yaml\MaterializeInc_materialize\test\sqlsmith\mzbuild.yml
mzbuild.yml
YAML
392
0.8
0
0.888889
react-lib
103
2024-12-03T09:05:34.483720
BSD-3-Clause
true
17de69983f6f5bc107d3dc979c538374
# Copyright Materialize, Inc. and contributors. All rights reserved.\n#\n# Use of this software is governed by the Business Source License\n# included in the LICENSE file at the root of this repository.\n#\n# As of the Change Date specified in that file, in accordance with\n# the Business Source License, use of this software will be governed\n# by the Apache License, Version 2.0.\n\nname: test-certs\n
dataset_sample\yaml\MaterializeInc_materialize\test\test-certs\mzbuild.yml
mzbuild.yml
YAML
394
0.8
0
0.888889
python-kit
182
2025-05-25T16:31:17.294114
GPL-3.0
true
b1411f32909ded39111b33e8a89c32b0
language: objective-c\nosx_image: xcode11.2\nxcode_project: App/BitBar.xcodeproj\nxcode_scheme: BitBar\nxcode_sdk: macosx10.11\n#before_install:\n#- brew update\n#- brew outdated xctool || brew upgrade xctool\nscript:\n- CERT_P12=Certificate.p12\n- echo "$CERT_BASE64" | base64 --decode > $CERT_P12\n- KEYCHAIN=build.keychain\n- security create-keychain -p travis $KEYCHAIN\n- security default-keychain -s $KEYCHAIN\n- security unlock-keychain -p travis $KEYCHAIN\n- security set-keychain-settings -t 3600 -u $KEYCHAIN\n- security import $CERT_P12 -k $KEYCHAIN -P "$CERT_PW" -T /usr/bin/codesign\n- 'IDENTITY="Developer ID Application: Code and That Ltd (B3T8QSC4HG)"'\n- if [ -n "$TRAVIS_TAG" ]; then xctool -project $TRAVIS_XCODE_PROJECT\n -scheme $TRAVIS_XCODE_SCHEME -sdk $TRAVIS_XCODE_SDK -configuration Release OBJROOT=$PWD/build\n SYMROOT=$PWD/build ONLY_ACTIVE_ARCH=NO CODE_SIGN_IDENTITY="$IDENTITY" build analyze; else xctool -project $TRAVIS_XCODE_PROJECT\n -scheme $TRAVIS_XCODE_SCHEME -sdk $TRAVIS_XCODE_SDK -configuration Release OBJROOT=$PWD/build\n SYMROOT=$PWD/build ONLY_ACTIVE_ARCH=NO build analyze -failOnWarnings; fi\n- security delete-keychain $KEYCHAIN\nbefore_deploy:\n- OUTPUTDIR="$PWD/build/Release"\n- cd $OUTPUTDIR\n- ditto -c -k --sequesterRsrc --keepParent "BitBar.app" "BitBar-$TRAVIS_TAG.zip"\n- ditto -c -k --sequesterRsrc --keepParent "BitBarDistro.app" "BitBarDistro-$TRAVIS_TAG.zip"\ndeploy:\n provider: releases\n api_key:\n secure: VB7wqPRAmwRxX1ugTss4lWdcCjMO4+9yYuvkSKIhRz5PcKFTdgIE5Ol29wssYSlEnk1D5ZqeCJBe3t2qowrxOKHWKJRxH5r4fbgYAYnbk9/nWsMLgWDn1mo4nYa0sD4GyMUDY9JqqmtBY3nZ2pYcJ0L1LmxUU+EHViwcBQz6G4Y=\n file:\n - $OUTPUTDIR/BitBar-$TRAVIS_TAG.zip\n - $OUTPUTDIR/BitBarDistro-$TRAVIS_TAG.zip\n skip_cleanup: true\n on:\n repo: matryer/bitbar\n tags: true\nafter_deploy:\n# Rebuild the Sparkle feed\n- curl -s -X POST -H "Authorization:token $GH_TOKEN" -H Accept:application/vnd.github.mister-fantastic-preview https://api.github.com/repos/matryer/bitbar/pages/builds\n# Update the Sparkle feed cache\n- brew install jq\n- PRERELEASE=$(curl -s -H "Authorization:token $GH_TOKEN" "https://api.github.com/repos/matryer/bitbar/releases/tags/$TRAVIS_TAG" | jq .prerelease)\n- echo "$PRERELEASE"\n- if [ "$PRERELEASE" = true ]; then FEED=(beta distro-beta); else FEED=(bitbar distro); fi\n- while :; do STATUS=$(curl -s -H "Authorization:token $GH_TOKEN" https://api.github.com/repos/matryer/bitbar/pages | jq .status); if [ "$STATUS" != '"queued"' ] && [ "$STATUS" != '"building"' ]; then echo "$STATUS"; break; fi; sleep 1; done\n- curl -s -H "X-RELOAD-KEY:$SPARKLE_UPDATE_KEY" -D - -o /dev/null "https://bitbarapp.com/feeds/${FEED[0]}/reload"\n- curl -s -H "X-RELOAD-KEY:$SPARKLE_UPDATE_KEY" -D - -o /dev/null "https://bitbarapp.com/feeds/${FEED[1]}/reload"\nnotifications:\n slack:\n secure: TpJVJf/NWxDvHxPjaQJnVg4vlW6JeLQ7eWadzFo7sUNSdB7Tui703AvYjG8tZTlk2lJ/4bV4jKgz8+rSElkeGsfPLTgDU33IDmG3V9o6MrAeV/ZQL37793bj+zJFxNuOkoIBaBQt1aNDTuIDUB97MNv02Vklb1M7Yd44NRVXdHk=\n
dataset_sample\yaml\matryer_xbar\archive\bitbar\.travis.yml
.travis.yml
YAML
2,974
0.95
0.074074
0.092593
python-kit
674
2023-12-23T21:01:44.815117
MIT
false
2fccd145a4c72d8fb6acbca0063ca66f
language: objective-c\n\nbefore_script: \n- gem install xcpretty\n\nscript: \n- xcodebuild -project Tests/DateToolsTests/DateToolsTests.xcodeproj -scheme DateToolsTests -sdk iphonesimulator test | xcpretty -c
dataset_sample\yaml\matryer_xbar\archive\bitbar\App\Vendor\DateTools\.travis.yml
.travis.yml
YAML
202
0.7
0
0
react-lib
580
2025-05-20T02:18:30.198222
MIT
false
1bf3495be2e569726e8c1ff64c02bc0c
language: objective-c\nosx_image: xcode7\nbefore_install:\n - brew update\n - brew install macmade/tap/xcode-coveralls\nscript: make ci\nafter_success: xcode-coveralls --exclude /Applications --exclude Tests --exclude Vendor build/Build/Intermediates/Sparkle.build/Coverage/Sparkle.build/Objects-normal/x86_64\n
dataset_sample\yaml\matryer_xbar\archive\bitbar\App\Vendor\Sparkle\.travis.yml
.travis.yml
YAML
306
0.7
0
0
python-kit
158
2025-02-15T15:20:55.662604
BSD-3-Clause
false
49533064417f46b60d599afaa69b9175
# Docker image\nimage:\n file: .gitpod.Dockerfile\n\n# ddev and composer are running as part of the prebuild\n# when starting a workspace all docker images are ready\ntasks:\n - name: Startup \n - command: |\n bash .ddev/gitpod-setup-ddev.sh\n gp ports await 8080 && gp preview $(gp url 8080) \n\n# VScode xdebug extension\nvscode:\n extensions:\n - felixfbecker.php-debug\n - bmewburn.vscode-intelephense-client\n - ziyasal.vscode-open-in-github\n\nports:\n # Ddev db port\n - port: 3306\n onOpen: ignore\n # phpmyadmin https port\n - port: 8027\n onOpen: ignore\n # mailhog https port\n - port: 8036\n onOpen: ignore\n # Main web port\n - port: 8080\n onOpen: ignore\n visibility: public\n # router https port that we're ignoring.\n - port: 8443\n onOpen: ignore\n # xdebug port\n - port: 9000\n onOpen: ignore\n\ngithub:\n prebuilds:\n # enable for the master/default branch (defaults to true)\n master: true\n # enable for all branches in this repo (defaults to false)\n branches: true\n # enable for pull requests coming from this repo (defaults to true)\n pullRequests: true\n # enable for pull requests coming from forks (defaults to false)\n pullRequestsFromForks: true\n # add a check to pull requests (defaults to true)\n addCheck: false\n # add a "Review in Gitpod" button as a comment to pull requests (defaults to false)\n addComment: false\n # add a "Review in Gitpod" button to the pull request's description (defaults to false)\n addBadge: true\n # add a label once the prebuild is ready to pull requests (defaults to false)\n addLabel: false\n
dataset_sample\yaml\mautic_mautic\.gitpod.yml
.gitpod.yml
YAML
1,607
0.8
0.068966
0.333333
python-kit
7
2025-04-08T11:13:49.326274
Apache-2.0
false
3e380fea5f875e4087feb3fb7cea222c
paths:\n tests: tests\n output: tests/_output\n data: tests/_data\n support: tests/_support\n envs: tests/_envs\nactor_suffix: Tester\nextensions:\n enabled:\n - Codeception\Extension\RunFailed\nparams:\n - .env.test.local\nsettings:\n error_level: "E_ALL & ~E_DEPRECATED & ~E_USER_DEPRECATED"\n
dataset_sample\yaml\mautic_mautic\codeception.yml
codeception.yml
YAML
294
0.7
0
0
python-kit
993
2025-07-09T00:08:18.818309
MIT
false
3aa760b171ad2efa3238ce7f7e6c99c8
comment:\n layout: "reach, diff, flags, files"\n behavior: default\n require_changes: true\n\ncoverage:\n status:\n patch:\n default:\n # PRs against files that have 0% coverage can remain\n # at 0% coverage, as long as the project's total\n # coverage (e.g. 36%) doesn't decrease.\n target: 0%\n
dataset_sample\yaml\mautic_mautic\codecov.yml
codecov.yml
YAML
329
0.95
0
0.25
vue-tools
709
2024-05-14T10:00:23.824733
MIT
false
9ff9448b68faa3c4f8083bf835f3cbdf
# Set update schedule for GitHub Actions only\n\nversion: 2\nupdates:\n - package-ecosystem: "github-actions"\n directory: "/"\n schedule:\n interval: "monthly"\n labels:\n - 'skip changelog'\n - 'dependencies'\n rebase-strategy: disabled\n
dataset_sample\yaml\meilisearch_meilisearch\.github\dependabot.yml
dependabot.yml
YAML
256
0.8
0.083333
0.090909
python-kit
770
2024-09-23T02:35:44.583476
Apache-2.0
false
64389146b44f4676ed8ec50bc0898210