Arvind2006 commited on
Commit
28d2b37
·
verified ·
1 Parent(s): e05c450

Upload 74 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. __pycache__/error_taxonomy.cpython-313.pyc +0 -0
  3. __pycache__/explain_error.cpython-313.pyc +0 -0
  4. __pycache__/extract_error_features.cpython-313.pyc +0 -0
  5. __pycache__/main.cpython-313.pyc +0 -0
  6. __pycache__/retrieve_docs.cpython-313.pyc +0 -0
  7. data/docs/docs.index +3 -0
  8. data/docs/docs_meta.json +2442 -0
  9. data/docs/raw/git_scm.txt +206 -0
  10. data/docs/raw/jenkins_credentials_text.txt +125 -0
  11. data/docs/raw/jenkins_git.txt +110 -0
  12. data/docs/raw/jenkins_nodes.txt +171 -0
  13. data/docs/raw/pipeline_steps.txt +0 -0
  14. data/docs/raw/pipeline_syntax.txt +1693 -0
  15. data/docs/raw/using_a_jenkinsfile.txt +810 -0
  16. data/errors/error_01.txt +29 -0
  17. data/errors/error_02.txt +29 -0
  18. data/errors/error_03.txt +11 -0
  19. data/errors/error_04.txt +48 -0
  20. data/errors/error_05.txt +16 -0
  21. data/errors/error_06.txt +10 -0
  22. data/errors/error_07.txt +25 -0
  23. data/errors/error_08.txt +24 -0
  24. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/adapter_config.json +0 -0
  25. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/added_tokens.json +0 -0
  26. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/chat_template.jinja +0 -0
  27. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/53aa51172d142c89d9012cce15ae4d6cc0ca6895895114379cacb4fab128d9db +3 -0
  28. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/58d4a9a45664eb9e12de9549c548c09b6134c17f +173 -0
  29. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/59d594003bf59880a884c574bf88ef7555bb0202 +4 -0
  30. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/72b987fd805cfa2b58c4c8c952b274a11bfd5a00 +24 -0
  31. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/952a9b81c0bfd99800fabf352f69c7ccd46c5e43 +20 -0
  32. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/c79f2b6a0cea6f4b564fed1938984bace9d30ff0 +1 -0
  33. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/cb202bfe2e3c98645018a6d12f182a434c9d3e02 +0 -0
  34. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/d1514c3162bbe87b343f565fadc62e6c06f04f03 +7 -0
  35. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/e7b0375001f109a6b8873d756ad4f7bbb15fbaa5 +1 -0
  36. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/fb140275c155a9c7c5a3b3e0e77a9e839594a938 +0 -0
  37. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/fd1b291129c607e5d49799f87cb219b27f98acdf +7 -0
  38. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/refs/main +1 -0
  39. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/1_Pooling/config.json +0 -0
  40. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/README.md +0 -0
  41. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/config.json +0 -0
  42. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/config_sentence_transformers.json +0 -0
  43. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/model.safetensors +3 -0
  44. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/modules.json +0 -0
  45. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/sentence_bert_config.json +0 -0
  46. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/special_tokens_map.json +0 -0
  47. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/tokenizer.json +0 -0
  48. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/tokenizer_config.json +0 -0
  49. model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/vocab.txt +0 -0
  50. model_cache/models--sentence-transformers--paraphrase-MiniLM-L3-v2/.no_exist/4ca70771034acceecb2e72475f72050fcdde4ddc/adapter_config.json +0 -0
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ data/docs/docs.index filter=lfs diff=lfs merge=lfs -text
37
+ model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/53aa51172d142c89d9012cce15ae4d6cc0ca6895895114379cacb4fab128d9db filter=lfs diff=lfs merge=lfs -text
38
+ model_cache/models--sentence-transformers--paraphrase-MiniLM-L3-v2/blobs/cf1e4e2d420c664973037c3c73125d7a8fc69952495093ef8f50596f8943a433 filter=lfs diff=lfs merge=lfs -text
__pycache__/error_taxonomy.cpython-313.pyc ADDED
Binary file (931 Bytes). View file
 
__pycache__/explain_error.cpython-313.pyc ADDED
Binary file (3.47 kB). View file
 
__pycache__/extract_error_features.cpython-313.pyc ADDED
Binary file (1.31 kB). View file
 
__pycache__/main.cpython-313.pyc ADDED
Binary file (1.3 kB). View file
 
__pycache__/retrieve_docs.cpython-313.pyc ADDED
Binary file (2.33 kB). View file
 
data/docs/docs.index ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82147cb1af5ba65eeb65590c97483e0040900e4aa8bcdff006387b0b989e0cbe
3
+ size 937005
data/docs/docs_meta.json ADDED
@@ -0,0 +1,2442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "source_file": "git_scm.txt",
4
+ "source": "https://www.jenkins.io/doc/"
5
+ },
6
+ {
7
+ "source_file": "git_scm.txt",
8
+ "source": "https://www.jenkins.io/doc/"
9
+ },
10
+ {
11
+ "source_file": "git_scm.txt",
12
+ "source": "https://www.jenkins.io/doc/"
13
+ },
14
+ {
15
+ "source_file": "git_scm.txt",
16
+ "source": "https://www.jenkins.io/doc/"
17
+ },
18
+ {
19
+ "source_file": "git_scm.txt",
20
+ "source": "https://www.jenkins.io/doc/"
21
+ },
22
+ {
23
+ "source_file": "git_scm.txt",
24
+ "source": "https://www.jenkins.io/doc/"
25
+ },
26
+ {
27
+ "source_file": "git_scm.txt",
28
+ "source": "https://www.jenkins.io/doc/"
29
+ },
30
+ {
31
+ "source_file": "git_scm.txt",
32
+ "source": "https://www.jenkins.io/doc/"
33
+ },
34
+ {
35
+ "source_file": "git_scm.txt",
36
+ "source": "https://www.jenkins.io/doc/"
37
+ },
38
+ {
39
+ "source_file": "git_scm.txt",
40
+ "source": "https://www.jenkins.io/doc/"
41
+ },
42
+ {
43
+ "source_file": "git_scm.txt",
44
+ "source": "https://www.jenkins.io/doc/"
45
+ },
46
+ {
47
+ "source_file": "git_scm.txt",
48
+ "source": "https://www.jenkins.io/doc/"
49
+ },
50
+ {
51
+ "source_file": "git_scm.txt",
52
+ "source": "https://www.jenkins.io/doc/"
53
+ },
54
+ {
55
+ "source_file": "git_scm.txt",
56
+ "source": "https://www.jenkins.io/doc/"
57
+ },
58
+ {
59
+ "source_file": "git_scm.txt",
60
+ "source": "https://www.jenkins.io/doc/"
61
+ },
62
+ {
63
+ "source_file": "git_scm.txt",
64
+ "source": "https://www.jenkins.io/doc/"
65
+ },
66
+ {
67
+ "source_file": "git_scm.txt",
68
+ "source": "https://www.jenkins.io/doc/"
69
+ },
70
+ {
71
+ "source_file": "git_scm.txt",
72
+ "source": "https://www.jenkins.io/doc/"
73
+ },
74
+ {
75
+ "source_file": "git_scm.txt",
76
+ "source": "https://www.jenkins.io/doc/"
77
+ },
78
+ {
79
+ "source_file": "git_scm.txt",
80
+ "source": "https://www.jenkins.io/doc/"
81
+ },
82
+ {
83
+ "source_file": "git_scm.txt",
84
+ "source": "https://www.jenkins.io/doc/"
85
+ },
86
+ {
87
+ "source_file": "git_scm.txt",
88
+ "source": "https://www.jenkins.io/doc/"
89
+ },
90
+ {
91
+ "source_file": "git_scm.txt",
92
+ "source": "https://www.jenkins.io/doc/"
93
+ },
94
+ {
95
+ "source_file": "git_scm.txt",
96
+ "source": "https://www.jenkins.io/doc/"
97
+ },
98
+ {
99
+ "source_file": "git_scm.txt",
100
+ "source": "https://www.jenkins.io/doc/"
101
+ },
102
+ {
103
+ "source_file": "git_scm.txt",
104
+ "source": "https://www.jenkins.io/doc/"
105
+ },
106
+ {
107
+ "source_file": "jenkins_credentials_text.txt",
108
+ "source": "https://www.jenkins.io/doc/"
109
+ },
110
+ {
111
+ "source_file": "jenkins_credentials_text.txt",
112
+ "source": "https://www.jenkins.io/doc/"
113
+ },
114
+ {
115
+ "source_file": "jenkins_credentials_text.txt",
116
+ "source": "https://www.jenkins.io/doc/"
117
+ },
118
+ {
119
+ "source_file": "jenkins_credentials_text.txt",
120
+ "source": "https://www.jenkins.io/doc/"
121
+ },
122
+ {
123
+ "source_file": "jenkins_credentials_text.txt",
124
+ "source": "https://www.jenkins.io/doc/"
125
+ },
126
+ {
127
+ "source_file": "jenkins_credentials_text.txt",
128
+ "source": "https://www.jenkins.io/doc/"
129
+ },
130
+ {
131
+ "source_file": "jenkins_credentials_text.txt",
132
+ "source": "https://www.jenkins.io/doc/"
133
+ },
134
+ {
135
+ "source_file": "jenkins_credentials_text.txt",
136
+ "source": "https://www.jenkins.io/doc/"
137
+ },
138
+ {
139
+ "source_file": "jenkins_credentials_text.txt",
140
+ "source": "https://www.jenkins.io/doc/"
141
+ },
142
+ {
143
+ "source_file": "jenkins_credentials_text.txt",
144
+ "source": "https://www.jenkins.io/doc/"
145
+ },
146
+ {
147
+ "source_file": "jenkins_credentials_text.txt",
148
+ "source": "https://www.jenkins.io/doc/"
149
+ },
150
+ {
151
+ "source_file": "jenkins_credentials_text.txt",
152
+ "source": "https://www.jenkins.io/doc/"
153
+ },
154
+ {
155
+ "source_file": "jenkins_credentials_text.txt",
156
+ "source": "https://www.jenkins.io/doc/"
157
+ },
158
+ {
159
+ "source_file": "jenkins_credentials_text.txt",
160
+ "source": "https://www.jenkins.io/doc/"
161
+ },
162
+ {
163
+ "source_file": "jenkins_git.txt",
164
+ "source": "https://www.jenkins.io/doc/"
165
+ },
166
+ {
167
+ "source_file": "jenkins_git.txt",
168
+ "source": "https://www.jenkins.io/doc/"
169
+ },
170
+ {
171
+ "source_file": "jenkins_git.txt",
172
+ "source": "https://www.jenkins.io/doc/"
173
+ },
174
+ {
175
+ "source_file": "jenkins_git.txt",
176
+ "source": "https://www.jenkins.io/doc/"
177
+ },
178
+ {
179
+ "source_file": "jenkins_git.txt",
180
+ "source": "https://www.jenkins.io/doc/"
181
+ },
182
+ {
183
+ "source_file": "jenkins_git.txt",
184
+ "source": "https://www.jenkins.io/doc/"
185
+ },
186
+ {
187
+ "source_file": "jenkins_git.txt",
188
+ "source": "https://www.jenkins.io/doc/"
189
+ },
190
+ {
191
+ "source_file": "jenkins_git.txt",
192
+ "source": "https://www.jenkins.io/doc/"
193
+ },
194
+ {
195
+ "source_file": "jenkins_git.txt",
196
+ "source": "https://www.jenkins.io/doc/"
197
+ },
198
+ {
199
+ "source_file": "jenkins_git.txt",
200
+ "source": "https://www.jenkins.io/doc/"
201
+ },
202
+ {
203
+ "source_file": "jenkins_git.txt",
204
+ "source": "https://www.jenkins.io/doc/"
205
+ },
206
+ {
207
+ "source_file": "jenkins_git.txt",
208
+ "source": "https://www.jenkins.io/doc/"
209
+ },
210
+ {
211
+ "source_file": "jenkins_git.txt",
212
+ "source": "https://www.jenkins.io/doc/"
213
+ },
214
+ {
215
+ "source_file": "jenkins_git.txt",
216
+ "source": "https://www.jenkins.io/doc/"
217
+ },
218
+ {
219
+ "source_file": "jenkins_git.txt",
220
+ "source": "https://www.jenkins.io/doc/"
221
+ },
222
+ {
223
+ "source_file": "jenkins_git.txt",
224
+ "source": "https://www.jenkins.io/doc/"
225
+ },
226
+ {
227
+ "source_file": "jenkins_git.txt",
228
+ "source": "https://www.jenkins.io/doc/"
229
+ },
230
+ {
231
+ "source_file": "jenkins_nodes.txt",
232
+ "source": "https://www.jenkins.io/doc/"
233
+ },
234
+ {
235
+ "source_file": "jenkins_nodes.txt",
236
+ "source": "https://www.jenkins.io/doc/"
237
+ },
238
+ {
239
+ "source_file": "jenkins_nodes.txt",
240
+ "source": "https://www.jenkins.io/doc/"
241
+ },
242
+ {
243
+ "source_file": "jenkins_nodes.txt",
244
+ "source": "https://www.jenkins.io/doc/"
245
+ },
246
+ {
247
+ "source_file": "jenkins_nodes.txt",
248
+ "source": "https://www.jenkins.io/doc/"
249
+ },
250
+ {
251
+ "source_file": "jenkins_nodes.txt",
252
+ "source": "https://www.jenkins.io/doc/"
253
+ },
254
+ {
255
+ "source_file": "jenkins_nodes.txt",
256
+ "source": "https://www.jenkins.io/doc/"
257
+ },
258
+ {
259
+ "source_file": "jenkins_nodes.txt",
260
+ "source": "https://www.jenkins.io/doc/"
261
+ },
262
+ {
263
+ "source_file": "jenkins_nodes.txt",
264
+ "source": "https://www.jenkins.io/doc/"
265
+ },
266
+ {
267
+ "source_file": "jenkins_nodes.txt",
268
+ "source": "https://www.jenkins.io/doc/"
269
+ },
270
+ {
271
+ "source_file": "jenkins_nodes.txt",
272
+ "source": "https://www.jenkins.io/doc/"
273
+ },
274
+ {
275
+ "source_file": "jenkins_nodes.txt",
276
+ "source": "https://www.jenkins.io/doc/"
277
+ },
278
+ {
279
+ "source_file": "jenkins_nodes.txt",
280
+ "source": "https://www.jenkins.io/doc/"
281
+ },
282
+ {
283
+ "source_file": "jenkins_nodes.txt",
284
+ "source": "https://www.jenkins.io/doc/"
285
+ },
286
+ {
287
+ "source_file": "jenkins_nodes.txt",
288
+ "source": "https://www.jenkins.io/doc/"
289
+ },
290
+ {
291
+ "source_file": "jenkins_nodes.txt",
292
+ "source": "https://www.jenkins.io/doc/"
293
+ },
294
+ {
295
+ "source_file": "jenkins_nodes.txt",
296
+ "source": "https://www.jenkins.io/doc/"
297
+ },
298
+ {
299
+ "source_file": "jenkins_nodes.txt",
300
+ "source": "https://www.jenkins.io/doc/"
301
+ },
302
+ {
303
+ "source_file": "jenkins_nodes.txt",
304
+ "source": "https://www.jenkins.io/doc/"
305
+ },
306
+ {
307
+ "source_file": "pipeline_steps.txt",
308
+ "source": "https://www.jenkins.io/doc/"
309
+ },
310
+ {
311
+ "source_file": "pipeline_steps.txt",
312
+ "source": "https://www.jenkins.io/doc/"
313
+ },
314
+ {
315
+ "source_file": "pipeline_steps.txt",
316
+ "source": "https://www.jenkins.io/doc/"
317
+ },
318
+ {
319
+ "source_file": "pipeline_steps.txt",
320
+ "source": "https://www.jenkins.io/doc/"
321
+ },
322
+ {
323
+ "source_file": "pipeline_steps.txt",
324
+ "source": "https://www.jenkins.io/doc/"
325
+ },
326
+ {
327
+ "source_file": "pipeline_steps.txt",
328
+ "source": "https://www.jenkins.io/doc/"
329
+ },
330
+ {
331
+ "source_file": "pipeline_steps.txt",
332
+ "source": "https://www.jenkins.io/doc/"
333
+ },
334
+ {
335
+ "source_file": "pipeline_steps.txt",
336
+ "source": "https://www.jenkins.io/doc/"
337
+ },
338
+ {
339
+ "source_file": "pipeline_steps.txt",
340
+ "source": "https://www.jenkins.io/doc/"
341
+ },
342
+ {
343
+ "source_file": "pipeline_steps.txt",
344
+ "source": "https://www.jenkins.io/doc/"
345
+ },
346
+ {
347
+ "source_file": "pipeline_steps.txt",
348
+ "source": "https://www.jenkins.io/doc/"
349
+ },
350
+ {
351
+ "source_file": "pipeline_steps.txt",
352
+ "source": "https://www.jenkins.io/doc/"
353
+ },
354
+ {
355
+ "source_file": "pipeline_steps.txt",
356
+ "source": "https://www.jenkins.io/doc/"
357
+ },
358
+ {
359
+ "source_file": "pipeline_steps.txt",
360
+ "source": "https://www.jenkins.io/doc/"
361
+ },
362
+ {
363
+ "source_file": "pipeline_steps.txt",
364
+ "source": "https://www.jenkins.io/doc/"
365
+ },
366
+ {
367
+ "source_file": "pipeline_steps.txt",
368
+ "source": "https://www.jenkins.io/doc/"
369
+ },
370
+ {
371
+ "source_file": "pipeline_steps.txt",
372
+ "source": "https://www.jenkins.io/doc/"
373
+ },
374
+ {
375
+ "source_file": "pipeline_steps.txt",
376
+ "source": "https://www.jenkins.io/doc/"
377
+ },
378
+ {
379
+ "source_file": "pipeline_steps.txt",
380
+ "source": "https://www.jenkins.io/doc/"
381
+ },
382
+ {
383
+ "source_file": "pipeline_steps.txt",
384
+ "source": "https://www.jenkins.io/doc/"
385
+ },
386
+ {
387
+ "source_file": "pipeline_steps.txt",
388
+ "source": "https://www.jenkins.io/doc/"
389
+ },
390
+ {
391
+ "source_file": "pipeline_steps.txt",
392
+ "source": "https://www.jenkins.io/doc/"
393
+ },
394
+ {
395
+ "source_file": "pipeline_steps.txt",
396
+ "source": "https://www.jenkins.io/doc/"
397
+ },
398
+ {
399
+ "source_file": "pipeline_steps.txt",
400
+ "source": "https://www.jenkins.io/doc/"
401
+ },
402
+ {
403
+ "source_file": "pipeline_steps.txt",
404
+ "source": "https://www.jenkins.io/doc/"
405
+ },
406
+ {
407
+ "source_file": "pipeline_steps.txt",
408
+ "source": "https://www.jenkins.io/doc/"
409
+ },
410
+ {
411
+ "source_file": "pipeline_steps.txt",
412
+ "source": "https://www.jenkins.io/doc/"
413
+ },
414
+ {
415
+ "source_file": "pipeline_steps.txt",
416
+ "source": "https://www.jenkins.io/doc/"
417
+ },
418
+ {
419
+ "source_file": "pipeline_steps.txt",
420
+ "source": "https://www.jenkins.io/doc/"
421
+ },
422
+ {
423
+ "source_file": "pipeline_steps.txt",
424
+ "source": "https://www.jenkins.io/doc/"
425
+ },
426
+ {
427
+ "source_file": "pipeline_steps.txt",
428
+ "source": "https://www.jenkins.io/doc/"
429
+ },
430
+ {
431
+ "source_file": "pipeline_steps.txt",
432
+ "source": "https://www.jenkins.io/doc/"
433
+ },
434
+ {
435
+ "source_file": "pipeline_steps.txt",
436
+ "source": "https://www.jenkins.io/doc/"
437
+ },
438
+ {
439
+ "source_file": "pipeline_steps.txt",
440
+ "source": "https://www.jenkins.io/doc/"
441
+ },
442
+ {
443
+ "source_file": "pipeline_steps.txt",
444
+ "source": "https://www.jenkins.io/doc/"
445
+ },
446
+ {
447
+ "source_file": "pipeline_steps.txt",
448
+ "source": "https://www.jenkins.io/doc/"
449
+ },
450
+ {
451
+ "source_file": "pipeline_steps.txt",
452
+ "source": "https://www.jenkins.io/doc/"
453
+ },
454
+ {
455
+ "source_file": "pipeline_steps.txt",
456
+ "source": "https://www.jenkins.io/doc/"
457
+ },
458
+ {
459
+ "source_file": "pipeline_steps.txt",
460
+ "source": "https://www.jenkins.io/doc/"
461
+ },
462
+ {
463
+ "source_file": "pipeline_steps.txt",
464
+ "source": "https://www.jenkins.io/doc/"
465
+ },
466
+ {
467
+ "source_file": "pipeline_steps.txt",
468
+ "source": "https://www.jenkins.io/doc/"
469
+ },
470
+ {
471
+ "source_file": "pipeline_steps.txt",
472
+ "source": "https://www.jenkins.io/doc/"
473
+ },
474
+ {
475
+ "source_file": "pipeline_steps.txt",
476
+ "source": "https://www.jenkins.io/doc/"
477
+ },
478
+ {
479
+ "source_file": "pipeline_steps.txt",
480
+ "source": "https://www.jenkins.io/doc/"
481
+ },
482
+ {
483
+ "source_file": "pipeline_steps.txt",
484
+ "source": "https://www.jenkins.io/doc/"
485
+ },
486
+ {
487
+ "source_file": "pipeline_steps.txt",
488
+ "source": "https://www.jenkins.io/doc/"
489
+ },
490
+ {
491
+ "source_file": "pipeline_steps.txt",
492
+ "source": "https://www.jenkins.io/doc/"
493
+ },
494
+ {
495
+ "source_file": "pipeline_steps.txt",
496
+ "source": "https://www.jenkins.io/doc/"
497
+ },
498
+ {
499
+ "source_file": "pipeline_steps.txt",
500
+ "source": "https://www.jenkins.io/doc/"
501
+ },
502
+ {
503
+ "source_file": "pipeline_steps.txt",
504
+ "source": "https://www.jenkins.io/doc/"
505
+ },
506
+ {
507
+ "source_file": "pipeline_steps.txt",
508
+ "source": "https://www.jenkins.io/doc/"
509
+ },
510
+ {
511
+ "source_file": "pipeline_steps.txt",
512
+ "source": "https://www.jenkins.io/doc/"
513
+ },
514
+ {
515
+ "source_file": "pipeline_steps.txt",
516
+ "source": "https://www.jenkins.io/doc/"
517
+ },
518
+ {
519
+ "source_file": "pipeline_steps.txt",
520
+ "source": "https://www.jenkins.io/doc/"
521
+ },
522
+ {
523
+ "source_file": "pipeline_steps.txt",
524
+ "source": "https://www.jenkins.io/doc/"
525
+ },
526
+ {
527
+ "source_file": "pipeline_steps.txt",
528
+ "source": "https://www.jenkins.io/doc/"
529
+ },
530
+ {
531
+ "source_file": "pipeline_steps.txt",
532
+ "source": "https://www.jenkins.io/doc/"
533
+ },
534
+ {
535
+ "source_file": "pipeline_steps.txt",
536
+ "source": "https://www.jenkins.io/doc/"
537
+ },
538
+ {
539
+ "source_file": "pipeline_steps.txt",
540
+ "source": "https://www.jenkins.io/doc/"
541
+ },
542
+ {
543
+ "source_file": "pipeline_steps.txt",
544
+ "source": "https://www.jenkins.io/doc/"
545
+ },
546
+ {
547
+ "source_file": "pipeline_steps.txt",
548
+ "source": "https://www.jenkins.io/doc/"
549
+ },
550
+ {
551
+ "source_file": "pipeline_steps.txt",
552
+ "source": "https://www.jenkins.io/doc/"
553
+ },
554
+ {
555
+ "source_file": "pipeline_steps.txt",
556
+ "source": "https://www.jenkins.io/doc/"
557
+ },
558
+ {
559
+ "source_file": "pipeline_steps.txt",
560
+ "source": "https://www.jenkins.io/doc/"
561
+ },
562
+ {
563
+ "source_file": "pipeline_steps.txt",
564
+ "source": "https://www.jenkins.io/doc/"
565
+ },
566
+ {
567
+ "source_file": "pipeline_steps.txt",
568
+ "source": "https://www.jenkins.io/doc/"
569
+ },
570
+ {
571
+ "source_file": "pipeline_steps.txt",
572
+ "source": "https://www.jenkins.io/doc/"
573
+ },
574
+ {
575
+ "source_file": "pipeline_steps.txt",
576
+ "source": "https://www.jenkins.io/doc/"
577
+ },
578
+ {
579
+ "source_file": "pipeline_steps.txt",
580
+ "source": "https://www.jenkins.io/doc/"
581
+ },
582
+ {
583
+ "source_file": "pipeline_steps.txt",
584
+ "source": "https://www.jenkins.io/doc/"
585
+ },
586
+ {
587
+ "source_file": "pipeline_steps.txt",
588
+ "source": "https://www.jenkins.io/doc/"
589
+ },
590
+ {
591
+ "source_file": "pipeline_steps.txt",
592
+ "source": "https://www.jenkins.io/doc/"
593
+ },
594
+ {
595
+ "source_file": "pipeline_steps.txt",
596
+ "source": "https://www.jenkins.io/doc/"
597
+ },
598
+ {
599
+ "source_file": "pipeline_steps.txt",
600
+ "source": "https://www.jenkins.io/doc/"
601
+ },
602
+ {
603
+ "source_file": "pipeline_steps.txt",
604
+ "source": "https://www.jenkins.io/doc/"
605
+ },
606
+ {
607
+ "source_file": "pipeline_steps.txt",
608
+ "source": "https://www.jenkins.io/doc/"
609
+ },
610
+ {
611
+ "source_file": "pipeline_steps.txt",
612
+ "source": "https://www.jenkins.io/doc/"
613
+ },
614
+ {
615
+ "source_file": "pipeline_steps.txt",
616
+ "source": "https://www.jenkins.io/doc/"
617
+ },
618
+ {
619
+ "source_file": "pipeline_steps.txt",
620
+ "source": "https://www.jenkins.io/doc/"
621
+ },
622
+ {
623
+ "source_file": "pipeline_steps.txt",
624
+ "source": "https://www.jenkins.io/doc/"
625
+ },
626
+ {
627
+ "source_file": "pipeline_steps.txt",
628
+ "source": "https://www.jenkins.io/doc/"
629
+ },
630
+ {
631
+ "source_file": "pipeline_steps.txt",
632
+ "source": "https://www.jenkins.io/doc/"
633
+ },
634
+ {
635
+ "source_file": "pipeline_steps.txt",
636
+ "source": "https://www.jenkins.io/doc/"
637
+ },
638
+ {
639
+ "source_file": "pipeline_steps.txt",
640
+ "source": "https://www.jenkins.io/doc/"
641
+ },
642
+ {
643
+ "source_file": "pipeline_steps.txt",
644
+ "source": "https://www.jenkins.io/doc/"
645
+ },
646
+ {
647
+ "source_file": "pipeline_steps.txt",
648
+ "source": "https://www.jenkins.io/doc/"
649
+ },
650
+ {
651
+ "source_file": "pipeline_steps.txt",
652
+ "source": "https://www.jenkins.io/doc/"
653
+ },
654
+ {
655
+ "source_file": "pipeline_steps.txt",
656
+ "source": "https://www.jenkins.io/doc/"
657
+ },
658
+ {
659
+ "source_file": "pipeline_steps.txt",
660
+ "source": "https://www.jenkins.io/doc/"
661
+ },
662
+ {
663
+ "source_file": "pipeline_steps.txt",
664
+ "source": "https://www.jenkins.io/doc/"
665
+ },
666
+ {
667
+ "source_file": "pipeline_steps.txt",
668
+ "source": "https://www.jenkins.io/doc/"
669
+ },
670
+ {
671
+ "source_file": "pipeline_steps.txt",
672
+ "source": "https://www.jenkins.io/doc/"
673
+ },
674
+ {
675
+ "source_file": "pipeline_steps.txt",
676
+ "source": "https://www.jenkins.io/doc/"
677
+ },
678
+ {
679
+ "source_file": "pipeline_steps.txt",
680
+ "source": "https://www.jenkins.io/doc/"
681
+ },
682
+ {
683
+ "source_file": "pipeline_steps.txt",
684
+ "source": "https://www.jenkins.io/doc/"
685
+ },
686
+ {
687
+ "source_file": "pipeline_steps.txt",
688
+ "source": "https://www.jenkins.io/doc/"
689
+ },
690
+ {
691
+ "source_file": "pipeline_steps.txt",
692
+ "source": "https://www.jenkins.io/doc/"
693
+ },
694
+ {
695
+ "source_file": "pipeline_steps.txt",
696
+ "source": "https://www.jenkins.io/doc/"
697
+ },
698
+ {
699
+ "source_file": "pipeline_steps.txt",
700
+ "source": "https://www.jenkins.io/doc/"
701
+ },
702
+ {
703
+ "source_file": "pipeline_steps.txt",
704
+ "source": "https://www.jenkins.io/doc/"
705
+ },
706
+ {
707
+ "source_file": "pipeline_steps.txt",
708
+ "source": "https://www.jenkins.io/doc/"
709
+ },
710
+ {
711
+ "source_file": "pipeline_steps.txt",
712
+ "source": "https://www.jenkins.io/doc/"
713
+ },
714
+ {
715
+ "source_file": "pipeline_steps.txt",
716
+ "source": "https://www.jenkins.io/doc/"
717
+ },
718
+ {
719
+ "source_file": "pipeline_steps.txt",
720
+ "source": "https://www.jenkins.io/doc/"
721
+ },
722
+ {
723
+ "source_file": "pipeline_steps.txt",
724
+ "source": "https://www.jenkins.io/doc/"
725
+ },
726
+ {
727
+ "source_file": "pipeline_steps.txt",
728
+ "source": "https://www.jenkins.io/doc/"
729
+ },
730
+ {
731
+ "source_file": "pipeline_steps.txt",
732
+ "source": "https://www.jenkins.io/doc/"
733
+ },
734
+ {
735
+ "source_file": "pipeline_steps.txt",
736
+ "source": "https://www.jenkins.io/doc/"
737
+ },
738
+ {
739
+ "source_file": "pipeline_steps.txt",
740
+ "source": "https://www.jenkins.io/doc/"
741
+ },
742
+ {
743
+ "source_file": "pipeline_steps.txt",
744
+ "source": "https://www.jenkins.io/doc/"
745
+ },
746
+ {
747
+ "source_file": "pipeline_steps.txt",
748
+ "source": "https://www.jenkins.io/doc/"
749
+ },
750
+ {
751
+ "source_file": "pipeline_steps.txt",
752
+ "source": "https://www.jenkins.io/doc/"
753
+ },
754
+ {
755
+ "source_file": "pipeline_steps.txt",
756
+ "source": "https://www.jenkins.io/doc/"
757
+ },
758
+ {
759
+ "source_file": "pipeline_steps.txt",
760
+ "source": "https://www.jenkins.io/doc/"
761
+ },
762
+ {
763
+ "source_file": "pipeline_steps.txt",
764
+ "source": "https://www.jenkins.io/doc/"
765
+ },
766
+ {
767
+ "source_file": "pipeline_steps.txt",
768
+ "source": "https://www.jenkins.io/doc/"
769
+ },
770
+ {
771
+ "source_file": "pipeline_steps.txt",
772
+ "source": "https://www.jenkins.io/doc/"
773
+ },
774
+ {
775
+ "source_file": "pipeline_steps.txt",
776
+ "source": "https://www.jenkins.io/doc/"
777
+ },
778
+ {
779
+ "source_file": "pipeline_steps.txt",
780
+ "source": "https://www.jenkins.io/doc/"
781
+ },
782
+ {
783
+ "source_file": "pipeline_steps.txt",
784
+ "source": "https://www.jenkins.io/doc/"
785
+ },
786
+ {
787
+ "source_file": "pipeline_steps.txt",
788
+ "source": "https://www.jenkins.io/doc/"
789
+ },
790
+ {
791
+ "source_file": "pipeline_steps.txt",
792
+ "source": "https://www.jenkins.io/doc/"
793
+ },
794
+ {
795
+ "source_file": "pipeline_steps.txt",
796
+ "source": "https://www.jenkins.io/doc/"
797
+ },
798
+ {
799
+ "source_file": "pipeline_steps.txt",
800
+ "source": "https://www.jenkins.io/doc/"
801
+ },
802
+ {
803
+ "source_file": "pipeline_steps.txt",
804
+ "source": "https://www.jenkins.io/doc/"
805
+ },
806
+ {
807
+ "source_file": "pipeline_steps.txt",
808
+ "source": "https://www.jenkins.io/doc/"
809
+ },
810
+ {
811
+ "source_file": "pipeline_steps.txt",
812
+ "source": "https://www.jenkins.io/doc/"
813
+ },
814
+ {
815
+ "source_file": "pipeline_steps.txt",
816
+ "source": "https://www.jenkins.io/doc/"
817
+ },
818
+ {
819
+ "source_file": "pipeline_steps.txt",
820
+ "source": "https://www.jenkins.io/doc/"
821
+ },
822
+ {
823
+ "source_file": "pipeline_steps.txt",
824
+ "source": "https://www.jenkins.io/doc/"
825
+ },
826
+ {
827
+ "source_file": "pipeline_steps.txt",
828
+ "source": "https://www.jenkins.io/doc/"
829
+ },
830
+ {
831
+ "source_file": "pipeline_steps.txt",
832
+ "source": "https://www.jenkins.io/doc/"
833
+ },
834
+ {
835
+ "source_file": "pipeline_steps.txt",
836
+ "source": "https://www.jenkins.io/doc/"
837
+ },
838
+ {
839
+ "source_file": "pipeline_steps.txt",
840
+ "source": "https://www.jenkins.io/doc/"
841
+ },
842
+ {
843
+ "source_file": "pipeline_steps.txt",
844
+ "source": "https://www.jenkins.io/doc/"
845
+ },
846
+ {
847
+ "source_file": "pipeline_steps.txt",
848
+ "source": "https://www.jenkins.io/doc/"
849
+ },
850
+ {
851
+ "source_file": "pipeline_steps.txt",
852
+ "source": "https://www.jenkins.io/doc/"
853
+ },
854
+ {
855
+ "source_file": "pipeline_steps.txt",
856
+ "source": "https://www.jenkins.io/doc/"
857
+ },
858
+ {
859
+ "source_file": "pipeline_steps.txt",
860
+ "source": "https://www.jenkins.io/doc/"
861
+ },
862
+ {
863
+ "source_file": "pipeline_steps.txt",
864
+ "source": "https://www.jenkins.io/doc/"
865
+ },
866
+ {
867
+ "source_file": "pipeline_steps.txt",
868
+ "source": "https://www.jenkins.io/doc/"
869
+ },
870
+ {
871
+ "source_file": "pipeline_steps.txt",
872
+ "source": "https://www.jenkins.io/doc/"
873
+ },
874
+ {
875
+ "source_file": "pipeline_steps.txt",
876
+ "source": "https://www.jenkins.io/doc/"
877
+ },
878
+ {
879
+ "source_file": "pipeline_steps.txt",
880
+ "source": "https://www.jenkins.io/doc/"
881
+ },
882
+ {
883
+ "source_file": "pipeline_steps.txt",
884
+ "source": "https://www.jenkins.io/doc/"
885
+ },
886
+ {
887
+ "source_file": "pipeline_steps.txt",
888
+ "source": "https://www.jenkins.io/doc/"
889
+ },
890
+ {
891
+ "source_file": "pipeline_steps.txt",
892
+ "source": "https://www.jenkins.io/doc/"
893
+ },
894
+ {
895
+ "source_file": "pipeline_steps.txt",
896
+ "source": "https://www.jenkins.io/doc/"
897
+ },
898
+ {
899
+ "source_file": "pipeline_steps.txt",
900
+ "source": "https://www.jenkins.io/doc/"
901
+ },
902
+ {
903
+ "source_file": "pipeline_steps.txt",
904
+ "source": "https://www.jenkins.io/doc/"
905
+ },
906
+ {
907
+ "source_file": "pipeline_steps.txt",
908
+ "source": "https://www.jenkins.io/doc/"
909
+ },
910
+ {
911
+ "source_file": "pipeline_steps.txt",
912
+ "source": "https://www.jenkins.io/doc/"
913
+ },
914
+ {
915
+ "source_file": "pipeline_steps.txt",
916
+ "source": "https://www.jenkins.io/doc/"
917
+ },
918
+ {
919
+ "source_file": "pipeline_steps.txt",
920
+ "source": "https://www.jenkins.io/doc/"
921
+ },
922
+ {
923
+ "source_file": "pipeline_steps.txt",
924
+ "source": "https://www.jenkins.io/doc/"
925
+ },
926
+ {
927
+ "source_file": "pipeline_steps.txt",
928
+ "source": "https://www.jenkins.io/doc/"
929
+ },
930
+ {
931
+ "source_file": "pipeline_steps.txt",
932
+ "source": "https://www.jenkins.io/doc/"
933
+ },
934
+ {
935
+ "source_file": "pipeline_steps.txt",
936
+ "source": "https://www.jenkins.io/doc/"
937
+ },
938
+ {
939
+ "source_file": "pipeline_steps.txt",
940
+ "source": "https://www.jenkins.io/doc/"
941
+ },
942
+ {
943
+ "source_file": "pipeline_steps.txt",
944
+ "source": "https://www.jenkins.io/doc/"
945
+ },
946
+ {
947
+ "source_file": "pipeline_steps.txt",
948
+ "source": "https://www.jenkins.io/doc/"
949
+ },
950
+ {
951
+ "source_file": "pipeline_steps.txt",
952
+ "source": "https://www.jenkins.io/doc/"
953
+ },
954
+ {
955
+ "source_file": "pipeline_steps.txt",
956
+ "source": "https://www.jenkins.io/doc/"
957
+ },
958
+ {
959
+ "source_file": "pipeline_steps.txt",
960
+ "source": "https://www.jenkins.io/doc/"
961
+ },
962
+ {
963
+ "source_file": "pipeline_steps.txt",
964
+ "source": "https://www.jenkins.io/doc/"
965
+ },
966
+ {
967
+ "source_file": "pipeline_steps.txt",
968
+ "source": "https://www.jenkins.io/doc/"
969
+ },
970
+ {
971
+ "source_file": "pipeline_steps.txt",
972
+ "source": "https://www.jenkins.io/doc/"
973
+ },
974
+ {
975
+ "source_file": "pipeline_steps.txt",
976
+ "source": "https://www.jenkins.io/doc/"
977
+ },
978
+ {
979
+ "source_file": "pipeline_steps.txt",
980
+ "source": "https://www.jenkins.io/doc/"
981
+ },
982
+ {
983
+ "source_file": "pipeline_steps.txt",
984
+ "source": "https://www.jenkins.io/doc/"
985
+ },
986
+ {
987
+ "source_file": "pipeline_steps.txt",
988
+ "source": "https://www.jenkins.io/doc/"
989
+ },
990
+ {
991
+ "source_file": "pipeline_steps.txt",
992
+ "source": "https://www.jenkins.io/doc/"
993
+ },
994
+ {
995
+ "source_file": "pipeline_steps.txt",
996
+ "source": "https://www.jenkins.io/doc/"
997
+ },
998
+ {
999
+ "source_file": "pipeline_steps.txt",
1000
+ "source": "https://www.jenkins.io/doc/"
1001
+ },
1002
+ {
1003
+ "source_file": "pipeline_steps.txt",
1004
+ "source": "https://www.jenkins.io/doc/"
1005
+ },
1006
+ {
1007
+ "source_file": "pipeline_steps.txt",
1008
+ "source": "https://www.jenkins.io/doc/"
1009
+ },
1010
+ {
1011
+ "source_file": "pipeline_steps.txt",
1012
+ "source": "https://www.jenkins.io/doc/"
1013
+ },
1014
+ {
1015
+ "source_file": "pipeline_steps.txt",
1016
+ "source": "https://www.jenkins.io/doc/"
1017
+ },
1018
+ {
1019
+ "source_file": "pipeline_steps.txt",
1020
+ "source": "https://www.jenkins.io/doc/"
1021
+ },
1022
+ {
1023
+ "source_file": "pipeline_steps.txt",
1024
+ "source": "https://www.jenkins.io/doc/"
1025
+ },
1026
+ {
1027
+ "source_file": "pipeline_steps.txt",
1028
+ "source": "https://www.jenkins.io/doc/"
1029
+ },
1030
+ {
1031
+ "source_file": "pipeline_steps.txt",
1032
+ "source": "https://www.jenkins.io/doc/"
1033
+ },
1034
+ {
1035
+ "source_file": "pipeline_steps.txt",
1036
+ "source": "https://www.jenkins.io/doc/"
1037
+ },
1038
+ {
1039
+ "source_file": "pipeline_steps.txt",
1040
+ "source": "https://www.jenkins.io/doc/"
1041
+ },
1042
+ {
1043
+ "source_file": "pipeline_steps.txt",
1044
+ "source": "https://www.jenkins.io/doc/"
1045
+ },
1046
+ {
1047
+ "source_file": "pipeline_steps.txt",
1048
+ "source": "https://www.jenkins.io/doc/"
1049
+ },
1050
+ {
1051
+ "source_file": "pipeline_steps.txt",
1052
+ "source": "https://www.jenkins.io/doc/"
1053
+ },
1054
+ {
1055
+ "source_file": "pipeline_steps.txt",
1056
+ "source": "https://www.jenkins.io/doc/"
1057
+ },
1058
+ {
1059
+ "source_file": "pipeline_steps.txt",
1060
+ "source": "https://www.jenkins.io/doc/"
1061
+ },
1062
+ {
1063
+ "source_file": "pipeline_steps.txt",
1064
+ "source": "https://www.jenkins.io/doc/"
1065
+ },
1066
+ {
1067
+ "source_file": "pipeline_steps.txt",
1068
+ "source": "https://www.jenkins.io/doc/"
1069
+ },
1070
+ {
1071
+ "source_file": "pipeline_steps.txt",
1072
+ "source": "https://www.jenkins.io/doc/"
1073
+ },
1074
+ {
1075
+ "source_file": "pipeline_steps.txt",
1076
+ "source": "https://www.jenkins.io/doc/"
1077
+ },
1078
+ {
1079
+ "source_file": "pipeline_steps.txt",
1080
+ "source": "https://www.jenkins.io/doc/"
1081
+ },
1082
+ {
1083
+ "source_file": "pipeline_steps.txt",
1084
+ "source": "https://www.jenkins.io/doc/"
1085
+ },
1086
+ {
1087
+ "source_file": "pipeline_steps.txt",
1088
+ "source": "https://www.jenkins.io/doc/"
1089
+ },
1090
+ {
1091
+ "source_file": "pipeline_steps.txt",
1092
+ "source": "https://www.jenkins.io/doc/"
1093
+ },
1094
+ {
1095
+ "source_file": "pipeline_steps.txt",
1096
+ "source": "https://www.jenkins.io/doc/"
1097
+ },
1098
+ {
1099
+ "source_file": "pipeline_steps.txt",
1100
+ "source": "https://www.jenkins.io/doc/"
1101
+ },
1102
+ {
1103
+ "source_file": "pipeline_steps.txt",
1104
+ "source": "https://www.jenkins.io/doc/"
1105
+ },
1106
+ {
1107
+ "source_file": "pipeline_steps.txt",
1108
+ "source": "https://www.jenkins.io/doc/"
1109
+ },
1110
+ {
1111
+ "source_file": "pipeline_steps.txt",
1112
+ "source": "https://www.jenkins.io/doc/"
1113
+ },
1114
+ {
1115
+ "source_file": "pipeline_steps.txt",
1116
+ "source": "https://www.jenkins.io/doc/"
1117
+ },
1118
+ {
1119
+ "source_file": "pipeline_steps.txt",
1120
+ "source": "https://www.jenkins.io/doc/"
1121
+ },
1122
+ {
1123
+ "source_file": "pipeline_steps.txt",
1124
+ "source": "https://www.jenkins.io/doc/"
1125
+ },
1126
+ {
1127
+ "source_file": "pipeline_steps.txt",
1128
+ "source": "https://www.jenkins.io/doc/"
1129
+ },
1130
+ {
1131
+ "source_file": "pipeline_steps.txt",
1132
+ "source": "https://www.jenkins.io/doc/"
1133
+ },
1134
+ {
1135
+ "source_file": "pipeline_steps.txt",
1136
+ "source": "https://www.jenkins.io/doc/"
1137
+ },
1138
+ {
1139
+ "source_file": "pipeline_steps.txt",
1140
+ "source": "https://www.jenkins.io/doc/"
1141
+ },
1142
+ {
1143
+ "source_file": "pipeline_steps.txt",
1144
+ "source": "https://www.jenkins.io/doc/"
1145
+ },
1146
+ {
1147
+ "source_file": "pipeline_steps.txt",
1148
+ "source": "https://www.jenkins.io/doc/"
1149
+ },
1150
+ {
1151
+ "source_file": "pipeline_steps.txt",
1152
+ "source": "https://www.jenkins.io/doc/"
1153
+ },
1154
+ {
1155
+ "source_file": "pipeline_steps.txt",
1156
+ "source": "https://www.jenkins.io/doc/"
1157
+ },
1158
+ {
1159
+ "source_file": "pipeline_steps.txt",
1160
+ "source": "https://www.jenkins.io/doc/"
1161
+ },
1162
+ {
1163
+ "source_file": "pipeline_steps.txt",
1164
+ "source": "https://www.jenkins.io/doc/"
1165
+ },
1166
+ {
1167
+ "source_file": "pipeline_steps.txt",
1168
+ "source": "https://www.jenkins.io/doc/"
1169
+ },
1170
+ {
1171
+ "source_file": "pipeline_steps.txt",
1172
+ "source": "https://www.jenkins.io/doc/"
1173
+ },
1174
+ {
1175
+ "source_file": "pipeline_steps.txt",
1176
+ "source": "https://www.jenkins.io/doc/"
1177
+ },
1178
+ {
1179
+ "source_file": "pipeline_steps.txt",
1180
+ "source": "https://www.jenkins.io/doc/"
1181
+ },
1182
+ {
1183
+ "source_file": "pipeline_steps.txt",
1184
+ "source": "https://www.jenkins.io/doc/"
1185
+ },
1186
+ {
1187
+ "source_file": "pipeline_steps.txt",
1188
+ "source": "https://www.jenkins.io/doc/"
1189
+ },
1190
+ {
1191
+ "source_file": "pipeline_steps.txt",
1192
+ "source": "https://www.jenkins.io/doc/"
1193
+ },
1194
+ {
1195
+ "source_file": "pipeline_steps.txt",
1196
+ "source": "https://www.jenkins.io/doc/"
1197
+ },
1198
+ {
1199
+ "source_file": "pipeline_steps.txt",
1200
+ "source": "https://www.jenkins.io/doc/"
1201
+ },
1202
+ {
1203
+ "source_file": "pipeline_steps.txt",
1204
+ "source": "https://www.jenkins.io/doc/"
1205
+ },
1206
+ {
1207
+ "source_file": "pipeline_steps.txt",
1208
+ "source": "https://www.jenkins.io/doc/"
1209
+ },
1210
+ {
1211
+ "source_file": "pipeline_steps.txt",
1212
+ "source": "https://www.jenkins.io/doc/"
1213
+ },
1214
+ {
1215
+ "source_file": "pipeline_steps.txt",
1216
+ "source": "https://www.jenkins.io/doc/"
1217
+ },
1218
+ {
1219
+ "source_file": "pipeline_steps.txt",
1220
+ "source": "https://www.jenkins.io/doc/"
1221
+ },
1222
+ {
1223
+ "source_file": "pipeline_steps.txt",
1224
+ "source": "https://www.jenkins.io/doc/"
1225
+ },
1226
+ {
1227
+ "source_file": "pipeline_steps.txt",
1228
+ "source": "https://www.jenkins.io/doc/"
1229
+ },
1230
+ {
1231
+ "source_file": "pipeline_steps.txt",
1232
+ "source": "https://www.jenkins.io/doc/"
1233
+ },
1234
+ {
1235
+ "source_file": "pipeline_steps.txt",
1236
+ "source": "https://www.jenkins.io/doc/"
1237
+ },
1238
+ {
1239
+ "source_file": "pipeline_steps.txt",
1240
+ "source": "https://www.jenkins.io/doc/"
1241
+ },
1242
+ {
1243
+ "source_file": "pipeline_steps.txt",
1244
+ "source": "https://www.jenkins.io/doc/"
1245
+ },
1246
+ {
1247
+ "source_file": "pipeline_steps.txt",
1248
+ "source": "https://www.jenkins.io/doc/"
1249
+ },
1250
+ {
1251
+ "source_file": "pipeline_steps.txt",
1252
+ "source": "https://www.jenkins.io/doc/"
1253
+ },
1254
+ {
1255
+ "source_file": "pipeline_steps.txt",
1256
+ "source": "https://www.jenkins.io/doc/"
1257
+ },
1258
+ {
1259
+ "source_file": "pipeline_steps.txt",
1260
+ "source": "https://www.jenkins.io/doc/"
1261
+ },
1262
+ {
1263
+ "source_file": "pipeline_steps.txt",
1264
+ "source": "https://www.jenkins.io/doc/"
1265
+ },
1266
+ {
1267
+ "source_file": "pipeline_steps.txt",
1268
+ "source": "https://www.jenkins.io/doc/"
1269
+ },
1270
+ {
1271
+ "source_file": "pipeline_steps.txt",
1272
+ "source": "https://www.jenkins.io/doc/"
1273
+ },
1274
+ {
1275
+ "source_file": "pipeline_steps.txt",
1276
+ "source": "https://www.jenkins.io/doc/"
1277
+ },
1278
+ {
1279
+ "source_file": "pipeline_steps.txt",
1280
+ "source": "https://www.jenkins.io/doc/"
1281
+ },
1282
+ {
1283
+ "source_file": "pipeline_steps.txt",
1284
+ "source": "https://www.jenkins.io/doc/"
1285
+ },
1286
+ {
1287
+ "source_file": "pipeline_steps.txt",
1288
+ "source": "https://www.jenkins.io/doc/"
1289
+ },
1290
+ {
1291
+ "source_file": "pipeline_steps.txt",
1292
+ "source": "https://www.jenkins.io/doc/"
1293
+ },
1294
+ {
1295
+ "source_file": "pipeline_steps.txt",
1296
+ "source": "https://www.jenkins.io/doc/"
1297
+ },
1298
+ {
1299
+ "source_file": "pipeline_steps.txt",
1300
+ "source": "https://www.jenkins.io/doc/"
1301
+ },
1302
+ {
1303
+ "source_file": "pipeline_steps.txt",
1304
+ "source": "https://www.jenkins.io/doc/"
1305
+ },
1306
+ {
1307
+ "source_file": "pipeline_steps.txt",
1308
+ "source": "https://www.jenkins.io/doc/"
1309
+ },
1310
+ {
1311
+ "source_file": "pipeline_steps.txt",
1312
+ "source": "https://www.jenkins.io/doc/"
1313
+ },
1314
+ {
1315
+ "source_file": "pipeline_steps.txt",
1316
+ "source": "https://www.jenkins.io/doc/"
1317
+ },
1318
+ {
1319
+ "source_file": "pipeline_steps.txt",
1320
+ "source": "https://www.jenkins.io/doc/"
1321
+ },
1322
+ {
1323
+ "source_file": "pipeline_steps.txt",
1324
+ "source": "https://www.jenkins.io/doc/"
1325
+ },
1326
+ {
1327
+ "source_file": "pipeline_steps.txt",
1328
+ "source": "https://www.jenkins.io/doc/"
1329
+ },
1330
+ {
1331
+ "source_file": "pipeline_steps.txt",
1332
+ "source": "https://www.jenkins.io/doc/"
1333
+ },
1334
+ {
1335
+ "source_file": "pipeline_steps.txt",
1336
+ "source": "https://www.jenkins.io/doc/"
1337
+ },
1338
+ {
1339
+ "source_file": "pipeline_steps.txt",
1340
+ "source": "https://www.jenkins.io/doc/"
1341
+ },
1342
+ {
1343
+ "source_file": "pipeline_steps.txt",
1344
+ "source": "https://www.jenkins.io/doc/"
1345
+ },
1346
+ {
1347
+ "source_file": "pipeline_steps.txt",
1348
+ "source": "https://www.jenkins.io/doc/"
1349
+ },
1350
+ {
1351
+ "source_file": "pipeline_steps.txt",
1352
+ "source": "https://www.jenkins.io/doc/"
1353
+ },
1354
+ {
1355
+ "source_file": "pipeline_steps.txt",
1356
+ "source": "https://www.jenkins.io/doc/"
1357
+ },
1358
+ {
1359
+ "source_file": "pipeline_steps.txt",
1360
+ "source": "https://www.jenkins.io/doc/"
1361
+ },
1362
+ {
1363
+ "source_file": "pipeline_steps.txt",
1364
+ "source": "https://www.jenkins.io/doc/"
1365
+ },
1366
+ {
1367
+ "source_file": "pipeline_steps.txt",
1368
+ "source": "https://www.jenkins.io/doc/"
1369
+ },
1370
+ {
1371
+ "source_file": "pipeline_steps.txt",
1372
+ "source": "https://www.jenkins.io/doc/"
1373
+ },
1374
+ {
1375
+ "source_file": "pipeline_syntax.txt",
1376
+ "source": "https://www.jenkins.io/doc/"
1377
+ },
1378
+ {
1379
+ "source_file": "pipeline_syntax.txt",
1380
+ "source": "https://www.jenkins.io/doc/"
1381
+ },
1382
+ {
1383
+ "source_file": "pipeline_syntax.txt",
1384
+ "source": "https://www.jenkins.io/doc/"
1385
+ },
1386
+ {
1387
+ "source_file": "pipeline_syntax.txt",
1388
+ "source": "https://www.jenkins.io/doc/"
1389
+ },
1390
+ {
1391
+ "source_file": "pipeline_syntax.txt",
1392
+ "source": "https://www.jenkins.io/doc/"
1393
+ },
1394
+ {
1395
+ "source_file": "pipeline_syntax.txt",
1396
+ "source": "https://www.jenkins.io/doc/"
1397
+ },
1398
+ {
1399
+ "source_file": "pipeline_syntax.txt",
1400
+ "source": "https://www.jenkins.io/doc/"
1401
+ },
1402
+ {
1403
+ "source_file": "pipeline_syntax.txt",
1404
+ "source": "https://www.jenkins.io/doc/"
1405
+ },
1406
+ {
1407
+ "source_file": "pipeline_syntax.txt",
1408
+ "source": "https://www.jenkins.io/doc/"
1409
+ },
1410
+ {
1411
+ "source_file": "pipeline_syntax.txt",
1412
+ "source": "https://www.jenkins.io/doc/"
1413
+ },
1414
+ {
1415
+ "source_file": "pipeline_syntax.txt",
1416
+ "source": "https://www.jenkins.io/doc/"
1417
+ },
1418
+ {
1419
+ "source_file": "pipeline_syntax.txt",
1420
+ "source": "https://www.jenkins.io/doc/"
1421
+ },
1422
+ {
1423
+ "source_file": "pipeline_syntax.txt",
1424
+ "source": "https://www.jenkins.io/doc/"
1425
+ },
1426
+ {
1427
+ "source_file": "pipeline_syntax.txt",
1428
+ "source": "https://www.jenkins.io/doc/"
1429
+ },
1430
+ {
1431
+ "source_file": "pipeline_syntax.txt",
1432
+ "source": "https://www.jenkins.io/doc/"
1433
+ },
1434
+ {
1435
+ "source_file": "pipeline_syntax.txt",
1436
+ "source": "https://www.jenkins.io/doc/"
1437
+ },
1438
+ {
1439
+ "source_file": "pipeline_syntax.txt",
1440
+ "source": "https://www.jenkins.io/doc/"
1441
+ },
1442
+ {
1443
+ "source_file": "pipeline_syntax.txt",
1444
+ "source": "https://www.jenkins.io/doc/"
1445
+ },
1446
+ {
1447
+ "source_file": "pipeline_syntax.txt",
1448
+ "source": "https://www.jenkins.io/doc/"
1449
+ },
1450
+ {
1451
+ "source_file": "pipeline_syntax.txt",
1452
+ "source": "https://www.jenkins.io/doc/"
1453
+ },
1454
+ {
1455
+ "source_file": "pipeline_syntax.txt",
1456
+ "source": "https://www.jenkins.io/doc/"
1457
+ },
1458
+ {
1459
+ "source_file": "pipeline_syntax.txt",
1460
+ "source": "https://www.jenkins.io/doc/"
1461
+ },
1462
+ {
1463
+ "source_file": "pipeline_syntax.txt",
1464
+ "source": "https://www.jenkins.io/doc/"
1465
+ },
1466
+ {
1467
+ "source_file": "pipeline_syntax.txt",
1468
+ "source": "https://www.jenkins.io/doc/"
1469
+ },
1470
+ {
1471
+ "source_file": "pipeline_syntax.txt",
1472
+ "source": "https://www.jenkins.io/doc/"
1473
+ },
1474
+ {
1475
+ "source_file": "pipeline_syntax.txt",
1476
+ "source": "https://www.jenkins.io/doc/"
1477
+ },
1478
+ {
1479
+ "source_file": "pipeline_syntax.txt",
1480
+ "source": "https://www.jenkins.io/doc/"
1481
+ },
1482
+ {
1483
+ "source_file": "pipeline_syntax.txt",
1484
+ "source": "https://www.jenkins.io/doc/"
1485
+ },
1486
+ {
1487
+ "source_file": "pipeline_syntax.txt",
1488
+ "source": "https://www.jenkins.io/doc/"
1489
+ },
1490
+ {
1491
+ "source_file": "pipeline_syntax.txt",
1492
+ "source": "https://www.jenkins.io/doc/"
1493
+ },
1494
+ {
1495
+ "source_file": "pipeline_syntax.txt",
1496
+ "source": "https://www.jenkins.io/doc/"
1497
+ },
1498
+ {
1499
+ "source_file": "pipeline_syntax.txt",
1500
+ "source": "https://www.jenkins.io/doc/"
1501
+ },
1502
+ {
1503
+ "source_file": "pipeline_syntax.txt",
1504
+ "source": "https://www.jenkins.io/doc/"
1505
+ },
1506
+ {
1507
+ "source_file": "pipeline_syntax.txt",
1508
+ "source": "https://www.jenkins.io/doc/"
1509
+ },
1510
+ {
1511
+ "source_file": "pipeline_syntax.txt",
1512
+ "source": "https://www.jenkins.io/doc/"
1513
+ },
1514
+ {
1515
+ "source_file": "pipeline_syntax.txt",
1516
+ "source": "https://www.jenkins.io/doc/"
1517
+ },
1518
+ {
1519
+ "source_file": "pipeline_syntax.txt",
1520
+ "source": "https://www.jenkins.io/doc/"
1521
+ },
1522
+ {
1523
+ "source_file": "pipeline_syntax.txt",
1524
+ "source": "https://www.jenkins.io/doc/"
1525
+ },
1526
+ {
1527
+ "source_file": "pipeline_syntax.txt",
1528
+ "source": "https://www.jenkins.io/doc/"
1529
+ },
1530
+ {
1531
+ "source_file": "pipeline_syntax.txt",
1532
+ "source": "https://www.jenkins.io/doc/"
1533
+ },
1534
+ {
1535
+ "source_file": "pipeline_syntax.txt",
1536
+ "source": "https://www.jenkins.io/doc/"
1537
+ },
1538
+ {
1539
+ "source_file": "pipeline_syntax.txt",
1540
+ "source": "https://www.jenkins.io/doc/"
1541
+ },
1542
+ {
1543
+ "source_file": "pipeline_syntax.txt",
1544
+ "source": "https://www.jenkins.io/doc/"
1545
+ },
1546
+ {
1547
+ "source_file": "pipeline_syntax.txt",
1548
+ "source": "https://www.jenkins.io/doc/"
1549
+ },
1550
+ {
1551
+ "source_file": "pipeline_syntax.txt",
1552
+ "source": "https://www.jenkins.io/doc/"
1553
+ },
1554
+ {
1555
+ "source_file": "pipeline_syntax.txt",
1556
+ "source": "https://www.jenkins.io/doc/"
1557
+ },
1558
+ {
1559
+ "source_file": "pipeline_syntax.txt",
1560
+ "source": "https://www.jenkins.io/doc/"
1561
+ },
1562
+ {
1563
+ "source_file": "pipeline_syntax.txt",
1564
+ "source": "https://www.jenkins.io/doc/"
1565
+ },
1566
+ {
1567
+ "source_file": "pipeline_syntax.txt",
1568
+ "source": "https://www.jenkins.io/doc/"
1569
+ },
1570
+ {
1571
+ "source_file": "pipeline_syntax.txt",
1572
+ "source": "https://www.jenkins.io/doc/"
1573
+ },
1574
+ {
1575
+ "source_file": "pipeline_syntax.txt",
1576
+ "source": "https://www.jenkins.io/doc/"
1577
+ },
1578
+ {
1579
+ "source_file": "pipeline_syntax.txt",
1580
+ "source": "https://www.jenkins.io/doc/"
1581
+ },
1582
+ {
1583
+ "source_file": "pipeline_syntax.txt",
1584
+ "source": "https://www.jenkins.io/doc/"
1585
+ },
1586
+ {
1587
+ "source_file": "pipeline_syntax.txt",
1588
+ "source": "https://www.jenkins.io/doc/"
1589
+ },
1590
+ {
1591
+ "source_file": "pipeline_syntax.txt",
1592
+ "source": "https://www.jenkins.io/doc/"
1593
+ },
1594
+ {
1595
+ "source_file": "pipeline_syntax.txt",
1596
+ "source": "https://www.jenkins.io/doc/"
1597
+ },
1598
+ {
1599
+ "source_file": "pipeline_syntax.txt",
1600
+ "source": "https://www.jenkins.io/doc/"
1601
+ },
1602
+ {
1603
+ "source_file": "pipeline_syntax.txt",
1604
+ "source": "https://www.jenkins.io/doc/"
1605
+ },
1606
+ {
1607
+ "source_file": "pipeline_syntax.txt",
1608
+ "source": "https://www.jenkins.io/doc/"
1609
+ },
1610
+ {
1611
+ "source_file": "pipeline_syntax.txt",
1612
+ "source": "https://www.jenkins.io/doc/"
1613
+ },
1614
+ {
1615
+ "source_file": "pipeline_syntax.txt",
1616
+ "source": "https://www.jenkins.io/doc/"
1617
+ },
1618
+ {
1619
+ "source_file": "pipeline_syntax.txt",
1620
+ "source": "https://www.jenkins.io/doc/"
1621
+ },
1622
+ {
1623
+ "source_file": "pipeline_syntax.txt",
1624
+ "source": "https://www.jenkins.io/doc/"
1625
+ },
1626
+ {
1627
+ "source_file": "pipeline_syntax.txt",
1628
+ "source": "https://www.jenkins.io/doc/"
1629
+ },
1630
+ {
1631
+ "source_file": "pipeline_syntax.txt",
1632
+ "source": "https://www.jenkins.io/doc/"
1633
+ },
1634
+ {
1635
+ "source_file": "pipeline_syntax.txt",
1636
+ "source": "https://www.jenkins.io/doc/"
1637
+ },
1638
+ {
1639
+ "source_file": "pipeline_syntax.txt",
1640
+ "source": "https://www.jenkins.io/doc/"
1641
+ },
1642
+ {
1643
+ "source_file": "pipeline_syntax.txt",
1644
+ "source": "https://www.jenkins.io/doc/"
1645
+ },
1646
+ {
1647
+ "source_file": "pipeline_syntax.txt",
1648
+ "source": "https://www.jenkins.io/doc/"
1649
+ },
1650
+ {
1651
+ "source_file": "pipeline_syntax.txt",
1652
+ "source": "https://www.jenkins.io/doc/"
1653
+ },
1654
+ {
1655
+ "source_file": "pipeline_syntax.txt",
1656
+ "source": "https://www.jenkins.io/doc/"
1657
+ },
1658
+ {
1659
+ "source_file": "pipeline_syntax.txt",
1660
+ "source": "https://www.jenkins.io/doc/"
1661
+ },
1662
+ {
1663
+ "source_file": "pipeline_syntax.txt",
1664
+ "source": "https://www.jenkins.io/doc/"
1665
+ },
1666
+ {
1667
+ "source_file": "pipeline_syntax.txt",
1668
+ "source": "https://www.jenkins.io/doc/"
1669
+ },
1670
+ {
1671
+ "source_file": "pipeline_syntax.txt",
1672
+ "source": "https://www.jenkins.io/doc/"
1673
+ },
1674
+ {
1675
+ "source_file": "pipeline_syntax.txt",
1676
+ "source": "https://www.jenkins.io/doc/"
1677
+ },
1678
+ {
1679
+ "source_file": "pipeline_syntax.txt",
1680
+ "source": "https://www.jenkins.io/doc/"
1681
+ },
1682
+ {
1683
+ "source_file": "pipeline_syntax.txt",
1684
+ "source": "https://www.jenkins.io/doc/"
1685
+ },
1686
+ {
1687
+ "source_file": "pipeline_syntax.txt",
1688
+ "source": "https://www.jenkins.io/doc/"
1689
+ },
1690
+ {
1691
+ "source_file": "pipeline_syntax.txt",
1692
+ "source": "https://www.jenkins.io/doc/"
1693
+ },
1694
+ {
1695
+ "source_file": "pipeline_syntax.txt",
1696
+ "source": "https://www.jenkins.io/doc/"
1697
+ },
1698
+ {
1699
+ "source_file": "pipeline_syntax.txt",
1700
+ "source": "https://www.jenkins.io/doc/"
1701
+ },
1702
+ {
1703
+ "source_file": "pipeline_syntax.txt",
1704
+ "source": "https://www.jenkins.io/doc/"
1705
+ },
1706
+ {
1707
+ "source_file": "pipeline_syntax.txt",
1708
+ "source": "https://www.jenkins.io/doc/"
1709
+ },
1710
+ {
1711
+ "source_file": "pipeline_syntax.txt",
1712
+ "source": "https://www.jenkins.io/doc/"
1713
+ },
1714
+ {
1715
+ "source_file": "pipeline_syntax.txt",
1716
+ "source": "https://www.jenkins.io/doc/"
1717
+ },
1718
+ {
1719
+ "source_file": "pipeline_syntax.txt",
1720
+ "source": "https://www.jenkins.io/doc/"
1721
+ },
1722
+ {
1723
+ "source_file": "pipeline_syntax.txt",
1724
+ "source": "https://www.jenkins.io/doc/"
1725
+ },
1726
+ {
1727
+ "source_file": "pipeline_syntax.txt",
1728
+ "source": "https://www.jenkins.io/doc/"
1729
+ },
1730
+ {
1731
+ "source_file": "pipeline_syntax.txt",
1732
+ "source": "https://www.jenkins.io/doc/"
1733
+ },
1734
+ {
1735
+ "source_file": "pipeline_syntax.txt",
1736
+ "source": "https://www.jenkins.io/doc/"
1737
+ },
1738
+ {
1739
+ "source_file": "pipeline_syntax.txt",
1740
+ "source": "https://www.jenkins.io/doc/"
1741
+ },
1742
+ {
1743
+ "source_file": "pipeline_syntax.txt",
1744
+ "source": "https://www.jenkins.io/doc/"
1745
+ },
1746
+ {
1747
+ "source_file": "pipeline_syntax.txt",
1748
+ "source": "https://www.jenkins.io/doc/"
1749
+ },
1750
+ {
1751
+ "source_file": "pipeline_syntax.txt",
1752
+ "source": "https://www.jenkins.io/doc/"
1753
+ },
1754
+ {
1755
+ "source_file": "pipeline_syntax.txt",
1756
+ "source": "https://www.jenkins.io/doc/"
1757
+ },
1758
+ {
1759
+ "source_file": "pipeline_syntax.txt",
1760
+ "source": "https://www.jenkins.io/doc/"
1761
+ },
1762
+ {
1763
+ "source_file": "pipeline_syntax.txt",
1764
+ "source": "https://www.jenkins.io/doc/"
1765
+ },
1766
+ {
1767
+ "source_file": "pipeline_syntax.txt",
1768
+ "source": "https://www.jenkins.io/doc/"
1769
+ },
1770
+ {
1771
+ "source_file": "pipeline_syntax.txt",
1772
+ "source": "https://www.jenkins.io/doc/"
1773
+ },
1774
+ {
1775
+ "source_file": "pipeline_syntax.txt",
1776
+ "source": "https://www.jenkins.io/doc/"
1777
+ },
1778
+ {
1779
+ "source_file": "pipeline_syntax.txt",
1780
+ "source": "https://www.jenkins.io/doc/"
1781
+ },
1782
+ {
1783
+ "source_file": "pipeline_syntax.txt",
1784
+ "source": "https://www.jenkins.io/doc/"
1785
+ },
1786
+ {
1787
+ "source_file": "pipeline_syntax.txt",
1788
+ "source": "https://www.jenkins.io/doc/"
1789
+ },
1790
+ {
1791
+ "source_file": "pipeline_syntax.txt",
1792
+ "source": "https://www.jenkins.io/doc/"
1793
+ },
1794
+ {
1795
+ "source_file": "pipeline_syntax.txt",
1796
+ "source": "https://www.jenkins.io/doc/"
1797
+ },
1798
+ {
1799
+ "source_file": "pipeline_syntax.txt",
1800
+ "source": "https://www.jenkins.io/doc/"
1801
+ },
1802
+ {
1803
+ "source_file": "pipeline_syntax.txt",
1804
+ "source": "https://www.jenkins.io/doc/"
1805
+ },
1806
+ {
1807
+ "source_file": "pipeline_syntax.txt",
1808
+ "source": "https://www.jenkins.io/doc/"
1809
+ },
1810
+ {
1811
+ "source_file": "pipeline_syntax.txt",
1812
+ "source": "https://www.jenkins.io/doc/"
1813
+ },
1814
+ {
1815
+ "source_file": "pipeline_syntax.txt",
1816
+ "source": "https://www.jenkins.io/doc/"
1817
+ },
1818
+ {
1819
+ "source_file": "pipeline_syntax.txt",
1820
+ "source": "https://www.jenkins.io/doc/"
1821
+ },
1822
+ {
1823
+ "source_file": "pipeline_syntax.txt",
1824
+ "source": "https://www.jenkins.io/doc/"
1825
+ },
1826
+ {
1827
+ "source_file": "pipeline_syntax.txt",
1828
+ "source": "https://www.jenkins.io/doc/"
1829
+ },
1830
+ {
1831
+ "source_file": "pipeline_syntax.txt",
1832
+ "source": "https://www.jenkins.io/doc/"
1833
+ },
1834
+ {
1835
+ "source_file": "pipeline_syntax.txt",
1836
+ "source": "https://www.jenkins.io/doc/"
1837
+ },
1838
+ {
1839
+ "source_file": "pipeline_syntax.txt",
1840
+ "source": "https://www.jenkins.io/doc/"
1841
+ },
1842
+ {
1843
+ "source_file": "pipeline_syntax.txt",
1844
+ "source": "https://www.jenkins.io/doc/"
1845
+ },
1846
+ {
1847
+ "source_file": "pipeline_syntax.txt",
1848
+ "source": "https://www.jenkins.io/doc/"
1849
+ },
1850
+ {
1851
+ "source_file": "pipeline_syntax.txt",
1852
+ "source": "https://www.jenkins.io/doc/"
1853
+ },
1854
+ {
1855
+ "source_file": "pipeline_syntax.txt",
1856
+ "source": "https://www.jenkins.io/doc/"
1857
+ },
1858
+ {
1859
+ "source_file": "pipeline_syntax.txt",
1860
+ "source": "https://www.jenkins.io/doc/"
1861
+ },
1862
+ {
1863
+ "source_file": "pipeline_syntax.txt",
1864
+ "source": "https://www.jenkins.io/doc/"
1865
+ },
1866
+ {
1867
+ "source_file": "pipeline_syntax.txt",
1868
+ "source": "https://www.jenkins.io/doc/"
1869
+ },
1870
+ {
1871
+ "source_file": "pipeline_syntax.txt",
1872
+ "source": "https://www.jenkins.io/doc/"
1873
+ },
1874
+ {
1875
+ "source_file": "pipeline_syntax.txt",
1876
+ "source": "https://www.jenkins.io/doc/"
1877
+ },
1878
+ {
1879
+ "source_file": "pipeline_syntax.txt",
1880
+ "source": "https://www.jenkins.io/doc/"
1881
+ },
1882
+ {
1883
+ "source_file": "pipeline_syntax.txt",
1884
+ "source": "https://www.jenkins.io/doc/"
1885
+ },
1886
+ {
1887
+ "source_file": "pipeline_syntax.txt",
1888
+ "source": "https://www.jenkins.io/doc/"
1889
+ },
1890
+ {
1891
+ "source_file": "pipeline_syntax.txt",
1892
+ "source": "https://www.jenkins.io/doc/"
1893
+ },
1894
+ {
1895
+ "source_file": "pipeline_syntax.txt",
1896
+ "source": "https://www.jenkins.io/doc/"
1897
+ },
1898
+ {
1899
+ "source_file": "pipeline_syntax.txt",
1900
+ "source": "https://www.jenkins.io/doc/"
1901
+ },
1902
+ {
1903
+ "source_file": "pipeline_syntax.txt",
1904
+ "source": "https://www.jenkins.io/doc/"
1905
+ },
1906
+ {
1907
+ "source_file": "pipeline_syntax.txt",
1908
+ "source": "https://www.jenkins.io/doc/"
1909
+ },
1910
+ {
1911
+ "source_file": "pipeline_syntax.txt",
1912
+ "source": "https://www.jenkins.io/doc/"
1913
+ },
1914
+ {
1915
+ "source_file": "pipeline_syntax.txt",
1916
+ "source": "https://www.jenkins.io/doc/"
1917
+ },
1918
+ {
1919
+ "source_file": "pipeline_syntax.txt",
1920
+ "source": "https://www.jenkins.io/doc/"
1921
+ },
1922
+ {
1923
+ "source_file": "pipeline_syntax.txt",
1924
+ "source": "https://www.jenkins.io/doc/"
1925
+ },
1926
+ {
1927
+ "source_file": "pipeline_syntax.txt",
1928
+ "source": "https://www.jenkins.io/doc/"
1929
+ },
1930
+ {
1931
+ "source_file": "pipeline_syntax.txt",
1932
+ "source": "https://www.jenkins.io/doc/"
1933
+ },
1934
+ {
1935
+ "source_file": "pipeline_syntax.txt",
1936
+ "source": "https://www.jenkins.io/doc/"
1937
+ },
1938
+ {
1939
+ "source_file": "pipeline_syntax.txt",
1940
+ "source": "https://www.jenkins.io/doc/"
1941
+ },
1942
+ {
1943
+ "source_file": "pipeline_syntax.txt",
1944
+ "source": "https://www.jenkins.io/doc/"
1945
+ },
1946
+ {
1947
+ "source_file": "pipeline_syntax.txt",
1948
+ "source": "https://www.jenkins.io/doc/"
1949
+ },
1950
+ {
1951
+ "source_file": "pipeline_syntax.txt",
1952
+ "source": "https://www.jenkins.io/doc/"
1953
+ },
1954
+ {
1955
+ "source_file": "pipeline_syntax.txt",
1956
+ "source": "https://www.jenkins.io/doc/"
1957
+ },
1958
+ {
1959
+ "source_file": "pipeline_syntax.txt",
1960
+ "source": "https://www.jenkins.io/doc/"
1961
+ },
1962
+ {
1963
+ "source_file": "pipeline_syntax.txt",
1964
+ "source": "https://www.jenkins.io/doc/"
1965
+ },
1966
+ {
1967
+ "source_file": "pipeline_syntax.txt",
1968
+ "source": "https://www.jenkins.io/doc/"
1969
+ },
1970
+ {
1971
+ "source_file": "pipeline_syntax.txt",
1972
+ "source": "https://www.jenkins.io/doc/"
1973
+ },
1974
+ {
1975
+ "source_file": "pipeline_syntax.txt",
1976
+ "source": "https://www.jenkins.io/doc/"
1977
+ },
1978
+ {
1979
+ "source_file": "pipeline_syntax.txt",
1980
+ "source": "https://www.jenkins.io/doc/"
1981
+ },
1982
+ {
1983
+ "source_file": "pipeline_syntax.txt",
1984
+ "source": "https://www.jenkins.io/doc/"
1985
+ },
1986
+ {
1987
+ "source_file": "pipeline_syntax.txt",
1988
+ "source": "https://www.jenkins.io/doc/"
1989
+ },
1990
+ {
1991
+ "source_file": "pipeline_syntax.txt",
1992
+ "source": "https://www.jenkins.io/doc/"
1993
+ },
1994
+ {
1995
+ "source_file": "pipeline_syntax.txt",
1996
+ "source": "https://www.jenkins.io/doc/"
1997
+ },
1998
+ {
1999
+ "source_file": "pipeline_syntax.txt",
2000
+ "source": "https://www.jenkins.io/doc/"
2001
+ },
2002
+ {
2003
+ "source_file": "pipeline_syntax.txt",
2004
+ "source": "https://www.jenkins.io/doc/"
2005
+ },
2006
+ {
2007
+ "source_file": "pipeline_syntax.txt",
2008
+ "source": "https://www.jenkins.io/doc/"
2009
+ },
2010
+ {
2011
+ "source_file": "pipeline_syntax.txt",
2012
+ "source": "https://www.jenkins.io/doc/"
2013
+ },
2014
+ {
2015
+ "source_file": "pipeline_syntax.txt",
2016
+ "source": "https://www.jenkins.io/doc/"
2017
+ },
2018
+ {
2019
+ "source_file": "pipeline_syntax.txt",
2020
+ "source": "https://www.jenkins.io/doc/"
2021
+ },
2022
+ {
2023
+ "source_file": "using_a_jenkinsfile.txt",
2024
+ "source": "https://www.jenkins.io/doc/"
2025
+ },
2026
+ {
2027
+ "source_file": "using_a_jenkinsfile.txt",
2028
+ "source": "https://www.jenkins.io/doc/"
2029
+ },
2030
+ {
2031
+ "source_file": "using_a_jenkinsfile.txt",
2032
+ "source": "https://www.jenkins.io/doc/"
2033
+ },
2034
+ {
2035
+ "source_file": "using_a_jenkinsfile.txt",
2036
+ "source": "https://www.jenkins.io/doc/"
2037
+ },
2038
+ {
2039
+ "source_file": "using_a_jenkinsfile.txt",
2040
+ "source": "https://www.jenkins.io/doc/"
2041
+ },
2042
+ {
2043
+ "source_file": "using_a_jenkinsfile.txt",
2044
+ "source": "https://www.jenkins.io/doc/"
2045
+ },
2046
+ {
2047
+ "source_file": "using_a_jenkinsfile.txt",
2048
+ "source": "https://www.jenkins.io/doc/"
2049
+ },
2050
+ {
2051
+ "source_file": "using_a_jenkinsfile.txt",
2052
+ "source": "https://www.jenkins.io/doc/"
2053
+ },
2054
+ {
2055
+ "source_file": "using_a_jenkinsfile.txt",
2056
+ "source": "https://www.jenkins.io/doc/"
2057
+ },
2058
+ {
2059
+ "source_file": "using_a_jenkinsfile.txt",
2060
+ "source": "https://www.jenkins.io/doc/"
2061
+ },
2062
+ {
2063
+ "source_file": "using_a_jenkinsfile.txt",
2064
+ "source": "https://www.jenkins.io/doc/"
2065
+ },
2066
+ {
2067
+ "source_file": "using_a_jenkinsfile.txt",
2068
+ "source": "https://www.jenkins.io/doc/"
2069
+ },
2070
+ {
2071
+ "source_file": "using_a_jenkinsfile.txt",
2072
+ "source": "https://www.jenkins.io/doc/"
2073
+ },
2074
+ {
2075
+ "source_file": "using_a_jenkinsfile.txt",
2076
+ "source": "https://www.jenkins.io/doc/"
2077
+ },
2078
+ {
2079
+ "source_file": "using_a_jenkinsfile.txt",
2080
+ "source": "https://www.jenkins.io/doc/"
2081
+ },
2082
+ {
2083
+ "source_file": "using_a_jenkinsfile.txt",
2084
+ "source": "https://www.jenkins.io/doc/"
2085
+ },
2086
+ {
2087
+ "source_file": "using_a_jenkinsfile.txt",
2088
+ "source": "https://www.jenkins.io/doc/"
2089
+ },
2090
+ {
2091
+ "source_file": "using_a_jenkinsfile.txt",
2092
+ "source": "https://www.jenkins.io/doc/"
2093
+ },
2094
+ {
2095
+ "source_file": "using_a_jenkinsfile.txt",
2096
+ "source": "https://www.jenkins.io/doc/"
2097
+ },
2098
+ {
2099
+ "source_file": "using_a_jenkinsfile.txt",
2100
+ "source": "https://www.jenkins.io/doc/"
2101
+ },
2102
+ {
2103
+ "source_file": "using_a_jenkinsfile.txt",
2104
+ "source": "https://www.jenkins.io/doc/"
2105
+ },
2106
+ {
2107
+ "source_file": "using_a_jenkinsfile.txt",
2108
+ "source": "https://www.jenkins.io/doc/"
2109
+ },
2110
+ {
2111
+ "source_file": "using_a_jenkinsfile.txt",
2112
+ "source": "https://www.jenkins.io/doc/"
2113
+ },
2114
+ {
2115
+ "source_file": "using_a_jenkinsfile.txt",
2116
+ "source": "https://www.jenkins.io/doc/"
2117
+ },
2118
+ {
2119
+ "source_file": "using_a_jenkinsfile.txt",
2120
+ "source": "https://www.jenkins.io/doc/"
2121
+ },
2122
+ {
2123
+ "source_file": "using_a_jenkinsfile.txt",
2124
+ "source": "https://www.jenkins.io/doc/"
2125
+ },
2126
+ {
2127
+ "source_file": "using_a_jenkinsfile.txt",
2128
+ "source": "https://www.jenkins.io/doc/"
2129
+ },
2130
+ {
2131
+ "source_file": "using_a_jenkinsfile.txt",
2132
+ "source": "https://www.jenkins.io/doc/"
2133
+ },
2134
+ {
2135
+ "source_file": "using_a_jenkinsfile.txt",
2136
+ "source": "https://www.jenkins.io/doc/"
2137
+ },
2138
+ {
2139
+ "source_file": "using_a_jenkinsfile.txt",
2140
+ "source": "https://www.jenkins.io/doc/"
2141
+ },
2142
+ {
2143
+ "source_file": "using_a_jenkinsfile.txt",
2144
+ "source": "https://www.jenkins.io/doc/"
2145
+ },
2146
+ {
2147
+ "source_file": "using_a_jenkinsfile.txt",
2148
+ "source": "https://www.jenkins.io/doc/"
2149
+ },
2150
+ {
2151
+ "source_file": "using_a_jenkinsfile.txt",
2152
+ "source": "https://www.jenkins.io/doc/"
2153
+ },
2154
+ {
2155
+ "source_file": "using_a_jenkinsfile.txt",
2156
+ "source": "https://www.jenkins.io/doc/"
2157
+ },
2158
+ {
2159
+ "source_file": "using_a_jenkinsfile.txt",
2160
+ "source": "https://www.jenkins.io/doc/"
2161
+ },
2162
+ {
2163
+ "source_file": "using_a_jenkinsfile.txt",
2164
+ "source": "https://www.jenkins.io/doc/"
2165
+ },
2166
+ {
2167
+ "source_file": "using_a_jenkinsfile.txt",
2168
+ "source": "https://www.jenkins.io/doc/"
2169
+ },
2170
+ {
2171
+ "source_file": "using_a_jenkinsfile.txt",
2172
+ "source": "https://www.jenkins.io/doc/"
2173
+ },
2174
+ {
2175
+ "source_file": "using_a_jenkinsfile.txt",
2176
+ "source": "https://www.jenkins.io/doc/"
2177
+ },
2178
+ {
2179
+ "source_file": "using_a_jenkinsfile.txt",
2180
+ "source": "https://www.jenkins.io/doc/"
2181
+ },
2182
+ {
2183
+ "source_file": "using_a_jenkinsfile.txt",
2184
+ "source": "https://www.jenkins.io/doc/"
2185
+ },
2186
+ {
2187
+ "source_file": "using_a_jenkinsfile.txt",
2188
+ "source": "https://www.jenkins.io/doc/"
2189
+ },
2190
+ {
2191
+ "source_file": "using_a_jenkinsfile.txt",
2192
+ "source": "https://www.jenkins.io/doc/"
2193
+ },
2194
+ {
2195
+ "source_file": "using_a_jenkinsfile.txt",
2196
+ "source": "https://www.jenkins.io/doc/"
2197
+ },
2198
+ {
2199
+ "source_file": "using_a_jenkinsfile.txt",
2200
+ "source": "https://www.jenkins.io/doc/"
2201
+ },
2202
+ {
2203
+ "source_file": "using_a_jenkinsfile.txt",
2204
+ "source": "https://www.jenkins.io/doc/"
2205
+ },
2206
+ {
2207
+ "source_file": "using_a_jenkinsfile.txt",
2208
+ "source": "https://www.jenkins.io/doc/"
2209
+ },
2210
+ {
2211
+ "source_file": "using_a_jenkinsfile.txt",
2212
+ "source": "https://www.jenkins.io/doc/"
2213
+ },
2214
+ {
2215
+ "source_file": "using_a_jenkinsfile.txt",
2216
+ "source": "https://www.jenkins.io/doc/"
2217
+ },
2218
+ {
2219
+ "source_file": "using_a_jenkinsfile.txt",
2220
+ "source": "https://www.jenkins.io/doc/"
2221
+ },
2222
+ {
2223
+ "source_file": "using_a_jenkinsfile.txt",
2224
+ "source": "https://www.jenkins.io/doc/"
2225
+ },
2226
+ {
2227
+ "source_file": "using_a_jenkinsfile.txt",
2228
+ "source": "https://www.jenkins.io/doc/"
2229
+ },
2230
+ {
2231
+ "source_file": "using_a_jenkinsfile.txt",
2232
+ "source": "https://www.jenkins.io/doc/"
2233
+ },
2234
+ {
2235
+ "source_file": "using_a_jenkinsfile.txt",
2236
+ "source": "https://www.jenkins.io/doc/"
2237
+ },
2238
+ {
2239
+ "source_file": "using_a_jenkinsfile.txt",
2240
+ "source": "https://www.jenkins.io/doc/"
2241
+ },
2242
+ {
2243
+ "source_file": "using_a_jenkinsfile.txt",
2244
+ "source": "https://www.jenkins.io/doc/"
2245
+ },
2246
+ {
2247
+ "source_file": "using_a_jenkinsfile.txt",
2248
+ "source": "https://www.jenkins.io/doc/"
2249
+ },
2250
+ {
2251
+ "source_file": "using_a_jenkinsfile.txt",
2252
+ "source": "https://www.jenkins.io/doc/"
2253
+ },
2254
+ {
2255
+ "source_file": "using_a_jenkinsfile.txt",
2256
+ "source": "https://www.jenkins.io/doc/"
2257
+ },
2258
+ {
2259
+ "source_file": "using_a_jenkinsfile.txt",
2260
+ "source": "https://www.jenkins.io/doc/"
2261
+ },
2262
+ {
2263
+ "source_file": "using_a_jenkinsfile.txt",
2264
+ "source": "https://www.jenkins.io/doc/"
2265
+ },
2266
+ {
2267
+ "source_file": "using_a_jenkinsfile.txt",
2268
+ "source": "https://www.jenkins.io/doc/"
2269
+ },
2270
+ {
2271
+ "source_file": "using_a_jenkinsfile.txt",
2272
+ "source": "https://www.jenkins.io/doc/"
2273
+ },
2274
+ {
2275
+ "source_file": "using_a_jenkinsfile.txt",
2276
+ "source": "https://www.jenkins.io/doc/"
2277
+ },
2278
+ {
2279
+ "source_file": "using_a_jenkinsfile.txt",
2280
+ "source": "https://www.jenkins.io/doc/"
2281
+ },
2282
+ {
2283
+ "source_file": "using_a_jenkinsfile.txt",
2284
+ "source": "https://www.jenkins.io/doc/"
2285
+ },
2286
+ {
2287
+ "source_file": "using_a_jenkinsfile.txt",
2288
+ "source": "https://www.jenkins.io/doc/"
2289
+ },
2290
+ {
2291
+ "source_file": "using_a_jenkinsfile.txt",
2292
+ "source": "https://www.jenkins.io/doc/"
2293
+ },
2294
+ {
2295
+ "source_file": "using_a_jenkinsfile.txt",
2296
+ "source": "https://www.jenkins.io/doc/"
2297
+ },
2298
+ {
2299
+ "source_file": "using_a_jenkinsfile.txt",
2300
+ "source": "https://www.jenkins.io/doc/"
2301
+ },
2302
+ {
2303
+ "source_file": "using_a_jenkinsfile.txt",
2304
+ "source": "https://www.jenkins.io/doc/"
2305
+ },
2306
+ {
2307
+ "source_file": "using_a_jenkinsfile.txt",
2308
+ "source": "https://www.jenkins.io/doc/"
2309
+ },
2310
+ {
2311
+ "source_file": "using_a_jenkinsfile.txt",
2312
+ "source": "https://www.jenkins.io/doc/"
2313
+ },
2314
+ {
2315
+ "source_file": "using_a_jenkinsfile.txt",
2316
+ "source": "https://www.jenkins.io/doc/"
2317
+ },
2318
+ {
2319
+ "source_file": "using_a_jenkinsfile.txt",
2320
+ "source": "https://www.jenkins.io/doc/"
2321
+ },
2322
+ {
2323
+ "source_file": "using_a_jenkinsfile.txt",
2324
+ "source": "https://www.jenkins.io/doc/"
2325
+ },
2326
+ {
2327
+ "source_file": "using_a_jenkinsfile.txt",
2328
+ "source": "https://www.jenkins.io/doc/"
2329
+ },
2330
+ {
2331
+ "source_file": "using_a_jenkinsfile.txt",
2332
+ "source": "https://www.jenkins.io/doc/"
2333
+ },
2334
+ {
2335
+ "source_file": "using_a_jenkinsfile.txt",
2336
+ "source": "https://www.jenkins.io/doc/"
2337
+ },
2338
+ {
2339
+ "source_file": "using_a_jenkinsfile.txt",
2340
+ "source": "https://www.jenkins.io/doc/"
2341
+ },
2342
+ {
2343
+ "source_file": "using_a_jenkinsfile.txt",
2344
+ "source": "https://www.jenkins.io/doc/"
2345
+ },
2346
+ {
2347
+ "source_file": "using_a_jenkinsfile.txt",
2348
+ "source": "https://www.jenkins.io/doc/"
2349
+ },
2350
+ {
2351
+ "source_file": "using_a_jenkinsfile.txt",
2352
+ "source": "https://www.jenkins.io/doc/"
2353
+ },
2354
+ {
2355
+ "source_file": "using_a_jenkinsfile.txt",
2356
+ "source": "https://www.jenkins.io/doc/"
2357
+ },
2358
+ {
2359
+ "source_file": "using_a_jenkinsfile.txt",
2360
+ "source": "https://www.jenkins.io/doc/"
2361
+ },
2362
+ {
2363
+ "source_file": "using_a_jenkinsfile.txt",
2364
+ "source": "https://www.jenkins.io/doc/"
2365
+ },
2366
+ {
2367
+ "source_file": "using_a_jenkinsfile.txt",
2368
+ "source": "https://www.jenkins.io/doc/"
2369
+ },
2370
+ {
2371
+ "source_file": "using_a_jenkinsfile.txt",
2372
+ "source": "https://www.jenkins.io/doc/"
2373
+ },
2374
+ {
2375
+ "source_file": "using_a_jenkinsfile.txt",
2376
+ "source": "https://www.jenkins.io/doc/"
2377
+ },
2378
+ {
2379
+ "source_file": "using_a_jenkinsfile.txt",
2380
+ "source": "https://www.jenkins.io/doc/"
2381
+ },
2382
+ {
2383
+ "source_file": "using_a_jenkinsfile.txt",
2384
+ "source": "https://www.jenkins.io/doc/"
2385
+ },
2386
+ {
2387
+ "source_file": "using_a_jenkinsfile.txt",
2388
+ "source": "https://www.jenkins.io/doc/"
2389
+ },
2390
+ {
2391
+ "source_file": "using_a_jenkinsfile.txt",
2392
+ "source": "https://www.jenkins.io/doc/"
2393
+ },
2394
+ {
2395
+ "source_file": "using_a_jenkinsfile.txt",
2396
+ "source": "https://www.jenkins.io/doc/"
2397
+ },
2398
+ {
2399
+ "source_file": "using_a_jenkinsfile.txt",
2400
+ "source": "https://www.jenkins.io/doc/"
2401
+ },
2402
+ {
2403
+ "source_file": "using_a_jenkinsfile.txt",
2404
+ "source": "https://www.jenkins.io/doc/"
2405
+ },
2406
+ {
2407
+ "source_file": "using_a_jenkinsfile.txt",
2408
+ "source": "https://www.jenkins.io/doc/"
2409
+ },
2410
+ {
2411
+ "source_file": "using_a_jenkinsfile.txt",
2412
+ "source": "https://www.jenkins.io/doc/"
2413
+ },
2414
+ {
2415
+ "source_file": "using_a_jenkinsfile.txt",
2416
+ "source": "https://www.jenkins.io/doc/"
2417
+ },
2418
+ {
2419
+ "source_file": "using_a_jenkinsfile.txt",
2420
+ "source": "https://www.jenkins.io/doc/"
2421
+ },
2422
+ {
2423
+ "source_file": "using_a_jenkinsfile.txt",
2424
+ "source": "https://www.jenkins.io/doc/"
2425
+ },
2426
+ {
2427
+ "source_file": "using_a_jenkinsfile.txt",
2428
+ "source": "https://www.jenkins.io/doc/"
2429
+ },
2430
+ {
2431
+ "source_file": "using_a_jenkinsfile.txt",
2432
+ "source": "https://www.jenkins.io/doc/"
2433
+ },
2434
+ {
2435
+ "source_file": "using_a_jenkinsfile.txt",
2436
+ "source": "https://www.jenkins.io/doc/"
2437
+ },
2438
+ {
2439
+ "source_file": "using_a_jenkinsfile.txt",
2440
+ "source": "https://www.jenkins.io/doc/"
2441
+ }
2442
+ ]
data/docs/raw/git_scm.txt ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ scmGit
6
+ scmGit
7
+ The git plugin provides fundamental git operations for Jenkins projects. It can poll, fetch, checkout, and merge contents of git repositories.
8
+
9
+ The scmGit parameter of the git plugin is used with the Pipeline SCM checkout step to checkout git repositories into Pipeline workspaces. The Pipeline Syntax Snippet Generator guides the user to select git plugin checkout options and provides online help for each of the options.
10
+
11
+ Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step. Examples of the checkout step include:
12
+
13
+ Checkout step with defaults
14
+ Checkout step with https and a specific branch
15
+ Checkout step with ssh and a private key credential
16
+ Checkout step with https and changelog disabled
17
+ Checkout step with git protocol and polling disabled
18
+ See the argument descriptions for more details.
19
+ The scmGit parameter of the checkout step provides access to all the Pipeline capabilities provided by the git plugin:
20
+
21
+ checkout scmGit(userRemoteConfigs: [
22
+ [ url: 'https://github.com/jenkinsci/git-plugin' ]
23
+ ])
24
+ NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. For simpler cases that do not require all the capabilities of the git plugin, the git step can also be used.
25
+
26
+ Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step.
27
+
28
+ The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the checkout step. For example, the checkout step supports:
29
+
30
+ SHA-1 checkout
31
+ Tag checkout
32
+ Submodule checkout
33
+ Sparse checkout
34
+ Large file checkout (LFS)
35
+ Reference repositories
36
+ Branch merges
37
+ Repository tagging
38
+ Custom refspecs
39
+ Timeout configuration
40
+ Changelog calculation against a non-default reference
41
+ Stale branch pruning
42
+ Example: Checkout step with defaults
43
+ Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
44
+
45
+ The Pipeline Snippet Generator generates this example:
46
+
47
+ checkout scmGit(userRemoteConfigs: [
48
+ [ url: 'https://github.com/jenkinsci/git-plugin' ]
49
+ ])
50
+ Example: Checkout step with https and a specific branch
51
+ Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.289).
52
+
53
+ The Pipeline Snippet Generator generates this example:
54
+
55
+ checkout scmGit(branches: [[name: 'stable-2.289']],
56
+ userRemoteConfigs: [
57
+ [ url: 'https://github.com/jenkinsci/jenkins.git' ]
58
+ ])
59
+ Example: Checkout step with ssh and a private key credential
60
+ Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
61
+
62
+ The Pipeline Snippet Generator generates this example:
63
+
64
+ checkout changelog: false,
65
+ scm: scmGit(userRemoteConfigs: [
66
+ [ credentialsId: 'my-private-key-credential-id',
67
+ url: 'git@github.com:jenkinsci/git-client-plugin.git' ]
68
+ ])
69
+ Example: Checkout step with https and changelog disabled
70
+ Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
71
+
72
+ The Pipeline Snippet Generator generates this example:
73
+
74
+ checkout changelog: false,
75
+ scm: scmGit(userRemoteConfigs: [
76
+ [ url: 'https://github.com/jenkinsci/credentials-plugin' ]
77
+ ])
78
+ Example: Checkout step with git protocol and polling disabled
79
+ Checkout from the command line git repository using git protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
+
81
+ The Pipeline Snippet Generator generates this example:
82
+
83
+ checkout poll: false,
84
+ scm: scmGit(userRemoteConfigs: [
85
+ [ url: 'git://git.kernel.org/pub/scm/git/git.git' ]
86
+ ])
87
+ Argument Descriptions
88
+ userRemoteConfigs
89
+ Specify the repository to track. This can be a URL or a local file path. Note that for super-projects (repositories with submodules), only a local file path or a complete URL is valid. The following are examples of valid git URLs.
90
+ ssh://git@github.com/github/git.git
91
+ git@github.com:github/git.git (short notation for ssh protocol)
92
+ ssh://user@other.host.com/~/repos/R.git (to access the repos/R.git repository in the user's home directory)
93
+ https://github.com/github/git.git
94
+
95
+ If the repository is a super-project, the location from which to clone submodules is dependent on whether the repository is bare or non-bare (i.e. has a working directory).
96
+ If the super-project is bare, the location of the submodules will be taken from .gitmodules.
97
+ If the super-project is not bare, it is assumed that the repository has each of its submodules cloned and checked out appropriately. Thus, the submodules will be taken directly from a path like ${SUPER_PROJECT_URL}/${SUBMODULE}, rather than relying on information from .gitmodules.
98
+ For a local URL/path to a super-project, git rev-parse --is-bare-repository is used to detect whether the super-project is bare or not.
99
+ For a remote URL to a super-project, the ending of the URL determines whether a bare or non-bare repository is assumed:
100
+ If the remote URL ends with .git, a non-bare repository is assumed.
101
+ If the remote URL does NOT end with .git, a bare repository is assumed.
102
+ Array / List of Nested Object
103
+ url : String
104
+ name : String
105
+ refspec : String
106
+ credentialsId : String
107
+ branches
108
+ List of branches to build. Jenkins jobs are most effective when each job builds only a single branch. When a single job builds multiple branches, the changelog comparisons between branches often show no changes or incorrect changes.
109
+ Array / List of Nested Object
110
+ name : String
111
+ browser
112
+ Defines the repository browser that displays changes detected by the git plugin.
113
+ Nested Choice of Objects
114
+ assembla
115
+ $class: 'BacklogGitRepositoryBrowser'
116
+ bitbucketServer
117
+ bitbucket
118
+ cgit
119
+ fisheye
120
+ gitblit
121
+ $class: 'GitBucketBrowser'
122
+ gitLab
123
+ gitLabBrowser
124
+ gitList
125
+ gitWeb
126
+ $class: 'GiteaBrowser'
127
+ github
128
+ gitiles
129
+ $class: 'GitoriousWeb'
130
+ gogs
131
+ kiln
132
+ phabricator
133
+ redmine
134
+ rhodeCode
135
+ $class: 'ScmManagerGitRepositoryBrowser'
136
+ jbSpace
137
+ $class: 'Stash'
138
+ teamFoundation
139
+ $class: 'TracGitRepositoryBrowser'
140
+ $class: 'TuleapBrowser'
141
+ viewgit
142
+ gitTool : String
143
+ Name of the git tool to be used for this job. Git tool names are defined in "Global Tool Configuration".
144
+
145
+ extensions
146
+ Extensions add new behavior or modify existing plugin behavior for different uses. Extensions help users more precisely tune plugin behavior to meet their needs.
147
+
148
+ Extensions include:
149
+
150
+ Clone extensions modify the git operations that retrieve remote changes into the agent workspace. The extensions can adjust the amount of history retrieved, how long the retrieval is allowed to run, and other retrieval details.
151
+ Checkout extensions modify the git operations that place files in the workspace from the git repository on the agent. The extensions can adjust the maximum duration of the checkout operation, the use and behavior of git submodules, the location of the workspace on the disc, and more.
152
+ Changelog extensions adapt the source code difference calculations for different cases.
153
+ Tagging extensions allow the plugin to apply tags in the current workspace.
154
+ Build initiation extensions control the conditions that start a build. They can ignore notifications of a change or force a deeper evaluation of the commits when polling.
155
+ Merge extensions can optionally merge changes from other branches into the current branch of the agent workspace. They control the source branch for the merge and the options applied to the merge.
156
+ Array / List of Nested Choice of Objects
157
+ authorInChangelog
158
+ $class: 'BitbucketEnvVarExtension'
159
+ $class: 'BuildChooserSetting'
160
+ buildSingleRevisionOnly
161
+ changelogToBranch
162
+ checkoutOption
163
+ cleanBeforeCheckout
164
+ cleanAfterCheckout
165
+ cloneOption
166
+ $class: 'CodeCommitURLHelper'
167
+ $class: 'DisableRemotePoll'
168
+ $class: 'ExcludeFromChangeSet'
169
+ $class: 'ExcludeFromPoll'
170
+ $class: 'FallbackToOtherRepositoryGitSCMExtension'
171
+ firstBuildChangelog
172
+ $class: 'GitClientAuthenticatorExtension'
173
+ lfs
174
+ $class: 'GitSCMChecksExtension'
175
+ $class: 'GitSCMStatusChecksExtension'
176
+ $class: 'GitTagMessageExtension'
177
+ $class: 'IgnoreNotifyCommit'
178
+ localBranch
179
+ $class: 'MessageExclusion'
180
+ $class: 'PathRestriction'
181
+ perBuildTag
182
+ $class: 'PreBuildMerge'
183
+ pretestedIntegration
184
+ pruneStaleBranch
185
+ pruneTags
186
+ $class: 'RelativeTargetDirectory'
187
+ $class: 'ScmName'
188
+ sparseCheckout
189
+ submodule
190
+ $class: 'UserExclusion'
191
+ $class: 'UserIdentity'
192
+ $class: 'WipeWorkspace'
193
+ doGenerateSubmoduleConfigurations : boolean (optional)
194
+ Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value and always uses false as its value.
195
+
196
+ submoduleCfg (optional)
197
+ Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value(s) and always uses empty values.
198
+
199
+ Array / List of Nested Object
200
+ submoduleName : String
201
+ Removed in git plugin 4.6.0.
202
+
203
+ branches : Array / List of String
204
+ Removed in git plugin 4.6.0.
205
+
206
+ Was this page helpful?
data/docs/raw/jenkins_credentials_text.txt ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ Table of Contents
6
+ Credentials Binding Plugin
7
+ withCredentials: Bind credentials to variables
8
+ Credentials Binding Plugin
9
+ View this plugin on the Plugins site
10
+
11
+ withCredentials: Bind credentials to variables
12
+ Allows various kinds of credentials (secrets) to be used in idiosyncratic ways. (Some steps explicitly ask for credentials of a particular kind, usually as a credentialsId parameter, in which case this step is unnecessary.) Each binding will define an environment variable active within the scope of the step. You can then use them directly from any other steps that expect environment variables to be set:
13
+
14
+ node {
15
+ withCredentials([usernameColonPassword(credentialsId: 'mylogin', variable: 'USERPASS')]) {
16
+ sh '''
17
+ set +x
18
+ curl -u "$USERPASS" https://private.server/ > output
19
+ '''
20
+ }
21
+ }
22
+ As another example (use Snippet Generator to see all options):
23
+
24
+ node {
25
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
26
+ sh '''
27
+ set +x
28
+ curl -H "Token: $TOKEN" https://some.api/
29
+ '''
30
+ }
31
+ }
32
+ Note the use of single quotes to define the script (implicit parameter to sh) in Groovy above. You want the secret to be expanded by the shell as an environment variable. The following idiom is potentially less secure, as the secret is interpolated by Groovy and so (for example) typical operating system process listings will accidentally disclose it:
33
+
34
+ node {
35
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
36
+ sh /* WRONG! */ """
37
+ set +x
38
+ curl -H 'Token: $TOKEN' https://some.api/
39
+ """
40
+ }
41
+ }
42
+ At least on Linux, environment variables can be obtained by other processes running in the same account, so you should not run a job which uses secrets on the same node as a job controlled by untrusted parties. In any event, you should always prefer expansion as environment variables to inclusion in the command, since Jenkins visualizations such as Blue Ocean will attempt to detect step parameters containing secrets and refuse to display them.
43
+
44
+ The secret(s) will be masked (****) in case they are printed to the build log. This prevents you from accidentally disclosing passwords and the like via the log. (Bourne shell set +x, or Windows batch @echo off, blocks secrets from being displayed in echoed commands; but build tools in debug mode might dump all environment variables to standard output/error, or poorly designed network clients might display authentication, etc.) The masking could of course be trivially circumvented; anyone permitted to configure a job or define Pipeline steps is assumed to be trusted to use any credentials in scope however they like.
45
+
46
+ Beware that certain tools mangle secrets when displaying them. As one example, Bash (as opposed to Ubuntu’s plainer Dash) does so with text containing ' in echo mode:
47
+
48
+ $ export PASS=foo"'"bar
49
+ $ env|fgrep PASS
50
+ PASS=foo'bar
51
+ $ sh -xc 'echo $PASS'
52
+ + echo foo'bar
53
+ foo'bar
54
+ $ bash -xc 'echo $PASS'
55
+ + echo 'foo'\''bar'
56
+ foo'bar
57
+ Mangled secrets can only be detected on a best-effort basis. By default, Jenkins will attempt to mask mangled secrets as they would appear in output of Bourne shell, Bash, Almquist shell and Windows batch. Without these strategies in place, mangled secrets would appear in plain text in log files. In the example above, this would result in:
58
+
59
+ + echo 'foo'\''bar'
60
+ ****
61
+ This particular issue can be more safely prevented by turning off echo with set +x or avoiding the use of shell metacharacters in secrets.
62
+
63
+ For bindings which store a secret file, beware that
64
+
65
+ node {
66
+ dir('subdir') {
67
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
68
+ sh 'use $FILE'
69
+ }
70
+ }
71
+ }
72
+ is not safe, as $FILE might be inside the workspace (in subdir@tmp/secretFiles/), and thus visible to anyone able to browse the job’s workspace. If you need to run steps in a different directory than the usual workspace, you should instead use
73
+
74
+ node {
75
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
76
+ dir('subdir') {
77
+ sh 'use $FILE'
78
+ }
79
+ }
80
+ }
81
+ to ensure that the secrets are outside the workspace; or choose a different workspace entirely:
82
+
83
+ node {
84
+ ws {
85
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
86
+ sh 'use $FILE'
87
+ }
88
+ }
89
+ }
90
+ Also see the Limitations of Credentials Masking blog post for more background.
91
+
92
+ bindings
93
+ Array / List of Nested Choice of Objects
94
+ aws
95
+ token
96
+ $class: 'AwsBucketCredentialsBinding'
97
+ ociCredentials
98
+ certificate
99
+ ConjurSecretApplianceCredentials
100
+ conjurSecretCredential
101
+ conjurSecretDockerClientCert
102
+ conjurSecretFile
103
+ conjurSecretString
104
+ conjurSecretUsername
105
+ conjurSecretUsernameSSHKey
106
+ dockerCert
107
+ file
108
+ gitlabApiToken
109
+ gitUsernamePassword
110
+ $class: 'KeychainPasswordAndPathBinding'
111
+ OSFBuilderSuiteOpenCommerceAPICredentials
112
+ sshUserPrivateKey
113
+ string
114
+ OSFBuilderSuiteTwoFactorAuthCredentials
115
+ usernameColonPassword
116
+ usernamePassword
117
+ $class: 'VaultCertificateCredentialsBinding'
118
+ vaultFile
119
+ $class: 'VaultSSHUserPrivateKeyBinding'
120
+ vaultString
121
+ $class: 'VaultTokenCredentialBinding'
122
+ $class: 'VaultUsernamePasswordCredentialBinding'
123
+ zip
124
+ azureServicePrincipal
125
+ azureStorage
data/docs/raw/jenkins_git.txt ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ Table of Contents
6
+ Git plugin
7
+ git: Git
8
+ Git plugin
9
+ View this plugin on the Plugins site
10
+
11
+ git: Git
12
+ The git step performs a clone from the specified repository into a Pipeline workspace.
13
+
14
+ Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the git step. More advanced checkout operations require the checkout step with the scmGit parameter rather than the git step. Examples of the git step include:
15
+
16
+ Git step with defaults
17
+ Git step with https and a specific branch
18
+ Git step with ssh and a private key credential
19
+ Git step with https and changelog disabled
20
+ Git step with git protocol and polling disabled
21
+ See the argument descriptions for more details.
22
+ The git step is a simplified shorthand for a subset of the more powerful checkout step with the scmGit parameter:
23
+
24
+ checkout scmGit(branches: [[name: 'main']],
25
+ userRemoteConfigs: [[url: 'https://git-server/user/repository.git']])
26
+ NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. It provides significantly more functionality than the git step.
27
+
28
+ Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the checkout step.
29
+
30
+ The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the scmGit parameter of the checkout step. For example, the git step does not support:
31
+
32
+ SHA-1 checkout
33
+ Tag checkout
34
+ Submodule checkout
35
+ Sparse checkout
36
+ Large file checkout (LFS)
37
+ Reference repositories
38
+ Branch merges
39
+ Repository tagging
40
+ Custom refspecs
41
+ Timeout configuration
42
+ Changelog calculation against a non-default reference
43
+ Stale branch pruning
44
+ Example: Git step with defaults
45
+ Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
46
+
47
+ The Pipeline Syntax Snippet Generator generates this example:
48
+
49
+ git 'https://github.com/jenkinsci/git-plugin.git'
50
+ Example: Git step with https and a specific branch
51
+ Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.492). Note that this must be a local branch name like 'master' or 'develop'.
52
+
53
+ Branch names that are not supported by the git step
54
+
55
+ Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument
56
+ SHA-1 hashes are not supported as the branch argument
57
+ Tag names are not supported as the branch argument
58
+ Remote branch names, SHA-1 hashes, and tag names are supported by the general purpose scmGit parameter of the checkout step.
59
+
60
+ The Pipeline Syntax Snippet Generator generates this example:
61
+
62
+ git branch: 'stable-2.492',
63
+ url: 'https://github.com/jenkinsci/jenkins.git'
64
+ Example: Git step with ssh and a private key credential
65
+ Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
66
+
67
+ The Pipeline Syntax Snippet Generator generates this example:
68
+
69
+ git credentialsId: 'my-private-key-credential-id',
70
+ url: 'git@github.com:jenkinsci/git-client-plugin.git'
71
+ Example: Git step with https and changelog disabled
72
+ Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
73
+
74
+ The Pipeline Syntax Snippet Generator generates this example:
75
+
76
+ git changelog: false,
77
+ url: 'https://github.com/jenkinsci/credentials-plugin.git'
78
+ Example: Git step with https protocol and polling disabled
79
+ Checkout from the Jenkins platform labeler repository using https protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
+
81
+ The Pipeline Syntax Snippet Generator generates this example:
82
+
83
+ git poll: false,
84
+ url: 'https://github.com/jenkinsci/platformlabeler-plugin.git'
85
+ Argument Descriptions
86
+ url : String
87
+ URL of the repository to be checked out in the workspace. Required parameter.
88
+
89
+ Repository URL's should follow the git URL guidelines. Git steps to access a secured repository should provide a Jenkins credential with the credentialsId argument rather than embedding credentials in the URL. Credentials embedded in a repository URL may be visible in console logs or in other log files.
90
+
91
+ branch : String (optional)
92
+ Branch to be checked out in the workspace. Default is 'master'.
93
+
94
+ Note that this must be a local branch name like 'master' or 'develop'. Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument. Tag names are not supported as the branch argument. SHA-1 hashes are not supported as the branch argument. Remote branch names, tag names, and SHA-1 hashes are supported by the general purpose checkout step with the scmGit parameter.
95
+
96
+ changelog : boolean (optional)
97
+ Compute changelog for this job. Default is 'true'.
98
+
99
+ If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed.
100
+
101
+ credentialsId : String (optional)
102
+ Identifier of the credential used to access the remote git repository. Default is '<empty>'.
103
+
104
+ The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
105
+
106
+ poll : boolean (optional)
107
+ Poll remote repository for changes. Default is 'true'.
108
+
109
+ If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes.
110
+
data/docs/raw/jenkins_nodes.txt ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Managing Nodes
2
+ Table of Contents
3
+ Components of Distributed Builds
4
+ Creating Agents
5
+ Launch inbound agent via Windows Scheduler
6
+ Installing a Jenkins agent on Windows
7
+ Creating a macOS agent for Jenkins
8
+ Components of Distributed Builds
9
+ Builds in a distributed builds architecture use nodes, agents, and executors, which are distinct from the Jenkins controller itself. Understanding what each of these components are is useful when managing nodes:
10
+
11
+ Jenkins controller
12
+ The Jenkins controller is the Jenkins service itself and where Jenkins is installed. It is also a web server that also acts as a "brain" for deciding how, when, and where to run tasks. Management tasks such as configuration, authorization, and authentication are executed on the controller, which serves HTTP requests. Files written when a Pipeline executes are written to the filesystem on the controller, unless they are off-loaded to an artifact repository such as Nexus or Artifactory.
13
+
14
+ Nodes
15
+ Nodes are the "machines" on which build agents run. Jenkins monitors each attached node for disk space, free temp space, free swap, clock time/sync, and response time. A node is taken offline if any of these values go outside the configured threshold. Jenkins supports two types of nodes:
16
+
17
+ agents (described below)
18
+
19
+ built-in node
20
+
21
+ The built-in node is a node that exists within the controller process. It is possible to use agents and the build-in node to run tasks. However, running tasks on the built-in node is discouraged for security, performance, and scalability reasons. The number of executors configured for the node determines the node’s ability to run tasks. Set the number of executors to 0 to disable running tasks on the built-in node.
22
+
23
+ Agents
24
+ Agents manage the task execution on behalf of the Jenkins controller by using executors. An agent is a small (170KB single jar) Java client process that connects to a Jenkins controller and is assumed to be unreliable. An agent can use any operating system that supports Java. Any tools required for building and testing get installed on the node where the agent runs. Because these tools are a part of the node, they can be installed directly or in a container, such as Docker or Kubernetes. Each agent is effectively a process with its own Process Identifier (PID) on the host machine. In practice, nodes and agents are essentially the same but it is good to remember that they are conceptually distinct.
25
+
26
+ Executors
27
+ An executor is a slot for the execution of tasks. Effectively, it is a thread in the agent. The number of executors on a node defines the number of concurrent tasks that can run. In other words, this determines the number of concurrent Pipeline stages that can execute at the same time. Determine the correct number of executors per build node must be determined based on the resources available on the node and the resources required for the workload. When determining how many executors to run on a node, consider CPU and memory requirements, as well as the amount of I/O and network activity:
28
+
29
+ One executor per node is the safest configuration.
30
+
31
+ One executor per CPU core can work well, if the tasks running are small.
32
+
33
+ Monitor I/O performance, CPU load, memory usage, and I/O throughput carefully when running multiple executors on a node.
34
+
35
+ Creating Agents
36
+ Jenkins agents are the "workers" that perform operations requested by the Jenkins controller. The Jenkins controller administers the agents and can manage the tooling on the agents. Jenkins agents may be statically allocated or they can be dynamically allocated through systems like Kubernetes, OpenShift, Amazon EC2, Azure, Google Cloud, IBM Cloud, Oracle Cloud, and other cloud providers.
37
+
38
+ This 30 minute tutorial from Darin Pope creates a Jenkins agent and connects it to a controller.
39
+
40
+ How to create an agent node in Jenkins
41
+
42
+ Launch inbound agent via Windows Scheduler
43
+ If you are having trouble getting the inbound agent installed as a Windows service (i.e., you followed the instructions on installing the agent as a service here but it didn’t work), an alternative method of starting the service automatically when Windows starts is to use the Windows Scheduler.
44
+
45
+ We take advantage of the Windows Scheduler’s ability to run command at system startup
46
+
47
+ Configure your node to use the "Launch agents by connecting it to the master" launch method
48
+
49
+ Click Save
50
+
51
+ Note the command required to launch the agent
52
+
53
+ On the new agent node’s Jenkins page, note the agent command line shown.
54
+
55
+ It will be like:
56
+
57
+ java \
58
+ -jar agent.jar \
59
+ -url <Jenkins URL> \
60
+ -secret <secret key> \
61
+ -name <agent name>
62
+ Obtain the agent.jar file and copy it to your new Windows agent node
63
+
64
+ In the command line noted in the last step, the "agent.jar" is a hyperlink. Click it to download the agent.jar file.
65
+
66
+ Copy the agent.jar file to a permanent location on your agent machine
67
+
68
+ Ensure that you have a java version available on your agent machine
69
+
70
+ If not, obtain and install a supported version of Java
71
+
72
+ Run the command manually from a CMD window on your agent to confirm that it works
73
+
74
+ Open the CMD window
75
+
76
+ Run the command the one like
77
+
78
+ java \
79
+ -jar agent.jar \
80
+ -url <Jenkins URL> \
81
+ -secret <secret key> \
82
+ -name <agent name>
83
+ Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
84
+
85
+ Stop the command (control-c)
86
+
87
+ Register a new scheduled job to run the same command
88
+
89
+ Open "Task Scheduler" on your windows machine
90
+
91
+ Start → Run: task Scheduler
92
+
93
+ Create a basic task (Menu: Action → Create Basic Task)
94
+
95
+ First page of the wizard:
96
+
97
+ Name: Jenkins Agent
98
+
99
+ Description (optional)
100
+
101
+ Click Next
102
+
103
+ Next page of the wizard
104
+
105
+ When do you want the task to start: select "When the computer starts"
106
+
107
+ Click Next
108
+
109
+ Next page of the wizard
110
+
111
+ What action do you want the task to perform: select "Start a program"
112
+
113
+ Click Next
114
+
115
+ Next page of the wizard
116
+
117
+ Program/Script: enter "java.exe" (or the full path to your java.exe)
118
+
119
+ Add arguments: enter the rest of the command, like
120
+
121
+ java
122
+ -jar agent.jar \
123
+ -url <Jenkins URL> \
124
+ -secret <secret key> \
125
+ -name <agent name>
126
+ eg:
127
+
128
+ java \
129
+ -jar D:\Scripts\jenkins\agent.jar \
130
+ -url http://jenkinshost.example.com \
131
+ -secret d6a84df1fc4f45ddc9c6ab34b08f13391983ffffffffffb3488b7d5ac77fbc7 \
132
+ -name buildNode1
133
+ Click Next
134
+
135
+ Next page of the wizard
136
+
137
+ Click the check box "Open the Properties dialog for this task when I click Finish
138
+
139
+ Click Finish
140
+
141
+ Update the task’s properties
142
+
143
+ On the General tab
144
+
145
+ Select the user to run the task as
146
+
147
+ Select "Run whether user is logged on or not"
148
+
149
+ On the settings tab
150
+
151
+ Uncheck "Stop the task if it runs longer than"
152
+
153
+ Check "Run the task as soon as possible after a scheduled start is missed"
154
+
155
+ Check "If the task failed, restart every: 10 minutes", and "Attempt to restart up to: 3 times"
156
+
157
+ Click OK
158
+
159
+ Start the scheduled task and again check that the agent is connected
160
+
161
+ Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
162
+
163
+ Installing a Jenkins agent on Windows
164
+ You can install a Jenkins agent on Windows using the command line. In this video, Darin reviews setting up and installing the Jenkins agent, including how to create any necessary files.
165
+
166
+ How to install a Jenkins agent on Windows
167
+
168
+ Creating a macOS agent for Jenkins
169
+ This video reviews the process of creating a macOS agent for Jenkins using Java 11.
170
+
171
+
data/docs/raw/pipeline_steps.txt ADDED
The diff for this file is too large to render. See raw diff
 
data/docs/raw/pipeline_syntax.txt ADDED
@@ -0,0 +1,1693 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Pipeline Syntax
2
+ Table of Contents
3
+ Declarative Pipeline
4
+ Limitations
5
+ Sections
6
+ agent
7
+ post
8
+ stages
9
+ steps
10
+ Directives
11
+ environment
12
+ options
13
+ parameters
14
+ triggers
15
+ Jenkins cron syntax
16
+ stage
17
+ tools
18
+ input
19
+ when
20
+ Sequential Stages
21
+ Parallel
22
+ Matrix
23
+ axes
24
+ stages
25
+ excludes (optional)
26
+ Matrix cell-level directives (optional)
27
+ Steps
28
+ script
29
+ Scripted Pipeline
30
+ Flow Control
31
+ Steps
32
+ Differences from plain Groovy
33
+ Syntax Comparison
34
+ This section builds on the information introduced in Getting started with Pipeline and should be treated solely as a reference. For more information on how to use Pipeline syntax in practical examples, refer to:
35
+
36
+ Using a Jenkinsfile
37
+
38
+ Pipeline-as-Code
39
+
40
+ As of version 2.5 of the Pipeline plugin, Pipeline supports two discrete syntaxes - Declarative and Scripted. For the pros and cons of each, refer to the comparison.
41
+
42
+ As discussed at the start of this chapter, the most fundamental part of a Pipeline is the "step". Basically, steps tell Jenkins what to do and serve as the basic building block for both Declarative and Scripted Pipeline syntax.
43
+
44
+ For an overview of available steps, please refer to the Pipeline Steps reference which contains a comprehensive list of steps built into Pipeline as well as steps provided by plugins.
45
+
46
+ Declarative Pipeline
47
+ Declarative Pipeline presents a more simplified and opinionated syntax on top of the Pipeline sub-systems. In order to use them, install the Pipeline: Declarative Plugin.
48
+
49
+ All valid Declarative Pipelines must be enclosed within a pipeline block, for example:
50
+
51
+ pipeline {
52
+ /* insert Declarative Pipeline here */
53
+ }
54
+ The basic statements and expressions which are valid in Declarative Pipeline follow the same rules as Groovy’s syntax with the following exceptions:
55
+
56
+ The top-level of the Pipeline must be a block, specifically: pipeline { }.
57
+
58
+ No semicolons as statement separators. Each statement has to be on its own line.
59
+
60
+ Blocks must only consist of Sections, Directives, Steps, or assignment statements.
61
+
62
+ A property reference statement is treated as a no-argument method invocation. So, for example, input is treated as input().
63
+
64
+ You can use the Declarative Directive Generator to help you get started with configuring the directives and sections in your Declarative Pipeline.
65
+
66
+ Limitations
67
+ There is currently an open issue which limits the maximum size of the code within the pipeline{} block. This limitation does not apply to Scripted Pipelines.
68
+
69
+ Sections
70
+ Sections in Declarative Pipeline typically contain one or more Directives or Steps.
71
+
72
+ agent
73
+ The agent section specifies where the entire Pipeline, or a specific stage, will execute in the Jenkins environment depending on where the agent section is placed. The section must be defined at the top-level inside the pipeline block, but stage-level usage is optional.
74
+
75
+ Required
76
+
77
+ Yes
78
+
79
+ Parameters
80
+
81
+ Described below
82
+
83
+ Allowed
84
+
85
+ In the top-level pipeline block and each stage block.
86
+
87
+ Differences between top level agents and stage level agents
88
+ There are some nuances when adding an agent to the top level or a stage level when the options directive is applied. Check the section options for more information.
89
+
90
+ Top Level Agents
91
+ In agents declared at the top level of a Pipeline, an agent is allocated and then the timeout option is applied. The time to allocate the agent is not included in the limit set by the timeout option.
92
+
93
+ pipeline {
94
+ agent any
95
+ options {
96
+ // Timeout counter starts AFTER agent is allocated
97
+ timeout(time: 1, unit: 'SECONDS')
98
+ }
99
+ stages {
100
+ stage('Example') {
101
+ steps {
102
+ echo 'Hello World'
103
+ }
104
+ }
105
+ }
106
+ }
107
+ Stage Agents
108
+ In agents declared within a stage, the options are invoked before allocating the agent and before checking any when conditions. In this case, when using timeout, it is applied before the agent is allocated. The time to allocate the agent is included in the limit set by the timeout option.
109
+
110
+ pipeline {
111
+ agent none
112
+ stages {
113
+ stage('Example') {
114
+ agent any
115
+ options {
116
+ // Timeout counter starts BEFORE agent is allocated
117
+ timeout(time: 1, unit: 'SECONDS')
118
+ }
119
+ steps {
120
+ echo 'Hello World'
121
+ }
122
+ }
123
+ }
124
+ }
125
+ This timeout will include the agent provisioning time. Because the timeout includes the agent provisioning time, the Pipeline may fail in cases where agent allocation is delayed.
126
+
127
+ Parameters
128
+ In order to support the wide variety of use-cases Pipeline authors may have, the agent section supports a few different types of parameters. These parameters can be applied at the top-level of the pipeline block, or within each stage directive.
129
+
130
+ any
131
+ Execute the Pipeline, or stage, on any available agent. For example: agent any
132
+
133
+ none
134
+ When applied at the top-level of the pipeline block no global agent will be allocated for the entire Pipeline run and each stage section will need to contain its own agent section. For example: agent none
135
+
136
+ label
137
+ Execute the Pipeline, or stage, on an agent available in the Jenkins environment with the provided label. For example: agent { label 'my-defined-label' }
138
+
139
+ Label conditions can also be used: For example: agent { label 'my-label1 && my-label2' } or agent { label 'my-label1 || my-label2' }
140
+
141
+ node
142
+ agent { node { label 'labelName' } } behaves the same as agent { label 'labelName' }, but node allows for additional options (such as customWorkspace).
143
+
144
+ docker
145
+ Execute the Pipeline, or stage, with the given container which will be dynamically provisioned on a node pre-configured to accept Docker-based Pipelines, or on a node matching the optionally defined label parameter. docker also optionally accepts an args parameter which may contain arguments to pass directly to a docker run invocation, and an alwaysPull option, which will force a docker pull even if the image name is already present. For example: agent { docker 'maven:3.9.3-eclipse-temurin-17' } or
146
+
147
+ agent {
148
+ docker {
149
+ image 'maven:3.9.3-eclipse-temurin-17'
150
+ label 'my-defined-label'
151
+ args '-v /tmp:/tmp'
152
+ }
153
+ }
154
+ docker also optionally accepts a registryUrl and registryCredentialsId parameters which will help to specify the Docker Registry to use and its credentials. The parameter registryCredentialsId could be used alone for private repositories within the docker hub. For example:
155
+
156
+ agent {
157
+ docker {
158
+ image 'myregistry.com/node'
159
+ label 'my-defined-label'
160
+ registryUrl 'https://myregistry.com/'
161
+ registryCredentialsId 'myPredefinedCredentialsInJenkins'
162
+ }
163
+ }
164
+ dockerfile
165
+ Execute the Pipeline, or stage, with a container built from a Dockerfile contained in the source repository. In order to use this option, the Jenkinsfile must be loaded from either a Multibranch Pipeline or a Pipeline from SCM. Conventionally this is the Dockerfile in the root of the source repository: agent { dockerfile true }. If building a Dockerfile in another directory, use the dir option: agent { dockerfile { dir 'someSubDir' } }. If your Dockerfile has another name, you can specify the file name with the filename option. You can pass additional arguments to the docker build …​ command with the additionalBuildArgs option, like agent { dockerfile { additionalBuildArgs '--build-arg foo=bar' } }. For example, a repository with the file build/Dockerfile.build, expecting a build argument version:
166
+
167
+ agent {
168
+ // Equivalent to "docker build -f Dockerfile.build --build-arg version=1.0.2 ./build/
169
+ dockerfile {
170
+ filename 'Dockerfile.build'
171
+ dir 'build'
172
+ label 'my-defined-label'
173
+ additionalBuildArgs '--build-arg version=1.0.2'
174
+ args '-v /tmp:/tmp'
175
+ }
176
+ }
177
+ dockerfile also optionally accepts a registryUrl and registryCredentialsId parameters which will help to specify the Docker Registry to use and its credentials. For example:
178
+
179
+ agent {
180
+ dockerfile {
181
+ filename 'Dockerfile.build'
182
+ dir 'build'
183
+ label 'my-defined-label'
184
+ registryUrl 'https://myregistry.com/'
185
+ registryCredentialsId 'myPredefinedCredentialsInJenkins'
186
+ }
187
+ }
188
+ kubernetes
189
+ Execute the Pipeline, or stage, inside a pod deployed on a Kubernetes cluster. In order to use this option, the Jenkinsfile must be loaded from either a Multibranch Pipeline or a Pipeline from SCM. The Pod template is defined inside the kubernetes { } block. For example, if you want a pod with a Kaniko container inside it, you would define it as follows:
190
+
191
+ agent {
192
+ kubernetes {
193
+ defaultContainer 'kaniko'
194
+ yaml '''
195
+ kind: Pod
196
+ spec:
197
+ containers:
198
+ - name: kaniko
199
+ image: gcr.io/kaniko-project/executor:debug
200
+ imagePullPolicy: Always
201
+ command:
202
+ - sleep
203
+ args:
204
+ - 99d
205
+ volumeMounts:
206
+ - name: aws-secret
207
+ mountPath: /root/.aws/
208
+ - name: docker-registry-config
209
+ mountPath: /kaniko/.docker
210
+ volumes:
211
+ - name: aws-secret
212
+ secret:
213
+ secretName: aws-secret
214
+ - name: docker-registry-config
215
+ configMap:
216
+ name: docker-registry-config
217
+ '''
218
+ }
219
+ You will need to create a secret aws-secret for Kaniko to be able to authenticate with ECR. This secret should contain the contents of ~/.aws/credentials. The other volume is a ConfigMap which should contain the endpoint of your ECR registry. For example:
220
+
221
+ {
222
+ "credHelpers": {
223
+ "<your-aws-account-id>.dkr.ecr.eu-central-1.amazonaws.com": "ecr-login"
224
+ }
225
+ }
226
+ Refer to the following example for reference: https://github.com/jenkinsci/kubernetes-plugin/blob/master/examples/kaniko.groovy
227
+
228
+ Common Options
229
+ These are a few options that can be applied to two or more agent implementations. They are not required unless explicitly stated.
230
+
231
+ label
232
+ A string. The label or label condition on which to run the Pipeline or individual stage.
233
+
234
+ This option is valid for node, docker, and dockerfile, and is required for node.
235
+
236
+ customWorkspace
237
+ A string. Run the Pipeline or individual stage this agent is applied to within this custom workspace, rather than the default. It can be either a relative path, in which case the custom workspace will be under the workspace root on the node, or an absolute path. For example:
238
+
239
+ agent {
240
+ node {
241
+ label 'my-defined-label'
242
+ customWorkspace '/some/other/path'
243
+ }
244
+ }
245
+ This option is valid for node, docker, and dockerfile.
246
+
247
+ reuseNode
248
+ A boolean, false by default. If true, run the container on the node specified at the top-level of the Pipeline, in the same workspace, rather than on a new node entirely.
249
+
250
+ This option is valid for docker and dockerfile, and only has an effect when used on an agent for an individual stage.
251
+
252
+ args
253
+ A string. Runtime arguments to pass to docker run.
254
+
255
+ This option is valid for docker and dockerfile.
256
+
257
+ Example 1. Docker Agent, Declarative Pipeline
258
+ pipeline {
259
+ agent { docker 'maven:3.9.3-eclipse-temurin-17' }
260
+ stages {
261
+ stage('Example Build') {
262
+ steps {
263
+ sh 'mvn -B clean verify'
264
+ }
265
+ }
266
+ }
267
+ }
268
+ Execute all the steps defined in this Pipeline within a newly created container of the given name and tag (maven:3.9.3-eclipse-temurin-17).
269
+ Example 2. Stage-level Agent Section
270
+ pipeline {
271
+ agent none
272
+ stages {
273
+ stage('Example Build') {
274
+ agent { docker 'maven:3.9.9-eclipse-temurin-21' }
275
+ steps {
276
+ echo 'Hello, Maven'
277
+ sh 'mvn --version'
278
+ }
279
+ }
280
+ stage('Example Test') {
281
+ agent { docker 'openjdk:21-jre' }
282
+ steps {
283
+ echo 'Hello, JDK'
284
+ sh 'java -version'
285
+ }
286
+ }
287
+ }
288
+ }
289
+ Defining agent none at the top-level of the Pipeline ensures that an Executor will not be assigned unnecessarily. Using agent none also forces each stage section to contain its own agent section.
290
+ Execute the steps in this stage in a newly created container using this image.
291
+ Execute the steps in this stage in a newly created container using a different image from the previous stage.
292
+ post
293
+ The post section defines one or more additional steps that are run upon the completion of a Pipeline’s or stage’s run (depending on the location of the post section within the Pipeline). post can support any of the following post-condition blocks: always, changed, fixed, regression, aborted, failure, success, unstable, unsuccessful, and cleanup. These condition blocks allow the execution of steps inside each condition depending on the completion status of the Pipeline or stage. The condition blocks are executed in the order shown below.
294
+
295
+ Required
296
+
297
+ No
298
+
299
+ Parameters
300
+
301
+ None
302
+
303
+ Allowed
304
+
305
+ In the top-level pipeline block and each stage block.
306
+
307
+ Conditions
308
+ always
309
+ Run the steps in the post section regardless of the completion status of the Pipeline’s or stage’s run.
310
+
311
+ changed
312
+ Only run the steps in post if the current Pipeline’s run has a different completion status from its previous run.
313
+
314
+ fixed
315
+ Only run the steps in post if the current Pipeline’s run is successful and the previous run failed or was unstable.
316
+
317
+ regression
318
+ Only run the steps in post if the current Pipeline’s or status is failure, unstable, or aborted and the previous run was successful.
319
+
320
+ aborted
321
+ Only run the steps in post if the current Pipeline’s run has an "aborted" status, usually due to the Pipeline being manually aborted. This is typically denoted by gray in the web UI.
322
+
323
+ failure
324
+ Only run the steps in post if the current Pipeline’s or stage’s run has a "failed" status, typically denoted by red in the web UI.
325
+
326
+ success
327
+ Only run the steps in post if the current Pipeline’s or stage’s run has a "success" status, typically denoted by blue or green in the web UI.
328
+
329
+ unstable
330
+ Only run the steps in post if the current Pipeline’s run has an "unstable" status, usually caused by test failures, code violations, etc. This is typically denoted by yellow in the web UI.
331
+
332
+ unsuccessful
333
+ Only run the steps in post if the current Pipeline’s or stage’s run has not a "success" status. This is typically denoted in the web UI depending on the status previously mentioned (for stages this may fire if the build itself is unstable).
334
+
335
+ cleanup
336
+ Run the steps in this post condition after every other post condition has been evaluated, regardless of the Pipeline or stage’s status.
337
+
338
+ Example 3. Post Section, Declarative Pipeline
339
+ pipeline {
340
+ agent any
341
+ stages {
342
+ stage('Example') {
343
+ steps {
344
+ echo 'Hello World'
345
+ }
346
+ }
347
+ }
348
+ post {
349
+ always {
350
+ echo 'I will always say Hello again!'
351
+ }
352
+ }
353
+ }
354
+ Conventionally, the post section should be placed at the end of the Pipeline.
355
+ Post-condition blocks contain steps the same as the steps section.
356
+ stages
357
+ Containing a sequence of one or more stage directives, the stages section is where the bulk of the "work" described by a Pipeline will be located. At a minimum, it is recommended that stages contain at least one stage directive for each discrete part of the continuous delivery process, such as Build, Test, and Deploy.
358
+
359
+ Required
360
+
361
+ Yes
362
+
363
+ Parameters
364
+
365
+ None
366
+
367
+ Allowed
368
+
369
+ Inside the pipeline block, or within a stage.
370
+
371
+ Example 4. Stages, Declarative Pipeline
372
+ pipeline {
373
+ agent any
374
+ stages {
375
+ stage('Example') {
376
+ steps {
377
+ echo 'Hello World'
378
+ }
379
+ }
380
+ }
381
+ }
382
+ The stages section will typically follow the directives such as agent, options, etc.
383
+ steps
384
+ The steps section defines a series of one or more steps to be executed in a given stage directive.
385
+
386
+ Required
387
+
388
+ Yes
389
+
390
+ Parameters
391
+
392
+ None
393
+
394
+ Allowed
395
+
396
+ Inside each stage block.
397
+
398
+ Example 5. Single Step, Declarative Pipeline
399
+ pipeline {
400
+ agent any
401
+ stages {
402
+ stage('Example') {
403
+ steps {
404
+ echo 'Hello World'
405
+ }
406
+ }
407
+ }
408
+ }
409
+ The steps section must contain one or more steps.
410
+ Directives
411
+ environment
412
+ The environment directive specifies a sequence of key-value pairs which will be defined as environment variables for all steps, or stage-specific steps, depending on where the environment directive is located within the Pipeline.
413
+
414
+ This directive supports a special helper method credentials() which can be used to access pre-defined Credentials by their identifier in the Jenkins environment.
415
+
416
+ Required
417
+
418
+ No
419
+
420
+ Parameters
421
+
422
+ None
423
+
424
+ Allowed
425
+
426
+ Inside the pipeline block, or within stage directives.
427
+
428
+ Supported Credentials Type
429
+ Secret Text
430
+ The environment variable specified will be set to the Secret Text content.
431
+
432
+ Secret File
433
+ The environment variable specified will be set to the location of the File file that is temporarily created.
434
+
435
+ Username and password
436
+ The environment variable specified will be set to username:password and two additional environment variables will be automatically defined: MYVARNAME_USR and MYVARNAME_PSW respectively.
437
+
438
+ SSH with Private Key
439
+ The environment variable specified will be set to the location of the SSH key file that is temporarily created and two additional environment variables will be automatically defined: MYVARNAME_USR and MYVARNAME_PSW (holding the passphrase).
440
+
441
+ Unsupported credentials type causes the pipeline to fail with the message: org.jenkinsci.plugins.credentialsbinding.impl.CredentialNotFoundException: No suitable binding handler could be found for type <unsupportedType>.
442
+
443
+ Example 6. Secret Text Credentials, Declarative Pipeline
444
+ pipeline {
445
+ agent any
446
+ environment {
447
+ CC = 'clang'
448
+ }
449
+ stages {
450
+ stage('Example') {
451
+ environment {
452
+ AN_ACCESS_KEY = credentials('my-predefined-secret-text')
453
+ }
454
+ steps {
455
+ sh 'printenv'
456
+ }
457
+ }
458
+ }
459
+ }
460
+ An environment directive used in the top-level pipeline block will apply to all steps within the Pipeline.
461
+ An environment directive defined within a stage will only apply the given environment variables to steps within the stage.
462
+ The environment block has a helper method credentials() defined which can be used to access pre-defined Credentials by their identifier in the Jenkins environment.
463
+ Example 7. Username and Password Credentials
464
+ pipeline {
465
+ agent any
466
+ stages {
467
+ stage('Example Username/Password') {
468
+ environment {
469
+ SERVICE_CREDS = credentials('my-predefined-username-password')
470
+ }
471
+ steps {
472
+ sh 'echo "Service user is $SERVICE_CREDS_USR"'
473
+ sh 'echo "Service password is $SERVICE_CREDS_PSW"'
474
+ sh 'curl -u $SERVICE_CREDS https://myservice.example.com'
475
+ }
476
+ }
477
+ stage('Example SSH Username with private key') {
478
+ environment {
479
+ SSH_CREDS = credentials('my-predefined-ssh-creds')
480
+ }
481
+ steps {
482
+ sh 'echo "SSH private key is located at $SSH_CREDS"'
483
+ sh 'echo "SSH user is $SSH_CREDS_USR"'
484
+ sh 'echo "SSH passphrase is $SSH_CREDS_PSW"'
485
+ }
486
+ }
487
+ }
488
+ }
489
+ options
490
+ The options directive allows configuring Pipeline-specific options from within the Pipeline itself. Pipeline provides a number of these options, such as buildDiscarder, but they may also be provided by plugins, such as timestamps.
491
+
492
+ Required
493
+
494
+ No
495
+
496
+ Parameters
497
+
498
+ None
499
+
500
+ Allowed
501
+
502
+ Inside the pipeline block, or (with certain limitations) within stage directives.
503
+
504
+ Available Options
505
+ buildDiscarder
506
+ Persist artifacts and console output for the specific number of recent Pipeline runs. For example: options { buildDiscarder(logRotator(numToKeepStr: '1')) }
507
+
508
+ checkoutToSubdirectory
509
+ Perform the automatic source control checkout in a subdirectory of the workspace. For example: options { checkoutToSubdirectory('foo') }
510
+
511
+ disableConcurrentBuilds
512
+ Disallow concurrent executions of the Pipeline. Can be useful for preventing simultaneous accesses to shared resources, etc. For example: options { disableConcurrentBuilds() } to queue a build when there’s already an executing build of the Pipeline, or options { disableConcurrentBuilds(abortPrevious: true) } to abort the running one and start the new build.
513
+
514
+ disableResume
515
+ Do not allow the pipeline to resume if the controller restarts. For example: options { disableResume() }
516
+
517
+ newContainerPerStage
518
+ Used with docker or dockerfile top-level agent. When specified, each stage will run in a new container deployed on the same node, rather than all stages running in the same container deployment.
519
+
520
+ overrideIndexTriggers
521
+ Allows overriding default treatment of branch indexing triggers. If branch indexing triggers are disabled at the multibranch or organization label, options { overrideIndexTriggers(true) } will enable them for this job only. Otherwise, options { overrideIndexTriggers(false) } will disable branch indexing triggers for this job only.
522
+
523
+ preserveStashes
524
+ Preserve stashes from completed builds, for use with stage restarting. For example: options { preserveStashes() } to preserve the stashes from the most recent completed build, or options { preserveStashes(buildCount: 5) } to preserve the stashes from the five most recent completed builds.
525
+
526
+ quietPeriod
527
+ Set the quiet period, in seconds, for the Pipeline, overriding the global default. For example: options { quietPeriod(30) }
528
+
529
+ retry
530
+ On failure, retry the entire Pipeline the specified number of times. For example: options { retry(3) }
531
+
532
+ skipDefaultCheckout
533
+ Skip checking out code from source control by default in the agent directive. For example: options { skipDefaultCheckout() }
534
+
535
+ skipStagesAfterUnstable
536
+ Skip stages once the build status has gone to UNSTABLE. For example: options { skipStagesAfterUnstable() }
537
+
538
+ timeout
539
+ Set a timeout period for the Pipeline run, after which Jenkins should abort the Pipeline. For example: options { timeout(time: 1, unit: 'HOURS') }
540
+
541
+ Example 8. Global Timeout, Declarative Pipeline
542
+ pipeline {
543
+ agent any
544
+ options {
545
+ timeout(time: 1, unit: 'HOURS')
546
+ }
547
+ stages {
548
+ stage('Example') {
549
+ steps {
550
+ echo 'Hello World'
551
+ }
552
+ }
553
+ }
554
+ }
555
+ Specifying a global execution timeout of one hour, after which Jenkins will abort the Pipeline run.
556
+ timestamps
557
+ Prepend all console output generated by the Pipeline run with the time at which the line was emitted. For example: options { timestamps() }
558
+
559
+ parallelsAlwaysFailFast
560
+ Set failfast true for all subsequent parallel stages in the pipeline. For example: options { parallelsAlwaysFailFast() }
561
+
562
+ disableRestartFromStage
563
+ Completely disable option "Restart From Stage" visible in classic Jenkins UI and Blue Ocean as well. For example: options { disableRestartFromStage() }. This option can not be used inside of the stage.
564
+
565
+ A comprehensive list of available options is pending the completion of help desk ticket 820.
566
+
567
+ stage options
568
+ The options directive for a stage is similar to the options directive at the root of the Pipeline. However, the stage-level options can only contain steps like retry, timeout, or timestamps, or Declarative options that are relevant to a stage, like skipDefaultCheckout.
569
+
570
+ Inside a stage, the steps in the options directive are invoked before entering the agent or checking any when conditions.
571
+
572
+ Available Stage Options
573
+ skipDefaultCheckout
574
+ Skip checking out code from source control by default in the agent directive. For example: options { skipDefaultCheckout() }
575
+
576
+ timeout
577
+ Set a timeout period for this stage, after which Jenkins should abort the stage. For example: options { timeout(time: 1, unit: 'HOURS') }
578
+
579
+ Example 9. Stage Timeout, Declarative Pipeline
580
+ pipeline {
581
+ agent any
582
+ stages {
583
+ stage('Example') {
584
+ options {
585
+ timeout(time: 1, unit: 'HOURS')
586
+ }
587
+ steps {
588
+ echo 'Hello World'
589
+ }
590
+ }
591
+ }
592
+ }
593
+ Specifying an execution timeout of one hour for the Example stage, after which Jenkins will abort the Pipeline run.
594
+ retry
595
+ On failure, retry this stage the specified number of times. For example: options { retry(3) }
596
+
597
+ timestamps
598
+ Prepend all console output generated during this stage with the time at which the line was emitted. For example: options { timestamps() }
599
+
600
+ parameters
601
+ The parameters directive provides a list of parameters that a user should provide when triggering the Pipeline. The values for these user-specified parameters are made available to Pipeline steps via the params object, refer to the Parameters, Declarative Pipeline for its specific usage.
602
+
603
+ Each parameter has a Name and Value, depending on the parameter type. This information is exported as environment variables when the build starts, allowing subsequent parts of the build configuration to access those values. For example, use the ${PARAMETER_NAME} syntax with POSIX shells like bash and ksh, the ${Env:PARAMETER_NAME} syntax with PowerShell, or the %PARAMETER_NAME% syntax with Windows cmd.exe.
604
+
605
+ Required
606
+
607
+ No
608
+
609
+ Parameters
610
+
611
+ None
612
+
613
+ Allowed
614
+
615
+ Only once, inside the pipeline block.
616
+
617
+ Available Parameters
618
+ string
619
+ A parameter of a string type, for example: parameters { string(name: 'DEPLOY_ENV', defaultValue: 'staging', description: '') }.
620
+
621
+ text
622
+ A text parameter, which can contain multiple lines, for example: parameters { text(name: 'DEPLOY_TEXT', defaultValue: 'One\nTwo\nThree\n', description: '') }.
623
+
624
+ booleanParam
625
+ A boolean parameter, for example: parameters { booleanParam(name: 'DEBUG_BUILD', defaultValue: true, description: '') }.
626
+
627
+ choice
628
+ A choice parameter, for example: parameters { choice(name: 'CHOICES', choices: ['one', 'two', 'three'], description: '') }. The first value is the default.
629
+
630
+ password
631
+ A password parameter, for example: parameters { password(name: 'PASSWORD', defaultValue: 'SECRET', description: 'A secret password') }.
632
+
633
+ Example 10. Parameters, Declarative Pipeline
634
+ pipeline {
635
+ agent any
636
+ parameters {
637
+ string(name: 'PERSON', defaultValue: 'Mr Jenkins', description: 'Who should I say hello to?')
638
+
639
+ text(name: 'BIOGRAPHY', defaultValue: '', description: 'Enter some information about the person')
640
+
641
+ booleanParam(name: 'TOGGLE', defaultValue: true, description: 'Toggle this value')
642
+
643
+ choice(name: 'CHOICE', choices: ['One', 'Two', 'Three'], description: 'Pick something')
644
+
645
+ password(name: 'PASSWORD', defaultValue: 'SECRET', description: 'Enter a password')
646
+ }
647
+ stages {
648
+ stage('Example') {
649
+ steps {
650
+ echo "Hello ${params.PERSON}"
651
+
652
+ echo "Biography: ${params.BIOGRAPHY}"
653
+
654
+ echo "Toggle: ${params.TOGGLE}"
655
+
656
+ echo "Choice: ${params.CHOICE}"
657
+
658
+ echo "Password: ${params.PASSWORD}"
659
+ }
660
+ }
661
+ }
662
+ }
663
+ A comprehensive list of available parameters is pending the completion of help desk ticket 820.
664
+
665
+ triggers
666
+ The triggers directive defines the automated ways in which the Pipeline should be re-triggered. For Pipelines which are integrated with a source such as GitHub or BitBucket, triggers may not be necessary as webhooks-based integration will likely already be present. The triggers currently available are cron, pollSCM and upstream.
667
+
668
+ Required
669
+
670
+ No
671
+
672
+ Parameters
673
+
674
+ None
675
+
676
+ Allowed
677
+
678
+ Only once, inside the pipeline block.
679
+
680
+ cron
681
+ Accepts a cron-style string to define a regular interval at which the Pipeline should be re-triggered, for example: triggers { cron('H */4 * * 1-5') }.
682
+
683
+ pollSCM
684
+ Accepts a cron-style string to define a regular interval at which Jenkins should check for new source changes. If new changes exist, the Pipeline will be re-triggered. For example: triggers { pollSCM('H */4 * * 1-5') }
685
+
686
+ upstream
687
+ Accepts a comma-separated string of jobs and a threshold. When any job in the string finishes with the minimum threshold, the Pipeline will be re-triggered. For example: triggers { upstream(upstreamProjects: 'job1,job2', threshold: hudson.model.Result.SUCCESS) }
688
+
689
+ The pollSCM trigger is only available in Jenkins 2.22 or later.
690
+
691
+ Example 11. Triggers, Declarative Pipeline
692
+ // Declarative //
693
+ pipeline {
694
+ agent any
695
+ triggers {
696
+ cron('H */4 * * 1-5')
697
+ }
698
+ stages {
699
+ stage('Example') {
700
+ steps {
701
+ echo 'Hello World'
702
+ }
703
+ }
704
+ }
705
+ }
706
+ Jenkins cron syntax
707
+ The Jenkins cron syntax follows the syntax of the cron utility (with minor differences). Specifically, each line consists of 5 fields separated by TAB or whitespace:
708
+
709
+ MINUTE HOUR DOM MONTH DOW
710
+ Minutes within the hour (0–59)
711
+
712
+ The hour of the day (0–23)
713
+
714
+ The day of the month (1–31)
715
+
716
+ The month (1–12)
717
+
718
+ The day of the week (0–7) where 0 and 7 are Sunday.
719
+
720
+ To specify multiple values for one field, the following operators are available. In the order of precedence,
721
+
722
+ * specifies all valid values
723
+
724
+ M-N specifies a range of values
725
+
726
+ M-N/X or */X steps by intervals of X through the specified range or whole valid range
727
+
728
+ A,B,…​,Z enumerates multiple values
729
+
730
+ To allow periodically scheduled tasks to produce even load on the system, the symbol H (for “hash”) should be used wherever possible. For example, using 0 0 * * * for a dozen daily jobs will cause a large spike at midnight. In contrast, using H H * * * would still execute each job once a day, but not all at the same time, better using limited resources.
731
+
732
+ The H symbol can be used with a range. For example, H H(0-7) * * * means some time between 12:00 AM (midnight) to 7:59 AM. You can also use step intervals with H, with or without ranges.
733
+
734
+ The H symbol can be thought of as a random value over a range, but it actually is a hash of the job name, not a random function, so that the value remains stable for any given project.
735
+
736
+ Beware that for the day of month field, short cycles such as */3 or H/3 will not work consistently near the end of most months, due to variable month lengths. For example, */3 will run on the 1st, 4th, …31st days of a long month, then again the next day of the next month. Hashes are always chosen in the 1-28 range, so H/3 will produce a gap between runs of between 3 and 6 days at the end of a month. Longer cycles will also have inconsistent lengths, but the effect may be relatively less noticeable.
737
+
738
+ Empty lines and lines that start with # will be ignored as comments.
739
+
740
+ In addition, @yearly, @annually, @monthly, @weekly, @daily, @midnight, and @hourly are supported as convenient aliases. These use the hash system for automatic balancing. For example, @hourly is the same as H * * * * and could mean at any time during the hour. @midnight actually means some time between 12:00 AM and 2:59 AM.
741
+
742
+ Table 1. Jenkins cron syntax examples
743
+ every fifteen minutes (perhaps at :07, :22, :37, :52)
744
+
745
+ triggers{ cron('H/15 * * * *') }
746
+
747
+ every ten minutes in the first half of every hour (three times, perhaps at :04, :14, :24)
748
+
749
+ triggers{ cron('H(0-29)/10 * * * *') }
750
+
751
+ once every two hours at 45 minutes past the hour starting at 9:45 AM and finishing at 3:45 PM every weekday.
752
+
753
+ triggers{ cron('45 9-16/2 * * 1-5') }
754
+
755
+ once in every two hours slot between 9 AM and 5 PM every weekday (perhaps at 10:38 AM, 12:38 PM, 2:38 PM, 4:38 PM)
756
+
757
+ triggers{ cron('H H(9-16)/2 * * 1-5') }
758
+
759
+ once a day on the 1st and 15th of every month except December
760
+
761
+ triggers{ cron('H H 1,15 1-11 *') }
762
+
763
+ stage
764
+ The stage directive goes in the stages section and should contain a steps section, an optional agent section, or other stage-specific directives. Practically speaking, all of the real work done by a Pipeline will be wrapped in one or more stage directives.
765
+
766
+ Required
767
+
768
+ At least one
769
+
770
+ Parameters
771
+
772
+ One mandatory parameter, a string for the name of the stage.
773
+
774
+ Allowed
775
+
776
+ Inside the stages section.
777
+
778
+ Example 12. Stage, Declarative Pipeline
779
+ // Declarative //
780
+ pipeline {
781
+ agent any
782
+ stages {
783
+ stage('Example') {
784
+ steps {
785
+ echo 'Hello World'
786
+ }
787
+ }
788
+ }
789
+ }
790
+ tools
791
+ A section defining tools to auto-install and put on the PATH. This is ignored if agent none is specified.
792
+
793
+ Required
794
+
795
+ No
796
+
797
+ Parameters
798
+
799
+ None
800
+
801
+ Allowed
802
+
803
+ Inside the pipeline block or a stage block.
804
+
805
+ Supported Tools
806
+ maven
807
+ jdk
808
+ gradle
809
+ Example 13. Tools, Declarative Pipeline
810
+ pipeline {
811
+ agent any
812
+ tools {
813
+ maven 'apache-maven-3.0.1'
814
+ }
815
+ stages {
816
+ stage('Example') {
817
+ steps {
818
+ sh 'mvn --version'
819
+ }
820
+ }
821
+ }
822
+ }
823
+ The tool name must be pre-configured in Jenkins under Manage Jenkins → Tools.
824
+ input
825
+ The input directive on a stage allows you to prompt for input, using the input step. The stage will pause after any options have been applied, and before entering the agent block for that stage or evaluating the when condition of the stage. If the input is approved, the stage will then continue. Any parameters provided as part of the input submission will be available in the environment for the rest of the stage.
826
+
827
+ Configuration options
828
+ message
829
+ Required. This will be presented to the user when they go to submit the input.
830
+
831
+ id
832
+ An optional identifier for this input. The default value is based on the stage name.
833
+
834
+ ok
835
+ Optional text for the "ok" button on the input form.
836
+
837
+ submitter
838
+ An optional comma-separated list of users or external group names who are allowed to submit this input. Defaults to allowing any user.
839
+
840
+ submitterParameter
841
+ An optional name of an environment variable to set with the submitter name, if present.
842
+
843
+ parameters
844
+ An optional list of parameters to prompt the submitter to provide. Refer to parameters for more information.
845
+
846
+ Example 14. Input Step, Declarative Pipeline
847
+ pipeline {
848
+ agent any
849
+ stages {
850
+ stage('Example') {
851
+ input {
852
+ message "Should we continue?"
853
+ ok "Yes, we should."
854
+ submitter "alice,bob"
855
+ parameters {
856
+ string(name: 'PERSON', defaultValue: 'Mr Jenkins', description: 'Who should I say hello to?')
857
+ }
858
+ }
859
+ steps {
860
+ echo "Hello, ${PERSON}, nice to meet you."
861
+ }
862
+ }
863
+ }
864
+ }
865
+ when
866
+ The when directive allows the Pipeline to determine whether the stage should be executed depending on the given condition. The when directive must contain at least one condition. If the when directive contains more than one condition, all the child conditions must return true for the stage to execute. This is the same as if the child conditions were nested in an allOf condition (refer to the examples below). If an anyOf condition is used, note that the condition skips remaining tests as soon as the first "true" condition is found.
867
+
868
+ More complex conditional structures can be built using the nesting conditions: not, allOf, or anyOf. Nesting conditions may be nested to any arbitrary depth.
869
+
870
+ Required
871
+
872
+ No
873
+
874
+ Parameters
875
+
876
+ None
877
+
878
+ Allowed
879
+
880
+ Inside a stage directive
881
+
882
+ Built-in Conditions
883
+ branch
884
+ Execute the stage when the branch being built matches the branch pattern (ANT style path glob) given, for example: when { branch 'master' }. Note that this only works on a multibranch Pipeline.
885
+
886
+ The optional parameter comparator may be added after an attribute to specify how any patterns are evaluated for a match:
887
+
888
+ EQUALS for a simple string comparison
889
+
890
+ GLOB (the default) for an ANT style path glob (same as for example changeset)
891
+
892
+ REGEXP for regular expression matching
893
+
894
+ For example: when { branch pattern: "release-\\d+", comparator: "REGEXP"}
895
+
896
+ buildingTag
897
+ Execute the stage when the build is building a tag. For example: when { buildingTag() }
898
+
899
+ changelog
900
+ Execute the stage if the build’s SCM changelog contains a given regular expression pattern, for example: when { changelog '.*^\\[DEPENDENCY\\] .+$' }.
901
+
902
+ changeset
903
+ Execute the stage if the build’s SCM changeset contains one or more files matching the given pattern. Example: when { changeset "**/*.js" }
904
+
905
+ The optional parameter comparator may be added after an attribute to specify how any patterns are evaluated for a match:
906
+
907
+ EQUALS for a simple string comparison
908
+
909
+ GLOB (the default) for an ANT style path glob case insensitive (this can be turned off with the caseSensitive parameter).
910
+
911
+ REGEXP for regular expression matching
912
+
913
+ For example: when { changeset pattern: ".TEST\\.java", comparator: "REGEXP" } or when { changeset pattern: "*/*TEST.java", caseSensitive: true }
914
+
915
+ changeRequest
916
+ Executes the stage if the current build is for a "change request" (a.k.a. Pull Request on GitHub and Bitbucket, Merge Request on GitLab, Change in Gerrit, etc.). When no parameters are passed the stage runs on every change request, for example: when { changeRequest() }.
917
+
918
+ By adding a filter attribute with parameter to the change request, the stage can be made to run only on matching change requests. Possible attributes are id, target, branch, fork, url, title, author, authorDisplayName, and authorEmail. Each of these corresponds to a CHANGE_* environment variable, for example: when { changeRequest target: 'master' }.
919
+
920
+ The optional parameter comparator may be added after an attribute to specify how any patterns are evaluated for a match:
921
+
922
+ EQUALS for a simple string comparison (the default)
923
+
924
+ GLOB for an ANT style path glob (same as for example changeset)
925
+
926
+ REGEXP for regular expression matching
927
+
928
+ Example: when { changeRequest authorEmail: "[\\w_-.]+@example.com", comparator: 'REGEXP' }
929
+
930
+ environment
931
+ Execute the stage when the specified environment variable is set to the given value, for example: when { environment name: 'DEPLOY_TO', value: 'production' }.
932
+
933
+ equals
934
+ Execute the stage when the expected value is equal to the actual value, for example: when { equals expected: 2, actual: currentBuild.number }.
935
+
936
+ expression
937
+ Execute the stage when the specified Groovy expression evaluates to true, for example: when { expression { return params.DEBUG_BUILD } }.
938
+
939
+ When returning strings from your expressions they must be converted to booleans or return null to evaluate to false. Simply returning "0" or "false" will still evaluate to "true".
940
+ tag
941
+ Execute the stage if the TAG_NAME variable matches the given pattern. For example: when { tag "release-*" } If an empty pattern is provided the stage will execute if the TAG_NAME variable exists (same as buildingTag()).
942
+
943
+ The optional parameter comparator may be added after an attribute to specify how any patterns are evaluated for a match:
944
+
945
+ EQUALS for a simple string comparison,
946
+
947
+ GLOB (the default) for an ANT style path glob (same as for example changeset), or
948
+
949
+ REGEXP for regular expression matching.
950
+
951
+ For example: when { tag pattern: "release-\\d+", comparator: "REGEXP"}
952
+
953
+ not
954
+ Execute the stage when the nested condition is false. Must contain one condition. For example: when { not { branch 'master' } }
955
+
956
+ allOf
957
+ Execute the stage when all of the nested conditions are true. Must contain at least one condition. For example: when { allOf { branch 'master'; environment name: 'DEPLOY_TO', value: 'production' } }
958
+
959
+ anyOf
960
+ Execute the stage when at least one of the nested conditions is true. Must contain at least one condition. For example: when { anyOf { branch 'master'; branch 'staging' } }
961
+
962
+ triggeredBy
963
+ Execute the stage when the current build has been triggered by the param given. For example:
964
+
965
+ when { triggeredBy 'SCMTrigger' }
966
+
967
+ when { triggeredBy 'TimerTrigger' }
968
+
969
+ when { triggeredBy 'BuildUpstreamCause' }
970
+
971
+ when { triggeredBy cause: "UserIdCause", detail: "vlinde" }
972
+
973
+ Evaluating when before entering agent in a stage
974
+ By default, the when condition for a stage will be evaluated after entering the agent for that stage, if one is defined. However, this can be changed by specifying the beforeAgent option within the when block. If beforeAgent is set to true, the when condition will be evaluated first, and the agent will only be entered if the when condition evaluates to true.
975
+
976
+ Evaluating when before the input directive
977
+ By default, the when condition for a stage will not be evaluated before the input, if one is defined. However, this can be changed by specifying the beforeInput option within the when block. If beforeInput is set to true, the when condition will be evaluated first, and the input will only be entered if the when condition evaluates to true.
978
+
979
+ beforeInput true takes precedence over beforeAgent true.
980
+
981
+ Evaluating when before the options directive
982
+ By default, the when condition for a stage will be evaluated after entering the options for that stage, if any are defined. However, this can be changed by specifying the beforeOptions option within the when block. If beforeOptions is set to true, the when condition will be evaluated first, and the options will only be entered if the when condition evaluates to true.
983
+
984
+ beforeOptions true takes precedence over beforeInput true and beforeAgent true.
985
+
986
+ Example 15. Single Condition, Declarative Pipeline
987
+ pipeline {
988
+ agent any
989
+ stages {
990
+ stage('Example Build') {
991
+ steps {
992
+ echo 'Hello World'
993
+ }
994
+ }
995
+ stage('Example Deploy') {
996
+ when {
997
+ branch 'production'
998
+ }
999
+ steps {
1000
+ echo 'Deploying'
1001
+ }
1002
+ }
1003
+ }
1004
+ }
1005
+ Example 16. Multiple Condition, Declarative Pipeline
1006
+ pipeline {
1007
+ agent any
1008
+ stages {
1009
+ stage('Example Build') {
1010
+ steps {
1011
+ echo 'Hello World'
1012
+ }
1013
+ }
1014
+ stage('Example Deploy') {
1015
+ when {
1016
+ branch 'production'
1017
+ environment name: 'DEPLOY_TO', value: 'production'
1018
+ }
1019
+ steps {
1020
+ echo 'Deploying'
1021
+ }
1022
+ }
1023
+ }
1024
+ }
1025
+ Example 17. Nested condition (same behavior as previous example)
1026
+ pipeline {
1027
+ agent any
1028
+ stages {
1029
+ stage('Example Build') {
1030
+ steps {
1031
+ echo 'Hello World'
1032
+ }
1033
+ }
1034
+ stage('Example Deploy') {
1035
+ when {
1036
+ allOf {
1037
+ branch 'production'
1038
+ environment name: 'DEPLOY_TO', value: 'production'
1039
+ }
1040
+ }
1041
+ steps {
1042
+ echo 'Deploying'
1043
+ }
1044
+ }
1045
+ }
1046
+ }
1047
+ Example 18. Multiple condition and nested condition
1048
+ pipeline {
1049
+ agent any
1050
+ stages {
1051
+ stage('Example Build') {
1052
+ steps {
1053
+ echo 'Hello World'
1054
+ }
1055
+ }
1056
+ stage('Example Deploy') {
1057
+ when {
1058
+ branch 'production'
1059
+ anyOf {
1060
+ environment name: 'DEPLOY_TO', value: 'production'
1061
+ environment name: 'DEPLOY_TO', value: 'staging'
1062
+ }
1063
+ }
1064
+ steps {
1065
+ echo 'Deploying'
1066
+ }
1067
+ }
1068
+ }
1069
+ }
1070
+ Example 19. Expression condition and nested condition
1071
+ pipeline {
1072
+ agent any
1073
+ stages {
1074
+ stage('Example Build') {
1075
+ steps {
1076
+ echo 'Hello World'
1077
+ }
1078
+ }
1079
+ stage('Example Deploy') {
1080
+ when {
1081
+ expression { BRANCH_NAME ==~ /(production|staging)/ }
1082
+ anyOf {
1083
+ environment name: 'DEPLOY_TO', value: 'production'
1084
+ environment name: 'DEPLOY_TO', value: 'staging'
1085
+ }
1086
+ }
1087
+ steps {
1088
+ echo 'Deploying'
1089
+ }
1090
+ }
1091
+ }
1092
+ }
1093
+ Example 20. beforeAgent
1094
+ pipeline {
1095
+ agent none
1096
+ stages {
1097
+ stage('Example Build') {
1098
+ steps {
1099
+ echo 'Hello World'
1100
+ }
1101
+ }
1102
+ stage('Example Deploy') {
1103
+ agent {
1104
+ label "some-label"
1105
+ }
1106
+ when {
1107
+ beforeAgent true
1108
+ branch 'production'
1109
+ }
1110
+ steps {
1111
+ echo 'Deploying'
1112
+ }
1113
+ }
1114
+ }
1115
+ }
1116
+ Example 21. beforeInput
1117
+ pipeline {
1118
+ agent none
1119
+ stages {
1120
+ stage('Example Build') {
1121
+ steps {
1122
+ echo 'Hello World'
1123
+ }
1124
+ }
1125
+ stage('Example Deploy') {
1126
+ when {
1127
+ beforeInput true
1128
+ branch 'production'
1129
+ }
1130
+ input {
1131
+ message "Deploy to production?"
1132
+ id "simple-input"
1133
+ }
1134
+ steps {
1135
+ echo 'Deploying'
1136
+ }
1137
+ }
1138
+ }
1139
+ }
1140
+ Example 22. beforeOptions
1141
+ pipeline {
1142
+ agent none
1143
+ stages {
1144
+ stage('Example Build') {
1145
+ steps {
1146
+ echo 'Hello World'
1147
+ }
1148
+ }
1149
+ stage('Example Deploy') {
1150
+ when {
1151
+ beforeOptions true
1152
+ branch 'testing'
1153
+ }
1154
+ options {
1155
+ lock label: 'testing-deploy-envs', quantity: 1, variable: 'deployEnv'
1156
+ }
1157
+ steps {
1158
+ echo "Deploying to ${deployEnv}"
1159
+ }
1160
+ }
1161
+ }
1162
+ }
1163
+ Example 23. triggeredBy
1164
+ pipeline {
1165
+ agent none
1166
+ stages {
1167
+ stage('Example Build') {
1168
+ steps {
1169
+ echo 'Hello World'
1170
+ }
1171
+ }
1172
+ stage('Example Deploy') {
1173
+ when {
1174
+ triggeredBy "TimerTrigger"
1175
+ }
1176
+ steps {
1177
+ echo 'Deploying'
1178
+ }
1179
+ }
1180
+ }
1181
+ }
1182
+ Sequential Stages
1183
+ Stages in Declarative Pipeline may have a stages section containing a list of nested stages to be run in sequential order.
1184
+
1185
+ A stage must have one and only one of steps, stages, parallel, or matrix. It is not possible to nest a parallel or matrix block within a stage directive if that stage directive is nested within a parallel or matrix block itself. However, a stage directive within a parallel or matrix block can use all other functionality of a stage, including agent, tools, when, etc.
1186
+ Example 24. Sequential Stages, Declarative Pipeline
1187
+ pipeline {
1188
+ agent none
1189
+ stages {
1190
+ stage('Non-Sequential Stage') {
1191
+ agent {
1192
+ label 'for-non-sequential'
1193
+ }
1194
+ steps {
1195
+ echo "On Non-Sequential Stage"
1196
+ }
1197
+ }
1198
+ stage('Sequential') {
1199
+ agent {
1200
+ label 'for-sequential'
1201
+ }
1202
+ environment {
1203
+ FOR_SEQUENTIAL = "some-value"
1204
+ }
1205
+ stages {
1206
+ stage('In Sequential 1') {
1207
+ steps {
1208
+ echo "In Sequential 1"
1209
+ }
1210
+ }
1211
+ stage('In Sequential 2') {
1212
+ steps {
1213
+ echo "In Sequential 2"
1214
+ }
1215
+ }
1216
+ stage('Parallel In Sequential') {
1217
+ parallel {
1218
+ stage('In Parallel 1') {
1219
+ steps {
1220
+ echo "In Parallel 1"
1221
+ }
1222
+ }
1223
+ stage('In Parallel 2') {
1224
+ steps {
1225
+ echo "In Parallel 2"
1226
+ }
1227
+ }
1228
+ }
1229
+ }
1230
+ }
1231
+ }
1232
+ }
1233
+ }
1234
+ Parallel
1235
+ Stages in Declarative Pipeline may have a parallel section containing a list of nested stages to be run in parallel.
1236
+
1237
+ A stage must have one and only one of steps, stages, parallel, or matrix. It is not possible to nest a parallel or matrix block within a stage directive if that stage directive is nested within a parallel or matrix block itself. However, a stage directive within a parallel or matrix block can use all other functionality of a stage, including agent, tools, when, etc.
1238
+ In addition, you can force your parallel stages to all be aborted when any one of them fails, by adding failFast true to the stage containing the parallel. Another option for adding failfast is adding an option to the pipeline definition: parallelsAlwaysFailFast().
1239
+
1240
+ Example 25. Parallel Stages, Declarative Pipeline
1241
+ pipeline {
1242
+ agent any
1243
+ stages {
1244
+ stage('Non-Parallel Stage') {
1245
+ steps {
1246
+ echo 'This stage will be executed first.'
1247
+ }
1248
+ }
1249
+ stage('Parallel Stage') {
1250
+ when {
1251
+ branch 'master'
1252
+ }
1253
+ failFast true
1254
+ parallel {
1255
+ stage('Branch A') {
1256
+ agent {
1257
+ label "for-branch-a"
1258
+ }
1259
+ steps {
1260
+ echo "On Branch A"
1261
+ }
1262
+ }
1263
+ stage('Branch B') {
1264
+ agent {
1265
+ label "for-branch-b"
1266
+ }
1267
+ steps {
1268
+ echo "On Branch B"
1269
+ }
1270
+ }
1271
+ stage('Branch C') {
1272
+ agent {
1273
+ label "for-branch-c"
1274
+ }
1275
+ stages {
1276
+ stage('Nested 1') {
1277
+ steps {
1278
+ echo "In stage Nested 1 within Branch C"
1279
+ }
1280
+ }
1281
+ stage('Nested 2') {
1282
+ steps {
1283
+ echo "In stage Nested 2 within Branch C"
1284
+ }
1285
+ }
1286
+ }
1287
+ }
1288
+ }
1289
+ }
1290
+ }
1291
+ }
1292
+ Example 26. parallelsAlwaysFailFast
1293
+ pipeline {
1294
+ agent any
1295
+ options {
1296
+ parallelsAlwaysFailFast()
1297
+ }
1298
+ stages {
1299
+ stage('Non-Parallel Stage') {
1300
+ steps {
1301
+ echo 'This stage will be executed first.'
1302
+ }
1303
+ }
1304
+ stage('Parallel Stage') {
1305
+ when {
1306
+ branch 'master'
1307
+ }
1308
+ parallel {
1309
+ stage('Branch A') {
1310
+ agent {
1311
+ label "for-branch-a"
1312
+ }
1313
+ steps {
1314
+ echo "On Branch A"
1315
+ }
1316
+ }
1317
+ stage('Branch B') {
1318
+ agent {
1319
+ label "for-branch-b"
1320
+ }
1321
+ steps {
1322
+ echo "On Branch B"
1323
+ }
1324
+ }
1325
+ stage('Branch C') {
1326
+ agent {
1327
+ label "for-branch-c"
1328
+ }
1329
+ stages {
1330
+ stage('Nested 1') {
1331
+ steps {
1332
+ echo "In stage Nested 1 within Branch C"
1333
+ }
1334
+ }
1335
+ stage('Nested 2') {
1336
+ steps {
1337
+ echo "In stage Nested 2 within Branch C"
1338
+ }
1339
+ }
1340
+ }
1341
+ }
1342
+ }
1343
+ }
1344
+ }
1345
+ }
1346
+ Matrix
1347
+ Stages in Declarative Pipeline may have a matrix section defining a multi-dimensional matrix of name-value combinations to be run in parallel. We’ll refer these combinations as "cells" in a matrix. Each cell in a matrix can include one or more stages to be run sequentially using the configuration for that cell.
1348
+
1349
+ A stage must have one and only one of steps, stages, parallel, or matrix. It is not possible to nest a parallel or matrix block within a stage directive if that stage directive is nested within a parallel or matrix block itself. However, a stage directive within a parallel or matrix block can use all other functionality of a stage, including agent, tools, when, etc.
1350
+ In addition, you can force your matrix cells to all be aborted when any one of them fails, by adding failFast true to the stage containing the matrix. Another option for adding failfast is adding an option to the pipeline definition: parallelsAlwaysFailFast().
1351
+
1352
+ The matrix section must include an axes section and a stages section. The axes section defines the values for each axis in the matrix. The stages section defines a list of stages to run sequentially in each cell. A matrix may have an excludes section to remove invalid cells from the matrix. Many of the directives available on stage, including agent, tools, when, etc., can also be added to matrix to control the behavior of each cell.
1353
+
1354
+ axes
1355
+ The axes section specifies one or more axis directives. Each axis consists of a name and a list of values. All the values from each axis are combined with the others to produce the cells.
1356
+
1357
+ Example 27. One-axis with 3 cells
1358
+ matrix {
1359
+ axes {
1360
+ axis {
1361
+ name 'PLATFORM'
1362
+ values 'linux', 'mac', 'windows'
1363
+ }
1364
+ }
1365
+ // ...
1366
+ }
1367
+ Example 28. Two-axis with 12 cells (three by four)
1368
+ matrix {
1369
+ axes {
1370
+ axis {
1371
+ name 'PLATFORM'
1372
+ values 'linux', 'mac', 'windows'
1373
+ }
1374
+ axis {
1375
+ name 'BROWSER'
1376
+ values 'chrome', 'edge', 'firefox', 'safari'
1377
+ }
1378
+ }
1379
+ // ...
1380
+ }
1381
+ Example 29. Three-axis matrix with 24 cells (three by four by two)
1382
+ matrix {
1383
+ axes {
1384
+ axis {
1385
+ name 'PLATFORM'
1386
+ values 'linux', 'mac', 'windows'
1387
+ }
1388
+ axis {
1389
+ name 'BROWSER'
1390
+ values 'chrome', 'edge', 'firefox', 'safari'
1391
+ }
1392
+ axis {
1393
+ name 'ARCHITECTURE'
1394
+ values '32-bit', '64-bit'
1395
+ }
1396
+ }
1397
+ // ...
1398
+ }
1399
+ stages
1400
+ The stages section specifies one or more stages to be executed sequentially in each cell. This section is identical to any other stages section.
1401
+
1402
+ Example 30. One-axis with 3 cells, each cell runs three stages - "build", "test", and "deploy"
1403
+ matrix {
1404
+ axes {
1405
+ axis {
1406
+ name 'PLATFORM'
1407
+ values 'linux', 'mac', 'windows'
1408
+ }
1409
+ }
1410
+ stages {
1411
+ stage('build') {
1412
+ // ...
1413
+ }
1414
+ stage('test') {
1415
+ // ...
1416
+ }
1417
+ stage('deploy') {
1418
+ // ...
1419
+ }
1420
+ }
1421
+ }
1422
+ Example 31. Two-axis with 12 cells (three by four)
1423
+ matrix {
1424
+ axes {
1425
+ axis {
1426
+ name 'PLATFORM'
1427
+ values 'linux', 'mac', 'windows'
1428
+ }
1429
+ axis {
1430
+ name 'BROWSER'
1431
+ values 'chrome', 'edge', 'firefox', 'safari'
1432
+ }
1433
+ }
1434
+ stages {
1435
+ stage('build-and-test') {
1436
+ // ...
1437
+ }
1438
+ }
1439
+ }
1440
+ excludes (optional)
1441
+ The optional excludes section lets authors specify one or more exclude filter expressions that select cells to be excluded from the expanded set of matrix cells (aka, sparsening). Filters are constructed using a basic directive structure of one or more of exclude axis directives each with a name and values list.
1442
+
1443
+ The axis directives inside an exclude generate a set of combinations (similar to generating the matrix cells). The matrix cells that match all the values from an exclude combination are removed from the matrix. If more than one exclude directive is supplied, each is evaluated separately to remove cells.
1444
+
1445
+ When dealing with a long list of values to exclude, exclude axis directives can use notValues instead of values. These will exclude cells that do not match one of the values passed to notValues.
1446
+
1447
+ Example 32. Three-axis matrix with 24 cells, exclude '32-bit, mac' (4 cells excluded)
1448
+ matrix {
1449
+ axes {
1450
+ axis {
1451
+ name 'PLATFORM'
1452
+ values 'linux', 'mac', 'windows'
1453
+ }
1454
+ axis {
1455
+ name 'BROWSER'
1456
+ values 'chrome', 'edge', 'firefox', 'safari'
1457
+ }
1458
+ axis {
1459
+ name 'ARCHITECTURE'
1460
+ values '32-bit', '64-bit'
1461
+ }
1462
+ }
1463
+ excludes {
1464
+ exclude {
1465
+ axis {
1466
+ name 'PLATFORM'
1467
+ values 'mac'
1468
+ }
1469
+ axis {
1470
+ name 'ARCHITECTURE'
1471
+ values '32-bit'
1472
+ }
1473
+ }
1474
+ }
1475
+ // ...
1476
+ }
1477
+ Exclude the linux, safari combination and exclude any platform that is not windows with the edge browser.
1478
+
1479
+ Example 33. Three-axis matrix with 24 cells, exclude '32-bit, mac' and invalid browser combinations (9 cells excluded)
1480
+ matrix {
1481
+ axes {
1482
+ axis {
1483
+ name 'PLATFORM'
1484
+ values 'linux', 'mac', 'windows'
1485
+ }
1486
+ axis {
1487
+ name 'BROWSER'
1488
+ values 'chrome', 'edge', 'firefox', 'safari'
1489
+ }
1490
+ axis {
1491
+ name 'ARCHITECTURE'
1492
+ values '32-bit', '64-bit'
1493
+ }
1494
+ }
1495
+ excludes {
1496
+ exclude {
1497
+ // 4 cells
1498
+ axis {
1499
+ name 'PLATFORM'
1500
+ values 'mac'
1501
+ }
1502
+ axis {
1503
+ name 'ARCHITECTURE'
1504
+ values '32-bit'
1505
+ }
1506
+ }
1507
+ exclude {
1508
+ // 2 cells
1509
+ axis {
1510
+ name 'PLATFORM'
1511
+ values 'linux'
1512
+ }
1513
+ axis {
1514
+ name 'BROWSER'
1515
+ values 'safari'
1516
+ }
1517
+ }
1518
+ exclude {
1519
+ // 3 more cells and '32-bit, mac' (already excluded)
1520
+ axis {
1521
+ name 'PLATFORM'
1522
+ notValues 'windows'
1523
+ }
1524
+ axis {
1525
+ name 'BROWSER'
1526
+ values 'edge'
1527
+ }
1528
+ }
1529
+ }
1530
+ // ...
1531
+ }
1532
+ Matrix cell-level directives (optional)
1533
+ Matrix lets users efficiently configure the overall environment for each cell, by adding stage-level directives under matrix itself. These directives behave the same as they would on a stage but they can also accept values provided by the matrix for each cell.
1534
+
1535
+ The axis and exclude directives define the static set of cells that make up the matrix. That set of combinations is generated before the start of the pipeline run. The "per-cell" directives, on the other hand, are evaluated at runtime.
1536
+
1537
+ These directives include:
1538
+
1539
+ agent
1540
+
1541
+ environment
1542
+
1543
+ input
1544
+
1545
+ options
1546
+
1547
+ post
1548
+
1549
+ tools
1550
+
1551
+ when
1552
+
1553
+ Example 34. Complete Matrix Example, Declarative Pipeline
1554
+ pipeline {
1555
+ parameters {
1556
+ choice(name: 'PLATFORM_FILTER', choices: ['all', 'linux', 'windows', 'mac'], description: 'Run on specific platform')
1557
+ }
1558
+ agent none
1559
+ stages {
1560
+ stage('BuildAndTest') {
1561
+ matrix {
1562
+ agent {
1563
+ label "${PLATFORM}-agent"
1564
+ }
1565
+ when { anyOf {
1566
+ expression { params.PLATFORM_FILTER == 'all' }
1567
+ expression { params.PLATFORM_FILTER == env.PLATFORM }
1568
+ } }
1569
+ axes {
1570
+ axis {
1571
+ name 'PLATFORM'
1572
+ values 'linux', 'windows', 'mac'
1573
+ }
1574
+ axis {
1575
+ name 'BROWSER'
1576
+ values 'firefox', 'chrome', 'safari', 'edge'
1577
+ }
1578
+ }
1579
+ excludes {
1580
+ exclude {
1581
+ axis {
1582
+ name 'PLATFORM'
1583
+ values 'linux'
1584
+ }
1585
+ axis {
1586
+ name 'BROWSER'
1587
+ values 'safari'
1588
+ }
1589
+ }
1590
+ exclude {
1591
+ axis {
1592
+ name 'PLATFORM'
1593
+ notValues 'windows'
1594
+ }
1595
+ axis {
1596
+ name 'BROWSER'
1597
+ values 'edge'
1598
+ }
1599
+ }
1600
+ }
1601
+ stages {
1602
+ stage('Build') {
1603
+ steps {
1604
+ echo "Do Build for ${PLATFORM} - ${BROWSER}"
1605
+ }
1606
+ }
1607
+ stage('Test') {
1608
+ steps {
1609
+ echo "Do Test for ${PLATFORM} - ${BROWSER}"
1610
+ }
1611
+ }
1612
+ }
1613
+ }
1614
+ }
1615
+ }
1616
+ }
1617
+ Steps
1618
+ Declarative Pipelines may use all the available steps documented in the Pipeline Steps reference, which contains a comprehensive list of steps, with the addition of the steps listed below which are only supported in Declarative Pipeline.
1619
+
1620
+ script
1621
+ The script step takes a block of Scripted Pipeline and executes that in the Declarative Pipeline. For most use-cases, the script step should be unnecessary in Declarative Pipelines, but it can provide a useful "escape hatch". script blocks of non-trivial size and/or complexity should be moved into Shared Libraries instead.
1622
+
1623
+ Example 35. Script Block in Declarative Pipeline
1624
+ pipeline {
1625
+ agent any
1626
+ stages {
1627
+ stage('Example') {
1628
+ steps {
1629
+ echo 'Hello World'
1630
+
1631
+ script {
1632
+ def browsers = ['chrome', 'firefox']
1633
+ for (int i = 0; i < browsers.size(); ++i) {
1634
+ echo "Testing the ${browsers[i]} browser"
1635
+ }
1636
+ }
1637
+ }
1638
+ }
1639
+ }
1640
+ }
1641
+ Scripted Pipeline
1642
+ Scripted Pipeline, like Declarative Pipeline, is built on top of the underlying Pipeline sub-system. Unlike Declarative, Scripted Pipeline is effectively a general-purpose DSL [1] built with Groovy. Most functionality provided by the Groovy language is made available to users of Scripted Pipeline, which means it can be a very expressive and flexible tool with which one can author continuous delivery pipelines.
1643
+
1644
+ Flow Control
1645
+ Scripted Pipeline is serially executed from the top of a Jenkinsfile downwards, like most traditional scripts in Groovy or other languages. Providing flow control, therefore, rests on Groovy expressions, such as the if/else conditionals, for example:
1646
+
1647
+ Example 36. Conditional Statement if, Scripted Pipeline
1648
+ node {
1649
+ stage('Example') {
1650
+ if (env.BRANCH_NAME == 'master') {
1651
+ echo 'I only execute on the master branch'
1652
+ } else {
1653
+ echo 'I execute elsewhere'
1654
+ }
1655
+ }
1656
+ }
1657
+ Another way Scripted Pipeline flow control can be managed is with Groovy’s exception handling support. When Steps fail for whatever reason they throw an exception. Handling behaviors on-error must make use of the try/catch/finally blocks in Groovy, for example:
1658
+
1659
+ Example 37. Try-Catch Block, Scripted Pipeline
1660
+ node {
1661
+ stage('Example') {
1662
+ try {
1663
+ sh 'exit 1'
1664
+ }
1665
+ catch (exc) {
1666
+ echo 'Something failed, I should sound the klaxons!'
1667
+ throw
1668
+ }
1669
+ }
1670
+ }
1671
+ Steps
1672
+ As discussed at the start of this chapter, the most fundamental part of a Pipeline is the "step". Fundamentally, steps tell Jenkins what to do and serve as the basic building block for both Declarative and Scripted Pipeline syntax.
1673
+
1674
+ Scripted Pipeline does not introduce any steps which are specific to its syntax; Pipeline Steps reference contains a comprehensive list of steps provided by Pipeline and plugins.
1675
+
1676
+ Differences from plain Groovy
1677
+ In order to provide durability, which means that running Pipelines can survive a restart of the Jenkins controller, Scripted Pipeline must serialize data back to the controller. Due to this design requirement, some Groovy idioms such as collection.each { item → /* perform operation */ } are not fully supported. Refer to JENKINS-27421 and JENKINS-26481 for more information.
1678
+
1679
+ Syntax Comparison
1680
+
1681
+ This video shares some differences between Scripted and Declarative Pipeline syntax.
1682
+
1683
+ When Jenkins Pipeline was first created, Groovy was selected as the foundation. Jenkins has long shipped with an embedded Groovy engine to provide advanced scripting capabilities for admins and users alike. Additionally, the implementors of Jenkins Pipeline found Groovy to be a solid foundation upon which to build what is now referred to as the "Scripted Pipeline" DSL. [1].
1684
+
1685
+ As it is a fully-featured programming environment, Scripted Pipeline offers a tremendous amount of flexibility and extensibility to Jenkins users. The Groovy learning-curve isn’t typically desirable for all members of a given team, so Declarative Pipeline was created to offer a simpler and more opinionated syntax for authoring Jenkins Pipeline.
1686
+
1687
+ Both are fundamentally the same Pipeline sub-system underneath. They are both durable implementations of "Pipeline as code". They are both able to use steps built into Pipeline or provided by plugins. Both are able to utilize Shared Libraries
1688
+
1689
+ Where they differ however is in syntax and flexibility. Declarative limits what is available to the user with a more strict and pre-defined structure, making it an ideal choice for simpler continuous delivery pipelines. Scripted provides very few limits, insofar that the only limits on structure and syntax tend to be defined by Groovy itself, rather than any Pipeline-specific systems, making it an ideal choice for power-users and those with more complex requirements. As the name implies, Declarative Pipeline encourages a declarative programming model. [2] Whereas Scripted Pipelines follow a more imperative programming model. [3]
1690
+
1691
+ 1. Domain-specific language
1692
+ 2. Declarative Programming
1693
+ 3. Imperative Programming
data/docs/raw/using_a_jenkinsfile.txt ADDED
@@ -0,0 +1,810 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Using a Jenkinsfile
2
+ Table of Contents
3
+ Creating a Jenkinsfile
4
+ Build
5
+ Test
6
+ Deploy
7
+ Working with your Jenkinsfile
8
+ Using environment variables
9
+ Setting environment variables
10
+ Setting environment variables dynamically
11
+ Handling credentials
12
+ For secret text, usernames and passwords, and secret files
13
+ Secret text
14
+ Usernames and passwords
15
+ Secret files
16
+ For other credential types
17
+ Combining credentials in one step
18
+ String interpolation
19
+ Interpolation of sensitive environment variables
20
+ Injection via interpolation
21
+ Handling parameters
22
+ Handling failure
23
+ Error-handling steps
24
+ Using multiple agents
25
+ Optional step arguments
26
+ Advanced Scripted Pipeline
27
+ Parallel execution
28
+ This section builds on the information covered in Getting started with Pipeline and introduces more useful steps, common patterns, and demonstrates some non-trivial Jenkinsfile examples.
29
+
30
+ Creating a Jenkinsfile, which is checked into source control [1], provides a number of immediate benefits:
31
+
32
+ Code review/iteration on the Pipeline
33
+
34
+ Audit trail for the Pipeline
35
+
36
+ Single source of truth [2] for the Pipeline, which can be viewed and edited by multiple members of the project.
37
+
38
+ Pipeline supports two syntaxes, Declarative (introduced in Pipeline 2.5) and Scripted Pipeline. Both of which support building continuous delivery pipelines. Both may be used to define a Pipeline in either the web UI or with a Jenkinsfile, though it’s generally considered a best practice to create a Jenkinsfile and check the file into the source control repository.
39
+
40
+ Creating a Jenkinsfile
41
+ As discussed in the Defining a Pipeline in SCM, a Jenkinsfile is a text file that contains the definition of a Jenkins Pipeline and is checked into source control. Consider the following Pipeline which implements a basic three-stage continuous delivery pipeline.
42
+
43
+ Jenkinsfile (Declarative Pipeline)
44
+ pipeline {
45
+ agent any
46
+
47
+ stages {
48
+ stage('Build') {
49
+ steps {
50
+ echo 'Building..'
51
+ }
52
+ }
53
+ stage('Test') {
54
+ steps {
55
+ echo 'Testing..'
56
+ }
57
+ }
58
+ stage('Deploy') {
59
+ steps {
60
+ echo 'Deploying....'
61
+ }
62
+ }
63
+ }
64
+ }
65
+ Toggle Scripted Pipeline (Advanced)
66
+ Not all Pipelines will have these same three stages, but it is a good starting point to define them for most projects. The sections below will demonstrate the creation and execution of a simple Pipeline in a test installation of Jenkins.
67
+
68
+ It is assumed that there is already a source control repository set up for the project and a Pipeline has been defined in Jenkins following these instructions.
69
+
70
+ Using a text editor, ideally one which supports Groovy syntax highlighting, create a new Jenkinsfile in the root directory of the project.
71
+
72
+ The Declarative Pipeline example above contains the minimum necessary structure to implement a continuous delivery pipeline. The agent directive, which is required, instructs Jenkins to allocate an executor and workspace for the Pipeline. Without an agent directive, not only is the Declarative Pipeline not valid, it would not be capable of doing any work! By default the agent directive ensures that the source repository is checked out and made available for steps in the subsequent stages.
73
+
74
+ The stages directive and steps directives are also required for a valid Declarative Pipeline as they instruct Jenkins what to execute and in which stage it should be executed.
75
+
76
+ For more advanced usage with Scripted Pipeline, the example above node is a crucial first step as it allocates an executor and workspace for the Pipeline. In essence, without node, a Pipeline cannot do any work! From within node, the first order of business will be to checkout the source code for this project. Since the Jenkinsfile is being pulled directly from source control, Pipeline provides a quick and easy way to access the right revision of the source code.
77
+
78
+ Jenkinsfile (Scripted Pipeline)
79
+ node {
80
+ checkout scm
81
+ /* .. snip .. */
82
+ }
83
+ The checkout step will checkout code from source control; scm is a special variable which instructs the checkout step to clone the specific revision which triggered this Pipeline run.
84
+ Build
85
+ For many projects the beginning of "work" in the Pipeline would be the "build" stage. Typically this stage of the Pipeline will be where source code is assembled, compiled, or packaged. The Jenkinsfile is not a replacement for an existing build tool such as GNU/Make, Maven, Gradle, or others, but rather can be viewed as a glue layer to bind the multiple phases of a project’s development lifecycle (build, test, deploy) together.
86
+
87
+ Jenkins has a number of plugins for invoking practically any build tool in general use, but this example will simply invoke make from a shell step (sh). The sh step assumes the system is Unix/Linux-based, for Windows-based systems the bat could be used instead.
88
+
89
+ Jenkinsfile (Declarative Pipeline)
90
+ pipeline {
91
+ agent any
92
+
93
+ stages {
94
+ stage('Build') {
95
+ steps {
96
+ sh 'make'
97
+ archiveArtifacts artifacts: '**/target/*.jar', fingerprint: true
98
+ }
99
+ }
100
+ }
101
+ }
102
+ Toggle Scripted Pipeline (Advanced)
103
+ The sh step invokes the make command and will only continue if a zero exit code is returned by the command. Any non-zero exit code will fail the Pipeline.
104
+ archiveArtifacts captures the files built matching the include pattern (**/target/*.jar) and saves them to the Jenkins controller for later retrieval.
105
+ Archiving artifacts is not a substitute for using external artifact repositories such as Artifactory or Nexus and should be considered only for basic reporting and file archival.
106
+
107
+ Test
108
+ Running automated tests is a crucial component of any successful continuous delivery process. As such, Jenkins has a number of test recording, reporting, and visualization facilities provided by a number of plugins. At a fundamental level, when there are test failures, it is useful to have Jenkins record the failures for reporting and visualization in the web UI. The example below uses the junit step, provided by the JUnit plugin.
109
+
110
+ In the example below, if tests fail, the Pipeline is marked "unstable", as denoted by a yellow ball in the web UI. Based on the recorded test reports, Jenkins can also provide historical trend analysis and visualization.
111
+
112
+ Jenkinsfile (Declarative Pipeline)
113
+ pipeline {
114
+ agent any
115
+
116
+ stages {
117
+ stage('Test') {
118
+ steps {
119
+ /* `make check` returns non-zero on test failures,
120
+ * using `true` to allow the Pipeline to continue nonetheless
121
+ */
122
+ sh 'make check || true'
123
+ junit '**/target/*.xml'
124
+ }
125
+ }
126
+ }
127
+ }
128
+ Toggle Scripted Pipeline (Advanced)
129
+ Using an inline shell conditional (sh 'make check || true') ensures that the sh step always sees a zero exit code, giving the junit step the opportunity to capture and process the test reports. Alternative approaches to this are covered in more detail in the Handling failure section below.
130
+ junit captures and associates the JUnit XML files matching the inclusion pattern (**/target/*.xml).
131
+ Deploy
132
+ Deployment can imply a variety of steps, depending on the project or organization requirements, and may be anything from publishing built artifacts to an Artifactory server, to pushing code to a production system.
133
+
134
+ At this stage of the example Pipeline, both the "Build" and "Test" stages have successfully executed. In essence, the "Deploy" stage will only execute assuming previous stages completed successfully, otherwise the Pipeline would have exited early.
135
+
136
+ Jenkinsfile (Declarative Pipeline)
137
+ pipeline {
138
+ agent any
139
+
140
+ stages {
141
+ stage('Deploy') {
142
+ when {
143
+ expression {
144
+ currentBuild.result == null || currentBuild.result == 'SUCCESS'
145
+ }
146
+ }
147
+ steps {
148
+ sh 'make publish'
149
+ }
150
+ }
151
+ }
152
+ }
153
+ Toggle Scripted Pipeline (Advanced)
154
+ Accessing the currentBuild.result variable allows the Pipeline to determine if there were any test failures. In which case, the value would be UNSTABLE.
155
+ Assuming everything has executed successfully in the example Jenkins Pipeline, each successful Pipeline run will have associated build artifacts archived, test results reported upon and the full console output all in Jenkins.
156
+
157
+ A Scripted Pipeline can include conditional tests (shown above), loops, try/catch/finally blocks, and even functions. The next section will cover this advanced Scripted Pipeline syntax in more detail.
158
+
159
+ Working with your Jenkinsfile
160
+ The following sections provide details about handling:
161
+
162
+ specific Pipeline syntax in your Jenkinsfile and
163
+
164
+ features and functionality of Pipeline syntax which are essential in building your application or Pipeline project.
165
+
166
+ Using environment variables
167
+ Jenkins Pipeline exposes environment variables via the global variable env, which is available from anywhere within a Jenkinsfile. The full list of environment variables accessible from within Jenkins Pipeline is documented at ${YOUR_JENKINS_URL}/pipeline-syntax/globals#env and includes:
168
+
169
+ BUILD_ID
170
+ The current build ID, identical to BUILD_NUMBER for builds created in Jenkins versions 1.597+.
171
+
172
+ BUILD_NUMBER
173
+ The current build number, such as "153".
174
+
175
+ BUILD_TAG
176
+ String of jenkins-${JOB_NAME}-${BUILD_NUMBER}. Convenient to put into a resource file, a jar file, etc for easier identification.
177
+
178
+ BUILD_URL
179
+ The URL where the results of this build can be found (for example, http://buildserver/jenkins/job/MyJobName/17/).
180
+
181
+ EXECUTOR_NUMBER
182
+ The unique number that identifies the current executor (among executors of the same machine) performing this build. This is the number you see in the "build executor status", except that the number starts from 0, not 1.
183
+
184
+ JAVA_HOME
185
+ If your job is configured to use a specific JDK, this variable is set to the JAVA_HOME of the specified JDK. When this variable is set, PATH is also updated to include the bin subdirectory of JAVA_HOME.
186
+
187
+ JENKINS_URL
188
+ Full URL of Jenkins, such as https://example.com:port/jenkins/ (NOTE: only available if Jenkins URL set in "System Configuration").
189
+
190
+ JOB_NAME
191
+ Name of the project of this build, such as "foo" or "foo/bar".
192
+
193
+ NODE_NAME
194
+ The name of the node the current build is running on. Set to 'master' for the Jenkins controller.
195
+
196
+ WORKSPACE
197
+ The absolute path of the workspace.
198
+
199
+ Referencing or using these environment variables can be accomplished like accessing any key in a Groovy Map, for example:
200
+
201
+ Jenkinsfile (Declarative Pipeline)
202
+ pipeline {
203
+ agent any
204
+ stages {
205
+ stage('Example') {
206
+ steps {
207
+ echo "Running ${env.BUILD_ID} on ${env.JENKINS_URL}"
208
+ }
209
+ }
210
+ }
211
+ }
212
+ Toggle Scripted Pipeline (Advanced)
213
+ Setting environment variables
214
+ Setting an environment variable within a Jenkins Pipeline is accomplished differently depending on whether Declarative or Scripted Pipeline is used.
215
+
216
+ Declarative Pipeline supports an environment directive, whereas users of Scripted Pipeline must use the withEnv step.
217
+
218
+ Jenkinsfile (Declarative Pipeline)
219
+ pipeline {
220
+ agent any
221
+ environment {
222
+ CC = 'clang'
223
+ }
224
+ stages {
225
+ stage('Example') {
226
+ environment {
227
+ DEBUG_FLAGS = '-g'
228
+ }
229
+ steps {
230
+ sh 'printenv'
231
+ }
232
+ }
233
+ }
234
+ }
235
+ Toggle Scripted Pipeline (Advanced)
236
+ An environment directive used in the top-level pipeline block will apply to all steps within the Pipeline.
237
+ An environment directive defined within a stage will only apply the given environment variables to steps within the stage.
238
+ Setting environment variables dynamically
239
+ Environment variables can be set at run time and can be used by shell scripts (sh), Windows batch scripts (bat) and PowerShell scripts (powershell). Each script can either returnStatus or returnStdout. More information on scripts.
240
+
241
+ Below is an example in a declarative pipeline using sh (shell) with both returnStatus and returnStdout.
242
+
243
+ Jenkinsfile (Declarative Pipeline)
244
+ pipeline {
245
+ agent any
246
+ environment {
247
+ // Using returnStdout
248
+ CC = """${sh(
249
+ returnStdout: true,
250
+ script: 'echo "clang"'
251
+ )}"""
252
+ // Using returnStatus
253
+ EXIT_STATUS = """${sh(
254
+ returnStatus: true,
255
+ script: 'exit 1'
256
+ )}"""
257
+ }
258
+ stages {
259
+ stage('Example') {
260
+ environment {
261
+ DEBUG_FLAGS = '-g'
262
+ }
263
+ steps {
264
+ sh 'printenv'
265
+ }
266
+ }
267
+ }
268
+ }
269
+ An agent must be set at the top level of the pipeline. This will fail if agent is set as agent none.
270
+ When using returnStdout a trailing whitespace will be appended to the returned string. Use .trim() to remove this.
271
+ Handling credentials
272
+ Credentials configured in Jenkins can be handled in Pipelines for immediate use. Read more about using credentials in Jenkins on the Using credentials page.
273
+
274
+ The correct way to handle credentials in Jenkins
275
+
276
+ For secret text, usernames and passwords, and secret files
277
+ Jenkins' declarative Pipeline syntax has the credentials() helper method (used within the environment directive) which supports secret text, username and password, as well as secret file credentials. If you want to handle other types of credentials, refer to the For other credential types section.
278
+
279
+ Secret text
280
+ The following Pipeline code shows an example of how to create a Pipeline using environment variables for secret text credentials.
281
+
282
+ In this example, two secret text credentials are assigned to separate environment variables to access Amazon Web Services (AWS). These credentials would have been configured in Jenkins with their respective credential IDs jenkins-aws-secret-key-id and jenkins-aws-secret-access-key.
283
+
284
+ Jenkinsfile (Declarative Pipeline)
285
+ pipeline {
286
+ agent {
287
+ // Define agent details here
288
+ }
289
+ environment {
290
+ AWS_ACCESS_KEY_ID = credentials('jenkins-aws-secret-key-id')
291
+ AWS_SECRET_ACCESS_KEY = credentials('jenkins-aws-secret-access-key')
292
+ }
293
+ stages {
294
+ stage('Example stage 1') {
295
+ steps {
296
+ //
297
+ }
298
+ }
299
+ stage('Example stage 2') {
300
+ steps {
301
+ //
302
+ }
303
+ }
304
+ }
305
+ }
306
+ You can reference the two credential environment variables (defined in this Pipeline’s environment directive), within this stage’s steps using the syntax $AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY. For example, here you can authenticate to AWS using the secret text credentials assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline (such as echo $AWS_SECRET_ACCESS_KEY), Jenkins only returns the value “****” to reduce the risk of secret information being disclosed to the console output and any logs. Any sensitive information in credential IDs themselves (such as usernames) are also returned as “****” in the Pipeline run’s output. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
307
+ In this Pipeline example, the credentials assigned to the two AWS_…​ environment variables are scoped globally for the entire Pipeline, so these credential variables could also be used in this stage’s steps. If, however, the environment directive in this Pipeline were moved to a specific stage (as is the case in the Usernames and passwords Pipeline example below), then these AWS_…​ environment variables would only be scoped to the steps in that stage.
308
+ Storing static AWS keys in Jenkins credentials is not very secure. If you can run Jenkins itself in AWS (at least the agent), it is preferable to use IAM roles for a computer or EKS service account. It is also possible to use web identity federation.
309
+ Usernames and passwords
310
+ The following Pipeline code snippets show an example of how to create a Pipeline using environment variables for username and password credentials.
311
+
312
+ In this example, username and password credentials are assigned to environment variables to access a Bitbucket repository in a common account or team for your organization; these credentials would have been configured in Jenkins with the credential ID jenkins-bitbucket-common-creds.
313
+
314
+ When setting the credential environment variable in the environment directive:
315
+
316
+ environment {
317
+ BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
318
+ }
319
+ this actually sets the following three environment variables:
320
+
321
+ BITBUCKET_COMMON_CREDS - contains a username and a password separated by a colon in the format username:password.
322
+
323
+ BITBUCKET_COMMON_CREDS_USR - an additional variable containing the username component only.
324
+
325
+ BITBUCKET_COMMON_CREDS_PSW - an additional variable containing the password component only.
326
+
327
+ By convention, variable names for environment variables are typically specified in capital case, with individual words separated by underscores You can, however, specify any legitimate variable name using lower case characters. Bear in mind that the additional environment variables created by the credentials() method (above) will always be appended with _USR and _PSW (i.e. in the format of an underscore followed by three capital letters).
328
+
329
+ The following code snippet shows the example Pipeline in its entirety:
330
+
331
+ Jenkinsfile (Declarative Pipeline)
332
+ pipeline {
333
+ agent {
334
+ // Define agent details here
335
+ }
336
+ stages {
337
+ stage('Example stage 1') {
338
+ environment {
339
+ BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
340
+ }
341
+ steps {
342
+ //
343
+ }
344
+ }
345
+ stage('Example stage 2') {
346
+ steps {
347
+ //
348
+ }
349
+ }
350
+ }
351
+ }
352
+ The following credential environment variables (defined in this Pipeline’s environment directive) are available within this stage’s steps and can be referenced using the syntax:
353
+ $BITBUCKET_COMMON_CREDS
354
+
355
+ $BITBUCKET_COMMON_CREDS_USR
356
+
357
+ $BITBUCKET_COMMON_CREDS_PSW
358
+
359
+ For example, here you can authenticate to Bitbucket with the username and password assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline the same behavior described in the Secret text example above applies to these username and password credential variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
360
+
361
+ In this Pipeline example, the credentials assigned to the three BITBUCKET_COMMON_CREDS…​ environment variables are scoped only to Example stage 1, so these credential variables are not available for use in this Example stage 2 stage’s steps. If, however, the environment directive in this Pipeline were moved immediately within the pipeline block (as is the case in the Secret text Pipeline example above), then these BITBUCKET_COMMON_CREDS…​ environment variables would be scoped globally and could be used in any stage’s steps.
362
+ Secret files
363
+ A secret file is a credential which is stored in a file and uploaded to Jenkins. Secret files are used for credentials that are:
364
+
365
+ too unwieldy to enter directly into Jenkins, and/or
366
+
367
+ in binary format, such as a GPG file.
368
+
369
+ In this example, we use a Kubernetes config file that has been configured as a secret file credential named my-kubeconfig.
370
+
371
+ Jenkinsfile (Declarative Pipeline)
372
+ pipeline {
373
+ agent {
374
+ // Define agent details here
375
+ }
376
+ environment {
377
+ // The MY_KUBECONFIG environment variable will be assigned the value of a temporary file.
378
+ // For example:
379
+ // /home/user/.jenkins/workspace/cred_test@tmp/secretFiles/546a5cf3-9b56-4165-a0fd-19e2afe6b31f/kubeconfig.txt
380
+ MY_KUBECONFIG = credentials('my-kubeconfig')
381
+ }
382
+ stages {
383
+ stage('Example stage 1') {
384
+ steps {
385
+ sh("kubectl --kubeconfig $MY_KUBECONFIG get pods")
386
+ }
387
+ }
388
+ }
389
+ }
390
+ For other credential types
391
+ If you need to set credentials in a Pipeline for anything other than secret text, usernames and passwords, or secret files like SSH keys or certificates, use Jenkins' Snippet Generator feature, which you can access through Jenkins' classic UI.
392
+
393
+ To access the Snippet Generator for your Pipeline project/item:
394
+
395
+ From the Jenkins Dashboard, select the name of your Pipeline project/item.
396
+
397
+ In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
398
+
399
+ From the Sample Step field, choose withCredentials: Bind credentials to variables.
400
+
401
+ Under Bindings, click Add and choose from the dropdown:
402
+
403
+ SSH User Private Key - to handle SSH public/private key pair credentials, from which you can specify:
404
+
405
+ Key File Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the private key file required in the SSH public/private key pair authentication process.
406
+
407
+ Passphrase Variable ( Optional ) - the name of the environment variable that will be bound to the passphrase associated with the SSH public/private key pair.
408
+
409
+ Username Variable ( Optional ) - the name of the environment variable that will be bound to username associated with the SSH public/private key pair.
410
+
411
+ Credentials - choose the SSH public/private key credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
412
+
413
+ Certificate - to handle PKCS#12 certificates, from which you can specify:
414
+
415
+ Keystore Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the certificate’s keystore required in the certificate authentication process.
416
+
417
+ Password Variable ( Optional ) - the name of the environment variable that will be bound to the password associated with the certificate.
418
+
419
+ Alias Variable ( Optional ) - the name of the environment variable that will be bound to the unique alias associated with the certificate.
420
+
421
+ Credentials - choose the certificate credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
422
+
423
+ Docker client certificate - to handle Docker Host Certificate Authentication.
424
+
425
+ Click Generate Pipeline Script and Jenkins generates a withCredentials(…​) { …​ } Pipeline step snippet for the credentials you specified, which you can then copy and paste into your Declarative or Scripted Pipeline code.
426
+ Notes:
427
+
428
+ The Credentials fields (above) show the names of credentials configured in Jenkins. However, these values are converted to credential IDs after clicking Generate Pipeline Script.
429
+
430
+ To combine more than one credential in a single withCredentials(…​) { …​ } Pipeline step, see Combining credentials in one step (below) for details.
431
+
432
+ SSH User Private Key example
433
+
434
+ withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
435
+ keyFileVariable: 'SSH_KEY_FOR_ABC', \
436
+ passphraseVariable: '', \
437
+ usernameVariable: '')]) {
438
+ // some block
439
+ }
440
+ The optional passphraseVariable and usernameVariable definitions can be deleted in your final Pipeline code.
441
+
442
+ Certificate example
443
+
444
+ withCredentials(bindings: [certificate(aliasVariable: '', \
445
+ credentialsId: 'jenkins-certificate-for-xyz', \
446
+ keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
447
+ passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
448
+ // some block
449
+ }
450
+ The optional aliasVariable and passwordVariable variable definitions can be deleted in your final Pipeline code.
451
+
452
+ The following code snippet shows an example Pipeline in its entirety, which implements the SSH User Private Key and Certificate snippets above:
453
+
454
+ Jenkinsfile (Declarative Pipeline)
455
+ pipeline {
456
+ agent {
457
+ // define agent details
458
+ }
459
+ stages {
460
+ stage('Example stage 1') {
461
+ steps {
462
+ withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
463
+ keyFileVariable: 'SSH_KEY_FOR_ABC')]) {
464
+ //
465
+ }
466
+ withCredentials(bindings: [certificate(credentialsId: 'jenkins-certificate-for-xyz', \
467
+ keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
468
+ passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
469
+ //
470
+ }
471
+ }
472
+ }
473
+ stage('Example stage 2') {
474
+ steps {
475
+ //
476
+ }
477
+ }
478
+ }
479
+ }
480
+ Within this step, you can reference the credential environment variable with the syntax $SSH_KEY_FOR_ABC. For example, here you can authenticate to the ABC application with its configured SSH public/private key pair credentials, whose SSH User Private Key file is assigned to $SSH_KEY_FOR_ABC.
481
+ Within this step, you can reference the credential environment variable with the syntax $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD. For example, here you can authenticate to the XYZ application with its configured certificate credentials, whose Certificate's keystore file and password are assigned to the variables $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD, respectively.
482
+ In this Pipeline example, the credentials assigned to the $SSH_KEY_FOR_ABC, $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD environment variables are scoped only within their respective withCredentials( …​ ) { …​ } steps, so these credential variables are not available for use in this Example stage 2 stage’s steps.
483
+ To maintain the security and anonymity of these credentials, if you attempt to retrieve the value of these credential variables from within these withCredentials( …​ ) { …​ } steps, the same behavior described in the Secret text example (above) applies to these SSH public/private key pair credential and certificate variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
484
+
485
+ When using the Sample Step field’s withCredentials: Bind credentials to variables option in the Snippet Generator, only credentials which your current Pipeline project/item has access to can be selected from any Credentials field’s list. While you can manually write a withCredentials( …​ ) { …​ } step for your Pipeline (like the examples above), using the Snippet Generator is recommended to avoid specifying credentials that are out of scope for this Pipeline project/item, which when run, will make the step fail.
486
+
487
+ You can also use the Snippet Generator to generate withCredentials( …​ ) { …​ } steps to handle secret text, usernames and passwords and secret files. However, if you only need to handle these types of credentials, it is recommended you use the relevant procedure described in the section above for improved Pipeline code readability.
488
+
489
+ The use of single-quotes instead of double-quotes to define the script (the implicit parameter to sh) in Groovy above. The single-quotes will cause the secret to be expanded by the shell as an environment variable. The double-quotes are potentially less secure as the secret is interpolated by Groovy, and so typical operating system process listings will accidentally disclose it :
490
+
491
+ node {
492
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
493
+ sh /* WRONG! */ """
494
+ set +x
495
+ curl -H 'Token: $TOKEN' https://some.api/
496
+ """
497
+ sh /* CORRECT */ '''
498
+ set +x
499
+ curl -H 'Token: $TOKEN' https://some.api/
500
+ '''
501
+ }
502
+ }
503
+ Combining credentials in one step
504
+ Using the Snippet Generator, you can make multiple credentials available within a single withCredentials( …​ ) { …​ } step by doing the following:
505
+
506
+ From the Jenkins Dashboard, select the name of your Pipeline project/item.
507
+
508
+ In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
509
+
510
+ From the Sample Step field, choose withCredentials: Bind credentials to variables.
511
+
512
+ Click Add under Bindings.
513
+
514
+ Choose the credential type to add to the withCredentials( …​ ) { …​ } step from the dropdown list.
515
+
516
+ Specify the credential Bindings details. Read more above these in the procedure under For other credential types (above).
517
+
518
+ Repeat from "Click Add …​" (above) for each (set of) credential/s to add to the withCredentials( …​ ) { …​ } step.
519
+
520
+ Select Generate Pipeline Script to generate the final withCredentials( …​ ) { …​ } step snippet.
521
+
522
+ String interpolation
523
+ Jenkins Pipeline uses rules identical to Groovy for string interpolation. Groovy’s String interpolation support can be confusing to many newcomers to the language. While Groovy supports declaring a string with either single quotes, or double quotes, for example:
524
+
525
+ def singlyQuoted = 'Hello'
526
+ def doublyQuoted = "World"
527
+ Only the latter string will support the dollar-sign ($) based string interpolation, for example:
528
+
529
+ def username = 'Jenkins'
530
+ echo 'Hello Mr. ${username}'
531
+ echo "I said, Hello Mr. ${username}"
532
+ Would result in:
533
+
534
+ Hello Mr. ${username}
535
+ I said, Hello Mr. Jenkins
536
+ Understanding how to use string interpolation is vital for using some of Pipeline’s more advanced features.
537
+
538
+ Interpolation of sensitive environment variables
539
+ Groovy string interpolation should never be used with credentials.
540
+
541
+ Groovy GStrings (double-quoted or triple-double-quoted strings) expand ${TOKEN} before the command is sent to the agent. When that happens, the secret is copied into the process arguments (visible via tooling such as ps) and any shell metacharacters inside the secret or user-controlled value are executed immediately. Always send a literal command string to sh, bat, powershell, or pwsh and let the shell read secrets from its environment, which withCredentials or the environment block have already populated.
542
+
543
+ Jenkinsfile (Declarative Pipeline)
544
+ pipeline {
545
+ agent any
546
+ environment {
547
+ API_TOKEN = credentials('example-token-id')
548
+ }
549
+ stages {
550
+ stage('Example') {
551
+ steps {
552
+ /* WRONG */
553
+ sh "curl -H 'Authorization: Bearer ${API_TOKEN}' https://example.com"
554
+ """
555
+ }
556
+ }
557
+ }
558
+ }
559
+ Jenkinsfile (Declarative Pipeline)
560
+ pipeline {
561
+ agent any
562
+ environment {
563
+ API_TOKEN = credentials('example-token-id')
564
+ }
565
+ stages {
566
+ stage('Example') {
567
+ steps {
568
+ /* CORRECT */
569
+ sh 'curl -H "Authorization: Bearer ${API_TOKEN}" https://example.com'
570
+ }
571
+ }
572
+ }
573
+ }
574
+ Any Groovy construct that avoids interpolation (for example, sh(script: 'curl …​ $API_TOKEN', label: 'call API')) is safe; the key is keeping secrets out of GStrings so only the shell expands them.
575
+
576
+ Injection via interpolation
577
+ Groovy string interpolation can inject rogue commands into command interpreters via special characters.
578
+
579
+ Another note of caution. Using Groovy string interpolation for user-controlled variables with steps that pass their arguments to command interpreters such as the sh, bat, powershell, or pwsh steps can result in problems analogous to SQL injection. This occurs when a user-controlled variable (generally an environment variable, usually a parameter passed to the build) that contains special characters (e.g. / \ $ & % ^ > < | ;) is passed to the sh, bat, powershell, or pwsh steps using Groovy interpolation. For a simple example:
580
+
581
+ Jenkinsfile (Declarative Pipeline)
582
+ pipeline {
583
+ agent any
584
+ parameters {
585
+ string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
586
+ }
587
+ stages {
588
+ stage('Example') {
589
+ steps {
590
+ /* WRONG! */
591
+ sh("echo ${STATEMENT}")
592
+ }
593
+ }
594
+ }
595
+ }
596
+ In this example, the argument to the sh step is evaluated by Groovy, and STATEMENT is interpolated directly into the argument as if sh('echo hello; ls /') has been written in the Pipeline. When this is processed on the agent, rather than echoing the value hello; ls /, it will echo hello then proceed to list the entire root directory of the agent. Any user able to control a variable interpolated by such a step would be able to make the sh step run arbitrary code on the agent. To avoid this problem, make sure arguments to steps such as sh or bat that reference parameters or other user-controlled environment variables use single quotes to avoid Groovy interpolation.
597
+
598
+ Jenkinsfile (Declarative Pipeline)
599
+ pipeline {
600
+ agent any
601
+ parameters {
602
+ string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
603
+ }
604
+ stages {
605
+ stage('Example') {
606
+ steps {
607
+ /* CORRECT */
608
+ sh('echo ${STATEMENT}')
609
+ }
610
+ }
611
+ }
612
+ }
613
+ Credential mangling is another issue that can occur when credentials that contain special characters are passed to a step using Groovy interpolation. When the credential value is mangled, it is no longer valid and will no longer be masked in the console log.
614
+
615
+ Jenkinsfile (Declarative Pipeline)
616
+ pipeline {
617
+ agent any
618
+ environment {
619
+ EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
620
+ }
621
+ stages {
622
+ stage('Example') {
623
+ steps {
624
+ /* WRONG! */
625
+ bat "echo ${EXAMPLE_KEY}"
626
+ }
627
+ }
628
+ }
629
+ }
630
+ Here, the bat step receives echo sec%ret and the Windows batch shell will simply drop the % and print out the value secret. Because there is a single character difference, the value secret will not be masked. Though the value is not the same as the actual credential, this is still a significant exposure of sensitive information. Again, single-quotes avoids this issue.
631
+
632
+ Jenkinsfile (Declarative Pipeline)
633
+ pipeline {
634
+ agent any
635
+ environment {
636
+ EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
637
+ }
638
+ stages {
639
+ stage('Example') {
640
+ steps {
641
+ /* CORRECT */
642
+ bat 'echo %EXAMPLE_KEY%'
643
+ }
644
+ }
645
+ }
646
+ }
647
+ Handling parameters
648
+ Declarative Pipeline supports parameters out-of-the-box, allowing the Pipeline to accept user-specified parameters at runtime via the parameters directive. Configuring parameters with Scripted Pipeline is done with the properties step, which can be found in the Snippet Generator.
649
+
650
+ If you configured your pipeline to accept parameters using the Build with Parameters option, those parameters are accessible as members of the params variable.
651
+
652
+ Assuming that a String parameter named "Greeting" has been configured in the Jenkinsfile, it can access that parameter via ${params.Greeting}:
653
+
654
+ Jenkinsfile (Declarative Pipeline)
655
+ pipeline {
656
+ agent any
657
+ parameters {
658
+ string(name: 'Greeting', defaultValue: 'Hello', description: 'How should I greet the world?')
659
+ }
660
+ stages {
661
+ stage('Example') {
662
+ steps {
663
+ echo "${params.Greeting} World!"
664
+ }
665
+ }
666
+ }
667
+ }
668
+ Toggle Scripted Pipeline (Advanced)
669
+ Handling failure
670
+ Declarative Pipeline supports robust failure handling by default via its post section which allows declaring a number of different "post conditions" such as: always, unstable, success, failure, and changed. The Pipeline Syntax section provides more detail on how to use the various post conditions.
671
+
672
+ Jenkinsfile (Declarative Pipeline)
673
+ pipeline {
674
+ agent any
675
+ stages {
676
+ stage('Test') {
677
+ steps {
678
+ sh 'make check'
679
+ }
680
+ }
681
+ }
682
+ post {
683
+ always {
684
+ junit '**/target/*.xml'
685
+ }
686
+ failure {
687
+ mail to: team@example.com, subject: 'The Pipeline failed :('
688
+ }
689
+ }
690
+ }
691
+ Toggle Scripted Pipeline (Advanced)
692
+ Scripted Pipeline however relies on Groovy’s built-in try/catch/finally semantics for handling failures during execution of the Pipeline.
693
+
694
+ In the Test example above, the sh step was modified to never return a non-zero exit code (sh 'make check || true'). This approach, while valid, means the following stages need to check currentBuild.result to know if there has been a test failure or not.
695
+
696
+ An alternative way of handling this, which preserves the early-exit behavior of failures in Pipeline, while still giving junit the chance to capture test reports, is to use a series of try/finally blocks:
697
+
698
+ Error-handling steps
699
+ Jenkins Pipelines provide dedicated steps for flexible error handling, allowing you to control how your Pipeline responds to errors and warnings. These steps help you surface errors and warnings clearly in Jenkins, giving you control over whether the Pipeline fails, continues, or simply reports a warning. For more information, refer to:
700
+
701
+ catchError
702
+
703
+ error
704
+
705
+ unstable
706
+
707
+ warnError
708
+
709
+ Using multiple agents
710
+ In all the previous examples, only a single agent has been used. This means Jenkins will allocate an executor wherever one is available, regardless of how it is labeled or configured. Not only can this behavior be overridden, but Pipeline allows utilizing multiple agents in the Jenkins environment from within the same Jenkinsfile, which can be helpful for more advanced use-cases such as executing builds/tests across multiple platforms.
711
+
712
+ In the example below, the "Build" stage will be performed on one agent and the built results will be reused on two subsequent agents, labelled "linux" and "windows" respectively, during the "Test" stage.
713
+
714
+ Jenkinsfile (Declarative Pipeline)
715
+ pipeline {
716
+ agent none
717
+ stages {
718
+ stage('Build') {
719
+ agent any
720
+ steps {
721
+ checkout scm
722
+ sh 'make'
723
+ stash includes: '**/target/*.jar', name: 'app'
724
+ }
725
+ }
726
+ stage('Test on Linux') {
727
+ agent {
728
+ label 'linux'
729
+ }
730
+ steps {
731
+ unstash 'app'
732
+ sh 'make check'
733
+ }
734
+ post {
735
+ always {
736
+ junit '**/target/*.xml'
737
+ }
738
+ }
739
+ }
740
+ stage('Test on Windows') {
741
+ agent {
742
+ label 'windows'
743
+ }
744
+ steps {
745
+ unstash 'app'
746
+ bat 'make check'
747
+ }
748
+ post {
749
+ always {
750
+ junit '**/target/*.xml'
751
+ }
752
+ }
753
+ }
754
+ }
755
+ }
756
+ Toggle Scripted Pipeline (Advanced)
757
+ The stash step allows capturing files matching an inclusion pattern (**/target/*.jar) for reuse within the same Pipeline. Once the Pipeline has completed its execution, stashed files are deleted from the Jenkins controller.
758
+ The parameter in agent/node allows for any valid Jenkins label expression. Consult the Pipeline Syntax section for more details.
759
+ unstash will retrieve the named "stash" from the Jenkins controller into the Pipeline’s current workspace.
760
+ The bat script allows for executing batch scripts on Windows-based platforms.
761
+ Optional step arguments
762
+ Pipeline follows the Groovy language convention of allowing parentheses to be omitted around method arguments.
763
+
764
+ Many Pipeline steps also use the named-parameter syntax as a shorthand for creating a Map in Groovy, which uses the syntax [key1: value1, key2: value2]. Making statements like the following functionally equivalent:
765
+
766
+ git url: 'git://example.com/amazing-project.git', branch: 'master'
767
+ git([url: 'git://example.com/amazing-project.git', branch: 'master'])
768
+ For convenience, when calling steps taking only one parameter (or only one mandatory parameter), the parameter name may be omitted, for example:
769
+
770
+ sh 'echo hello' /* short form */
771
+ sh([script: 'echo hello']) /* long form */
772
+ Advanced Scripted Pipeline
773
+ Scripted Pipeline is a domain-specific language [3] based on Groovy, most Groovy syntax can be used in Scripted Pipeline without modification.
774
+
775
+ Parallel execution
776
+ The example in the section above runs tests across two different platforms in a linear series. In practice, if the make check execution takes 30 minutes to complete, the "Test" stage would now take 60 minutes to complete!
777
+
778
+ Fortunately, Pipeline has built-in functionality for executing portions of Scripted Pipeline in parallel, implemented in the aptly named parallel step.
779
+
780
+ Refactoring the example above to use the parallel step:
781
+
782
+ Jenkinsfile (Scripted Pipeline)
783
+ stage('Build') {
784
+ /* .. snip .. */
785
+ }
786
+
787
+ stage('Test') {
788
+ parallel linux: {
789
+ node('linux') {
790
+ checkout scm
791
+ try {
792
+ unstash 'app'
793
+ sh 'make check'
794
+ }
795
+ finally {
796
+ junit '**/target/*.xml'
797
+ }
798
+ }
799
+ },
800
+ windows: {
801
+ node('windows') {
802
+ /* .. snip .. */
803
+ }
804
+ }
805
+ }
806
+ Instead of executing the tests on the "linux" and "windows" labelled nodes in series, they will now execute in parallel assuming the requisite capacity exists in the Jenkins environment.
807
+
808
+ 1. en.wikipedia.org/wiki/Source_control_management
809
+ 2. en.wikipedia.org/wiki/Single_Source_of_Truth
810
+ 3. en.wikipedia.org/wiki/Domain-specific_language
data/errors/error_01.txt ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.
4
+ 1 error
5
+
6
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
+ at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
+ at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
+ at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
+ at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
+ at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
28
+ at hudson.model.Executor.run(Executor.java:460)
29
+ Finished: FAILURE
data/errors/error_02.txt ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 9: expecting '}', found '' @ line 9, column 1.
4
+ 1 error
5
+
6
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
+ at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
+ at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
+ at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
+ at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
+ at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
28
+ at hudson.model.Executor.run(Executor.java:460)
29
+ Finished: FAILURE
data/errors/error_03.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] stage
4
+ [Pipeline] { (Build)
5
+ [Pipeline] sh
6
+ [Pipeline] error
7
+ [Pipeline] }
8
+ [Pipeline] // stage
9
+ [Pipeline] End of Pipeline
10
+ ERROR: Attempted to execute a step that requires a node context while ‘agent none’ was specified. Be sure to specify your own ‘node { ... }’ blocks when using ‘agent none’.
11
+ Finished: FAILURE
data/errors/error_04.txt ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Running on Jenkins in /var/jenkins_home/workspace/error_04
5
+ [Pipeline] {
6
+ [Pipeline] stage
7
+ [Pipeline] { (Checkout)
8
+ [Pipeline] git
9
+ The recommended git tool is: NONE
10
+ No credentials specified
11
+ Cloning the remote Git repository
12
+ Cloning repository https://github.com/example/repo.git
13
+ > git init /var/jenkins_home/workspace/error_04 # timeout=10
14
+ Fetching upstream changes from https://github.com/example/repo.git
15
+ > git --version # timeout=10
16
+ > git --version # 'git version 2.47.3'
17
+ > git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/* # timeout=10
18
+ ERROR: Error cloning remote repo 'origin'
19
+ hudson.plugins.git.GitException: Command "git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
20
+ stdout:
21
+ stderr: remote: Invalid username or token. Password authentication is not supported for Git operations.
22
+ fatal: Authentication failed for 'https://github.com/example/repo.git/'
23
+
24
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2848)
25
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:2189)
26
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:638)
27
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:880)
28
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1220)
29
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM._checkout(GitSCM.java:1310)
30
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM.checkout(GitSCM.java:1277)
31
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep.checkout(SCMStep.java:136)
32
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:101)
33
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:88)
34
+ at PluginClassLoader for workflow-step-api//org.jenkinsci.plugins.workflow.steps.SynchronousNonBlockingStepExecution.lambda$start$0(SynchronousNonBlockingStepExecution.java:49)
35
+ at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
36
+ at java.base/java.util.concurrent.FutureTask.run(Unknown Source)
37
+ at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
38
+ at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
39
+ at java.base/java.lang.Thread.run(Unknown Source)
40
+ ERROR: Error cloning remote repo 'origin'
41
+ ERROR: Maximum checkout retry attempts reached, aborting
42
+ [Pipeline] }
43
+ [Pipeline] // stage
44
+ [Pipeline] }
45
+ [Pipeline] // node
46
+ [Pipeline] End of Pipeline
47
+ ERROR: Error cloning remote repo 'origin'
48
+ Finished: FAILURE
data/errors/error_05.txt ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Running on Jenkins in /var/jenkins_home/workspace/error_05
5
+ [Pipeline] {
6
+ [Pipeline] stage
7
+ [Pipeline] { (Auth)
8
+ [Pipeline] withCredentials
9
+ [Pipeline] // withCredentials
10
+ [Pipeline] }
11
+ [Pipeline] // stage
12
+ [Pipeline] }
13
+ [Pipeline] // node
14
+ [Pipeline] End of Pipeline
15
+ ERROR: Could not find credentials entry with ID 'does-not-exist'
16
+ Finished: FAILURE
data/errors/error_06.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Still waiting to schedule task
5
+ ‘Jenkins’ doesn’t have label ‘no-such-node’
6
+ Aborted by Arvind Nandigam
7
+ [Pipeline] // node
8
+ [Pipeline] End of Pipeline
9
+ org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: db6e2408-0d72-4f17-a842-4d2ecce38130
10
+ Finished: ABORTED
data/errors/error_07.txt ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+
3
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
4
+ WorkflowScript: 6: Expected a step @ line 6, column 17.
5
+ def x = 1
6
+ ^
7
+
8
+ 1 error
9
+
10
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
11
+ at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
12
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
13
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
14
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
15
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
16
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
17
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
18
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
19
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
20
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
21
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
22
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
23
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
24
+ at hudson.model.Executor.run(Executor.java:460)
25
+ Finished: FAILURE
data/errors/error_08.txt ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 3: Invalid agent type "docker" specified. Must be one of [any, label, none] @ line 3, column 9.
4
+ docker {
5
+ ^
6
+
7
+ 1 error
8
+
9
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
10
+ at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
11
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
12
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
13
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
14
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
15
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
16
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
17
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
18
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
19
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
20
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
21
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
22
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
23
+ at hudson.model.Executor.run(Executor.java:460)
24
+ Finished: FAILURE
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/adapter_config.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/added_tokens.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/.no_exist/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/chat_template.jinja ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/53aa51172d142c89d9012cce15ae4d6cc0ca6895895114379cacb4fab128d9db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53aa51172d142c89d9012cce15ae4d6cc0ca6895895114379cacb4fab128d9db
3
+ size 90868376
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/58d4a9a45664eb9e12de9549c548c09b6134c17f ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: en
3
+ license: apache-2.0
4
+ library_name: sentence-transformers
5
+ tags:
6
+ - sentence-transformers
7
+ - feature-extraction
8
+ - sentence-similarity
9
+ - transformers
10
+ datasets:
11
+ - s2orc
12
+ - flax-sentence-embeddings/stackexchange_xml
13
+ - ms_marco
14
+ - gooaq
15
+ - yahoo_answers_topics
16
+ - code_search_net
17
+ - search_qa
18
+ - eli5
19
+ - snli
20
+ - multi_nli
21
+ - wikihow
22
+ - natural_questions
23
+ - trivia_qa
24
+ - embedding-data/sentence-compression
25
+ - embedding-data/flickr30k-captions
26
+ - embedding-data/altlex
27
+ - embedding-data/simple-wiki
28
+ - embedding-data/QQP
29
+ - embedding-data/SPECTER
30
+ - embedding-data/PAQ_pairs
31
+ - embedding-data/WikiAnswers
32
+ pipeline_tag: sentence-similarity
33
+ ---
34
+
35
+
36
+ # all-MiniLM-L6-v2
37
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search.
38
+
39
+ ## Usage (Sentence-Transformers)
40
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
41
+
42
+ ```
43
+ pip install -U sentence-transformers
44
+ ```
45
+
46
+ Then you can use the model like this:
47
+ ```python
48
+ from sentence_transformers import SentenceTransformer
49
+ sentences = ["This is an example sentence", "Each sentence is converted"]
50
+
51
+ model = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
52
+ embeddings = model.encode(sentences)
53
+ print(embeddings)
54
+ ```
55
+
56
+ ## Usage (HuggingFace Transformers)
57
+ Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
58
+
59
+ ```python
60
+ from transformers import AutoTokenizer, AutoModel
61
+ import torch
62
+ import torch.nn.functional as F
63
+
64
+ #Mean Pooling - Take attention mask into account for correct averaging
65
+ def mean_pooling(model_output, attention_mask):
66
+ token_embeddings = model_output[0] #First element of model_output contains all token embeddings
67
+ input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
68
+ return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
69
+
70
+
71
+ # Sentences we want sentence embeddings for
72
+ sentences = ['This is an example sentence', 'Each sentence is converted']
73
+
74
+ # Load model from HuggingFace Hub
75
+ tokenizer = AutoTokenizer.from_pretrained('sentence-transformers/all-MiniLM-L6-v2')
76
+ model = AutoModel.from_pretrained('sentence-transformers/all-MiniLM-L6-v2')
77
+
78
+ # Tokenize sentences
79
+ encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
80
+
81
+ # Compute token embeddings
82
+ with torch.no_grad():
83
+ model_output = model(**encoded_input)
84
+
85
+ # Perform pooling
86
+ sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
87
+
88
+ # Normalize embeddings
89
+ sentence_embeddings = F.normalize(sentence_embeddings, p=2, dim=1)
90
+
91
+ print("Sentence embeddings:")
92
+ print(sentence_embeddings)
93
+ ```
94
+
95
+ ------
96
+
97
+ ## Background
98
+
99
+ The project aims to train sentence embedding models on very large sentence level datasets using a self-supervised
100
+ contrastive learning objective. We used the pretrained [`nreimers/MiniLM-L6-H384-uncased`](https://huggingface.co/nreimers/MiniLM-L6-H384-uncased) model and fine-tuned in on a
101
+ 1B sentence pairs dataset. We use a contrastive learning objective: given a sentence from the pair, the model should predict which out of a set of randomly sampled other sentences, was actually paired with it in our dataset.
102
+
103
+ We developed this model during the
104
+ [Community week using JAX/Flax for NLP & CV](https://discuss.huggingface.co/t/open-to-the-community-community-week-using-jax-flax-for-nlp-cv/7104),
105
+ organized by Hugging Face. We developed this model as part of the project:
106
+ [Train the Best Sentence Embedding Model Ever with 1B Training Pairs](https://discuss.huggingface.co/t/train-the-best-sentence-embedding-model-ever-with-1b-training-pairs/7354). We benefited from efficient hardware infrastructure to run the project: 7 TPUs v3-8, as well as intervention from Googles Flax, JAX, and Cloud team member about efficient deep learning frameworks.
107
+
108
+ ## Intended uses
109
+
110
+ Our model is intended to be used as a sentence and short paragraph encoder. Given an input text, it outputs a vector which captures
111
+ the semantic information. The sentence vector may be used for information retrieval, clustering or sentence similarity tasks.
112
+
113
+ By default, input text longer than 256 word pieces is truncated.
114
+
115
+
116
+ ## Training procedure
117
+
118
+ ### Pre-training
119
+
120
+ We use the pretrained [`nreimers/MiniLM-L6-H384-uncased`](https://huggingface.co/nreimers/MiniLM-L6-H384-uncased) model. Please refer to the model card for more detailed information about the pre-training procedure.
121
+
122
+ ### Fine-tuning
123
+
124
+ We fine-tune the model using a contrastive objective. Formally, we compute the cosine similarity from each possible sentence pairs from the batch.
125
+ We then apply the cross entropy loss by comparing with true pairs.
126
+
127
+ #### Hyper parameters
128
+
129
+ We trained our model on a TPU v3-8. We train the model during 100k steps using a batch size of 1024 (128 per TPU core).
130
+ We use a learning rate warm up of 500. The sequence length was limited to 128 tokens. We used the AdamW optimizer with
131
+ a 2e-5 learning rate. The full training script is accessible in this current repository: `train_script.py`.
132
+
133
+ #### Training data
134
+
135
+ We use the concatenation from multiple datasets to fine-tune our model. The total number of sentence pairs is above 1 billion sentences.
136
+ We sampled each dataset given a weighted probability which configuration is detailed in the `data_config.json` file.
137
+
138
+
139
+ | Dataset | Paper | Number of training tuples |
140
+ |--------------------------------------------------------|:----------------------------------------:|:--------------------------:|
141
+ | [Reddit comments (2015-2018)](https://github.com/PolyAI-LDN/conversational-datasets/tree/master/reddit) | [paper](https://arxiv.org/abs/1904.06472) | 726,484,430 |
142
+ | [S2ORC](https://github.com/allenai/s2orc) Citation pairs (Abstracts) | [paper](https://aclanthology.org/2020.acl-main.447/) | 116,288,806 |
143
+ | [WikiAnswers](https://github.com/afader/oqa#wikianswers-corpus) Duplicate question pairs | [paper](https://doi.org/10.1145/2623330.2623677) | 77,427,422 |
144
+ | [PAQ](https://github.com/facebookresearch/PAQ) (Question, Answer) pairs | [paper](https://arxiv.org/abs/2102.07033) | 64,371,441 |
145
+ | [S2ORC](https://github.com/allenai/s2orc) Citation pairs (Titles) | [paper](https://aclanthology.org/2020.acl-main.447/) | 52,603,982 |
146
+ | [S2ORC](https://github.com/allenai/s2orc) (Title, Abstract) | [paper](https://aclanthology.org/2020.acl-main.447/) | 41,769,185 |
147
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) (Title, Body) pairs | - | 25,316,456 |
148
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) (Title+Body, Answer) pairs | - | 21,396,559 |
149
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) (Title, Answer) pairs | - | 21,396,559 |
150
+ | [MS MARCO](https://microsoft.github.io/msmarco/) triplets | [paper](https://doi.org/10.1145/3404835.3462804) | 9,144,553 |
151
+ | [GOOAQ: Open Question Answering with Diverse Answer Types](https://github.com/allenai/gooaq) | [paper](https://arxiv.org/pdf/2104.08727.pdf) | 3,012,496 |
152
+ | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Title, Answer) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 1,198,260 |
153
+ | [Code Search](https://huggingface.co/datasets/code_search_net) | - | 1,151,414 |
154
+ | [COCO](https://cocodataset.org/#home) Image captions | [paper](https://link.springer.com/chapter/10.1007%2F978-3-319-10602-1_48) | 828,395|
155
+ | [SPECTER](https://github.com/allenai/specter) citation triplets | [paper](https://doi.org/10.18653/v1/2020.acl-main.207) | 684,100 |
156
+ | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Question, Answer) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 681,164 |
157
+ | [Yahoo Answers](https://www.kaggle.com/soumikrakshit/yahoo-answers-dataset) (Title, Question) | [paper](https://proceedings.neurips.cc/paper/2015/hash/250cf8b51c773f3f8dc8b4be867a9a02-Abstract.html) | 659,896 |
158
+ | [SearchQA](https://huggingface.co/datasets/search_qa) | [paper](https://arxiv.org/abs/1704.05179) | 582,261 |
159
+ | [Eli5](https://huggingface.co/datasets/eli5) | [paper](https://doi.org/10.18653/v1/p19-1346) | 325,475 |
160
+ | [Flickr 30k](https://shannon.cs.illinois.edu/DenotationGraph/) | [paper](https://transacl.org/ojs/index.php/tacl/article/view/229/33) | 317,695 |
161
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) Duplicate questions (titles) | | 304,525 |
162
+ | AllNLI ([SNLI](https://nlp.stanford.edu/projects/snli/) and [MultiNLI](https://cims.nyu.edu/~sbowman/multinli/) | [paper SNLI](https://doi.org/10.18653/v1/d15-1075), [paper MultiNLI](https://doi.org/10.18653/v1/n18-1101) | 277,230 |
163
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) Duplicate questions (bodies) | | 250,519 |
164
+ | [Stack Exchange](https://huggingface.co/datasets/flax-sentence-embeddings/stackexchange_xml) Duplicate questions (titles+bodies) | | 250,460 |
165
+ | [Sentence Compression](https://github.com/google-research-datasets/sentence-compression) | [paper](https://www.aclweb.org/anthology/D13-1155/) | 180,000 |
166
+ | [Wikihow](https://github.com/pvl/wikihow_pairs_dataset) | [paper](https://arxiv.org/abs/1810.09305) | 128,542 |
167
+ | [Altlex](https://github.com/chridey/altlex/) | [paper](https://aclanthology.org/P16-1135.pdf) | 112,696 |
168
+ | [Quora Question Triplets](https://quoradata.quora.com/First-Quora-Dataset-Release-Question-Pairs) | - | 103,663 |
169
+ | [Simple Wikipedia](https://cs.pomona.edu/~dkauchak/simplification/) | [paper](https://www.aclweb.org/anthology/P11-2117/) | 102,225 |
170
+ | [Natural Questions (NQ)](https://ai.google.com/research/NaturalQuestions) | [paper](https://transacl.org/ojs/index.php/tacl/article/view/1455) | 100,231 |
171
+ | [SQuAD2.0](https://rajpurkar.github.io/SQuAD-explorer/) | [paper](https://aclanthology.org/P18-2124.pdf) | 87,599 |
172
+ | [TriviaQA](https://huggingface.co/datasets/trivia_qa) | - | 73,346 |
173
+ | **Total** | | **1,170,060,424** |
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/59d594003bf59880a884c574bf88ef7555bb0202 ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 256,
3
+ "do_lower_case": false
4
+ }
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/72b987fd805cfa2b58c4c8c952b274a11bfd5a00 ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nreimers/MiniLM-L6-H384-uncased",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "gradient_checkpointing": false,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 384,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 1536,
13
+ "layer_norm_eps": 1e-12,
14
+ "max_position_embeddings": 512,
15
+ "model_type": "bert",
16
+ "num_attention_heads": 12,
17
+ "num_hidden_layers": 6,
18
+ "pad_token_id": 0,
19
+ "position_embedding_type": "absolute",
20
+ "transformers_version": "4.8.2",
21
+ "type_vocab_size": 2,
22
+ "use_cache": true,
23
+ "vocab_size": 30522
24
+ }
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/952a9b81c0bfd99800fabf352f69c7ccd46c5e43 ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Normalize",
18
+ "type": "sentence_transformers.models.Normalize"
19
+ }
20
+ ]
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/c79f2b6a0cea6f4b564fed1938984bace9d30ff0 ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "name_or_path": "nreimers/MiniLM-L6-H384-uncased", "do_basic_tokenize": true, "never_split": null, "tokenizer_class": "BertTokenizer", "model_max_length": 512}
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/cb202bfe2e3c98645018a6d12f182a434c9d3e02 ADDED
The diff for this file is too large to render. See raw diff
 
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/d1514c3162bbe87b343f565fadc62e6c06f04f03 ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 384,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/e7b0375001f109a6b8873d756ad4f7bbb15fbaa5 ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/fb140275c155a9c7c5a3b3e0e77a9e839594a938 ADDED
The diff for this file is too large to render. See raw diff
 
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/blobs/fd1b291129c607e5d49799f87cb219b27f98acdf ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.0.0",
4
+ "transformers": "4.6.1",
5
+ "pytorch": "1.8.1"
6
+ }
7
+ }
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/refs/main ADDED
@@ -0,0 +1 @@
 
 
1
+ c9745ed1d9f207416be6d2e6f8de32d1f16199bf
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/1_Pooling/config.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/README.md ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/config.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/config_sentence_transformers.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
3
+ size 0
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/modules.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/sentence_bert_config.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/special_tokens_map.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/tokenizer.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/tokenizer_config.json ADDED
File without changes
model_cache/models--sentence-transformers--all-MiniLM-L6-v2/snapshots/c9745ed1d9f207416be6d2e6f8de32d1f16199bf/vocab.txt ADDED
File without changes
model_cache/models--sentence-transformers--paraphrase-MiniLM-L3-v2/.no_exist/4ca70771034acceecb2e72475f72050fcdde4ddc/adapter_config.json ADDED
File without changes