Arvind2006 commited on
Commit
f18435c
·
verified ·
1 Parent(s): 259df19

Upload folder using huggingface_hub

Browse files
.gitignore CHANGED
@@ -1 +1 @@
1
- model_cache/
 
1
+ model_cache/
Dockerfile CHANGED
@@ -1,18 +1,10 @@
1
- FROM python:3.13-slim
2
 
3
  WORKDIR /app
4
 
5
- # System deps (important for faiss & torch)
6
- RUN apt-get update && apt-get install -y \
7
- build-essential \
8
- git \
9
- && rm -rf /var/lib/apt/lists/*
10
-
11
- COPY requirements.txt .
12
-
13
- RUN pip install --upgrade pip \
14
- && pip install --no-cache-dir -r requirements.txt
15
-
16
  COPY . .
17
 
 
 
 
18
  CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
 
1
+ FROM python:3.10-slim
2
 
3
  WORKDIR /app
4
 
 
 
 
 
 
 
 
 
 
 
 
5
  COPY . .
6
 
7
+ RUN pip install --upgrade pip
8
+ RUN pip install -r requirements.txt
9
+
10
  CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -1,14 +1,3 @@
1
- ---
2
- title: Jenkins Error Explainer
3
- emoji: 🛠️
4
- colorFrom: blue
5
- colorTo: green
6
- sdk: docker
7
- sdk_version: "latest"
8
- app_file: main.py
9
- pinned: false
10
- ---
11
-
12
  # Jenkins Error Explainer
13
 
14
  A documentation-grounded system that explains Jenkins pipeline errors using official Jenkins documentation.
 
 
 
 
 
 
 
 
 
 
 
 
1
  # Jenkins Error Explainer
2
 
3
  A documentation-grounded system that explains Jenkins pipeline errors using official Jenkins documentation.
app.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # app.py - Entry point for HuggingFace Spaces
2
+ import sys
3
+ sys.path.insert(0, ".")
4
+
5
+ from main import app
6
+
7
+ if __name__ == "__main__":
8
+ import uvicorn
9
+ uvicorn.run(app, host="0.0.0.0", port=7860)
cli.py CHANGED
@@ -1,47 +1,47 @@
1
- # cli.py
2
-
3
- import sys
4
- from explain_error import explain_error
5
-
6
- def main():
7
- if len(sys.argv) != 2:
8
- print("Usage: python cli.py <path_to_jenkins_error_log>")
9
- sys.exit(1)
10
-
11
- log_path = sys.argv[1]
12
-
13
- try:
14
- with open(log_path, "r", encoding="utf-8") as f:
15
- log_text = f.read()
16
- except FileNotFoundError:
17
- print(f"Error: File not found -> {log_path}")
18
- sys.exit(1)
19
-
20
- result = explain_error(log_text)
21
-
22
- if result["error_category"] == "unknown":
23
- print(
24
- "Warning: This error is not currently supported by the explainer. "
25
- "The explanation may be incomplete.\n"
26
- )
27
-
28
- print("\n=== Jenkins Error Explanation ===\n")
29
- print(f"Error Category:\n{result['error_category']}\n")
30
-
31
- print("Error Summary:")
32
- print(result["summary"], "\n")
33
-
34
- print("Likely Causes:")
35
- for cause in result["likely_causes"]:
36
- print(f"- {cause}")
37
- print()
38
-
39
- print("Relevant Documentation:")
40
- for ref in result["references"]:
41
- print(f"- {ref}")
42
-
43
- print("\n================================\n")
44
-
45
-
46
- if __name__ == "__main__":
47
- main()
 
1
+ # cli.py
2
+
3
+ import sys
4
+ from explain_error import explain_error
5
+
6
+ def main():
7
+ if len(sys.argv) != 2:
8
+ print("Usage: python cli.py <path_to_jenkins_error_log>")
9
+ sys.exit(1)
10
+
11
+ log_path = sys.argv[1]
12
+
13
+ try:
14
+ with open(log_path, "r", encoding="utf-8") as f:
15
+ log_text = f.read()
16
+ except FileNotFoundError:
17
+ print(f"Error: File not found -> {log_path}")
18
+ sys.exit(1)
19
+
20
+ result = explain_error(log_text)
21
+
22
+ if result["error_category"] == "unknown":
23
+ print(
24
+ "Warning: This error is not currently supported by the explainer. "
25
+ "The explanation may be incomplete.\n"
26
+ )
27
+
28
+ print("\n=== Jenkins Error Explanation ===\n")
29
+ print(f"Error Category:\n{result['error_category']}\n")
30
+
31
+ print("Error Summary:")
32
+ print(result["summary"], "\n")
33
+
34
+ print("Likely Causes:")
35
+ for cause in result["likely_causes"]:
36
+ print(f"- {cause}")
37
+ print()
38
+
39
+ print("Relevant Documentation:")
40
+ for ref in result["references"]:
41
+ print(f"- {ref}")
42
+
43
+ print("\n================================\n")
44
+
45
+
46
+ if __name__ == "__main__":
47
+ main()
data/docs/docs_meta.json CHANGED
The diff for this file is too large to render. See raw diff
 
data/docs/raw/git_scm.txt CHANGED
@@ -1,206 +1,206 @@
1
- The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
-
3
- For a list of other such plugins, see the Pipeline Steps Reference page.
4
-
5
- scmGit
6
- scmGit
7
- The git plugin provides fundamental git operations for Jenkins projects. It can poll, fetch, checkout, and merge contents of git repositories.
8
-
9
- The scmGit parameter of the git plugin is used with the Pipeline SCM checkout step to checkout git repositories into Pipeline workspaces. The Pipeline Syntax Snippet Generator guides the user to select git plugin checkout options and provides online help for each of the options.
10
-
11
- Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step. Examples of the checkout step include:
12
-
13
- Checkout step with defaults
14
- Checkout step with https and a specific branch
15
- Checkout step with ssh and a private key credential
16
- Checkout step with https and changelog disabled
17
- Checkout step with git protocol and polling disabled
18
- See the argument descriptions for more details.
19
- The scmGit parameter of the checkout step provides access to all the Pipeline capabilities provided by the git plugin:
20
-
21
- checkout scmGit(userRemoteConfigs: [
22
- [ url: 'https://github.com/jenkinsci/git-plugin' ]
23
- ])
24
- NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. For simpler cases that do not require all the capabilities of the git plugin, the git step can also be used.
25
-
26
- Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step.
27
-
28
- The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the checkout step. For example, the checkout step supports:
29
-
30
- SHA-1 checkout
31
- Tag checkout
32
- Submodule checkout
33
- Sparse checkout
34
- Large file checkout (LFS)
35
- Reference repositories
36
- Branch merges
37
- Repository tagging
38
- Custom refspecs
39
- Timeout configuration
40
- Changelog calculation against a non-default reference
41
- Stale branch pruning
42
- Example: Checkout step with defaults
43
- Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
44
-
45
- The Pipeline Snippet Generator generates this example:
46
-
47
- checkout scmGit(userRemoteConfigs: [
48
- [ url: 'https://github.com/jenkinsci/git-plugin' ]
49
- ])
50
- Example: Checkout step with https and a specific branch
51
- Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.289).
52
-
53
- The Pipeline Snippet Generator generates this example:
54
-
55
- checkout scmGit(branches: [[name: 'stable-2.289']],
56
- userRemoteConfigs: [
57
- [ url: 'https://github.com/jenkinsci/jenkins.git' ]
58
- ])
59
- Example: Checkout step with ssh and a private key credential
60
- Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
61
-
62
- The Pipeline Snippet Generator generates this example:
63
-
64
- checkout changelog: false,
65
- scm: scmGit(userRemoteConfigs: [
66
- [ credentialsId: 'my-private-key-credential-id',
67
- url: 'git@github.com:jenkinsci/git-client-plugin.git' ]
68
- ])
69
- Example: Checkout step with https and changelog disabled
70
- Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
71
-
72
- The Pipeline Snippet Generator generates this example:
73
-
74
- checkout changelog: false,
75
- scm: scmGit(userRemoteConfigs: [
76
- [ url: 'https://github.com/jenkinsci/credentials-plugin' ]
77
- ])
78
- Example: Checkout step with git protocol and polling disabled
79
- Checkout from the command line git repository using git protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
-
81
- The Pipeline Snippet Generator generates this example:
82
-
83
- checkout poll: false,
84
- scm: scmGit(userRemoteConfigs: [
85
- [ url: 'git://git.kernel.org/pub/scm/git/git.git' ]
86
- ])
87
- Argument Descriptions
88
- userRemoteConfigs
89
- Specify the repository to track. This can be a URL or a local file path. Note that for super-projects (repositories with submodules), only a local file path or a complete URL is valid. The following are examples of valid git URLs.
90
- ssh://git@github.com/github/git.git
91
- git@github.com:github/git.git (short notation for ssh protocol)
92
- ssh://user@other.host.com/~/repos/R.git (to access the repos/R.git repository in the user's home directory)
93
- https://github.com/github/git.git
94
-
95
- If the repository is a super-project, the location from which to clone submodules is dependent on whether the repository is bare or non-bare (i.e. has a working directory).
96
- If the super-project is bare, the location of the submodules will be taken from .gitmodules.
97
- If the super-project is not bare, it is assumed that the repository has each of its submodules cloned and checked out appropriately. Thus, the submodules will be taken directly from a path like ${SUPER_PROJECT_URL}/${SUBMODULE}, rather than relying on information from .gitmodules.
98
- For a local URL/path to a super-project, git rev-parse --is-bare-repository is used to detect whether the super-project is bare or not.
99
- For a remote URL to a super-project, the ending of the URL determines whether a bare or non-bare repository is assumed:
100
- If the remote URL ends with .git, a non-bare repository is assumed.
101
- If the remote URL does NOT end with .git, a bare repository is assumed.
102
- Array / List of Nested Object
103
- url : String
104
- name : String
105
- refspec : String
106
- credentialsId : String
107
- branches
108
- List of branches to build. Jenkins jobs are most effective when each job builds only a single branch. When a single job builds multiple branches, the changelog comparisons between branches often show no changes or incorrect changes.
109
- Array / List of Nested Object
110
- name : String
111
- browser
112
- Defines the repository browser that displays changes detected by the git plugin.
113
- Nested Choice of Objects
114
- assembla
115
- $class: 'BacklogGitRepositoryBrowser'
116
- bitbucketServer
117
- bitbucket
118
- cgit
119
- fisheye
120
- gitblit
121
- $class: 'GitBucketBrowser'
122
- gitLab
123
- gitLabBrowser
124
- gitList
125
- gitWeb
126
- $class: 'GiteaBrowser'
127
- github
128
- gitiles
129
- $class: 'GitoriousWeb'
130
- gogs
131
- kiln
132
- phabricator
133
- redmine
134
- rhodeCode
135
- $class: 'ScmManagerGitRepositoryBrowser'
136
- jbSpace
137
- $class: 'Stash'
138
- teamFoundation
139
- $class: 'TracGitRepositoryBrowser'
140
- $class: 'TuleapBrowser'
141
- viewgit
142
- gitTool : String
143
- Name of the git tool to be used for this job. Git tool names are defined in "Global Tool Configuration".
144
-
145
- extensions
146
- Extensions add new behavior or modify existing plugin behavior for different uses. Extensions help users more precisely tune plugin behavior to meet their needs.
147
-
148
- Extensions include:
149
-
150
- Clone extensions modify the git operations that retrieve remote changes into the agent workspace. The extensions can adjust the amount of history retrieved, how long the retrieval is allowed to run, and other retrieval details.
151
- Checkout extensions modify the git operations that place files in the workspace from the git repository on the agent. The extensions can adjust the maximum duration of the checkout operation, the use and behavior of git submodules, the location of the workspace on the disc, and more.
152
- Changelog extensions adapt the source code difference calculations for different cases.
153
- Tagging extensions allow the plugin to apply tags in the current workspace.
154
- Build initiation extensions control the conditions that start a build. They can ignore notifications of a change or force a deeper evaluation of the commits when polling.
155
- Merge extensions can optionally merge changes from other branches into the current branch of the agent workspace. They control the source branch for the merge and the options applied to the merge.
156
- Array / List of Nested Choice of Objects
157
- authorInChangelog
158
- $class: 'BitbucketEnvVarExtension'
159
- $class: 'BuildChooserSetting'
160
- buildSingleRevisionOnly
161
- changelogToBranch
162
- checkoutOption
163
- cleanBeforeCheckout
164
- cleanAfterCheckout
165
- cloneOption
166
- $class: 'CodeCommitURLHelper'
167
- $class: 'DisableRemotePoll'
168
- $class: 'ExcludeFromChangeSet'
169
- $class: 'ExcludeFromPoll'
170
- $class: 'FallbackToOtherRepositoryGitSCMExtension'
171
- firstBuildChangelog
172
- $class: 'GitClientAuthenticatorExtension'
173
- lfs
174
- $class: 'GitSCMChecksExtension'
175
- $class: 'GitSCMStatusChecksExtension'
176
- $class: 'GitTagMessageExtension'
177
- $class: 'IgnoreNotifyCommit'
178
- localBranch
179
- $class: 'MessageExclusion'
180
- $class: 'PathRestriction'
181
- perBuildTag
182
- $class: 'PreBuildMerge'
183
- pretestedIntegration
184
- pruneStaleBranch
185
- pruneTags
186
- $class: 'RelativeTargetDirectory'
187
- $class: 'ScmName'
188
- sparseCheckout
189
- submodule
190
- $class: 'UserExclusion'
191
- $class: 'UserIdentity'
192
- $class: 'WipeWorkspace'
193
- doGenerateSubmoduleConfigurations : boolean (optional)
194
- Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value and always uses false as its value.
195
-
196
- submoduleCfg (optional)
197
- Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value(s) and always uses empty values.
198
-
199
- Array / List of Nested Object
200
- submoduleName : String
201
- Removed in git plugin 4.6.0.
202
-
203
- branches : Array / List of String
204
- Removed in git plugin 4.6.0.
205
-
206
  Was this page helpful?
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ scmGit
6
+ scmGit
7
+ The git plugin provides fundamental git operations for Jenkins projects. It can poll, fetch, checkout, and merge contents of git repositories.
8
+
9
+ The scmGit parameter of the git plugin is used with the Pipeline SCM checkout step to checkout git repositories into Pipeline workspaces. The Pipeline Syntax Snippet Generator guides the user to select git plugin checkout options and provides online help for each of the options.
10
+
11
+ Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step. Examples of the checkout step include:
12
+
13
+ Checkout step with defaults
14
+ Checkout step with https and a specific branch
15
+ Checkout step with ssh and a private key credential
16
+ Checkout step with https and changelog disabled
17
+ Checkout step with git protocol and polling disabled
18
+ See the argument descriptions for more details.
19
+ The scmGit parameter of the checkout step provides access to all the Pipeline capabilities provided by the git plugin:
20
+
21
+ checkout scmGit(userRemoteConfigs: [
22
+ [ url: 'https://github.com/jenkinsci/git-plugin' ]
23
+ ])
24
+ NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. For simpler cases that do not require all the capabilities of the git plugin, the git step can also be used.
25
+
26
+ Use the Pipeline Snippet Generator to generate a sample pipeline script for the checkout step.
27
+
28
+ The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the checkout step. For example, the checkout step supports:
29
+
30
+ SHA-1 checkout
31
+ Tag checkout
32
+ Submodule checkout
33
+ Sparse checkout
34
+ Large file checkout (LFS)
35
+ Reference repositories
36
+ Branch merges
37
+ Repository tagging
38
+ Custom refspecs
39
+ Timeout configuration
40
+ Changelog calculation against a non-default reference
41
+ Stale branch pruning
42
+ Example: Checkout step with defaults
43
+ Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
44
+
45
+ The Pipeline Snippet Generator generates this example:
46
+
47
+ checkout scmGit(userRemoteConfigs: [
48
+ [ url: 'https://github.com/jenkinsci/git-plugin' ]
49
+ ])
50
+ Example: Checkout step with https and a specific branch
51
+ Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.289).
52
+
53
+ The Pipeline Snippet Generator generates this example:
54
+
55
+ checkout scmGit(branches: [[name: 'stable-2.289']],
56
+ userRemoteConfigs: [
57
+ [ url: 'https://github.com/jenkinsci/jenkins.git' ]
58
+ ])
59
+ Example: Checkout step with ssh and a private key credential
60
+ Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
61
+
62
+ The Pipeline Snippet Generator generates this example:
63
+
64
+ checkout changelog: false,
65
+ scm: scmGit(userRemoteConfigs: [
66
+ [ credentialsId: 'my-private-key-credential-id',
67
+ url: 'git@github.com:jenkinsci/git-client-plugin.git' ]
68
+ ])
69
+ Example: Checkout step with https and changelog disabled
70
+ Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
71
+
72
+ The Pipeline Snippet Generator generates this example:
73
+
74
+ checkout changelog: false,
75
+ scm: scmGit(userRemoteConfigs: [
76
+ [ url: 'https://github.com/jenkinsci/credentials-plugin' ]
77
+ ])
78
+ Example: Checkout step with git protocol and polling disabled
79
+ Checkout from the command line git repository using git protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
+
81
+ The Pipeline Snippet Generator generates this example:
82
+
83
+ checkout poll: false,
84
+ scm: scmGit(userRemoteConfigs: [
85
+ [ url: 'git://git.kernel.org/pub/scm/git/git.git' ]
86
+ ])
87
+ Argument Descriptions
88
+ userRemoteConfigs
89
+ Specify the repository to track. This can be a URL or a local file path. Note that for super-projects (repositories with submodules), only a local file path or a complete URL is valid. The following are examples of valid git URLs.
90
+ ssh://git@github.com/github/git.git
91
+ git@github.com:github/git.git (short notation for ssh protocol)
92
+ ssh://user@other.host.com/~/repos/R.git (to access the repos/R.git repository in the user's home directory)
93
+ https://github.com/github/git.git
94
+
95
+ If the repository is a super-project, the location from which to clone submodules is dependent on whether the repository is bare or non-bare (i.e. has a working directory).
96
+ If the super-project is bare, the location of the submodules will be taken from .gitmodules.
97
+ If the super-project is not bare, it is assumed that the repository has each of its submodules cloned and checked out appropriately. Thus, the submodules will be taken directly from a path like ${SUPER_PROJECT_URL}/${SUBMODULE}, rather than relying on information from .gitmodules.
98
+ For a local URL/path to a super-project, git rev-parse --is-bare-repository is used to detect whether the super-project is bare or not.
99
+ For a remote URL to a super-project, the ending of the URL determines whether a bare or non-bare repository is assumed:
100
+ If the remote URL ends with .git, a non-bare repository is assumed.
101
+ If the remote URL does NOT end with .git, a bare repository is assumed.
102
+ Array / List of Nested Object
103
+ url : String
104
+ name : String
105
+ refspec : String
106
+ credentialsId : String
107
+ branches
108
+ List of branches to build. Jenkins jobs are most effective when each job builds only a single branch. When a single job builds multiple branches, the changelog comparisons between branches often show no changes or incorrect changes.
109
+ Array / List of Nested Object
110
+ name : String
111
+ browser
112
+ Defines the repository browser that displays changes detected by the git plugin.
113
+ Nested Choice of Objects
114
+ assembla
115
+ $class: 'BacklogGitRepositoryBrowser'
116
+ bitbucketServer
117
+ bitbucket
118
+ cgit
119
+ fisheye
120
+ gitblit
121
+ $class: 'GitBucketBrowser'
122
+ gitLab
123
+ gitLabBrowser
124
+ gitList
125
+ gitWeb
126
+ $class: 'GiteaBrowser'
127
+ github
128
+ gitiles
129
+ $class: 'GitoriousWeb'
130
+ gogs
131
+ kiln
132
+ phabricator
133
+ redmine
134
+ rhodeCode
135
+ $class: 'ScmManagerGitRepositoryBrowser'
136
+ jbSpace
137
+ $class: 'Stash'
138
+ teamFoundation
139
+ $class: 'TracGitRepositoryBrowser'
140
+ $class: 'TuleapBrowser'
141
+ viewgit
142
+ gitTool : String
143
+ Name of the git tool to be used for this job. Git tool names are defined in "Global Tool Configuration".
144
+
145
+ extensions
146
+ Extensions add new behavior or modify existing plugin behavior for different uses. Extensions help users more precisely tune plugin behavior to meet their needs.
147
+
148
+ Extensions include:
149
+
150
+ Clone extensions modify the git operations that retrieve remote changes into the agent workspace. The extensions can adjust the amount of history retrieved, how long the retrieval is allowed to run, and other retrieval details.
151
+ Checkout extensions modify the git operations that place files in the workspace from the git repository on the agent. The extensions can adjust the maximum duration of the checkout operation, the use and behavior of git submodules, the location of the workspace on the disc, and more.
152
+ Changelog extensions adapt the source code difference calculations for different cases.
153
+ Tagging extensions allow the plugin to apply tags in the current workspace.
154
+ Build initiation extensions control the conditions that start a build. They can ignore notifications of a change or force a deeper evaluation of the commits when polling.
155
+ Merge extensions can optionally merge changes from other branches into the current branch of the agent workspace. They control the source branch for the merge and the options applied to the merge.
156
+ Array / List of Nested Choice of Objects
157
+ authorInChangelog
158
+ $class: 'BitbucketEnvVarExtension'
159
+ $class: 'BuildChooserSetting'
160
+ buildSingleRevisionOnly
161
+ changelogToBranch
162
+ checkoutOption
163
+ cleanBeforeCheckout
164
+ cleanAfterCheckout
165
+ cloneOption
166
+ $class: 'CodeCommitURLHelper'
167
+ $class: 'DisableRemotePoll'
168
+ $class: 'ExcludeFromChangeSet'
169
+ $class: 'ExcludeFromPoll'
170
+ $class: 'FallbackToOtherRepositoryGitSCMExtension'
171
+ firstBuildChangelog
172
+ $class: 'GitClientAuthenticatorExtension'
173
+ lfs
174
+ $class: 'GitSCMChecksExtension'
175
+ $class: 'GitSCMStatusChecksExtension'
176
+ $class: 'GitTagMessageExtension'
177
+ $class: 'IgnoreNotifyCommit'
178
+ localBranch
179
+ $class: 'MessageExclusion'
180
+ $class: 'PathRestriction'
181
+ perBuildTag
182
+ $class: 'PreBuildMerge'
183
+ pretestedIntegration
184
+ pruneStaleBranch
185
+ pruneTags
186
+ $class: 'RelativeTargetDirectory'
187
+ $class: 'ScmName'
188
+ sparseCheckout
189
+ submodule
190
+ $class: 'UserExclusion'
191
+ $class: 'UserIdentity'
192
+ $class: 'WipeWorkspace'
193
+ doGenerateSubmoduleConfigurations : boolean (optional)
194
+ Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value and always uses false as its value.
195
+
196
+ submoduleCfg (optional)
197
+ Removed facility that was intended to test combinations of git submodule versions. Removed in git plugin 4.6.0. Ignores the user provided value(s) and always uses empty values.
198
+
199
+ Array / List of Nested Object
200
+ submoduleName : String
201
+ Removed in git plugin 4.6.0.
202
+
203
+ branches : Array / List of String
204
+ Removed in git plugin 4.6.0.
205
+
206
  Was this page helpful?
data/docs/raw/jenkins_credentials_text.txt CHANGED
@@ -1,125 +1,125 @@
1
- The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
-
3
- For a list of other such plugins, see the Pipeline Steps Reference page.
4
-
5
- Table of Contents
6
- Credentials Binding Plugin
7
- withCredentials: Bind credentials to variables
8
- Credentials Binding Plugin
9
- View this plugin on the Plugins site
10
-
11
- withCredentials: Bind credentials to variables
12
- Allows various kinds of credentials (secrets) to be used in idiosyncratic ways. (Some steps explicitly ask for credentials of a particular kind, usually as a credentialsId parameter, in which case this step is unnecessary.) Each binding will define an environment variable active within the scope of the step. You can then use them directly from any other steps that expect environment variables to be set:
13
-
14
- node {
15
- withCredentials([usernameColonPassword(credentialsId: 'mylogin', variable: 'USERPASS')]) {
16
- sh '''
17
- set +x
18
- curl -u "$USERPASS" https://private.server/ > output
19
- '''
20
- }
21
- }
22
- As another example (use Snippet Generator to see all options):
23
-
24
- node {
25
- withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
26
- sh '''
27
- set +x
28
- curl -H "Token: $TOKEN" https://some.api/
29
- '''
30
- }
31
- }
32
- Note the use of single quotes to define the script (implicit parameter to sh) in Groovy above. You want the secret to be expanded by the shell as an environment variable. The following idiom is potentially less secure, as the secret is interpolated by Groovy and so (for example) typical operating system process listings will accidentally disclose it:
33
-
34
- node {
35
- withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
36
- sh /* WRONG! */ """
37
- set +x
38
- curl -H 'Token: $TOKEN' https://some.api/
39
- """
40
- }
41
- }
42
- At least on Linux, environment variables can be obtained by other processes running in the same account, so you should not run a job which uses secrets on the same node as a job controlled by untrusted parties. In any event, you should always prefer expansion as environment variables to inclusion in the command, since Jenkins visualizations such as Blue Ocean will attempt to detect step parameters containing secrets and refuse to display them.
43
-
44
- The secret(s) will be masked (****) in case they are printed to the build log. This prevents you from accidentally disclosing passwords and the like via the log. (Bourne shell set +x, or Windows batch @echo off, blocks secrets from being displayed in echoed commands; but build tools in debug mode might dump all environment variables to standard output/error, or poorly designed network clients might display authentication, etc.) The masking could of course be trivially circumvented; anyone permitted to configure a job or define Pipeline steps is assumed to be trusted to use any credentials in scope however they like.
45
-
46
- Beware that certain tools mangle secrets when displaying them. As one example, Bash (as opposed to Ubuntu’s plainer Dash) does so with text containing ' in echo mode:
47
-
48
- $ export PASS=foo"'"bar
49
- $ env|fgrep PASS
50
- PASS=foo'bar
51
- $ sh -xc 'echo $PASS'
52
- + echo foo'bar
53
- foo'bar
54
- $ bash -xc 'echo $PASS'
55
- + echo 'foo'\''bar'
56
- foo'bar
57
- Mangled secrets can only be detected on a best-effort basis. By default, Jenkins will attempt to mask mangled secrets as they would appear in output of Bourne shell, Bash, Almquist shell and Windows batch. Without these strategies in place, mangled secrets would appear in plain text in log files. In the example above, this would result in:
58
-
59
- + echo 'foo'\''bar'
60
- ****
61
- This particular issue can be more safely prevented by turning off echo with set +x or avoiding the use of shell metacharacters in secrets.
62
-
63
- For bindings which store a secret file, beware that
64
-
65
- node {
66
- dir('subdir') {
67
- withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
68
- sh 'use $FILE'
69
- }
70
- }
71
- }
72
- is not safe, as $FILE might be inside the workspace (in subdir@tmp/secretFiles/), and thus visible to anyone able to browse the job’s workspace. If you need to run steps in a different directory than the usual workspace, you should instead use
73
-
74
- node {
75
- withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
76
- dir('subdir') {
77
- sh 'use $FILE'
78
- }
79
- }
80
- }
81
- to ensure that the secrets are outside the workspace; or choose a different workspace entirely:
82
-
83
- node {
84
- ws {
85
- withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
86
- sh 'use $FILE'
87
- }
88
- }
89
- }
90
- Also see the Limitations of Credentials Masking blog post for more background.
91
-
92
- bindings
93
- Array / List of Nested Choice of Objects
94
- aws
95
- token
96
- $class: 'AwsBucketCredentialsBinding'
97
- ociCredentials
98
- certificate
99
- ConjurSecretApplianceCredentials
100
- conjurSecretCredential
101
- conjurSecretDockerClientCert
102
- conjurSecretFile
103
- conjurSecretString
104
- conjurSecretUsername
105
- conjurSecretUsernameSSHKey
106
- dockerCert
107
- file
108
- gitlabApiToken
109
- gitUsernamePassword
110
- $class: 'KeychainPasswordAndPathBinding'
111
- OSFBuilderSuiteOpenCommerceAPICredentials
112
- sshUserPrivateKey
113
- string
114
- OSFBuilderSuiteTwoFactorAuthCredentials
115
- usernameColonPassword
116
- usernamePassword
117
- $class: 'VaultCertificateCredentialsBinding'
118
- vaultFile
119
- $class: 'VaultSSHUserPrivateKeyBinding'
120
- vaultString
121
- $class: 'VaultTokenCredentialBinding'
122
- $class: 'VaultUsernamePasswordCredentialBinding'
123
- zip
124
- azureServicePrincipal
125
- azureStorage
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ Table of Contents
6
+ Credentials Binding Plugin
7
+ withCredentials: Bind credentials to variables
8
+ Credentials Binding Plugin
9
+ View this plugin on the Plugins site
10
+
11
+ withCredentials: Bind credentials to variables
12
+ Allows various kinds of credentials (secrets) to be used in idiosyncratic ways. (Some steps explicitly ask for credentials of a particular kind, usually as a credentialsId parameter, in which case this step is unnecessary.) Each binding will define an environment variable active within the scope of the step. You can then use them directly from any other steps that expect environment variables to be set:
13
+
14
+ node {
15
+ withCredentials([usernameColonPassword(credentialsId: 'mylogin', variable: 'USERPASS')]) {
16
+ sh '''
17
+ set +x
18
+ curl -u "$USERPASS" https://private.server/ > output
19
+ '''
20
+ }
21
+ }
22
+ As another example (use Snippet Generator to see all options):
23
+
24
+ node {
25
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
26
+ sh '''
27
+ set +x
28
+ curl -H "Token: $TOKEN" https://some.api/
29
+ '''
30
+ }
31
+ }
32
+ Note the use of single quotes to define the script (implicit parameter to sh) in Groovy above. You want the secret to be expanded by the shell as an environment variable. The following idiom is potentially less secure, as the secret is interpolated by Groovy and so (for example) typical operating system process listings will accidentally disclose it:
33
+
34
+ node {
35
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
36
+ sh /* WRONG! */ """
37
+ set +x
38
+ curl -H 'Token: $TOKEN' https://some.api/
39
+ """
40
+ }
41
+ }
42
+ At least on Linux, environment variables can be obtained by other processes running in the same account, so you should not run a job which uses secrets on the same node as a job controlled by untrusted parties. In any event, you should always prefer expansion as environment variables to inclusion in the command, since Jenkins visualizations such as Blue Ocean will attempt to detect step parameters containing secrets and refuse to display them.
43
+
44
+ The secret(s) will be masked (****) in case they are printed to the build log. This prevents you from accidentally disclosing passwords and the like via the log. (Bourne shell set +x, or Windows batch @echo off, blocks secrets from being displayed in echoed commands; but build tools in debug mode might dump all environment variables to standard output/error, or poorly designed network clients might display authentication, etc.) The masking could of course be trivially circumvented; anyone permitted to configure a job or define Pipeline steps is assumed to be trusted to use any credentials in scope however they like.
45
+
46
+ Beware that certain tools mangle secrets when displaying them. As one example, Bash (as opposed to Ubuntu’s plainer Dash) does so with text containing ' in echo mode:
47
+
48
+ $ export PASS=foo"'"bar
49
+ $ env|fgrep PASS
50
+ PASS=foo'bar
51
+ $ sh -xc 'echo $PASS'
52
+ + echo foo'bar
53
+ foo'bar
54
+ $ bash -xc 'echo $PASS'
55
+ + echo 'foo'\''bar'
56
+ foo'bar
57
+ Mangled secrets can only be detected on a best-effort basis. By default, Jenkins will attempt to mask mangled secrets as they would appear in output of Bourne shell, Bash, Almquist shell and Windows batch. Without these strategies in place, mangled secrets would appear in plain text in log files. In the example above, this would result in:
58
+
59
+ + echo 'foo'\''bar'
60
+ ****
61
+ This particular issue can be more safely prevented by turning off echo with set +x or avoiding the use of shell metacharacters in secrets.
62
+
63
+ For bindings which store a secret file, beware that
64
+
65
+ node {
66
+ dir('subdir') {
67
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
68
+ sh 'use $FILE'
69
+ }
70
+ }
71
+ }
72
+ is not safe, as $FILE might be inside the workspace (in subdir@tmp/secretFiles/), and thus visible to anyone able to browse the job’s workspace. If you need to run steps in a different directory than the usual workspace, you should instead use
73
+
74
+ node {
75
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
76
+ dir('subdir') {
77
+ sh 'use $FILE'
78
+ }
79
+ }
80
+ }
81
+ to ensure that the secrets are outside the workspace; or choose a different workspace entirely:
82
+
83
+ node {
84
+ ws {
85
+ withCredentials([file(credentialsId: 'secret', variable: 'FILE')]) {
86
+ sh 'use $FILE'
87
+ }
88
+ }
89
+ }
90
+ Also see the Limitations of Credentials Masking blog post for more background.
91
+
92
+ bindings
93
+ Array / List of Nested Choice of Objects
94
+ aws
95
+ token
96
+ $class: 'AwsBucketCredentialsBinding'
97
+ ociCredentials
98
+ certificate
99
+ ConjurSecretApplianceCredentials
100
+ conjurSecretCredential
101
+ conjurSecretDockerClientCert
102
+ conjurSecretFile
103
+ conjurSecretString
104
+ conjurSecretUsername
105
+ conjurSecretUsernameSSHKey
106
+ dockerCert
107
+ file
108
+ gitlabApiToken
109
+ gitUsernamePassword
110
+ $class: 'KeychainPasswordAndPathBinding'
111
+ OSFBuilderSuiteOpenCommerceAPICredentials
112
+ sshUserPrivateKey
113
+ string
114
+ OSFBuilderSuiteTwoFactorAuthCredentials
115
+ usernameColonPassword
116
+ usernamePassword
117
+ $class: 'VaultCertificateCredentialsBinding'
118
+ vaultFile
119
+ $class: 'VaultSSHUserPrivateKeyBinding'
120
+ vaultString
121
+ $class: 'VaultTokenCredentialBinding'
122
+ $class: 'VaultUsernamePasswordCredentialBinding'
123
+ zip
124
+ azureServicePrincipal
125
+ azureStorage
data/docs/raw/jenkins_git.txt CHANGED
@@ -1,110 +1,110 @@
1
- The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
-
3
- For a list of other such plugins, see the Pipeline Steps Reference page.
4
-
5
- Table of Contents
6
- Git plugin
7
- git: Git
8
- Git plugin
9
- View this plugin on the Plugins site
10
-
11
- git: Git
12
- The git step performs a clone from the specified repository into a Pipeline workspace.
13
-
14
- Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the git step. More advanced checkout operations require the checkout step with the scmGit parameter rather than the git step. Examples of the git step include:
15
-
16
- Git step with defaults
17
- Git step with https and a specific branch
18
- Git step with ssh and a private key credential
19
- Git step with https and changelog disabled
20
- Git step with git protocol and polling disabled
21
- See the argument descriptions for more details.
22
- The git step is a simplified shorthand for a subset of the more powerful checkout step with the scmGit parameter:
23
-
24
- checkout scmGit(branches: [[name: 'main']],
25
- userRemoteConfigs: [[url: 'https://git-server/user/repository.git']])
26
- NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. It provides significantly more functionality than the git step.
27
-
28
- Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the checkout step.
29
-
30
- The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the scmGit parameter of the checkout step. For example, the git step does not support:
31
-
32
- SHA-1 checkout
33
- Tag checkout
34
- Submodule checkout
35
- Sparse checkout
36
- Large file checkout (LFS)
37
- Reference repositories
38
- Branch merges
39
- Repository tagging
40
- Custom refspecs
41
- Timeout configuration
42
- Changelog calculation against a non-default reference
43
- Stale branch pruning
44
- Example: Git step with defaults
45
- Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
46
-
47
- The Pipeline Syntax Snippet Generator generates this example:
48
-
49
- git 'https://github.com/jenkinsci/git-plugin.git'
50
- Example: Git step with https and a specific branch
51
- Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.492). Note that this must be a local branch name like 'master' or 'develop'.
52
-
53
- Branch names that are not supported by the git step
54
-
55
- Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument
56
- SHA-1 hashes are not supported as the branch argument
57
- Tag names are not supported as the branch argument
58
- Remote branch names, SHA-1 hashes, and tag names are supported by the general purpose scmGit parameter of the checkout step.
59
-
60
- The Pipeline Syntax Snippet Generator generates this example:
61
-
62
- git branch: 'stable-2.492',
63
- url: 'https://github.com/jenkinsci/jenkins.git'
64
- Example: Git step with ssh and a private key credential
65
- Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
66
-
67
- The Pipeline Syntax Snippet Generator generates this example:
68
-
69
- git credentialsId: 'my-private-key-credential-id',
70
- url: 'git@github.com:jenkinsci/git-client-plugin.git'
71
- Example: Git step with https and changelog disabled
72
- Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
73
-
74
- The Pipeline Syntax Snippet Generator generates this example:
75
-
76
- git changelog: false,
77
- url: 'https://github.com/jenkinsci/credentials-plugin.git'
78
- Example: Git step with https protocol and polling disabled
79
- Checkout from the Jenkins platform labeler repository using https protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
-
81
- The Pipeline Syntax Snippet Generator generates this example:
82
-
83
- git poll: false,
84
- url: 'https://github.com/jenkinsci/platformlabeler-plugin.git'
85
- Argument Descriptions
86
- url : String
87
- URL of the repository to be checked out in the workspace. Required parameter.
88
-
89
- Repository URL's should follow the git URL guidelines. Git steps to access a secured repository should provide a Jenkins credential with the credentialsId argument rather than embedding credentials in the URL. Credentials embedded in a repository URL may be visible in console logs or in other log files.
90
-
91
- branch : String (optional)
92
- Branch to be checked out in the workspace. Default is 'master'.
93
-
94
- Note that this must be a local branch name like 'master' or 'develop'. Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument. Tag names are not supported as the branch argument. SHA-1 hashes are not supported as the branch argument. Remote branch names, tag names, and SHA-1 hashes are supported by the general purpose checkout step with the scmGit parameter.
95
-
96
- changelog : boolean (optional)
97
- Compute changelog for this job. Default is 'true'.
98
-
99
- If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed.
100
-
101
- credentialsId : String (optional)
102
- Identifier of the credential used to access the remote git repository. Default is '<empty>'.
103
-
104
- The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
105
-
106
- poll : boolean (optional)
107
- Poll remote repository for changes. Default is 'true'.
108
-
109
- If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes.
110
-
 
1
+ The following plugin provides functionality available through Pipeline-compatible steps. Read more about how to integrate steps into your Pipeline in the Steps section of the Pipeline Syntax page.
2
+
3
+ For a list of other such plugins, see the Pipeline Steps Reference page.
4
+
5
+ Table of Contents
6
+ Git plugin
7
+ git: Git
8
+ Git plugin
9
+ View this plugin on the Plugins site
10
+
11
+ git: Git
12
+ The git step performs a clone from the specified repository into a Pipeline workspace.
13
+
14
+ Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the git step. More advanced checkout operations require the checkout step with the scmGit parameter rather than the git step. Examples of the git step include:
15
+
16
+ Git step with defaults
17
+ Git step with https and a specific branch
18
+ Git step with ssh and a private key credential
19
+ Git step with https and changelog disabled
20
+ Git step with git protocol and polling disabled
21
+ See the argument descriptions for more details.
22
+ The git step is a simplified shorthand for a subset of the more powerful checkout step with the scmGit parameter:
23
+
24
+ checkout scmGit(branches: [[name: 'main']],
25
+ userRemoteConfigs: [[url: 'https://git-server/user/repository.git']])
26
+ NOTE: The checkout step with the scmGit parameter is the preferred SCM checkout method. It provides significantly more functionality than the git step.
27
+
28
+ Use the Pipeline Syntax Snippet Generator to generate a sample pipeline script for the checkout step.
29
+
30
+ The checkout step with the scmGit parameter can be used in many cases where the git step cannot be used. Refer to the git plugin documentation for detailed descriptions of options available to the scmGit parameter of the checkout step. For example, the git step does not support:
31
+
32
+ SHA-1 checkout
33
+ Tag checkout
34
+ Submodule checkout
35
+ Sparse checkout
36
+ Large file checkout (LFS)
37
+ Reference repositories
38
+ Branch merges
39
+ Repository tagging
40
+ Custom refspecs
41
+ Timeout configuration
42
+ Changelog calculation against a non-default reference
43
+ Stale branch pruning
44
+ Example: Git step with defaults
45
+ Checkout from the git plugin source repository using https protocol, no credentials, and the master branch.
46
+
47
+ The Pipeline Syntax Snippet Generator generates this example:
48
+
49
+ git 'https://github.com/jenkinsci/git-plugin.git'
50
+ Example: Git step with https and a specific branch
51
+ Checkout from the Jenkins source repository using https protocol, no credentials, and a specific branch (stable-2.492). Note that this must be a local branch name like 'master' or 'develop'.
52
+
53
+ Branch names that are not supported by the git step
54
+
55
+ Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument
56
+ SHA-1 hashes are not supported as the branch argument
57
+ Tag names are not supported as the branch argument
58
+ Remote branch names, SHA-1 hashes, and tag names are supported by the general purpose scmGit parameter of the checkout step.
59
+
60
+ The Pipeline Syntax Snippet Generator generates this example:
61
+
62
+ git branch: 'stable-2.492',
63
+ url: 'https://github.com/jenkinsci/jenkins.git'
64
+ Example: Git step with ssh and a private key credential
65
+ Checkout from the git client plugin source repository using ssh protocol, private key credentials, and the master branch. The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
66
+
67
+ The Pipeline Syntax Snippet Generator generates this example:
68
+
69
+ git credentialsId: 'my-private-key-credential-id',
70
+ url: 'git@github.com:jenkinsci/git-client-plugin.git'
71
+ Example: Git step with https and changelog disabled
72
+ Checkout from the Jenkins source repository using https protocol, no credentials, the master branch, and changelog calculation disabled. If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed. See the workflow scm step documentation for more changelog details.
73
+
74
+ The Pipeline Syntax Snippet Generator generates this example:
75
+
76
+ git changelog: false,
77
+ url: 'https://github.com/jenkinsci/credentials-plugin.git'
78
+ Example: Git step with https protocol and polling disabled
79
+ Checkout from the Jenkins platform labeler repository using https protocol, no credentials, the master branch, and no polling for changes. If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes. See the workflow scm step documentation for more polling details.
80
+
81
+ The Pipeline Syntax Snippet Generator generates this example:
82
+
83
+ git poll: false,
84
+ url: 'https://github.com/jenkinsci/platformlabeler-plugin.git'
85
+ Argument Descriptions
86
+ url : String
87
+ URL of the repository to be checked out in the workspace. Required parameter.
88
+
89
+ Repository URL's should follow the git URL guidelines. Git steps to access a secured repository should provide a Jenkins credential with the credentialsId argument rather than embedding credentials in the URL. Credentials embedded in a repository URL may be visible in console logs or in other log files.
90
+
91
+ branch : String (optional)
92
+ Branch to be checked out in the workspace. Default is 'master'.
93
+
94
+ Note that this must be a local branch name like 'master' or 'develop'. Remote branch names like 'origin/master' and 'origin/develop' are not supported as the branch argument. Tag names are not supported as the branch argument. SHA-1 hashes are not supported as the branch argument. Remote branch names, tag names, and SHA-1 hashes are supported by the general purpose checkout step with the scmGit parameter.
95
+
96
+ changelog : boolean (optional)
97
+ Compute changelog for this job. Default is 'true'.
98
+
99
+ If changelog is false, then the changelog will not be computed for this job. If changelog is true or is not set, then the changelog will be computed.
100
+
101
+ credentialsId : String (optional)
102
+ Identifier of the credential used to access the remote git repository. Default is '<empty>'.
103
+
104
+ The credential must be a private key credential if the remote git repository is accessed with the ssh protocol. The credential must be a username / password credential if the remote git repository is accessed with http or https protocol.
105
+
106
+ poll : boolean (optional)
107
+ Poll remote repository for changes. Default is 'true'.
108
+
109
+ If poll is false, then the remote repository will not be polled for changes. If poll is true or is not set, then the remote repository will be polled for changes.
110
+
data/docs/raw/jenkins_nodes.txt CHANGED
@@ -1,171 +1,171 @@
1
- Managing Nodes
2
- Table of Contents
3
- Components of Distributed Builds
4
- Creating Agents
5
- Launch inbound agent via Windows Scheduler
6
- Installing a Jenkins agent on Windows
7
- Creating a macOS agent for Jenkins
8
- Components of Distributed Builds
9
- Builds in a distributed builds architecture use nodes, agents, and executors, which are distinct from the Jenkins controller itself. Understanding what each of these components are is useful when managing nodes:
10
-
11
- Jenkins controller
12
- The Jenkins controller is the Jenkins service itself and where Jenkins is installed. It is also a web server that also acts as a "brain" for deciding how, when, and where to run tasks. Management tasks such as configuration, authorization, and authentication are executed on the controller, which serves HTTP requests. Files written when a Pipeline executes are written to the filesystem on the controller, unless they are off-loaded to an artifact repository such as Nexus or Artifactory.
13
-
14
- Nodes
15
- Nodes are the "machines" on which build agents run. Jenkins monitors each attached node for disk space, free temp space, free swap, clock time/sync, and response time. A node is taken offline if any of these values go outside the configured threshold. Jenkins supports two types of nodes:
16
-
17
- agents (described below)
18
-
19
- built-in node
20
-
21
- The built-in node is a node that exists within the controller process. It is possible to use agents and the build-in node to run tasks. However, running tasks on the built-in node is discouraged for security, performance, and scalability reasons. The number of executors configured for the node determines the node’s ability to run tasks. Set the number of executors to 0 to disable running tasks on the built-in node.
22
-
23
- Agents
24
- Agents manage the task execution on behalf of the Jenkins controller by using executors. An agent is a small (170KB single jar) Java client process that connects to a Jenkins controller and is assumed to be unreliable. An agent can use any operating system that supports Java. Any tools required for building and testing get installed on the node where the agent runs. Because these tools are a part of the node, they can be installed directly or in a container, such as Docker or Kubernetes. Each agent is effectively a process with its own Process Identifier (PID) on the host machine. In practice, nodes and agents are essentially the same but it is good to remember that they are conceptually distinct.
25
-
26
- Executors
27
- An executor is a slot for the execution of tasks. Effectively, it is a thread in the agent. The number of executors on a node defines the number of concurrent tasks that can run. In other words, this determines the number of concurrent Pipeline stages that can execute at the same time. Determine the correct number of executors per build node must be determined based on the resources available on the node and the resources required for the workload. When determining how many executors to run on a node, consider CPU and memory requirements, as well as the amount of I/O and network activity:
28
-
29
- One executor per node is the safest configuration.
30
-
31
- One executor per CPU core can work well, if the tasks running are small.
32
-
33
- Monitor I/O performance, CPU load, memory usage, and I/O throughput carefully when running multiple executors on a node.
34
-
35
- Creating Agents
36
- Jenkins agents are the "workers" that perform operations requested by the Jenkins controller. The Jenkins controller administers the agents and can manage the tooling on the agents. Jenkins agents may be statically allocated or they can be dynamically allocated through systems like Kubernetes, OpenShift, Amazon EC2, Azure, Google Cloud, IBM Cloud, Oracle Cloud, and other cloud providers.
37
-
38
- This 30 minute tutorial from Darin Pope creates a Jenkins agent and connects it to a controller.
39
-
40
- How to create an agent node in Jenkins
41
-
42
- Launch inbound agent via Windows Scheduler
43
- If you are having trouble getting the inbound agent installed as a Windows service (i.e., you followed the instructions on installing the agent as a service here but it didn’t work), an alternative method of starting the service automatically when Windows starts is to use the Windows Scheduler.
44
-
45
- We take advantage of the Windows Scheduler’s ability to run command at system startup
46
-
47
- Configure your node to use the "Launch agents by connecting it to the master" launch method
48
-
49
- Click Save
50
-
51
- Note the command required to launch the agent
52
-
53
- On the new agent node’s Jenkins page, note the agent command line shown.
54
-
55
- It will be like:
56
-
57
- java \
58
- -jar agent.jar \
59
- -url <Jenkins URL> \
60
- -secret <secret key> \
61
- -name <agent name>
62
- Obtain the agent.jar file and copy it to your new Windows agent node
63
-
64
- In the command line noted in the last step, the "agent.jar" is a hyperlink. Click it to download the agent.jar file.
65
-
66
- Copy the agent.jar file to a permanent location on your agent machine
67
-
68
- Ensure that you have a java version available on your agent machine
69
-
70
- If not, obtain and install a supported version of Java
71
-
72
- Run the command manually from a CMD window on your agent to confirm that it works
73
-
74
- Open the CMD window
75
-
76
- Run the command the one like
77
-
78
- java \
79
- -jar agent.jar \
80
- -url <Jenkins URL> \
81
- -secret <secret key> \
82
- -name <agent name>
83
- Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
84
-
85
- Stop the command (control-c)
86
-
87
- Register a new scheduled job to run the same command
88
-
89
- Open "Task Scheduler" on your windows machine
90
-
91
- Start → Run: task Scheduler
92
-
93
- Create a basic task (Menu: Action → Create Basic Task)
94
-
95
- First page of the wizard:
96
-
97
- Name: Jenkins Agent
98
-
99
- Description (optional)
100
-
101
- Click Next
102
-
103
- Next page of the wizard
104
-
105
- When do you want the task to start: select "When the computer starts"
106
-
107
- Click Next
108
-
109
- Next page of the wizard
110
-
111
- What action do you want the task to perform: select "Start a program"
112
-
113
- Click Next
114
-
115
- Next page of the wizard
116
-
117
- Program/Script: enter "java.exe" (or the full path to your java.exe)
118
-
119
- Add arguments: enter the rest of the command, like
120
-
121
- java
122
- -jar agent.jar \
123
- -url <Jenkins URL> \
124
- -secret <secret key> \
125
- -name <agent name>
126
- eg:
127
-
128
- java \
129
- -jar D:\Scripts\jenkins\agent.jar \
130
- -url http://jenkinshost.example.com \
131
- -secret d6a84df1fc4f45ddc9c6ab34b08f13391983ffffffffffb3488b7d5ac77fbc7 \
132
- -name buildNode1
133
- Click Next
134
-
135
- Next page of the wizard
136
-
137
- Click the check box "Open the Properties dialog for this task when I click Finish
138
-
139
- Click Finish
140
-
141
- Update the task’s properties
142
-
143
- On the General tab
144
-
145
- Select the user to run the task as
146
-
147
- Select "Run whether user is logged on or not"
148
-
149
- On the settings tab
150
-
151
- Uncheck "Stop the task if it runs longer than"
152
-
153
- Check "Run the task as soon as possible after a scheduled start is missed"
154
-
155
- Check "If the task failed, restart every: 10 minutes", and "Attempt to restart up to: 3 times"
156
-
157
- Click OK
158
-
159
- Start the scheduled task and again check that the agent is connected
160
-
161
- Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
162
-
163
- Installing a Jenkins agent on Windows
164
- You can install a Jenkins agent on Windows using the command line. In this video, Darin reviews setting up and installing the Jenkins agent, including how to create any necessary files.
165
-
166
- How to install a Jenkins agent on Windows
167
-
168
- Creating a macOS agent for Jenkins
169
- This video reviews the process of creating a macOS agent for Jenkins using Java 11.
170
-
171
-
 
1
+ Managing Nodes
2
+ Table of Contents
3
+ Components of Distributed Builds
4
+ Creating Agents
5
+ Launch inbound agent via Windows Scheduler
6
+ Installing a Jenkins agent on Windows
7
+ Creating a macOS agent for Jenkins
8
+ Components of Distributed Builds
9
+ Builds in a distributed builds architecture use nodes, agents, and executors, which are distinct from the Jenkins controller itself. Understanding what each of these components are is useful when managing nodes:
10
+
11
+ Jenkins controller
12
+ The Jenkins controller is the Jenkins service itself and where Jenkins is installed. It is also a web server that also acts as a "brain" for deciding how, when, and where to run tasks. Management tasks such as configuration, authorization, and authentication are executed on the controller, which serves HTTP requests. Files written when a Pipeline executes are written to the filesystem on the controller, unless they are off-loaded to an artifact repository such as Nexus or Artifactory.
13
+
14
+ Nodes
15
+ Nodes are the "machines" on which build agents run. Jenkins monitors each attached node for disk space, free temp space, free swap, clock time/sync, and response time. A node is taken offline if any of these values go outside the configured threshold. Jenkins supports two types of nodes:
16
+
17
+ agents (described below)
18
+
19
+ built-in node
20
+
21
+ The built-in node is a node that exists within the controller process. It is possible to use agents and the build-in node to run tasks. However, running tasks on the built-in node is discouraged for security, performance, and scalability reasons. The number of executors configured for the node determines the node’s ability to run tasks. Set the number of executors to 0 to disable running tasks on the built-in node.
22
+
23
+ Agents
24
+ Agents manage the task execution on behalf of the Jenkins controller by using executors. An agent is a small (170KB single jar) Java client process that connects to a Jenkins controller and is assumed to be unreliable. An agent can use any operating system that supports Java. Any tools required for building and testing get installed on the node where the agent runs. Because these tools are a part of the node, they can be installed directly or in a container, such as Docker or Kubernetes. Each agent is effectively a process with its own Process Identifier (PID) on the host machine. In practice, nodes and agents are essentially the same but it is good to remember that they are conceptually distinct.
25
+
26
+ Executors
27
+ An executor is a slot for the execution of tasks. Effectively, it is a thread in the agent. The number of executors on a node defines the number of concurrent tasks that can run. In other words, this determines the number of concurrent Pipeline stages that can execute at the same time. Determine the correct number of executors per build node must be determined based on the resources available on the node and the resources required for the workload. When determining how many executors to run on a node, consider CPU and memory requirements, as well as the amount of I/O and network activity:
28
+
29
+ One executor per node is the safest configuration.
30
+
31
+ One executor per CPU core can work well, if the tasks running are small.
32
+
33
+ Monitor I/O performance, CPU load, memory usage, and I/O throughput carefully when running multiple executors on a node.
34
+
35
+ Creating Agents
36
+ Jenkins agents are the "workers" that perform operations requested by the Jenkins controller. The Jenkins controller administers the agents and can manage the tooling on the agents. Jenkins agents may be statically allocated or they can be dynamically allocated through systems like Kubernetes, OpenShift, Amazon EC2, Azure, Google Cloud, IBM Cloud, Oracle Cloud, and other cloud providers.
37
+
38
+ This 30 minute tutorial from Darin Pope creates a Jenkins agent and connects it to a controller.
39
+
40
+ How to create an agent node in Jenkins
41
+
42
+ Launch inbound agent via Windows Scheduler
43
+ If you are having trouble getting the inbound agent installed as a Windows service (i.e., you followed the instructions on installing the agent as a service here but it didn’t work), an alternative method of starting the service automatically when Windows starts is to use the Windows Scheduler.
44
+
45
+ We take advantage of the Windows Scheduler’s ability to run command at system startup
46
+
47
+ Configure your node to use the "Launch agents by connecting it to the master" launch method
48
+
49
+ Click Save
50
+
51
+ Note the command required to launch the agent
52
+
53
+ On the new agent node’s Jenkins page, note the agent command line shown.
54
+
55
+ It will be like:
56
+
57
+ java \
58
+ -jar agent.jar \
59
+ -url <Jenkins URL> \
60
+ -secret <secret key> \
61
+ -name <agent name>
62
+ Obtain the agent.jar file and copy it to your new Windows agent node
63
+
64
+ In the command line noted in the last step, the "agent.jar" is a hyperlink. Click it to download the agent.jar file.
65
+
66
+ Copy the agent.jar file to a permanent location on your agent machine
67
+
68
+ Ensure that you have a java version available on your agent machine
69
+
70
+ If not, obtain and install a supported version of Java
71
+
72
+ Run the command manually from a CMD window on your agent to confirm that it works
73
+
74
+ Open the CMD window
75
+
76
+ Run the command the one like
77
+
78
+ java \
79
+ -jar agent.jar \
80
+ -url <Jenkins URL> \
81
+ -secret <secret key> \
82
+ -name <agent name>
83
+ Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
84
+
85
+ Stop the command (control-c)
86
+
87
+ Register a new scheduled job to run the same command
88
+
89
+ Open "Task Scheduler" on your windows machine
90
+
91
+ Start → Run: task Scheduler
92
+
93
+ Create a basic task (Menu: Action → Create Basic Task)
94
+
95
+ First page of the wizard:
96
+
97
+ Name: Jenkins Agent
98
+
99
+ Description (optional)
100
+
101
+ Click Next
102
+
103
+ Next page of the wizard
104
+
105
+ When do you want the task to start: select "When the computer starts"
106
+
107
+ Click Next
108
+
109
+ Next page of the wizard
110
+
111
+ What action do you want the task to perform: select "Start a program"
112
+
113
+ Click Next
114
+
115
+ Next page of the wizard
116
+
117
+ Program/Script: enter "java.exe" (or the full path to your java.exe)
118
+
119
+ Add arguments: enter the rest of the command, like
120
+
121
+ java
122
+ -jar agent.jar \
123
+ -url <Jenkins URL> \
124
+ -secret <secret key> \
125
+ -name <agent name>
126
+ eg:
127
+
128
+ java \
129
+ -jar D:\Scripts\jenkins\agent.jar \
130
+ -url http://jenkinshost.example.com \
131
+ -secret d6a84df1fc4f45ddc9c6ab34b08f13391983ffffffffffb3488b7d5ac77fbc7 \
132
+ -name buildNode1
133
+ Click Next
134
+
135
+ Next page of the wizard
136
+
137
+ Click the check box "Open the Properties dialog for this task when I click Finish
138
+
139
+ Click Finish
140
+
141
+ Update the task’s properties
142
+
143
+ On the General tab
144
+
145
+ Select the user to run the task as
146
+
147
+ Select "Run whether user is logged on or not"
148
+
149
+ On the settings tab
150
+
151
+ Uncheck "Stop the task if it runs longer than"
152
+
153
+ Check "Run the task as soon as possible after a scheduled start is missed"
154
+
155
+ Check "If the task failed, restart every: 10 minutes", and "Attempt to restart up to: 3 times"
156
+
157
+ Click OK
158
+
159
+ Start the scheduled task and again check that the agent is connected
160
+
161
+ Go back to the node’s web page in Jenkins. If everything works then page should say "Agent is connected"
162
+
163
+ Installing a Jenkins agent on Windows
164
+ You can install a Jenkins agent on Windows using the command line. In this video, Darin reviews setting up and installing the Jenkins agent, including how to create any necessary files.
165
+
166
+ How to install a Jenkins agent on Windows
167
+
168
+ Creating a macOS agent for Jenkins
169
+ This video reviews the process of creating a macOS agent for Jenkins using Java 11.
170
+
171
+
data/docs/raw/pipeline_steps.txt CHANGED
The diff for this file is too large to render. See raw diff
 
data/docs/raw/pipeline_syntax.txt CHANGED
The diff for this file is too large to render. See raw diff
 
data/docs/raw/using_a_jenkinsfile.txt CHANGED
@@ -1,810 +1,810 @@
1
- Using a Jenkinsfile
2
- Table of Contents
3
- Creating a Jenkinsfile
4
- Build
5
- Test
6
- Deploy
7
- Working with your Jenkinsfile
8
- Using environment variables
9
- Setting environment variables
10
- Setting environment variables dynamically
11
- Handling credentials
12
- For secret text, usernames and passwords, and secret files
13
- Secret text
14
- Usernames and passwords
15
- Secret files
16
- For other credential types
17
- Combining credentials in one step
18
- String interpolation
19
- Interpolation of sensitive environment variables
20
- Injection via interpolation
21
- Handling parameters
22
- Handling failure
23
- Error-handling steps
24
- Using multiple agents
25
- Optional step arguments
26
- Advanced Scripted Pipeline
27
- Parallel execution
28
- This section builds on the information covered in Getting started with Pipeline and introduces more useful steps, common patterns, and demonstrates some non-trivial Jenkinsfile examples.
29
-
30
- Creating a Jenkinsfile, which is checked into source control [1], provides a number of immediate benefits:
31
-
32
- Code review/iteration on the Pipeline
33
-
34
- Audit trail for the Pipeline
35
-
36
- Single source of truth [2] for the Pipeline, which can be viewed and edited by multiple members of the project.
37
-
38
- Pipeline supports two syntaxes, Declarative (introduced in Pipeline 2.5) and Scripted Pipeline. Both of which support building continuous delivery pipelines. Both may be used to define a Pipeline in either the web UI or with a Jenkinsfile, though it’s generally considered a best practice to create a Jenkinsfile and check the file into the source control repository.
39
-
40
- Creating a Jenkinsfile
41
- As discussed in the Defining a Pipeline in SCM, a Jenkinsfile is a text file that contains the definition of a Jenkins Pipeline and is checked into source control. Consider the following Pipeline which implements a basic three-stage continuous delivery pipeline.
42
-
43
- Jenkinsfile (Declarative Pipeline)
44
- pipeline {
45
- agent any
46
-
47
- stages {
48
- stage('Build') {
49
- steps {
50
- echo 'Building..'
51
- }
52
- }
53
- stage('Test') {
54
- steps {
55
- echo 'Testing..'
56
- }
57
- }
58
- stage('Deploy') {
59
- steps {
60
- echo 'Deploying....'
61
- }
62
- }
63
- }
64
- }
65
- Toggle Scripted Pipeline (Advanced)
66
- Not all Pipelines will have these same three stages, but it is a good starting point to define them for most projects. The sections below will demonstrate the creation and execution of a simple Pipeline in a test installation of Jenkins.
67
-
68
- It is assumed that there is already a source control repository set up for the project and a Pipeline has been defined in Jenkins following these instructions.
69
-
70
- Using a text editor, ideally one which supports Groovy syntax highlighting, create a new Jenkinsfile in the root directory of the project.
71
-
72
- The Declarative Pipeline example above contains the minimum necessary structure to implement a continuous delivery pipeline. The agent directive, which is required, instructs Jenkins to allocate an executor and workspace for the Pipeline. Without an agent directive, not only is the Declarative Pipeline not valid, it would not be capable of doing any work! By default the agent directive ensures that the source repository is checked out and made available for steps in the subsequent stages.
73
-
74
- The stages directive and steps directives are also required for a valid Declarative Pipeline as they instruct Jenkins what to execute and in which stage it should be executed.
75
-
76
- For more advanced usage with Scripted Pipeline, the example above node is a crucial first step as it allocates an executor and workspace for the Pipeline. In essence, without node, a Pipeline cannot do any work! From within node, the first order of business will be to checkout the source code for this project. Since the Jenkinsfile is being pulled directly from source control, Pipeline provides a quick and easy way to access the right revision of the source code.
77
-
78
- Jenkinsfile (Scripted Pipeline)
79
- node {
80
- checkout scm
81
- /* .. snip .. */
82
- }
83
- The checkout step will checkout code from source control; scm is a special variable which instructs the checkout step to clone the specific revision which triggered this Pipeline run.
84
- Build
85
- For many projects the beginning of "work" in the Pipeline would be the "build" stage. Typically this stage of the Pipeline will be where source code is assembled, compiled, or packaged. The Jenkinsfile is not a replacement for an existing build tool such as GNU/Make, Maven, Gradle, or others, but rather can be viewed as a glue layer to bind the multiple phases of a project’s development lifecycle (build, test, deploy) together.
86
-
87
- Jenkins has a number of plugins for invoking practically any build tool in general use, but this example will simply invoke make from a shell step (sh). The sh step assumes the system is Unix/Linux-based, for Windows-based systems the bat could be used instead.
88
-
89
- Jenkinsfile (Declarative Pipeline)
90
- pipeline {
91
- agent any
92
-
93
- stages {
94
- stage('Build') {
95
- steps {
96
- sh 'make'
97
- archiveArtifacts artifacts: '**/target/*.jar', fingerprint: true
98
- }
99
- }
100
- }
101
- }
102
- Toggle Scripted Pipeline (Advanced)
103
- The sh step invokes the make command and will only continue if a zero exit code is returned by the command. Any non-zero exit code will fail the Pipeline.
104
- archiveArtifacts captures the files built matching the include pattern (**/target/*.jar) and saves them to the Jenkins controller for later retrieval.
105
- Archiving artifacts is not a substitute for using external artifact repositories such as Artifactory or Nexus and should be considered only for basic reporting and file archival.
106
-
107
- Test
108
- Running automated tests is a crucial component of any successful continuous delivery process. As such, Jenkins has a number of test recording, reporting, and visualization facilities provided by a number of plugins. At a fundamental level, when there are test failures, it is useful to have Jenkins record the failures for reporting and visualization in the web UI. The example below uses the junit step, provided by the JUnit plugin.
109
-
110
- In the example below, if tests fail, the Pipeline is marked "unstable", as denoted by a yellow ball in the web UI. Based on the recorded test reports, Jenkins can also provide historical trend analysis and visualization.
111
-
112
- Jenkinsfile (Declarative Pipeline)
113
- pipeline {
114
- agent any
115
-
116
- stages {
117
- stage('Test') {
118
- steps {
119
- /* `make check` returns non-zero on test failures,
120
- * using `true` to allow the Pipeline to continue nonetheless
121
- */
122
- sh 'make check || true'
123
- junit '**/target/*.xml'
124
- }
125
- }
126
- }
127
- }
128
- Toggle Scripted Pipeline (Advanced)
129
- Using an inline shell conditional (sh 'make check || true') ensures that the sh step always sees a zero exit code, giving the junit step the opportunity to capture and process the test reports. Alternative approaches to this are covered in more detail in the Handling failure section below.
130
- junit captures and associates the JUnit XML files matching the inclusion pattern (**/target/*.xml).
131
- Deploy
132
- Deployment can imply a variety of steps, depending on the project or organization requirements, and may be anything from publishing built artifacts to an Artifactory server, to pushing code to a production system.
133
-
134
- At this stage of the example Pipeline, both the "Build" and "Test" stages have successfully executed. In essence, the "Deploy" stage will only execute assuming previous stages completed successfully, otherwise the Pipeline would have exited early.
135
-
136
- Jenkinsfile (Declarative Pipeline)
137
- pipeline {
138
- agent any
139
-
140
- stages {
141
- stage('Deploy') {
142
- when {
143
- expression {
144
- currentBuild.result == null || currentBuild.result == 'SUCCESS'
145
- }
146
- }
147
- steps {
148
- sh 'make publish'
149
- }
150
- }
151
- }
152
- }
153
- Toggle Scripted Pipeline (Advanced)
154
- Accessing the currentBuild.result variable allows the Pipeline to determine if there were any test failures. In which case, the value would be UNSTABLE.
155
- Assuming everything has executed successfully in the example Jenkins Pipeline, each successful Pipeline run will have associated build artifacts archived, test results reported upon and the full console output all in Jenkins.
156
-
157
- A Scripted Pipeline can include conditional tests (shown above), loops, try/catch/finally blocks, and even functions. The next section will cover this advanced Scripted Pipeline syntax in more detail.
158
-
159
- Working with your Jenkinsfile
160
- The following sections provide details about handling:
161
-
162
- specific Pipeline syntax in your Jenkinsfile and
163
-
164
- features and functionality of Pipeline syntax which are essential in building your application or Pipeline project.
165
-
166
- Using environment variables
167
- Jenkins Pipeline exposes environment variables via the global variable env, which is available from anywhere within a Jenkinsfile. The full list of environment variables accessible from within Jenkins Pipeline is documented at ${YOUR_JENKINS_URL}/pipeline-syntax/globals#env and includes:
168
-
169
- BUILD_ID
170
- The current build ID, identical to BUILD_NUMBER for builds created in Jenkins versions 1.597+.
171
-
172
- BUILD_NUMBER
173
- The current build number, such as "153".
174
-
175
- BUILD_TAG
176
- String of jenkins-${JOB_NAME}-${BUILD_NUMBER}. Convenient to put into a resource file, a jar file, etc for easier identification.
177
-
178
- BUILD_URL
179
- The URL where the results of this build can be found (for example, http://buildserver/jenkins/job/MyJobName/17/).
180
-
181
- EXECUTOR_NUMBER
182
- The unique number that identifies the current executor (among executors of the same machine) performing this build. This is the number you see in the "build executor status", except that the number starts from 0, not 1.
183
-
184
- JAVA_HOME
185
- If your job is configured to use a specific JDK, this variable is set to the JAVA_HOME of the specified JDK. When this variable is set, PATH is also updated to include the bin subdirectory of JAVA_HOME.
186
-
187
- JENKINS_URL
188
- Full URL of Jenkins, such as https://example.com:port/jenkins/ (NOTE: only available if Jenkins URL set in "System Configuration").
189
-
190
- JOB_NAME
191
- Name of the project of this build, such as "foo" or "foo/bar".
192
-
193
- NODE_NAME
194
- The name of the node the current build is running on. Set to 'master' for the Jenkins controller.
195
-
196
- WORKSPACE
197
- The absolute path of the workspace.
198
-
199
- Referencing or using these environment variables can be accomplished like accessing any key in a Groovy Map, for example:
200
-
201
- Jenkinsfile (Declarative Pipeline)
202
- pipeline {
203
- agent any
204
- stages {
205
- stage('Example') {
206
- steps {
207
- echo "Running ${env.BUILD_ID} on ${env.JENKINS_URL}"
208
- }
209
- }
210
- }
211
- }
212
- Toggle Scripted Pipeline (Advanced)
213
- Setting environment variables
214
- Setting an environment variable within a Jenkins Pipeline is accomplished differently depending on whether Declarative or Scripted Pipeline is used.
215
-
216
- Declarative Pipeline supports an environment directive, whereas users of Scripted Pipeline must use the withEnv step.
217
-
218
- Jenkinsfile (Declarative Pipeline)
219
- pipeline {
220
- agent any
221
- environment {
222
- CC = 'clang'
223
- }
224
- stages {
225
- stage('Example') {
226
- environment {
227
- DEBUG_FLAGS = '-g'
228
- }
229
- steps {
230
- sh 'printenv'
231
- }
232
- }
233
- }
234
- }
235
- Toggle Scripted Pipeline (Advanced)
236
- An environment directive used in the top-level pipeline block will apply to all steps within the Pipeline.
237
- An environment directive defined within a stage will only apply the given environment variables to steps within the stage.
238
- Setting environment variables dynamically
239
- Environment variables can be set at run time and can be used by shell scripts (sh), Windows batch scripts (bat) and PowerShell scripts (powershell). Each script can either returnStatus or returnStdout. More information on scripts.
240
-
241
- Below is an example in a declarative pipeline using sh (shell) with both returnStatus and returnStdout.
242
-
243
- Jenkinsfile (Declarative Pipeline)
244
- pipeline {
245
- agent any
246
- environment {
247
- // Using returnStdout
248
- CC = """${sh(
249
- returnStdout: true,
250
- script: 'echo "clang"'
251
- )}"""
252
- // Using returnStatus
253
- EXIT_STATUS = """${sh(
254
- returnStatus: true,
255
- script: 'exit 1'
256
- )}"""
257
- }
258
- stages {
259
- stage('Example') {
260
- environment {
261
- DEBUG_FLAGS = '-g'
262
- }
263
- steps {
264
- sh 'printenv'
265
- }
266
- }
267
- }
268
- }
269
- An agent must be set at the top level of the pipeline. This will fail if agent is set as agent none.
270
- When using returnStdout a trailing whitespace will be appended to the returned string. Use .trim() to remove this.
271
- Handling credentials
272
- Credentials configured in Jenkins can be handled in Pipelines for immediate use. Read more about using credentials in Jenkins on the Using credentials page.
273
-
274
- The correct way to handle credentials in Jenkins
275
-
276
- For secret text, usernames and passwords, and secret files
277
- Jenkins' declarative Pipeline syntax has the credentials() helper method (used within the environment directive) which supports secret text, username and password, as well as secret file credentials. If you want to handle other types of credentials, refer to the For other credential types section.
278
-
279
- Secret text
280
- The following Pipeline code shows an example of how to create a Pipeline using environment variables for secret text credentials.
281
-
282
- In this example, two secret text credentials are assigned to separate environment variables to access Amazon Web Services (AWS). These credentials would have been configured in Jenkins with their respective credential IDs jenkins-aws-secret-key-id and jenkins-aws-secret-access-key.
283
-
284
- Jenkinsfile (Declarative Pipeline)
285
- pipeline {
286
- agent {
287
- // Define agent details here
288
- }
289
- environment {
290
- AWS_ACCESS_KEY_ID = credentials('jenkins-aws-secret-key-id')
291
- AWS_SECRET_ACCESS_KEY = credentials('jenkins-aws-secret-access-key')
292
- }
293
- stages {
294
- stage('Example stage 1') {
295
- steps {
296
- //
297
- }
298
- }
299
- stage('Example stage 2') {
300
- steps {
301
- //
302
- }
303
- }
304
- }
305
- }
306
- You can reference the two credential environment variables (defined in this Pipeline’s environment directive), within this stage’s steps using the syntax $AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY. For example, here you can authenticate to AWS using the secret text credentials assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline (such as echo $AWS_SECRET_ACCESS_KEY), Jenkins only returns the value “****” to reduce the risk of secret information being disclosed to the console output and any logs. Any sensitive information in credential IDs themselves (such as usernames) are also returned as “****” in the Pipeline run’s output. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
307
- In this Pipeline example, the credentials assigned to the two AWS_…​ environment variables are scoped globally for the entire Pipeline, so these credential variables could also be used in this stage’s steps. If, however, the environment directive in this Pipeline were moved to a specific stage (as is the case in the Usernames and passwords Pipeline example below), then these AWS_…​ environment variables would only be scoped to the steps in that stage.
308
- Storing static AWS keys in Jenkins credentials is not very secure. If you can run Jenkins itself in AWS (at least the agent), it is preferable to use IAM roles for a computer or EKS service account. It is also possible to use web identity federation.
309
- Usernames and passwords
310
- The following Pipeline code snippets show an example of how to create a Pipeline using environment variables for username and password credentials.
311
-
312
- In this example, username and password credentials are assigned to environment variables to access a Bitbucket repository in a common account or team for your organization; these credentials would have been configured in Jenkins with the credential ID jenkins-bitbucket-common-creds.
313
-
314
- When setting the credential environment variable in the environment directive:
315
-
316
- environment {
317
- BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
318
- }
319
- this actually sets the following three environment variables:
320
-
321
- BITBUCKET_COMMON_CREDS - contains a username and a password separated by a colon in the format username:password.
322
-
323
- BITBUCKET_COMMON_CREDS_USR - an additional variable containing the username component only.
324
-
325
- BITBUCKET_COMMON_CREDS_PSW - an additional variable containing the password component only.
326
-
327
- By convention, variable names for environment variables are typically specified in capital case, with individual words separated by underscores You can, however, specify any legitimate variable name using lower case characters. Bear in mind that the additional environment variables created by the credentials() method (above) will always be appended with _USR and _PSW (i.e. in the format of an underscore followed by three capital letters).
328
-
329
- The following code snippet shows the example Pipeline in its entirety:
330
-
331
- Jenkinsfile (Declarative Pipeline)
332
- pipeline {
333
- agent {
334
- // Define agent details here
335
- }
336
- stages {
337
- stage('Example stage 1') {
338
- environment {
339
- BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
340
- }
341
- steps {
342
- //
343
- }
344
- }
345
- stage('Example stage 2') {
346
- steps {
347
- //
348
- }
349
- }
350
- }
351
- }
352
- The following credential environment variables (defined in this Pipeline’s environment directive) are available within this stage’s steps and can be referenced using the syntax:
353
- $BITBUCKET_COMMON_CREDS
354
-
355
- $BITBUCKET_COMMON_CREDS_USR
356
-
357
- $BITBUCKET_COMMON_CREDS_PSW
358
-
359
- For example, here you can authenticate to Bitbucket with the username and password assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline the same behavior described in the Secret text example above applies to these username and password credential variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
360
-
361
- In this Pipeline example, the credentials assigned to the three BITBUCKET_COMMON_CREDS…​ environment variables are scoped only to Example stage 1, so these credential variables are not available for use in this Example stage 2 stage’s steps. If, however, the environment directive in this Pipeline were moved immediately within the pipeline block (as is the case in the Secret text Pipeline example above), then these BITBUCKET_COMMON_CREDS…​ environment variables would be scoped globally and could be used in any stage’s steps.
362
- Secret files
363
- A secret file is a credential which is stored in a file and uploaded to Jenkins. Secret files are used for credentials that are:
364
-
365
- too unwieldy to enter directly into Jenkins, and/or
366
-
367
- in binary format, such as a GPG file.
368
-
369
- In this example, we use a Kubernetes config file that has been configured as a secret file credential named my-kubeconfig.
370
-
371
- Jenkinsfile (Declarative Pipeline)
372
- pipeline {
373
- agent {
374
- // Define agent details here
375
- }
376
- environment {
377
- // The MY_KUBECONFIG environment variable will be assigned the value of a temporary file.
378
- // For example:
379
- // /home/user/.jenkins/workspace/cred_test@tmp/secretFiles/546a5cf3-9b56-4165-a0fd-19e2afe6b31f/kubeconfig.txt
380
- MY_KUBECONFIG = credentials('my-kubeconfig')
381
- }
382
- stages {
383
- stage('Example stage 1') {
384
- steps {
385
- sh("kubectl --kubeconfig $MY_KUBECONFIG get pods")
386
- }
387
- }
388
- }
389
- }
390
- For other credential types
391
- If you need to set credentials in a Pipeline for anything other than secret text, usernames and passwords, or secret files like SSH keys or certificates, use Jenkins' Snippet Generator feature, which you can access through Jenkins' classic UI.
392
-
393
- To access the Snippet Generator for your Pipeline project/item:
394
-
395
- From the Jenkins Dashboard, select the name of your Pipeline project/item.
396
-
397
- In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
398
-
399
- From the Sample Step field, choose withCredentials: Bind credentials to variables.
400
-
401
- Under Bindings, click Add and choose from the dropdown:
402
-
403
- SSH User Private Key - to handle SSH public/private key pair credentials, from which you can specify:
404
-
405
- Key File Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the private key file required in the SSH public/private key pair authentication process.
406
-
407
- Passphrase Variable ( Optional ) - the name of the environment variable that will be bound to the passphrase associated with the SSH public/private key pair.
408
-
409
- Username Variable ( Optional ) - the name of the environment variable that will be bound to username associated with the SSH public/private key pair.
410
-
411
- Credentials - choose the SSH public/private key credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
412
-
413
- Certificate - to handle PKCS#12 certificates, from which you can specify:
414
-
415
- Keystore Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the certificate’s keystore required in the certificate authentication process.
416
-
417
- Password Variable ( Optional ) - the name of the environment variable that will be bound to the password associated with the certificate.
418
-
419
- Alias Variable ( Optional ) - the name of the environment variable that will be bound to the unique alias associated with the certificate.
420
-
421
- Credentials - choose the certificate credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
422
-
423
- Docker client certificate - to handle Docker Host Certificate Authentication.
424
-
425
- Click Generate Pipeline Script and Jenkins generates a withCredentials(…​) { …​ } Pipeline step snippet for the credentials you specified, which you can then copy and paste into your Declarative or Scripted Pipeline code.
426
- Notes:
427
-
428
- The Credentials fields (above) show the names of credentials configured in Jenkins. However, these values are converted to credential IDs after clicking Generate Pipeline Script.
429
-
430
- To combine more than one credential in a single withCredentials(…​) { …​ } Pipeline step, see Combining credentials in one step (below) for details.
431
-
432
- SSH User Private Key example
433
-
434
- withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
435
- keyFileVariable: 'SSH_KEY_FOR_ABC', \
436
- passphraseVariable: '', \
437
- usernameVariable: '')]) {
438
- // some block
439
- }
440
- The optional passphraseVariable and usernameVariable definitions can be deleted in your final Pipeline code.
441
-
442
- Certificate example
443
-
444
- withCredentials(bindings: [certificate(aliasVariable: '', \
445
- credentialsId: 'jenkins-certificate-for-xyz', \
446
- keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
447
- passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
448
- // some block
449
- }
450
- The optional aliasVariable and passwordVariable variable definitions can be deleted in your final Pipeline code.
451
-
452
- The following code snippet shows an example Pipeline in its entirety, which implements the SSH User Private Key and Certificate snippets above:
453
-
454
- Jenkinsfile (Declarative Pipeline)
455
- pipeline {
456
- agent {
457
- // define agent details
458
- }
459
- stages {
460
- stage('Example stage 1') {
461
- steps {
462
- withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
463
- keyFileVariable: 'SSH_KEY_FOR_ABC')]) {
464
- //
465
- }
466
- withCredentials(bindings: [certificate(credentialsId: 'jenkins-certificate-for-xyz', \
467
- keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
468
- passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
469
- //
470
- }
471
- }
472
- }
473
- stage('Example stage 2') {
474
- steps {
475
- //
476
- }
477
- }
478
- }
479
- }
480
- Within this step, you can reference the credential environment variable with the syntax $SSH_KEY_FOR_ABC. For example, here you can authenticate to the ABC application with its configured SSH public/private key pair credentials, whose SSH User Private Key file is assigned to $SSH_KEY_FOR_ABC.
481
- Within this step, you can reference the credential environment variable with the syntax $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD. For example, here you can authenticate to the XYZ application with its configured certificate credentials, whose Certificate's keystore file and password are assigned to the variables $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD, respectively.
482
- In this Pipeline example, the credentials assigned to the $SSH_KEY_FOR_ABC, $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD environment variables are scoped only within their respective withCredentials( …​ ) { …​ } steps, so these credential variables are not available for use in this Example stage 2 stage’s steps.
483
- To maintain the security and anonymity of these credentials, if you attempt to retrieve the value of these credential variables from within these withCredentials( …​ ) { …​ } steps, the same behavior described in the Secret text example (above) applies to these SSH public/private key pair credential and certificate variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
484
-
485
- When using the Sample Step field’s withCredentials: Bind credentials to variables option in the Snippet Generator, only credentials which your current Pipeline project/item has access to can be selected from any Credentials field’s list. While you can manually write a withCredentials( …​ ) { …​ } step for your Pipeline (like the examples above), using the Snippet Generator is recommended to avoid specifying credentials that are out of scope for this Pipeline project/item, which when run, will make the step fail.
486
-
487
- You can also use the Snippet Generator to generate withCredentials( …​ ) { …​ } steps to handle secret text, usernames and passwords and secret files. However, if you only need to handle these types of credentials, it is recommended you use the relevant procedure described in the section above for improved Pipeline code readability.
488
-
489
- The use of single-quotes instead of double-quotes to define the script (the implicit parameter to sh) in Groovy above. The single-quotes will cause the secret to be expanded by the shell as an environment variable. The double-quotes are potentially less secure as the secret is interpolated by Groovy, and so typical operating system process listings will accidentally disclose it :
490
-
491
- node {
492
- withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
493
- sh /* WRONG! */ """
494
- set +x
495
- curl -H 'Token: $TOKEN' https://some.api/
496
- """
497
- sh /* CORRECT */ '''
498
- set +x
499
- curl -H 'Token: $TOKEN' https://some.api/
500
- '''
501
- }
502
- }
503
- Combining credentials in one step
504
- Using the Snippet Generator, you can make multiple credentials available within a single withCredentials( …​ ) { …​ } step by doing the following:
505
-
506
- From the Jenkins Dashboard, select the name of your Pipeline project/item.
507
-
508
- In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
509
-
510
- From the Sample Step field, choose withCredentials: Bind credentials to variables.
511
-
512
- Click Add under Bindings.
513
-
514
- Choose the credential type to add to the withCredentials( …​ ) { …​ } step from the dropdown list.
515
-
516
- Specify the credential Bindings details. Read more above these in the procedure under For other credential types (above).
517
-
518
- Repeat from "Click Add …​" (above) for each (set of) credential/s to add to the withCredentials( …​ ) { …​ } step.
519
-
520
- Select Generate Pipeline Script to generate the final withCredentials( …​ ) { …​ } step snippet.
521
-
522
- String interpolation
523
- Jenkins Pipeline uses rules identical to Groovy for string interpolation. Groovy’s String interpolation support can be confusing to many newcomers to the language. While Groovy supports declaring a string with either single quotes, or double quotes, for example:
524
-
525
- def singlyQuoted = 'Hello'
526
- def doublyQuoted = "World"
527
- Only the latter string will support the dollar-sign ($) based string interpolation, for example:
528
-
529
- def username = 'Jenkins'
530
- echo 'Hello Mr. ${username}'
531
- echo "I said, Hello Mr. ${username}"
532
- Would result in:
533
-
534
- Hello Mr. ${username}
535
- I said, Hello Mr. Jenkins
536
- Understanding how to use string interpolation is vital for using some of Pipeline’s more advanced features.
537
-
538
- Interpolation of sensitive environment variables
539
- Groovy string interpolation should never be used with credentials.
540
-
541
- Groovy GStrings (double-quoted or triple-double-quoted strings) expand ${TOKEN} before the command is sent to the agent. When that happens, the secret is copied into the process arguments (visible via tooling such as ps) and any shell metacharacters inside the secret or user-controlled value are executed immediately. Always send a literal command string to sh, bat, powershell, or pwsh and let the shell read secrets from its environment, which withCredentials or the environment block have already populated.
542
-
543
- Jenkinsfile (Declarative Pipeline)
544
- pipeline {
545
- agent any
546
- environment {
547
- API_TOKEN = credentials('example-token-id')
548
- }
549
- stages {
550
- stage('Example') {
551
- steps {
552
- /* WRONG */
553
- sh "curl -H 'Authorization: Bearer ${API_TOKEN}' https://example.com"
554
- """
555
- }
556
- }
557
- }
558
- }
559
- Jenkinsfile (Declarative Pipeline)
560
- pipeline {
561
- agent any
562
- environment {
563
- API_TOKEN = credentials('example-token-id')
564
- }
565
- stages {
566
- stage('Example') {
567
- steps {
568
- /* CORRECT */
569
- sh 'curl -H "Authorization: Bearer ${API_TOKEN}" https://example.com'
570
- }
571
- }
572
- }
573
- }
574
- Any Groovy construct that avoids interpolation (for example, sh(script: 'curl …​ $API_TOKEN', label: 'call API')) is safe; the key is keeping secrets out of GStrings so only the shell expands them.
575
-
576
- Injection via interpolation
577
- Groovy string interpolation can inject rogue commands into command interpreters via special characters.
578
-
579
- Another note of caution. Using Groovy string interpolation for user-controlled variables with steps that pass their arguments to command interpreters such as the sh, bat, powershell, or pwsh steps can result in problems analogous to SQL injection. This occurs when a user-controlled variable (generally an environment variable, usually a parameter passed to the build) that contains special characters (e.g. / \ $ & % ^ > < | ;) is passed to the sh, bat, powershell, or pwsh steps using Groovy interpolation. For a simple example:
580
-
581
- Jenkinsfile (Declarative Pipeline)
582
- pipeline {
583
- agent any
584
- parameters {
585
- string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
586
- }
587
- stages {
588
- stage('Example') {
589
- steps {
590
- /* WRONG! */
591
- sh("echo ${STATEMENT}")
592
- }
593
- }
594
- }
595
- }
596
- In this example, the argument to the sh step is evaluated by Groovy, and STATEMENT is interpolated directly into the argument as if sh('echo hello; ls /') has been written in the Pipeline. When this is processed on the agent, rather than echoing the value hello; ls /, it will echo hello then proceed to list the entire root directory of the agent. Any user able to control a variable interpolated by such a step would be able to make the sh step run arbitrary code on the agent. To avoid this problem, make sure arguments to steps such as sh or bat that reference parameters or other user-controlled environment variables use single quotes to avoid Groovy interpolation.
597
-
598
- Jenkinsfile (Declarative Pipeline)
599
- pipeline {
600
- agent any
601
- parameters {
602
- string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
603
- }
604
- stages {
605
- stage('Example') {
606
- steps {
607
- /* CORRECT */
608
- sh('echo ${STATEMENT}')
609
- }
610
- }
611
- }
612
- }
613
- Credential mangling is another issue that can occur when credentials that contain special characters are passed to a step using Groovy interpolation. When the credential value is mangled, it is no longer valid and will no longer be masked in the console log.
614
-
615
- Jenkinsfile (Declarative Pipeline)
616
- pipeline {
617
- agent any
618
- environment {
619
- EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
620
- }
621
- stages {
622
- stage('Example') {
623
- steps {
624
- /* WRONG! */
625
- bat "echo ${EXAMPLE_KEY}"
626
- }
627
- }
628
- }
629
- }
630
- Here, the bat step receives echo sec%ret and the Windows batch shell will simply drop the % and print out the value secret. Because there is a single character difference, the value secret will not be masked. Though the value is not the same as the actual credential, this is still a significant exposure of sensitive information. Again, single-quotes avoids this issue.
631
-
632
- Jenkinsfile (Declarative Pipeline)
633
- pipeline {
634
- agent any
635
- environment {
636
- EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
637
- }
638
- stages {
639
- stage('Example') {
640
- steps {
641
- /* CORRECT */
642
- bat 'echo %EXAMPLE_KEY%'
643
- }
644
- }
645
- }
646
- }
647
- Handling parameters
648
- Declarative Pipeline supports parameters out-of-the-box, allowing the Pipeline to accept user-specified parameters at runtime via the parameters directive. Configuring parameters with Scripted Pipeline is done with the properties step, which can be found in the Snippet Generator.
649
-
650
- If you configured your pipeline to accept parameters using the Build with Parameters option, those parameters are accessible as members of the params variable.
651
-
652
- Assuming that a String parameter named "Greeting" has been configured in the Jenkinsfile, it can access that parameter via ${params.Greeting}:
653
-
654
- Jenkinsfile (Declarative Pipeline)
655
- pipeline {
656
- agent any
657
- parameters {
658
- string(name: 'Greeting', defaultValue: 'Hello', description: 'How should I greet the world?')
659
- }
660
- stages {
661
- stage('Example') {
662
- steps {
663
- echo "${params.Greeting} World!"
664
- }
665
- }
666
- }
667
- }
668
- Toggle Scripted Pipeline (Advanced)
669
- Handling failure
670
- Declarative Pipeline supports robust failure handling by default via its post section which allows declaring a number of different "post conditions" such as: always, unstable, success, failure, and changed. The Pipeline Syntax section provides more detail on how to use the various post conditions.
671
-
672
- Jenkinsfile (Declarative Pipeline)
673
- pipeline {
674
- agent any
675
- stages {
676
- stage('Test') {
677
- steps {
678
- sh 'make check'
679
- }
680
- }
681
- }
682
- post {
683
- always {
684
- junit '**/target/*.xml'
685
- }
686
- failure {
687
- mail to: team@example.com, subject: 'The Pipeline failed :('
688
- }
689
- }
690
- }
691
- Toggle Scripted Pipeline (Advanced)
692
- Scripted Pipeline however relies on Groovy’s built-in try/catch/finally semantics for handling failures during execution of the Pipeline.
693
-
694
- In the Test example above, the sh step was modified to never return a non-zero exit code (sh 'make check || true'). This approach, while valid, means the following stages need to check currentBuild.result to know if there has been a test failure or not.
695
-
696
- An alternative way of handling this, which preserves the early-exit behavior of failures in Pipeline, while still giving junit the chance to capture test reports, is to use a series of try/finally blocks:
697
-
698
- Error-handling steps
699
- Jenkins Pipelines provide dedicated steps for flexible error handling, allowing you to control how your Pipeline responds to errors and warnings. These steps help you surface errors and warnings clearly in Jenkins, giving you control over whether the Pipeline fails, continues, or simply reports a warning. For more information, refer to:
700
-
701
- catchError
702
-
703
- error
704
-
705
- unstable
706
-
707
- warnError
708
-
709
- Using multiple agents
710
- In all the previous examples, only a single agent has been used. This means Jenkins will allocate an executor wherever one is available, regardless of how it is labeled or configured. Not only can this behavior be overridden, but Pipeline allows utilizing multiple agents in the Jenkins environment from within the same Jenkinsfile, which can be helpful for more advanced use-cases such as executing builds/tests across multiple platforms.
711
-
712
- In the example below, the "Build" stage will be performed on one agent and the built results will be reused on two subsequent agents, labelled "linux" and "windows" respectively, during the "Test" stage.
713
-
714
- Jenkinsfile (Declarative Pipeline)
715
- pipeline {
716
- agent none
717
- stages {
718
- stage('Build') {
719
- agent any
720
- steps {
721
- checkout scm
722
- sh 'make'
723
- stash includes: '**/target/*.jar', name: 'app'
724
- }
725
- }
726
- stage('Test on Linux') {
727
- agent {
728
- label 'linux'
729
- }
730
- steps {
731
- unstash 'app'
732
- sh 'make check'
733
- }
734
- post {
735
- always {
736
- junit '**/target/*.xml'
737
- }
738
- }
739
- }
740
- stage('Test on Windows') {
741
- agent {
742
- label 'windows'
743
- }
744
- steps {
745
- unstash 'app'
746
- bat 'make check'
747
- }
748
- post {
749
- always {
750
- junit '**/target/*.xml'
751
- }
752
- }
753
- }
754
- }
755
- }
756
- Toggle Scripted Pipeline (Advanced)
757
- The stash step allows capturing files matching an inclusion pattern (**/target/*.jar) for reuse within the same Pipeline. Once the Pipeline has completed its execution, stashed files are deleted from the Jenkins controller.
758
- The parameter in agent/node allows for any valid Jenkins label expression. Consult the Pipeline Syntax section for more details.
759
- unstash will retrieve the named "stash" from the Jenkins controller into the Pipeline’s current workspace.
760
- The bat script allows for executing batch scripts on Windows-based platforms.
761
- Optional step arguments
762
- Pipeline follows the Groovy language convention of allowing parentheses to be omitted around method arguments.
763
-
764
- Many Pipeline steps also use the named-parameter syntax as a shorthand for creating a Map in Groovy, which uses the syntax [key1: value1, key2: value2]. Making statements like the following functionally equivalent:
765
-
766
- git url: 'git://example.com/amazing-project.git', branch: 'master'
767
- git([url: 'git://example.com/amazing-project.git', branch: 'master'])
768
- For convenience, when calling steps taking only one parameter (or only one mandatory parameter), the parameter name may be omitted, for example:
769
-
770
- sh 'echo hello' /* short form */
771
- sh([script: 'echo hello']) /* long form */
772
- Advanced Scripted Pipeline
773
- Scripted Pipeline is a domain-specific language [3] based on Groovy, most Groovy syntax can be used in Scripted Pipeline without modification.
774
-
775
- Parallel execution
776
- The example in the section above runs tests across two different platforms in a linear series. In practice, if the make check execution takes 30 minutes to complete, the "Test" stage would now take 60 minutes to complete!
777
-
778
- Fortunately, Pipeline has built-in functionality for executing portions of Scripted Pipeline in parallel, implemented in the aptly named parallel step.
779
-
780
- Refactoring the example above to use the parallel step:
781
-
782
- Jenkinsfile (Scripted Pipeline)
783
- stage('Build') {
784
- /* .. snip .. */
785
- }
786
-
787
- stage('Test') {
788
- parallel linux: {
789
- node('linux') {
790
- checkout scm
791
- try {
792
- unstash 'app'
793
- sh 'make check'
794
- }
795
- finally {
796
- junit '**/target/*.xml'
797
- }
798
- }
799
- },
800
- windows: {
801
- node('windows') {
802
- /* .. snip .. */
803
- }
804
- }
805
- }
806
- Instead of executing the tests on the "linux" and "windows" labelled nodes in series, they will now execute in parallel assuming the requisite capacity exists in the Jenkins environment.
807
-
808
- 1. en.wikipedia.org/wiki/Source_control_management
809
- 2. en.wikipedia.org/wiki/Single_Source_of_Truth
810
- 3. en.wikipedia.org/wiki/Domain-specific_language
 
1
+ Using a Jenkinsfile
2
+ Table of Contents
3
+ Creating a Jenkinsfile
4
+ Build
5
+ Test
6
+ Deploy
7
+ Working with your Jenkinsfile
8
+ Using environment variables
9
+ Setting environment variables
10
+ Setting environment variables dynamically
11
+ Handling credentials
12
+ For secret text, usernames and passwords, and secret files
13
+ Secret text
14
+ Usernames and passwords
15
+ Secret files
16
+ For other credential types
17
+ Combining credentials in one step
18
+ String interpolation
19
+ Interpolation of sensitive environment variables
20
+ Injection via interpolation
21
+ Handling parameters
22
+ Handling failure
23
+ Error-handling steps
24
+ Using multiple agents
25
+ Optional step arguments
26
+ Advanced Scripted Pipeline
27
+ Parallel execution
28
+ This section builds on the information covered in Getting started with Pipeline and introduces more useful steps, common patterns, and demonstrates some non-trivial Jenkinsfile examples.
29
+
30
+ Creating a Jenkinsfile, which is checked into source control [1], provides a number of immediate benefits:
31
+
32
+ Code review/iteration on the Pipeline
33
+
34
+ Audit trail for the Pipeline
35
+
36
+ Single source of truth [2] for the Pipeline, which can be viewed and edited by multiple members of the project.
37
+
38
+ Pipeline supports two syntaxes, Declarative (introduced in Pipeline 2.5) and Scripted Pipeline. Both of which support building continuous delivery pipelines. Both may be used to define a Pipeline in either the web UI or with a Jenkinsfile, though it’s generally considered a best practice to create a Jenkinsfile and check the file into the source control repository.
39
+
40
+ Creating a Jenkinsfile
41
+ As discussed in the Defining a Pipeline in SCM, a Jenkinsfile is a text file that contains the definition of a Jenkins Pipeline and is checked into source control. Consider the following Pipeline which implements a basic three-stage continuous delivery pipeline.
42
+
43
+ Jenkinsfile (Declarative Pipeline)
44
+ pipeline {
45
+ agent any
46
+
47
+ stages {
48
+ stage('Build') {
49
+ steps {
50
+ echo 'Building..'
51
+ }
52
+ }
53
+ stage('Test') {
54
+ steps {
55
+ echo 'Testing..'
56
+ }
57
+ }
58
+ stage('Deploy') {
59
+ steps {
60
+ echo 'Deploying....'
61
+ }
62
+ }
63
+ }
64
+ }
65
+ Toggle Scripted Pipeline (Advanced)
66
+ Not all Pipelines will have these same three stages, but it is a good starting point to define them for most projects. The sections below will demonstrate the creation and execution of a simple Pipeline in a test installation of Jenkins.
67
+
68
+ It is assumed that there is already a source control repository set up for the project and a Pipeline has been defined in Jenkins following these instructions.
69
+
70
+ Using a text editor, ideally one which supports Groovy syntax highlighting, create a new Jenkinsfile in the root directory of the project.
71
+
72
+ The Declarative Pipeline example above contains the minimum necessary structure to implement a continuous delivery pipeline. The agent directive, which is required, instructs Jenkins to allocate an executor and workspace for the Pipeline. Without an agent directive, not only is the Declarative Pipeline not valid, it would not be capable of doing any work! By default the agent directive ensures that the source repository is checked out and made available for steps in the subsequent stages.
73
+
74
+ The stages directive and steps directives are also required for a valid Declarative Pipeline as they instruct Jenkins what to execute and in which stage it should be executed.
75
+
76
+ For more advanced usage with Scripted Pipeline, the example above node is a crucial first step as it allocates an executor and workspace for the Pipeline. In essence, without node, a Pipeline cannot do any work! From within node, the first order of business will be to checkout the source code for this project. Since the Jenkinsfile is being pulled directly from source control, Pipeline provides a quick and easy way to access the right revision of the source code.
77
+
78
+ Jenkinsfile (Scripted Pipeline)
79
+ node {
80
+ checkout scm
81
+ /* .. snip .. */
82
+ }
83
+ The checkout step will checkout code from source control; scm is a special variable which instructs the checkout step to clone the specific revision which triggered this Pipeline run.
84
+ Build
85
+ For many projects the beginning of "work" in the Pipeline would be the "build" stage. Typically this stage of the Pipeline will be where source code is assembled, compiled, or packaged. The Jenkinsfile is not a replacement for an existing build tool such as GNU/Make, Maven, Gradle, or others, but rather can be viewed as a glue layer to bind the multiple phases of a project’s development lifecycle (build, test, deploy) together.
86
+
87
+ Jenkins has a number of plugins for invoking practically any build tool in general use, but this example will simply invoke make from a shell step (sh). The sh step assumes the system is Unix/Linux-based, for Windows-based systems the bat could be used instead.
88
+
89
+ Jenkinsfile (Declarative Pipeline)
90
+ pipeline {
91
+ agent any
92
+
93
+ stages {
94
+ stage('Build') {
95
+ steps {
96
+ sh 'make'
97
+ archiveArtifacts artifacts: '**/target/*.jar', fingerprint: true
98
+ }
99
+ }
100
+ }
101
+ }
102
+ Toggle Scripted Pipeline (Advanced)
103
+ The sh step invokes the make command and will only continue if a zero exit code is returned by the command. Any non-zero exit code will fail the Pipeline.
104
+ archiveArtifacts captures the files built matching the include pattern (**/target/*.jar) and saves them to the Jenkins controller for later retrieval.
105
+ Archiving artifacts is not a substitute for using external artifact repositories such as Artifactory or Nexus and should be considered only for basic reporting and file archival.
106
+
107
+ Test
108
+ Running automated tests is a crucial component of any successful continuous delivery process. As such, Jenkins has a number of test recording, reporting, and visualization facilities provided by a number of plugins. At a fundamental level, when there are test failures, it is useful to have Jenkins record the failures for reporting and visualization in the web UI. The example below uses the junit step, provided by the JUnit plugin.
109
+
110
+ In the example below, if tests fail, the Pipeline is marked "unstable", as denoted by a yellow ball in the web UI. Based on the recorded test reports, Jenkins can also provide historical trend analysis and visualization.
111
+
112
+ Jenkinsfile (Declarative Pipeline)
113
+ pipeline {
114
+ agent any
115
+
116
+ stages {
117
+ stage('Test') {
118
+ steps {
119
+ /* `make check` returns non-zero on test failures,
120
+ * using `true` to allow the Pipeline to continue nonetheless
121
+ */
122
+ sh 'make check || true'
123
+ junit '**/target/*.xml'
124
+ }
125
+ }
126
+ }
127
+ }
128
+ Toggle Scripted Pipeline (Advanced)
129
+ Using an inline shell conditional (sh 'make check || true') ensures that the sh step always sees a zero exit code, giving the junit step the opportunity to capture and process the test reports. Alternative approaches to this are covered in more detail in the Handling failure section below.
130
+ junit captures and associates the JUnit XML files matching the inclusion pattern (**/target/*.xml).
131
+ Deploy
132
+ Deployment can imply a variety of steps, depending on the project or organization requirements, and may be anything from publishing built artifacts to an Artifactory server, to pushing code to a production system.
133
+
134
+ At this stage of the example Pipeline, both the "Build" and "Test" stages have successfully executed. In essence, the "Deploy" stage will only execute assuming previous stages completed successfully, otherwise the Pipeline would have exited early.
135
+
136
+ Jenkinsfile (Declarative Pipeline)
137
+ pipeline {
138
+ agent any
139
+
140
+ stages {
141
+ stage('Deploy') {
142
+ when {
143
+ expression {
144
+ currentBuild.result == null || currentBuild.result == 'SUCCESS'
145
+ }
146
+ }
147
+ steps {
148
+ sh 'make publish'
149
+ }
150
+ }
151
+ }
152
+ }
153
+ Toggle Scripted Pipeline (Advanced)
154
+ Accessing the currentBuild.result variable allows the Pipeline to determine if there were any test failures. In which case, the value would be UNSTABLE.
155
+ Assuming everything has executed successfully in the example Jenkins Pipeline, each successful Pipeline run will have associated build artifacts archived, test results reported upon and the full console output all in Jenkins.
156
+
157
+ A Scripted Pipeline can include conditional tests (shown above), loops, try/catch/finally blocks, and even functions. The next section will cover this advanced Scripted Pipeline syntax in more detail.
158
+
159
+ Working with your Jenkinsfile
160
+ The following sections provide details about handling:
161
+
162
+ specific Pipeline syntax in your Jenkinsfile and
163
+
164
+ features and functionality of Pipeline syntax which are essential in building your application or Pipeline project.
165
+
166
+ Using environment variables
167
+ Jenkins Pipeline exposes environment variables via the global variable env, which is available from anywhere within a Jenkinsfile. The full list of environment variables accessible from within Jenkins Pipeline is documented at ${YOUR_JENKINS_URL}/pipeline-syntax/globals#env and includes:
168
+
169
+ BUILD_ID
170
+ The current build ID, identical to BUILD_NUMBER for builds created in Jenkins versions 1.597+.
171
+
172
+ BUILD_NUMBER
173
+ The current build number, such as "153".
174
+
175
+ BUILD_TAG
176
+ String of jenkins-${JOB_NAME}-${BUILD_NUMBER}. Convenient to put into a resource file, a jar file, etc for easier identification.
177
+
178
+ BUILD_URL
179
+ The URL where the results of this build can be found (for example, http://buildserver/jenkins/job/MyJobName/17/).
180
+
181
+ EXECUTOR_NUMBER
182
+ The unique number that identifies the current executor (among executors of the same machine) performing this build. This is the number you see in the "build executor status", except that the number starts from 0, not 1.
183
+
184
+ JAVA_HOME
185
+ If your job is configured to use a specific JDK, this variable is set to the JAVA_HOME of the specified JDK. When this variable is set, PATH is also updated to include the bin subdirectory of JAVA_HOME.
186
+
187
+ JENKINS_URL
188
+ Full URL of Jenkins, such as https://example.com:port/jenkins/ (NOTE: only available if Jenkins URL set in "System Configuration").
189
+
190
+ JOB_NAME
191
+ Name of the project of this build, such as "foo" or "foo/bar".
192
+
193
+ NODE_NAME
194
+ The name of the node the current build is running on. Set to 'master' for the Jenkins controller.
195
+
196
+ WORKSPACE
197
+ The absolute path of the workspace.
198
+
199
+ Referencing or using these environment variables can be accomplished like accessing any key in a Groovy Map, for example:
200
+
201
+ Jenkinsfile (Declarative Pipeline)
202
+ pipeline {
203
+ agent any
204
+ stages {
205
+ stage('Example') {
206
+ steps {
207
+ echo "Running ${env.BUILD_ID} on ${env.JENKINS_URL}"
208
+ }
209
+ }
210
+ }
211
+ }
212
+ Toggle Scripted Pipeline (Advanced)
213
+ Setting environment variables
214
+ Setting an environment variable within a Jenkins Pipeline is accomplished differently depending on whether Declarative or Scripted Pipeline is used.
215
+
216
+ Declarative Pipeline supports an environment directive, whereas users of Scripted Pipeline must use the withEnv step.
217
+
218
+ Jenkinsfile (Declarative Pipeline)
219
+ pipeline {
220
+ agent any
221
+ environment {
222
+ CC = 'clang'
223
+ }
224
+ stages {
225
+ stage('Example') {
226
+ environment {
227
+ DEBUG_FLAGS = '-g'
228
+ }
229
+ steps {
230
+ sh 'printenv'
231
+ }
232
+ }
233
+ }
234
+ }
235
+ Toggle Scripted Pipeline (Advanced)
236
+ An environment directive used in the top-level pipeline block will apply to all steps within the Pipeline.
237
+ An environment directive defined within a stage will only apply the given environment variables to steps within the stage.
238
+ Setting environment variables dynamically
239
+ Environment variables can be set at run time and can be used by shell scripts (sh), Windows batch scripts (bat) and PowerShell scripts (powershell). Each script can either returnStatus or returnStdout. More information on scripts.
240
+
241
+ Below is an example in a declarative pipeline using sh (shell) with both returnStatus and returnStdout.
242
+
243
+ Jenkinsfile (Declarative Pipeline)
244
+ pipeline {
245
+ agent any
246
+ environment {
247
+ // Using returnStdout
248
+ CC = """${sh(
249
+ returnStdout: true,
250
+ script: 'echo "clang"'
251
+ )}"""
252
+ // Using returnStatus
253
+ EXIT_STATUS = """${sh(
254
+ returnStatus: true,
255
+ script: 'exit 1'
256
+ )}"""
257
+ }
258
+ stages {
259
+ stage('Example') {
260
+ environment {
261
+ DEBUG_FLAGS = '-g'
262
+ }
263
+ steps {
264
+ sh 'printenv'
265
+ }
266
+ }
267
+ }
268
+ }
269
+ An agent must be set at the top level of the pipeline. This will fail if agent is set as agent none.
270
+ When using returnStdout a trailing whitespace will be appended to the returned string. Use .trim() to remove this.
271
+ Handling credentials
272
+ Credentials configured in Jenkins can be handled in Pipelines for immediate use. Read more about using credentials in Jenkins on the Using credentials page.
273
+
274
+ The correct way to handle credentials in Jenkins
275
+
276
+ For secret text, usernames and passwords, and secret files
277
+ Jenkins' declarative Pipeline syntax has the credentials() helper method (used within the environment directive) which supports secret text, username and password, as well as secret file credentials. If you want to handle other types of credentials, refer to the For other credential types section.
278
+
279
+ Secret text
280
+ The following Pipeline code shows an example of how to create a Pipeline using environment variables for secret text credentials.
281
+
282
+ In this example, two secret text credentials are assigned to separate environment variables to access Amazon Web Services (AWS). These credentials would have been configured in Jenkins with their respective credential IDs jenkins-aws-secret-key-id and jenkins-aws-secret-access-key.
283
+
284
+ Jenkinsfile (Declarative Pipeline)
285
+ pipeline {
286
+ agent {
287
+ // Define agent details here
288
+ }
289
+ environment {
290
+ AWS_ACCESS_KEY_ID = credentials('jenkins-aws-secret-key-id')
291
+ AWS_SECRET_ACCESS_KEY = credentials('jenkins-aws-secret-access-key')
292
+ }
293
+ stages {
294
+ stage('Example stage 1') {
295
+ steps {
296
+ //
297
+ }
298
+ }
299
+ stage('Example stage 2') {
300
+ steps {
301
+ //
302
+ }
303
+ }
304
+ }
305
+ }
306
+ You can reference the two credential environment variables (defined in this Pipeline’s environment directive), within this stage’s steps using the syntax $AWS_ACCESS_KEY_ID and $AWS_SECRET_ACCESS_KEY. For example, here you can authenticate to AWS using the secret text credentials assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline (such as echo $AWS_SECRET_ACCESS_KEY), Jenkins only returns the value “****” to reduce the risk of secret information being disclosed to the console output and any logs. Any sensitive information in credential IDs themselves (such as usernames) are also returned as “****” in the Pipeline run’s output. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
307
+ In this Pipeline example, the credentials assigned to the two AWS_…​ environment variables are scoped globally for the entire Pipeline, so these credential variables could also be used in this stage’s steps. If, however, the environment directive in this Pipeline were moved to a specific stage (as is the case in the Usernames and passwords Pipeline example below), then these AWS_…​ environment variables would only be scoped to the steps in that stage.
308
+ Storing static AWS keys in Jenkins credentials is not very secure. If you can run Jenkins itself in AWS (at least the agent), it is preferable to use IAM roles for a computer or EKS service account. It is also possible to use web identity federation.
309
+ Usernames and passwords
310
+ The following Pipeline code snippets show an example of how to create a Pipeline using environment variables for username and password credentials.
311
+
312
+ In this example, username and password credentials are assigned to environment variables to access a Bitbucket repository in a common account or team for your organization; these credentials would have been configured in Jenkins with the credential ID jenkins-bitbucket-common-creds.
313
+
314
+ When setting the credential environment variable in the environment directive:
315
+
316
+ environment {
317
+ BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
318
+ }
319
+ this actually sets the following three environment variables:
320
+
321
+ BITBUCKET_COMMON_CREDS - contains a username and a password separated by a colon in the format username:password.
322
+
323
+ BITBUCKET_COMMON_CREDS_USR - an additional variable containing the username component only.
324
+
325
+ BITBUCKET_COMMON_CREDS_PSW - an additional variable containing the password component only.
326
+
327
+ By convention, variable names for environment variables are typically specified in capital case, with individual words separated by underscores You can, however, specify any legitimate variable name using lower case characters. Bear in mind that the additional environment variables created by the credentials() method (above) will always be appended with _USR and _PSW (i.e. in the format of an underscore followed by three capital letters).
328
+
329
+ The following code snippet shows the example Pipeline in its entirety:
330
+
331
+ Jenkinsfile (Declarative Pipeline)
332
+ pipeline {
333
+ agent {
334
+ // Define agent details here
335
+ }
336
+ stages {
337
+ stage('Example stage 1') {
338
+ environment {
339
+ BITBUCKET_COMMON_CREDS = credentials('jenkins-bitbucket-common-creds')
340
+ }
341
+ steps {
342
+ //
343
+ }
344
+ }
345
+ stage('Example stage 2') {
346
+ steps {
347
+ //
348
+ }
349
+ }
350
+ }
351
+ }
352
+ The following credential environment variables (defined in this Pipeline’s environment directive) are available within this stage’s steps and can be referenced using the syntax:
353
+ $BITBUCKET_COMMON_CREDS
354
+
355
+ $BITBUCKET_COMMON_CREDS_USR
356
+
357
+ $BITBUCKET_COMMON_CREDS_PSW
358
+
359
+ For example, here you can authenticate to Bitbucket with the username and password assigned to these credential variables. To maintain the security and anonymity of these credentials, if the job displays the value of these credential variables from within the Pipeline the same behavior described in the Secret text example above applies to these username and password credential variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
360
+
361
+ In this Pipeline example, the credentials assigned to the three BITBUCKET_COMMON_CREDS…​ environment variables are scoped only to Example stage 1, so these credential variables are not available for use in this Example stage 2 stage’s steps. If, however, the environment directive in this Pipeline were moved immediately within the pipeline block (as is the case in the Secret text Pipeline example above), then these BITBUCKET_COMMON_CREDS…​ environment variables would be scoped globally and could be used in any stage’s steps.
362
+ Secret files
363
+ A secret file is a credential which is stored in a file and uploaded to Jenkins. Secret files are used for credentials that are:
364
+
365
+ too unwieldy to enter directly into Jenkins, and/or
366
+
367
+ in binary format, such as a GPG file.
368
+
369
+ In this example, we use a Kubernetes config file that has been configured as a secret file credential named my-kubeconfig.
370
+
371
+ Jenkinsfile (Declarative Pipeline)
372
+ pipeline {
373
+ agent {
374
+ // Define agent details here
375
+ }
376
+ environment {
377
+ // The MY_KUBECONFIG environment variable will be assigned the value of a temporary file.
378
+ // For example:
379
+ // /home/user/.jenkins/workspace/cred_test@tmp/secretFiles/546a5cf3-9b56-4165-a0fd-19e2afe6b31f/kubeconfig.txt
380
+ MY_KUBECONFIG = credentials('my-kubeconfig')
381
+ }
382
+ stages {
383
+ stage('Example stage 1') {
384
+ steps {
385
+ sh("kubectl --kubeconfig $MY_KUBECONFIG get pods")
386
+ }
387
+ }
388
+ }
389
+ }
390
+ For other credential types
391
+ If you need to set credentials in a Pipeline for anything other than secret text, usernames and passwords, or secret files like SSH keys or certificates, use Jenkins' Snippet Generator feature, which you can access through Jenkins' classic UI.
392
+
393
+ To access the Snippet Generator for your Pipeline project/item:
394
+
395
+ From the Jenkins Dashboard, select the name of your Pipeline project/item.
396
+
397
+ In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
398
+
399
+ From the Sample Step field, choose withCredentials: Bind credentials to variables.
400
+
401
+ Under Bindings, click Add and choose from the dropdown:
402
+
403
+ SSH User Private Key - to handle SSH public/private key pair credentials, from which you can specify:
404
+
405
+ Key File Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the private key file required in the SSH public/private key pair authentication process.
406
+
407
+ Passphrase Variable ( Optional ) - the name of the environment variable that will be bound to the passphrase associated with the SSH public/private key pair.
408
+
409
+ Username Variable ( Optional ) - the name of the environment variable that will be bound to username associated with the SSH public/private key pair.
410
+
411
+ Credentials - choose the SSH public/private key credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
412
+
413
+ Certificate - to handle PKCS#12 certificates, from which you can specify:
414
+
415
+ Keystore Variable - the name of the environment variable that will be bound to these credentials. Jenkins actually assigns this temporary variable to the secure location of the certificate’s keystore required in the certificate authentication process.
416
+
417
+ Password Variable ( Optional ) - the name of the environment variable that will be bound to the password associated with the certificate.
418
+
419
+ Alias Variable ( Optional ) - the name of the environment variable that will be bound to the unique alias associated with the certificate.
420
+
421
+ Credentials - choose the certificate credentials stored in Jenkins. The value of this field is the credential ID, which Jenkins writes out to the generated snippet.
422
+
423
+ Docker client certificate - to handle Docker Host Certificate Authentication.
424
+
425
+ Click Generate Pipeline Script and Jenkins generates a withCredentials(…​) { …​ } Pipeline step snippet for the credentials you specified, which you can then copy and paste into your Declarative or Scripted Pipeline code.
426
+ Notes:
427
+
428
+ The Credentials fields (above) show the names of credentials configured in Jenkins. However, these values are converted to credential IDs after clicking Generate Pipeline Script.
429
+
430
+ To combine more than one credential in a single withCredentials(…​) { …​ } Pipeline step, see Combining credentials in one step (below) for details.
431
+
432
+ SSH User Private Key example
433
+
434
+ withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
435
+ keyFileVariable: 'SSH_KEY_FOR_ABC', \
436
+ passphraseVariable: '', \
437
+ usernameVariable: '')]) {
438
+ // some block
439
+ }
440
+ The optional passphraseVariable and usernameVariable definitions can be deleted in your final Pipeline code.
441
+
442
+ Certificate example
443
+
444
+ withCredentials(bindings: [certificate(aliasVariable: '', \
445
+ credentialsId: 'jenkins-certificate-for-xyz', \
446
+ keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
447
+ passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
448
+ // some block
449
+ }
450
+ The optional aliasVariable and passwordVariable variable definitions can be deleted in your final Pipeline code.
451
+
452
+ The following code snippet shows an example Pipeline in its entirety, which implements the SSH User Private Key and Certificate snippets above:
453
+
454
+ Jenkinsfile (Declarative Pipeline)
455
+ pipeline {
456
+ agent {
457
+ // define agent details
458
+ }
459
+ stages {
460
+ stage('Example stage 1') {
461
+ steps {
462
+ withCredentials(bindings: [sshUserPrivateKey(credentialsId: 'jenkins-ssh-key-for-abc', \
463
+ keyFileVariable: 'SSH_KEY_FOR_ABC')]) {
464
+ //
465
+ }
466
+ withCredentials(bindings: [certificate(credentialsId: 'jenkins-certificate-for-xyz', \
467
+ keystoreVariable: 'CERTIFICATE_FOR_XYZ', \
468
+ passwordVariable: 'XYZ-CERTIFICATE-PASSWORD')]) {
469
+ //
470
+ }
471
+ }
472
+ }
473
+ stage('Example stage 2') {
474
+ steps {
475
+ //
476
+ }
477
+ }
478
+ }
479
+ }
480
+ Within this step, you can reference the credential environment variable with the syntax $SSH_KEY_FOR_ABC. For example, here you can authenticate to the ABC application with its configured SSH public/private key pair credentials, whose SSH User Private Key file is assigned to $SSH_KEY_FOR_ABC.
481
+ Within this step, you can reference the credential environment variable with the syntax $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD. For example, here you can authenticate to the XYZ application with its configured certificate credentials, whose Certificate's keystore file and password are assigned to the variables $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD, respectively.
482
+ In this Pipeline example, the credentials assigned to the $SSH_KEY_FOR_ABC, $CERTIFICATE_FOR_XYZ and $XYZ-CERTIFICATE-PASSWORD environment variables are scoped only within their respective withCredentials( …​ ) { …​ } steps, so these credential variables are not available for use in this Example stage 2 stage’s steps.
483
+ To maintain the security and anonymity of these credentials, if you attempt to retrieve the value of these credential variables from within these withCredentials( …​ ) { …​ } steps, the same behavior described in the Secret text example (above) applies to these SSH public/private key pair credential and certificate variable types too. This only reduces the risk of accidental exposure. It does not prevent a malicious user from capturing the credential value by other means. A Pipeline that uses credentials can also disclose those credentials. Don’t allow untrusted Pipeline jobs to use trusted credentials.
484
+
485
+ When using the Sample Step field’s withCredentials: Bind credentials to variables option in the Snippet Generator, only credentials which your current Pipeline project/item has access to can be selected from any Credentials field’s list. While you can manually write a withCredentials( …​ ) { …​ } step for your Pipeline (like the examples above), using the Snippet Generator is recommended to avoid specifying credentials that are out of scope for this Pipeline project/item, which when run, will make the step fail.
486
+
487
+ You can also use the Snippet Generator to generate withCredentials( …​ ) { …​ } steps to handle secret text, usernames and passwords and secret files. However, if you only need to handle these types of credentials, it is recommended you use the relevant procedure described in the section above for improved Pipeline code readability.
488
+
489
+ The use of single-quotes instead of double-quotes to define the script (the implicit parameter to sh) in Groovy above. The single-quotes will cause the secret to be expanded by the shell as an environment variable. The double-quotes are potentially less secure as the secret is interpolated by Groovy, and so typical operating system process listings will accidentally disclose it :
490
+
491
+ node {
492
+ withCredentials([string(credentialsId: 'mytoken', variable: 'TOKEN')]) {
493
+ sh /* WRONG! */ """
494
+ set +x
495
+ curl -H 'Token: $TOKEN' https://some.api/
496
+ """
497
+ sh /* CORRECT */ '''
498
+ set +x
499
+ curl -H 'Token: $TOKEN' https://some.api/
500
+ '''
501
+ }
502
+ }
503
+ Combining credentials in one step
504
+ Using the Snippet Generator, you can make multiple credentials available within a single withCredentials( …​ ) { …​ } step by doing the following:
505
+
506
+ From the Jenkins Dashboard, select the name of your Pipeline project/item.
507
+
508
+ In the left navigation pane, select Pipeline Syntax and ensure that the Snippet Generator option is available at the top of the navigation pane.
509
+
510
+ From the Sample Step field, choose withCredentials: Bind credentials to variables.
511
+
512
+ Click Add under Bindings.
513
+
514
+ Choose the credential type to add to the withCredentials( …​ ) { …​ } step from the dropdown list.
515
+
516
+ Specify the credential Bindings details. Read more above these in the procedure under For other credential types (above).
517
+
518
+ Repeat from "Click Add …​" (above) for each (set of) credential/s to add to the withCredentials( …​ ) { …​ } step.
519
+
520
+ Select Generate Pipeline Script to generate the final withCredentials( …​ ) { …​ } step snippet.
521
+
522
+ String interpolation
523
+ Jenkins Pipeline uses rules identical to Groovy for string interpolation. Groovy’s String interpolation support can be confusing to many newcomers to the language. While Groovy supports declaring a string with either single quotes, or double quotes, for example:
524
+
525
+ def singlyQuoted = 'Hello'
526
+ def doublyQuoted = "World"
527
+ Only the latter string will support the dollar-sign ($) based string interpolation, for example:
528
+
529
+ def username = 'Jenkins'
530
+ echo 'Hello Mr. ${username}'
531
+ echo "I said, Hello Mr. ${username}"
532
+ Would result in:
533
+
534
+ Hello Mr. ${username}
535
+ I said, Hello Mr. Jenkins
536
+ Understanding how to use string interpolation is vital for using some of Pipeline’s more advanced features.
537
+
538
+ Interpolation of sensitive environment variables
539
+ Groovy string interpolation should never be used with credentials.
540
+
541
+ Groovy GStrings (double-quoted or triple-double-quoted strings) expand ${TOKEN} before the command is sent to the agent. When that happens, the secret is copied into the process arguments (visible via tooling such as ps) and any shell metacharacters inside the secret or user-controlled value are executed immediately. Always send a literal command string to sh, bat, powershell, or pwsh and let the shell read secrets from its environment, which withCredentials or the environment block have already populated.
542
+
543
+ Jenkinsfile (Declarative Pipeline)
544
+ pipeline {
545
+ agent any
546
+ environment {
547
+ API_TOKEN = credentials('example-token-id')
548
+ }
549
+ stages {
550
+ stage('Example') {
551
+ steps {
552
+ /* WRONG */
553
+ sh "curl -H 'Authorization: Bearer ${API_TOKEN}' https://example.com"
554
+ """
555
+ }
556
+ }
557
+ }
558
+ }
559
+ Jenkinsfile (Declarative Pipeline)
560
+ pipeline {
561
+ agent any
562
+ environment {
563
+ API_TOKEN = credentials('example-token-id')
564
+ }
565
+ stages {
566
+ stage('Example') {
567
+ steps {
568
+ /* CORRECT */
569
+ sh 'curl -H "Authorization: Bearer ${API_TOKEN}" https://example.com'
570
+ }
571
+ }
572
+ }
573
+ }
574
+ Any Groovy construct that avoids interpolation (for example, sh(script: 'curl …​ $API_TOKEN', label: 'call API')) is safe; the key is keeping secrets out of GStrings so only the shell expands them.
575
+
576
+ Injection via interpolation
577
+ Groovy string interpolation can inject rogue commands into command interpreters via special characters.
578
+
579
+ Another note of caution. Using Groovy string interpolation for user-controlled variables with steps that pass their arguments to command interpreters such as the sh, bat, powershell, or pwsh steps can result in problems analogous to SQL injection. This occurs when a user-controlled variable (generally an environment variable, usually a parameter passed to the build) that contains special characters (e.g. / \ $ & % ^ > < | ;) is passed to the sh, bat, powershell, or pwsh steps using Groovy interpolation. For a simple example:
580
+
581
+ Jenkinsfile (Declarative Pipeline)
582
+ pipeline {
583
+ agent any
584
+ parameters {
585
+ string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
586
+ }
587
+ stages {
588
+ stage('Example') {
589
+ steps {
590
+ /* WRONG! */
591
+ sh("echo ${STATEMENT}")
592
+ }
593
+ }
594
+ }
595
+ }
596
+ In this example, the argument to the sh step is evaluated by Groovy, and STATEMENT is interpolated directly into the argument as if sh('echo hello; ls /') has been written in the Pipeline. When this is processed on the agent, rather than echoing the value hello; ls /, it will echo hello then proceed to list the entire root directory of the agent. Any user able to control a variable interpolated by such a step would be able to make the sh step run arbitrary code on the agent. To avoid this problem, make sure arguments to steps such as sh or bat that reference parameters or other user-controlled environment variables use single quotes to avoid Groovy interpolation.
597
+
598
+ Jenkinsfile (Declarative Pipeline)
599
+ pipeline {
600
+ agent any
601
+ parameters {
602
+ string(name: 'STATEMENT', defaultValue: 'hello; ls /', description: 'What should I say?')
603
+ }
604
+ stages {
605
+ stage('Example') {
606
+ steps {
607
+ /* CORRECT */
608
+ sh('echo ${STATEMENT}')
609
+ }
610
+ }
611
+ }
612
+ }
613
+ Credential mangling is another issue that can occur when credentials that contain special characters are passed to a step using Groovy interpolation. When the credential value is mangled, it is no longer valid and will no longer be masked in the console log.
614
+
615
+ Jenkinsfile (Declarative Pipeline)
616
+ pipeline {
617
+ agent any
618
+ environment {
619
+ EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
620
+ }
621
+ stages {
622
+ stage('Example') {
623
+ steps {
624
+ /* WRONG! */
625
+ bat "echo ${EXAMPLE_KEY}"
626
+ }
627
+ }
628
+ }
629
+ }
630
+ Here, the bat step receives echo sec%ret and the Windows batch shell will simply drop the % and print out the value secret. Because there is a single character difference, the value secret will not be masked. Though the value is not the same as the actual credential, this is still a significant exposure of sensitive information. Again, single-quotes avoids this issue.
631
+
632
+ Jenkinsfile (Declarative Pipeline)
633
+ pipeline {
634
+ agent any
635
+ environment {
636
+ EXAMPLE_KEY = credentials('example-credentials-id') // Secret value is 'sec%ret'
637
+ }
638
+ stages {
639
+ stage('Example') {
640
+ steps {
641
+ /* CORRECT */
642
+ bat 'echo %EXAMPLE_KEY%'
643
+ }
644
+ }
645
+ }
646
+ }
647
+ Handling parameters
648
+ Declarative Pipeline supports parameters out-of-the-box, allowing the Pipeline to accept user-specified parameters at runtime via the parameters directive. Configuring parameters with Scripted Pipeline is done with the properties step, which can be found in the Snippet Generator.
649
+
650
+ If you configured your pipeline to accept parameters using the Build with Parameters option, those parameters are accessible as members of the params variable.
651
+
652
+ Assuming that a String parameter named "Greeting" has been configured in the Jenkinsfile, it can access that parameter via ${params.Greeting}:
653
+
654
+ Jenkinsfile (Declarative Pipeline)
655
+ pipeline {
656
+ agent any
657
+ parameters {
658
+ string(name: 'Greeting', defaultValue: 'Hello', description: 'How should I greet the world?')
659
+ }
660
+ stages {
661
+ stage('Example') {
662
+ steps {
663
+ echo "${params.Greeting} World!"
664
+ }
665
+ }
666
+ }
667
+ }
668
+ Toggle Scripted Pipeline (Advanced)
669
+ Handling failure
670
+ Declarative Pipeline supports robust failure handling by default via its post section which allows declaring a number of different "post conditions" such as: always, unstable, success, failure, and changed. The Pipeline Syntax section provides more detail on how to use the various post conditions.
671
+
672
+ Jenkinsfile (Declarative Pipeline)
673
+ pipeline {
674
+ agent any
675
+ stages {
676
+ stage('Test') {
677
+ steps {
678
+ sh 'make check'
679
+ }
680
+ }
681
+ }
682
+ post {
683
+ always {
684
+ junit '**/target/*.xml'
685
+ }
686
+ failure {
687
+ mail to: team@example.com, subject: 'The Pipeline failed :('
688
+ }
689
+ }
690
+ }
691
+ Toggle Scripted Pipeline (Advanced)
692
+ Scripted Pipeline however relies on Groovy’s built-in try/catch/finally semantics for handling failures during execution of the Pipeline.
693
+
694
+ In the Test example above, the sh step was modified to never return a non-zero exit code (sh 'make check || true'). This approach, while valid, means the following stages need to check currentBuild.result to know if there has been a test failure or not.
695
+
696
+ An alternative way of handling this, which preserves the early-exit behavior of failures in Pipeline, while still giving junit the chance to capture test reports, is to use a series of try/finally blocks:
697
+
698
+ Error-handling steps
699
+ Jenkins Pipelines provide dedicated steps for flexible error handling, allowing you to control how your Pipeline responds to errors and warnings. These steps help you surface errors and warnings clearly in Jenkins, giving you control over whether the Pipeline fails, continues, or simply reports a warning. For more information, refer to:
700
+
701
+ catchError
702
+
703
+ error
704
+
705
+ unstable
706
+
707
+ warnError
708
+
709
+ Using multiple agents
710
+ In all the previous examples, only a single agent has been used. This means Jenkins will allocate an executor wherever one is available, regardless of how it is labeled or configured. Not only can this behavior be overridden, but Pipeline allows utilizing multiple agents in the Jenkins environment from within the same Jenkinsfile, which can be helpful for more advanced use-cases such as executing builds/tests across multiple platforms.
711
+
712
+ In the example below, the "Build" stage will be performed on one agent and the built results will be reused on two subsequent agents, labelled "linux" and "windows" respectively, during the "Test" stage.
713
+
714
+ Jenkinsfile (Declarative Pipeline)
715
+ pipeline {
716
+ agent none
717
+ stages {
718
+ stage('Build') {
719
+ agent any
720
+ steps {
721
+ checkout scm
722
+ sh 'make'
723
+ stash includes: '**/target/*.jar', name: 'app'
724
+ }
725
+ }
726
+ stage('Test on Linux') {
727
+ agent {
728
+ label 'linux'
729
+ }
730
+ steps {
731
+ unstash 'app'
732
+ sh 'make check'
733
+ }
734
+ post {
735
+ always {
736
+ junit '**/target/*.xml'
737
+ }
738
+ }
739
+ }
740
+ stage('Test on Windows') {
741
+ agent {
742
+ label 'windows'
743
+ }
744
+ steps {
745
+ unstash 'app'
746
+ bat 'make check'
747
+ }
748
+ post {
749
+ always {
750
+ junit '**/target/*.xml'
751
+ }
752
+ }
753
+ }
754
+ }
755
+ }
756
+ Toggle Scripted Pipeline (Advanced)
757
+ The stash step allows capturing files matching an inclusion pattern (**/target/*.jar) for reuse within the same Pipeline. Once the Pipeline has completed its execution, stashed files are deleted from the Jenkins controller.
758
+ The parameter in agent/node allows for any valid Jenkins label expression. Consult the Pipeline Syntax section for more details.
759
+ unstash will retrieve the named "stash" from the Jenkins controller into the Pipeline’s current workspace.
760
+ The bat script allows for executing batch scripts on Windows-based platforms.
761
+ Optional step arguments
762
+ Pipeline follows the Groovy language convention of allowing parentheses to be omitted around method arguments.
763
+
764
+ Many Pipeline steps also use the named-parameter syntax as a shorthand for creating a Map in Groovy, which uses the syntax [key1: value1, key2: value2]. Making statements like the following functionally equivalent:
765
+
766
+ git url: 'git://example.com/amazing-project.git', branch: 'master'
767
+ git([url: 'git://example.com/amazing-project.git', branch: 'master'])
768
+ For convenience, when calling steps taking only one parameter (or only one mandatory parameter), the parameter name may be omitted, for example:
769
+
770
+ sh 'echo hello' /* short form */
771
+ sh([script: 'echo hello']) /* long form */
772
+ Advanced Scripted Pipeline
773
+ Scripted Pipeline is a domain-specific language [3] based on Groovy, most Groovy syntax can be used in Scripted Pipeline without modification.
774
+
775
+ Parallel execution
776
+ The example in the section above runs tests across two different platforms in a linear series. In practice, if the make check execution takes 30 minutes to complete, the "Test" stage would now take 60 minutes to complete!
777
+
778
+ Fortunately, Pipeline has built-in functionality for executing portions of Scripted Pipeline in parallel, implemented in the aptly named parallel step.
779
+
780
+ Refactoring the example above to use the parallel step:
781
+
782
+ Jenkinsfile (Scripted Pipeline)
783
+ stage('Build') {
784
+ /* .. snip .. */
785
+ }
786
+
787
+ stage('Test') {
788
+ parallel linux: {
789
+ node('linux') {
790
+ checkout scm
791
+ try {
792
+ unstash 'app'
793
+ sh 'make check'
794
+ }
795
+ finally {
796
+ junit '**/target/*.xml'
797
+ }
798
+ }
799
+ },
800
+ windows: {
801
+ node('windows') {
802
+ /* .. snip .. */
803
+ }
804
+ }
805
+ }
806
+ Instead of executing the tests on the "linux" and "windows" labelled nodes in series, they will now execute in parallel assuming the requisite capacity exists in the Jenkins environment.
807
+
808
+ 1. en.wikipedia.org/wiki/Source_control_management
809
+ 2. en.wikipedia.org/wiki/Single_Source_of_Truth
810
+ 3. en.wikipedia.org/wiki/Domain-specific_language
data/errors/error_01.txt CHANGED
@@ -1,29 +1,29 @@
1
- Started by user Arvind Nandigam
2
- org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
- WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.
4
- 1 error
5
-
6
- at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
- at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
- at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
- at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
- at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
- at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
- at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
- at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
- at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
- at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
- at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
- at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
- at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
- at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
- at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
- at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
- at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
- at hudson.model.ResourceController.execute(ResourceController.java:101)
28
- at hudson.model.Executor.run(Executor.java:460)
29
- Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.
4
+ 1 error
5
+
6
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
+ at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
+ at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
+ at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
+ at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
+ at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
28
+ at hudson.model.Executor.run(Executor.java:460)
29
+ Finished: FAILURE
data/errors/error_02.txt CHANGED
@@ -1,29 +1,29 @@
1
- Started by user Arvind Nandigam
2
- org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
- WorkflowScript: 9: expecting '}', found '' @ line 9, column 1.
4
- 1 error
5
-
6
- at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
- at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
- at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
- at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
- at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
- at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
- at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
- at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
- at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
- at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
- at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
- at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
- at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
- at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
- at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
- at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
- at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
- at hudson.model.ResourceController.execute(ResourceController.java:101)
28
- at hudson.model.Executor.run(Executor.java:460)
29
- Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 9: expecting '}', found '' @ line 9, column 1.
4
+ 1 error
5
+
6
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
7
+ at org.codehaus.groovy.control.ErrorCollector.addFatalError(ErrorCollector.java:149)
8
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:119)
9
+ at org.codehaus.groovy.control.ErrorCollector.addError(ErrorCollector.java:131)
10
+ at org.codehaus.groovy.control.SourceUnit.addError(SourceUnit.java:349)
11
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.transformCSTIntoAST(AntlrParserPlugin.java:225)
12
+ at org.codehaus.groovy.antlr.AntlrParserPlugin.parseCST(AntlrParserPlugin.java:191)
13
+ at org.codehaus.groovy.control.SourceUnit.parse(SourceUnit.java:233)
14
+ at org.codehaus.groovy.control.CompilationUnit$1.call(CompilationUnit.java:189)
15
+ at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:966)
16
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:626)
17
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
18
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
19
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
20
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
21
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
22
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
23
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
24
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
25
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
26
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
27
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
28
+ at hudson.model.Executor.run(Executor.java:460)
29
+ Finished: FAILURE
data/errors/error_03.txt CHANGED
@@ -1,11 +1,11 @@
1
- Started by user Arvind Nandigam
2
- [Pipeline] Start of Pipeline
3
- [Pipeline] stage
4
- [Pipeline] { (Build)
5
- [Pipeline] sh
6
- [Pipeline] error
7
- [Pipeline] }
8
- [Pipeline] // stage
9
- [Pipeline] End of Pipeline
10
- ERROR: Attempted to execute a step that requires a node context while ‘agent none’ was specified. Be sure to specify your own ‘node { ... }’ blocks when using ‘agent none’.
11
  Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] stage
4
+ [Pipeline] { (Build)
5
+ [Pipeline] sh
6
+ [Pipeline] error
7
+ [Pipeline] }
8
+ [Pipeline] // stage
9
+ [Pipeline] End of Pipeline
10
+ ERROR: Attempted to execute a step that requires a node context while ‘agent none’ was specified. Be sure to specify your own ‘node { ... }’ blocks when using ‘agent none’.
11
  Finished: FAILURE
data/errors/error_04.txt CHANGED
@@ -1,48 +1,48 @@
1
- Started by user Arvind Nandigam
2
- [Pipeline] Start of Pipeline
3
- [Pipeline] node
4
- Running on Jenkins in /var/jenkins_home/workspace/error_04
5
- [Pipeline] {
6
- [Pipeline] stage
7
- [Pipeline] { (Checkout)
8
- [Pipeline] git
9
- The recommended git tool is: NONE
10
- No credentials specified
11
- Cloning the remote Git repository
12
- Cloning repository https://github.com/example/repo.git
13
- > git init /var/jenkins_home/workspace/error_04 # timeout=10
14
- Fetching upstream changes from https://github.com/example/repo.git
15
- > git --version # timeout=10
16
- > git --version # 'git version 2.47.3'
17
- > git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/* # timeout=10
18
- ERROR: Error cloning remote repo 'origin'
19
- hudson.plugins.git.GitException: Command "git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
20
- stdout:
21
- stderr: remote: Invalid username or token. Password authentication is not supported for Git operations.
22
- fatal: Authentication failed for 'https://github.com/example/repo.git/'
23
-
24
- at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2848)
25
- at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:2189)
26
- at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:638)
27
- at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:880)
28
- at PluginClassLoader for git//hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1220)
29
- at PluginClassLoader for git//hudson.plugins.git.GitSCM._checkout(GitSCM.java:1310)
30
- at PluginClassLoader for git//hudson.plugins.git.GitSCM.checkout(GitSCM.java:1277)
31
- at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep.checkout(SCMStep.java:136)
32
- at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:101)
33
- at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:88)
34
- at PluginClassLoader for workflow-step-api//org.jenkinsci.plugins.workflow.steps.SynchronousNonBlockingStepExecution.lambda$start$0(SynchronousNonBlockingStepExecution.java:49)
35
- at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
36
- at java.base/java.util.concurrent.FutureTask.run(Unknown Source)
37
- at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
38
- at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
39
- at java.base/java.lang.Thread.run(Unknown Source)
40
- ERROR: Error cloning remote repo 'origin'
41
- ERROR: Maximum checkout retry attempts reached, aborting
42
- [Pipeline] }
43
- [Pipeline] // stage
44
- [Pipeline] }
45
- [Pipeline] // node
46
- [Pipeline] End of Pipeline
47
- ERROR: Error cloning remote repo 'origin'
48
- Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Running on Jenkins in /var/jenkins_home/workspace/error_04
5
+ [Pipeline] {
6
+ [Pipeline] stage
7
+ [Pipeline] { (Checkout)
8
+ [Pipeline] git
9
+ The recommended git tool is: NONE
10
+ No credentials specified
11
+ Cloning the remote Git repository
12
+ Cloning repository https://github.com/example/repo.git
13
+ > git init /var/jenkins_home/workspace/error_04 # timeout=10
14
+ Fetching upstream changes from https://github.com/example/repo.git
15
+ > git --version # timeout=10
16
+ > git --version # 'git version 2.47.3'
17
+ > git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/* # timeout=10
18
+ ERROR: Error cloning remote repo 'origin'
19
+ hudson.plugins.git.GitException: Command "git fetch --tags --force --progress -- https://github.com/example/repo.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
20
+ stdout:
21
+ stderr: remote: Invalid username or token. Password authentication is not supported for Git operations.
22
+ fatal: Authentication failed for 'https://github.com/example/repo.git/'
23
+
24
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:2848)
25
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:2189)
26
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:638)
27
+ at PluginClassLoader for git-client//org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:880)
28
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1220)
29
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM._checkout(GitSCM.java:1310)
30
+ at PluginClassLoader for git//hudson.plugins.git.GitSCM.checkout(GitSCM.java:1277)
31
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep.checkout(SCMStep.java:136)
32
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:101)
33
+ at PluginClassLoader for workflow-scm-step//org.jenkinsci.plugins.workflow.steps.scm.SCMStep$StepExecutionImpl.run(SCMStep.java:88)
34
+ at PluginClassLoader for workflow-step-api//org.jenkinsci.plugins.workflow.steps.SynchronousNonBlockingStepExecution.lambda$start$0(SynchronousNonBlockingStepExecution.java:49)
35
+ at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
36
+ at java.base/java.util.concurrent.FutureTask.run(Unknown Source)
37
+ at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
38
+ at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
39
+ at java.base/java.lang.Thread.run(Unknown Source)
40
+ ERROR: Error cloning remote repo 'origin'
41
+ ERROR: Maximum checkout retry attempts reached, aborting
42
+ [Pipeline] }
43
+ [Pipeline] // stage
44
+ [Pipeline] }
45
+ [Pipeline] // node
46
+ [Pipeline] End of Pipeline
47
+ ERROR: Error cloning remote repo 'origin'
48
+ Finished: FAILURE
data/errors/error_05.txt CHANGED
@@ -1,16 +1,16 @@
1
- Started by user Arvind Nandigam
2
- [Pipeline] Start of Pipeline
3
- [Pipeline] node
4
- Running on Jenkins in /var/jenkins_home/workspace/error_05
5
- [Pipeline] {
6
- [Pipeline] stage
7
- [Pipeline] { (Auth)
8
- [Pipeline] withCredentials
9
- [Pipeline] // withCredentials
10
- [Pipeline] }
11
- [Pipeline] // stage
12
- [Pipeline] }
13
- [Pipeline] // node
14
- [Pipeline] End of Pipeline
15
- ERROR: Could not find credentials entry with ID 'does-not-exist'
16
  Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Running on Jenkins in /var/jenkins_home/workspace/error_05
5
+ [Pipeline] {
6
+ [Pipeline] stage
7
+ [Pipeline] { (Auth)
8
+ [Pipeline] withCredentials
9
+ [Pipeline] // withCredentials
10
+ [Pipeline] }
11
+ [Pipeline] // stage
12
+ [Pipeline] }
13
+ [Pipeline] // node
14
+ [Pipeline] End of Pipeline
15
+ ERROR: Could not find credentials entry with ID 'does-not-exist'
16
  Finished: FAILURE
data/errors/error_06.txt CHANGED
@@ -1,10 +1,10 @@
1
- Started by user Arvind Nandigam
2
- [Pipeline] Start of Pipeline
3
- [Pipeline] node
4
- Still waiting to schedule task
5
- ‘Jenkins’ doesn’t have label ‘no-such-node’
6
- Aborted by Arvind Nandigam
7
- [Pipeline] // node
8
- [Pipeline] End of Pipeline
9
- org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: db6e2408-0d72-4f17-a842-4d2ecce38130
10
  Finished: ABORTED
 
1
+ Started by user Arvind Nandigam
2
+ [Pipeline] Start of Pipeline
3
+ [Pipeline] node
4
+ Still waiting to schedule task
5
+ ‘Jenkins’ doesn’t have label ‘no-such-node’
6
+ Aborted by Arvind Nandigam
7
+ [Pipeline] // node
8
+ [Pipeline] End of Pipeline
9
+ org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: db6e2408-0d72-4f17-a842-4d2ecce38130
10
  Finished: ABORTED
data/errors/error_07.txt CHANGED
@@ -1,25 +1,25 @@
1
- Started by user Arvind Nandigam
2
-
3
- org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
4
- WorkflowScript: 6: Expected a step @ line 6, column 17.
5
- def x = 1
6
- ^
7
-
8
- 1 error
9
-
10
- at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
11
- at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
12
- at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
13
- at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
14
- at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
15
- at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
16
- at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
17
- at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
18
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
19
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
20
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
21
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
22
- at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
23
- at hudson.model.ResourceController.execute(ResourceController.java:101)
24
- at hudson.model.Executor.run(Executor.java:460)
25
- Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+
3
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
4
+ WorkflowScript: 6: Expected a step @ line 6, column 17.
5
+ def x = 1
6
+ ^
7
+
8
+ 1 error
9
+
10
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
11
+ at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
12
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
13
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
14
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
15
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
16
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
17
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
18
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
19
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
20
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
21
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
22
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
23
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
24
+ at hudson.model.Executor.run(Executor.java:460)
25
+ Finished: FAILURE
data/errors/error_08.txt CHANGED
@@ -1,24 +1,24 @@
1
- Started by user Arvind Nandigam
2
- org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
- WorkflowScript: 3: Invalid agent type "docker" specified. Must be one of [any, label, none] @ line 3, column 9.
4
- docker {
5
- ^
6
-
7
- 1 error
8
-
9
- at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
10
- at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
11
- at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
12
- at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
13
- at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
14
- at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
15
- at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
16
- at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
17
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
18
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
19
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
20
- at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
21
- at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
22
- at hudson.model.ResourceController.execute(ResourceController.java:101)
23
- at hudson.model.Executor.run(Executor.java:460)
24
- Finished: FAILURE
 
1
+ Started by user Arvind Nandigam
2
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
3
+ WorkflowScript: 3: Invalid agent type "docker" specified. Must be one of [any, label, none] @ line 3, column 9.
4
+ docker {
5
+ ^
6
+
7
+ 1 error
8
+
9
+ at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:309)
10
+ at org.codehaus.groovy.control.CompilationUnit.applyToPrimaryClassNodes(CompilationUnit.java:1107)
11
+ at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:624)
12
+ at org.codehaus.groovy.control.CompilationUnit.processPhaseOperations(CompilationUnit.java:602)
13
+ at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:579)
14
+ at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:323)
15
+ at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:293)
16
+ at PluginClassLoader for script-security//org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.GroovySandbox$Scope.parse(GroovySandbox.java:162)
17
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.doParse(CpsGroovyShell.java:202)
18
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsGroovyShell.reparse(CpsGroovyShell.java:186)
19
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.parseScript(CpsFlowExecution.java:669)
20
+ at PluginClassLoader for workflow-cps//org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.start(CpsFlowExecution.java:615)
21
+ at PluginClassLoader for workflow-job//org.jenkinsci.plugins.workflow.job.WorkflowRun.run(WorkflowRun.java:341)
22
+ at hudson.model.ResourceController.execute(ResourceController.java:101)
23
+ at hudson.model.Executor.run(Executor.java:460)
24
+ Finished: FAILURE
demo_langchain_rag.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # demo_langchain_rag.py
2
+ # Demo script showcasing LangChain and RAG for internship interview
3
+ # Run: python demo_langchain_rag.py
4
+
5
+ import json
6
+ from langchain_rag import JenkinsRAGChain, get_rag_chain
7
+
8
+ def print_section(title):
9
+ print("\n" + "=" * 60)
10
+ print(f" {title}")
11
+ print("=" * 60)
12
+
13
+ def main():
14
+ print_section("LangChain + RAG Demo for Textify.ai Internship")
15
+ print("\nThis demo showcases:")
16
+ print(" • LangChain framework for building LLM chains")
17
+ print(" • RAG (Retrieval-Augmented Generation) pattern")
18
+ print(" • Vector search with FAISS + HuggingFace embeddings")
19
+ print(" • Integration with Jenkins documentation")
20
+
21
+ print_section("1. Initializing LangChain RAG Chain")
22
+ print("Loading Jenkins documentation...")
23
+ print("Creating vector embeddings with paraphrase-MiniLM-L3-v2...")
24
+ print("Setting up RetrievalQA chain with FLAN-T5...")
25
+
26
+ rag = JenkinsRAGChain()
27
+ print("\n✓ RAG Chain initialized successfully!")
28
+ print(f" - Documents indexed: {len(rag.documents)}")
29
+ print(f" - Embedding model: {rag.embeddings.model_name}")
30
+ print(f" - LLM: {rag.llm.repo_id}")
31
+
32
+ test_errors = [
33
+ {
34
+ "name": "Groovy Syntax Error",
35
+ "log": """
36
+ Started by user admin
37
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
38
+ WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.
39
+ 1 error
40
+ at org.codehaus.groovy.control.ErrorNode.accept(ErrorNode.java:36)
41
+ """
42
+ },
43
+ {
44
+ "name": "Missing Agent Error",
45
+ "log": """
46
+ [Pipeline] node
47
+ Running on in /var/jenkins/workspace/test
48
+ [Pipeline] {
49
+ java.lang.IllegalStateException: agent none is specified, but no stage has an agent assigned
50
+ at org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.initialize(CpsFlowExecution.java:123)
51
+ """
52
+ },
53
+ {
54
+ "name": "Missing Plugin Error",
55
+ "log": """
56
+ No such DSL method 'dockerBuild'
57
+ Available DSL methods:
58
+ archive
59
+ bat
60
+ build
61
+ checkout
62
+ deleteDir
63
+ dir
64
+ echo
65
+ emailext
66
+ fileExists
67
+ """
68
+ }
69
+ ]
70
+
71
+ for i, test in enumerate(test_errors, 1):
72
+ print_section(f"2.{i} Testing: {test['name']}")
73
+ print(f"\nInput Error Log:\n{test['log'][:200]}...")
74
+
75
+ result = rag.explain_error(test['log'])
76
+
77
+ print(f"\n--- Result ---")
78
+ print(f"Error Category: {result['error_category']}")
79
+ print(f"\nLLM Explanation (from RAG):")
80
+ print(result['llm_explanation'])
81
+ print(f"\n[Metadata]")
82
+ print(f" Retrieval: {result['retrieval_source']}")
83
+ print(f" LLM Model: {result['model_used']}")
84
+ print(f" Embeddings: {result['embedding_model']}")
85
+
86
+ print_section("3. LangChain Architecture Summary")
87
+ print("""
88
+ ┌─────────────────────────────────────────────────────────────┐
89
+ │ LangChain RAG Pipeline │
90
+ ├─────────────────────────────────────────────────────────────┤
91
+ │ │
92
+ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
93
+ │ │ User Error │───▶│ Retriever │───▶│ LLM │ │
94
+ │ │ Log │ │ (FAISS) │ │ (FLAN-T5) │ │
95
+ │ └──────────────┘ └──────────────┘ └──────────────┘ │
96
+ │ │ │ │ │
97
+ │ ▼ ▼ ▼ │
98
+ │ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
99
+ │ │ Feature │ │ Context │ │ Generated │ │
100
+ │ │ Extraction │ │ (Docs + │ │ Explanation│ │
101
+ │ │ │ │ Prompt) │ │ │ │
102
+ │ └──────────────┘ └──────────────┘ └──────────────┘ │
103
+ │ │
104
+ └─────────────────────────────────────────────────────────────┘
105
+
106
+ Key Components:
107
+ • LangChain Core: Document, Prompt, OutputParser
108
+ • LangChain Community: FAISS vectorstore
109
+ • LangChain HuggingFace: Embeddings & LLM integration
110
+ • RetrievalQA: End-to-end RAG chain
111
+ """)
112
+
113
+ print_section("4. API Endpoint Demo")
114
+ print("""
115
+ POST /explain-rag
116
+ {
117
+ "log_text": "org.codehaus.groovy.control.MultipleCompilationErrorsException..."
118
+ }
119
+
120
+ Response:
121
+ {
122
+ "error_category": "groovy_syntax_error",
123
+ "llm_explanation": "The error indicates...",
124
+ "retrieval_source": "LangChain RAG (FAISS + HuggingFace)",
125
+ "model_used": "google/flan-t5-base",
126
+ "embedding_model": "paraphrase-MiniLM-L3-v2"
127
+ }
128
+ """)
129
+
130
+ print("\n" + "=" * 60)
131
+ print(" Demo Complete!")
132
+ print("=" * 60)
133
+ print("""
134
+ To run the API server:
135
+ uvicorn main:app --reload
136
+
137
+ To test the /explain-rag endpoint:
138
+ curl -X POST http://localhost:8000/explain-rag \\
139
+ -H "Content-Type: application/json" \\
140
+ -d '{"log_text": "your error log here"}'
141
+ """)
142
+
143
+ if __name__ == "__main__":
144
+ main()
error_taxonomy.py CHANGED
@@ -1,49 +1,49 @@
1
- # error_taxonomy.py
2
-
3
- ERROR_CATEGORIES = {
4
- # Groovy / pipeline syntax
5
- "groovy_syntax_error": [
6
- r"MultipleCompilationErrorsException",
7
- r"expecting '\}'",
8
- r"WorkflowScript"
9
- ],
10
-
11
- # Agent / executor issues
12
- "missing_agent": [
13
- r"requires a node context",
14
- r"agent none"
15
- ],
16
-
17
- "no_node_available": [
18
- r"There are no nodes with the label",
19
- r"doesn’t have label",
20
- r"does not have label",
21
- r"Still waiting to schedule task"
22
- ],
23
-
24
- # SCM / Git related
25
- "git_authentication_error": [
26
- r"Authentication failed",
27
- r"Invalid username or token",
28
- r"Error cloning remote repo"
29
- ],
30
-
31
- # Jenkins credentials system
32
- "missing_credentials": [
33
- r"Credentials .* not found",
34
- r"Could not find credentials entry with ID"
35
- ],
36
-
37
- # Plugin / DSL
38
- "missing_plugin": [
39
- r"No such DSL method",
40
- r"No such step"
41
- ],
42
-
43
- # File system
44
- "file_not_found": [
45
- r"No such file or directory",
46
- r"cannot open",
47
- r"script returned exit code 1"
48
- ]
49
- }
 
1
+ # error_taxonomy.py
2
+
3
+ ERROR_CATEGORIES = {
4
+ # Groovy / pipeline syntax
5
+ "groovy_syntax_error": [
6
+ r"MultipleCompilationErrorsException",
7
+ r"expecting '\}'",
8
+ r"WorkflowScript"
9
+ ],
10
+
11
+ # Agent / executor issues
12
+ "missing_agent": [
13
+ r"requires a node context",
14
+ r"agent none"
15
+ ],
16
+
17
+ "no_node_available": [
18
+ r"There are no nodes with the label",
19
+ r"doesn’t have label",
20
+ r"does not have label",
21
+ r"Still waiting to schedule task"
22
+ ],
23
+
24
+ # SCM / Git related
25
+ "git_authentication_error": [
26
+ r"Authentication failed",
27
+ r"Invalid username or token",
28
+ r"Error cloning remote repo"
29
+ ],
30
+
31
+ # Jenkins credentials system
32
+ "missing_credentials": [
33
+ r"Credentials .* not found",
34
+ r"Could not find credentials entry with ID"
35
+ ],
36
+
37
+ # Plugin / DSL
38
+ "missing_plugin": [
39
+ r"No such DSL method",
40
+ r"No such step"
41
+ ],
42
+
43
+ # File system
44
+ "file_not_found": [
45
+ r"No such file or directory",
46
+ r"cannot open",
47
+ r"script returned exit code 1"
48
+ ]
49
+ }
explain_error.py CHANGED
@@ -1,126 +1,126 @@
1
- # explain_error.py
2
-
3
- from extract_error_features import extract_error_features
4
- from retrieve_docs import retrieve_docs
5
-
6
- SYSTEM_PROMPT = """
7
- You are a Jenkins CI/CD expert.
8
-
9
- Explain the Jenkins error using ONLY the provided documentation context.
10
- If the documentation does not clearly explain the error, say so explicitly.
11
-
12
- Output format:
13
-
14
- Error Summary:
15
- <short explanation>
16
-
17
- Likely Causes:
18
- - cause 1
19
- - cause 2
20
-
21
- Relevant Documentation:
22
- - link or source file
23
- """
24
-
25
- def explain_error(log_text: str):
26
- features = extract_error_features(log_text)
27
- category = features["category"]
28
-
29
- retrieved = retrieve_docs(category)
30
-
31
- explanation = {
32
- "error_category": category,
33
- "summary": "",
34
- "likely_causes": [],
35
- "references": []
36
- }
37
-
38
- # Heuristic explanation (v1, deterministic)
39
- if category == "groovy_syntax_error":
40
- explanation["summary"] = (
41
- "The pipeline failed due to a Groovy syntax error, "
42
- "most likely caused by an invalid or incomplete Jenkinsfile."
43
- )
44
- explanation["likely_causes"] = [
45
- "Missing or mismatched braces in the Jenkinsfile",
46
- "Invalid declarative pipeline structure"
47
- ]
48
-
49
- elif category == "missing_agent":
50
- explanation["summary"] = (
51
- "The pipeline attempted to execute a step that requires a node, "
52
- "but no agent was allocated."
53
- )
54
- explanation["likely_causes"] = [
55
- "Using 'agent none' without defining a stage-level agent",
56
- "Executing node-dependent steps without a workspace"
57
- ]
58
-
59
- elif category == "no_node_available":
60
- explanation["summary"] = (
61
- "The pipeline could not be scheduled because no Jenkins node "
62
- "matched the requested label."
63
- )
64
- explanation["likely_causes"] = [
65
- "The specified node label does not exist",
66
- "All matching nodes are offline or busy"
67
- ]
68
-
69
- elif category == "missing_plugin":
70
- explanation["summary"] = (
71
- "The pipeline referenced a step that is not available, "
72
- "indicating a missing or uninstalled plugin."
73
- )
74
- explanation["likely_causes"] = [
75
- "Required plugin is not installed",
76
- "Incorrect step name in the Jenkinsfile"
77
- ]
78
-
79
- elif category == "missing_credentials":
80
- explanation["summary"] = (
81
- "The pipeline referenced credentials that do not exist in Jenkins."
82
- )
83
- explanation["likely_causes"] = [
84
- "Credentials ID is incorrect or misspelled",
85
- "Credentials were not configured in Jenkins"
86
- ]
87
-
88
- elif category == "file_not_found":
89
- explanation["summary"] = (
90
- "The pipeline attempted to access a file that does not exist "
91
- "in the workspace."
92
- )
93
- explanation["likely_causes"] = [
94
- "File path is incorrect",
95
- "File was not generated or checked out"
96
- ]
97
-
98
- elif category == "git_authentication_error":
99
- explanation["summary"] = (
100
- "Jenkins failed to authenticate with the Git repository during checkout."
101
- )
102
- explanation["likely_causes"] = [
103
- "Invalid or missing Git credentials",
104
- "Repository requires token-based authentication"
105
- ]
106
-
107
- else:
108
- explanation["summary"] = (
109
- "The error could not be confidently explained using the available documentation."
110
- )
111
-
112
- for r in retrieved:
113
- explanation["references"].append(
114
- f"{r['meta']['source_file']} ({r['meta']['source']})"
115
- )
116
- if not retrieved:
117
- explanation["summary"] = (
118
- "No relevant Jenkins documentation was found for this error. "
119
- "The error may be plugin-specific or outside the current scope."
120
- )
121
- explanation["likely_causes"] = []
122
- explanation["references"] = []
123
-
124
- return explanation
125
-
126
-
 
1
+ # explain_error.py
2
+
3
+ from extract_error_features import extract_error_features
4
+ from retrieve_docs import retrieve_docs
5
+
6
+ SYSTEM_PROMPT = """
7
+ You are a Jenkins CI/CD expert.
8
+
9
+ Explain the Jenkins error using ONLY the provided documentation context.
10
+ If the documentation does not clearly explain the error, say so explicitly.
11
+
12
+ Output format:
13
+
14
+ Error Summary:
15
+ <short explanation>
16
+
17
+ Likely Causes:
18
+ - cause 1
19
+ - cause 2
20
+
21
+ Relevant Documentation:
22
+ - link or source file
23
+ """
24
+
25
+ def explain_error(log_text: str):
26
+ features = extract_error_features(log_text)
27
+ category = features["category"]
28
+
29
+ retrieved = retrieve_docs(category)
30
+
31
+ explanation = {
32
+ "error_category": category,
33
+ "summary": "",
34
+ "likely_causes": [],
35
+ "references": []
36
+ }
37
+
38
+ # Heuristic explanation (v1, deterministic)
39
+ if category == "groovy_syntax_error":
40
+ explanation["summary"] = (
41
+ "The pipeline failed due to a Groovy syntax error, "
42
+ "most likely caused by an invalid or incomplete Jenkinsfile."
43
+ )
44
+ explanation["likely_causes"] = [
45
+ "Missing or mismatched braces in the Jenkinsfile",
46
+ "Invalid declarative pipeline structure"
47
+ ]
48
+
49
+ elif category == "missing_agent":
50
+ explanation["summary"] = (
51
+ "The pipeline attempted to execute a step that requires a node, "
52
+ "but no agent was allocated."
53
+ )
54
+ explanation["likely_causes"] = [
55
+ "Using 'agent none' without defining a stage-level agent",
56
+ "Executing node-dependent steps without a workspace"
57
+ ]
58
+
59
+ elif category == "no_node_available":
60
+ explanation["summary"] = (
61
+ "The pipeline could not be scheduled because no Jenkins node "
62
+ "matched the requested label."
63
+ )
64
+ explanation["likely_causes"] = [
65
+ "The specified node label does not exist",
66
+ "All matching nodes are offline or busy"
67
+ ]
68
+
69
+ elif category == "missing_plugin":
70
+ explanation["summary"] = (
71
+ "The pipeline referenced a step that is not available, "
72
+ "indicating a missing or uninstalled plugin."
73
+ )
74
+ explanation["likely_causes"] = [
75
+ "Required plugin is not installed",
76
+ "Incorrect step name in the Jenkinsfile"
77
+ ]
78
+
79
+ elif category == "missing_credentials":
80
+ explanation["summary"] = (
81
+ "The pipeline referenced credentials that do not exist in Jenkins."
82
+ )
83
+ explanation["likely_causes"] = [
84
+ "Credentials ID is incorrect or misspelled",
85
+ "Credentials were not configured in Jenkins"
86
+ ]
87
+
88
+ elif category == "file_not_found":
89
+ explanation["summary"] = (
90
+ "The pipeline attempted to access a file that does not exist "
91
+ "in the workspace."
92
+ )
93
+ explanation["likely_causes"] = [
94
+ "File path is incorrect",
95
+ "File was not generated or checked out"
96
+ ]
97
+
98
+ elif category == "git_authentication_error":
99
+ explanation["summary"] = (
100
+ "Jenkins failed to authenticate with the Git repository during checkout."
101
+ )
102
+ explanation["likely_causes"] = [
103
+ "Invalid or missing Git credentials",
104
+ "Repository requires token-based authentication"
105
+ ]
106
+
107
+ else:
108
+ explanation["summary"] = (
109
+ "The error could not be confidently explained using the available documentation."
110
+ )
111
+
112
+ for r in retrieved:
113
+ explanation["references"].append(
114
+ f"{r['meta']['source_file']} ({r['meta']['source']})"
115
+ )
116
+ if not retrieved:
117
+ explanation["summary"] = (
118
+ "No relevant Jenkins documentation was found for this error. "
119
+ "The error may be plugin-specific or outside the current scope."
120
+ )
121
+ explanation["likely_causes"] = []
122
+ explanation["references"] = []
123
+
124
+ return explanation
125
+
126
+
extract_error_features.py CHANGED
@@ -1,24 +1,24 @@
1
- import re
2
- from error_taxonomy import ERROR_CATEGORIES
3
-
4
- COMPILED_PATTERNS = {
5
- cat: [re.compile(p) for p in pats]
6
- for cat, pats in ERROR_CATEGORIES.items()
7
- }
8
-
9
- def extract_error_features(log_text: str) -> dict:
10
- result = {
11
- "category": "unknown",
12
- "matched_signals": [],
13
- "line_numbers": []
14
- }
15
-
16
- for category, patterns in COMPILED_PATTERNS.items():
17
- for pattern in patterns:
18
- if pattern.search(log_text):
19
- result["category"] = category
20
- result["matched_signals"].append(pattern.pattern)
21
-
22
- result["line_numbers"] = re.findall(r"line (\d+)", log_text)
23
-
24
- return result
 
1
+ import re
2
+ from error_taxonomy import ERROR_CATEGORIES
3
+
4
+ COMPILED_PATTERNS = {
5
+ cat: [re.compile(p) for p in pats]
6
+ for cat, pats in ERROR_CATEGORIES.items()
7
+ }
8
+
9
+ def extract_error_features(log_text: str) -> dict:
10
+ result = {
11
+ "category": "unknown",
12
+ "matched_signals": [],
13
+ "line_numbers": []
14
+ }
15
+
16
+ for category, patterns in COMPILED_PATTERNS.items():
17
+ for pattern in patterns:
18
+ if pattern.search(log_text):
19
+ result["category"] = category
20
+ result["matched_signals"].append(pattern.pattern)
21
+
22
+ result["line_numbers"] = re.findall(r"line (\d+)", log_text)
23
+
24
+ return result
ingest_docs.py CHANGED
@@ -1,54 +1,54 @@
1
- # ingest_docs.py
2
-
3
- import os
4
- import json
5
- import faiss
6
- from sentence_transformers import SentenceTransformer
7
-
8
- RAW_DOCS_DIR = "data/docs/raw"
9
- INDEX_PATH = "data/docs/docs.index"
10
- META_PATH = "data/docs/docs_meta.json"
11
-
12
- CHUNK_SIZE = 400
13
-
14
- def chunk_text(text, size):
15
- chunks = []
16
- for i in range(0, len(text), size):
17
- chunk = text[i:i+size].strip()
18
- if chunk:
19
- chunks.append(chunk)
20
- return chunks
21
-
22
- def main():
23
- model = SentenceTransformer("paraphrase-MiniLM-L3-v2", cache_folder="./model_cache")
24
-
25
- documents = []
26
- metadata = []
27
-
28
- for fname in os.listdir(RAW_DOCS_DIR):
29
- path = os.path.join(RAW_DOCS_DIR, fname)
30
- with open(path, "r", encoding="utf-8") as f:
31
- text = f.read()
32
-
33
- chunks = chunk_text(text, CHUNK_SIZE)
34
- for chunk in chunks:
35
- documents.append(chunk)
36
- metadata.append({
37
- "source_file": fname,
38
- "source": f"https://www.jenkins.io/doc/"
39
- })
40
-
41
- embeddings = model.encode(documents)
42
- index = faiss.IndexFlatL2(embeddings.shape[1])
43
- index.add(embeddings)
44
-
45
- os.makedirs("data/docs", exist_ok=True)
46
- faiss.write_index(index, INDEX_PATH)
47
-
48
- with open(META_PATH, "w", encoding="utf-8") as f:
49
- json.dump(metadata, f, indent=2)
50
-
51
- print(f"Ingested {len(documents)} document chunks.")
52
-
53
- if __name__ == "__main__":
54
- main()
 
1
+ # ingest_docs.py
2
+
3
+ import os
4
+ import json
5
+ import faiss
6
+ from sentence_transformers import SentenceTransformer
7
+
8
+ RAW_DOCS_DIR = "data/docs/raw"
9
+ INDEX_PATH = "data/docs/docs.index"
10
+ META_PATH = "data/docs/docs_meta.json"
11
+
12
+ CHUNK_SIZE = 400
13
+
14
+ def chunk_text(text, size):
15
+ chunks = []
16
+ for i in range(0, len(text), size):
17
+ chunk = text[i:i+size].strip()
18
+ if chunk:
19
+ chunks.append(chunk)
20
+ return chunks
21
+
22
+ def main():
23
+ model = SentenceTransformer("paraphrase-MiniLM-L3-v2", cache_folder="./model_cache")
24
+
25
+ documents = []
26
+ metadata = []
27
+
28
+ for fname in os.listdir(RAW_DOCS_DIR):
29
+ path = os.path.join(RAW_DOCS_DIR, fname)
30
+ with open(path, "r", encoding="utf-8") as f:
31
+ text = f.read()
32
+
33
+ chunks = chunk_text(text, CHUNK_SIZE)
34
+ for chunk in chunks:
35
+ documents.append(chunk)
36
+ metadata.append({
37
+ "source_file": fname,
38
+ "source": f"https://www.jenkins.io/doc/"
39
+ })
40
+
41
+ embeddings = model.encode(documents)
42
+ index = faiss.IndexFlatL2(embeddings.shape[1])
43
+ index.add(embeddings)
44
+
45
+ os.makedirs("data/docs", exist_ok=True)
46
+ faiss.write_index(index, INDEX_PATH)
47
+
48
+ with open(META_PATH, "w", encoding="utf-8") as f:
49
+ json.dump(metadata, f, indent=2)
50
+
51
+ print(f"Ingested {len(documents)} document chunks.")
52
+
53
+ if __name__ == "__main__":
54
+ main()
langchain_rag.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # langchain_rag.py
2
+ # LangChain-based RAG pipeline for Jenkins error explanation
3
+
4
+ import os
5
+ import json
6
+ import tempfile
7
+ from typing import List, Dict, Any
8
+
9
+ from langchain_core.documents import Document
10
+ from langchain_core.prompts import PromptTemplate
11
+ from langchain_core.output_parsers import StrOutputParser
12
+ from langchain_huggingface import HuggingFaceEmbeddings
13
+ from langchain_community.vectorstores import FAISS
14
+ from langchain_community.llms import HuggingFaceHub
15
+ from langchain.chains import RetrievalQA
16
+
17
+ from extract_error_features import extract_error_features
18
+
19
+ RAW_DOCS_DIR = "data/docs/raw"
20
+ CHUNK_SIZE = 400
21
+
22
+ def load_raw_docs() -> List[Document]:
23
+ """Load raw Jenkins documentation files and convert to LangChain Documents."""
24
+ documents = []
25
+
26
+ for fname in os.listdir(RAW_DOCS_DIR):
27
+ path = os.path.join(RAW_DOCS_DIR, fname)
28
+ with open(path, "r", encoding="utf-8") as f:
29
+ text = f.read()
30
+
31
+ chunks = chunk_text(text, CHUNK_SIZE)
32
+ for chunk in chunks:
33
+ documents.append(Document(
34
+ page_content=chunk,
35
+ metadata={
36
+ "source_file": fname,
37
+ "source": "https://www.jenkins.io/doc/"
38
+ }
39
+ ))
40
+
41
+ return documents
42
+
43
+ def chunk_text(text: str, size: int) -> List[str]:
44
+ """Split text into chunks."""
45
+ chunks = []
46
+ for i in range(0, len(text), size):
47
+ chunk = text[i:i+size].strip()
48
+ if chunk:
49
+ chunks.append(chunk)
50
+ return chunks
51
+
52
+ class JenkinsRAGChain:
53
+ """LangChain-based RAG chain for Jenkins error explanation."""
54
+
55
+ def __init__(self):
56
+ self.embeddings = HuggingFaceEmbeddings(
57
+ model_name="sentence-transformers/paraphrase-MiniLM-L3-v2",
58
+ model_kwargs={'device': 'cpu'}
59
+ )
60
+
61
+ self.documents = load_raw_docs()
62
+ self.vectorstore = FAISS.from_documents(
63
+ self.documents,
64
+ self.embeddings
65
+ )
66
+
67
+ self.retriever = self.vectorstore.as_retriever(
68
+ search_kwargs={"k": 5}
69
+ )
70
+
71
+ self.llm = HuggingFaceHub(
72
+ repo_id="google/flan-t5-base",
73
+ model_kwargs={"temperature": 0.3, "max_new_tokens": 256}
74
+ )
75
+
76
+ self.prompt = PromptTemplate(
77
+ template="""You are a Jenkins CI/CD expert. Use the following context from
78
+ official Jenkins documentation to explain the error.
79
+
80
+ Context from Jenkins documentation:
81
+ {context}
82
+
83
+ Error log to explain:
84
+ {question}
85
+
86
+ Provide a clear explanation with:
87
+ 1. Error Summary
88
+ 2. Likely Causes
89
+ 3. Relevant Documentation links
90
+
91
+ If the documentation doesn't cover this error, say so explicitly.""",
92
+ input_variables=["context", "question"]
93
+ )
94
+
95
+ self.qa_chain = RetrievalQA.from_chain_type(
96
+ llm=self.llm,
97
+ chain_type="stuff",
98
+ retriever=self.retriever,
99
+ chain_type_kwargs={"prompt": self.prompt},
100
+ output_parser=StrOutputParser()
101
+ )
102
+
103
+ def explain_error(self, log_text: str) -> Dict[str, Any]:
104
+ """Explain a Jenkins error using LangChain RAG."""
105
+ features = extract_error_features(log_text)
106
+ category = features["category"]
107
+
108
+ enhanced_query = f"""
109
+ Error Category: {category}
110
+
111
+ Jenkins log:
112
+ {log_text}
113
+
114
+ Explain this error using the retrieved documentation.
115
+ """
116
+
117
+ result = self.qa_chain.invoke(enhanced_query)
118
+
119
+ return {
120
+ "error_category": category,
121
+ "llm_explanation": result,
122
+ "retrieval_source": "LangChain RAG (FAISS + HuggingFace)",
123
+ "model_used": "google/flan-t5-base",
124
+ "embedding_model": "paraphrase-MiniLM-L3-v2"
125
+ }
126
+
127
+ def get_rag_chain() -> JenkinsRAGChain:
128
+ """Get or create the RAG chain (singleton pattern for efficiency)."""
129
+ if not hasattr(get_rag_chain, '_instance'):
130
+ get_rag_chain._instance = JenkinsRAGChain()
131
+ return get_rag_chain._instance
132
+
133
+
134
+ if __name__ == "__main__":
135
+ print("Initializing LangChain RAG Chain...")
136
+ print("=" * 50)
137
+
138
+ rag = JenkinsRAGChain()
139
+
140
+ sample_error = """
141
+ Started by user admin
142
+ org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
143
+ WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.
144
+ 1 error
145
+ at org.codehaus.groovy.control.ErrorNode.accept(ErrorNode.java:36)
146
+ at org.codehaus.groovy.control.CompilationUnit$3.call(CompilationUnit.java:698)
147
+ """
148
+
149
+ print("\nSample Jenkins Error:")
150
+ print(sample_error)
151
+ print("=" * 50)
152
+
153
+ result = rag.explain_error(sample_error)
154
+
155
+ print("\nResult from LangChain RAG:")
156
+ print(json.dumps(result, indent=2))
main.py CHANGED
@@ -1,21 +1,20 @@
1
  from fastapi import FastAPI
2
  from explain_error import explain_error
 
3
 
4
  app = FastAPI()
5
 
6
-
7
- from pydantic import BaseModel
8
-
9
- class LogRequest(BaseModel):
10
- log_text: str
11
-
12
- @app.post("/explain")
13
- def explain(req: LogRequest):
14
- return explain_error(req.log_text)
15
-
16
- from fastapi.responses import RedirectResponse
17
-
18
  @app.get("/")
19
- def redirect_to_docs():
20
- return RedirectResponse(url="/docs")
21
 
 
 
 
 
 
 
 
 
 
 
 
1
  from fastapi import FastAPI
2
  from explain_error import explain_error
3
+ from langchain_rag import get_rag_chain
4
 
5
  app = FastAPI()
6
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  @app.get("/")
8
+ def home():
9
+ return {"message": "Jenkins Error Explainer API running"}
10
 
11
+ @app.post("/explain")
12
+ def explain(payload: dict):
13
+ log = payload["log_text"]
14
+ return explain_error(log)
15
+
16
+ @app.post("/explain-rag")
17
+ def explain_with_rag(payload: dict):
18
+ log = payload["log_text"]
19
+ rag_chain = get_rag_chain()
20
+ return rag_chain.explain_error(log)
preload_model.py CHANGED
@@ -1,9 +1,9 @@
1
- from sentence_transformers import SentenceTransformer
2
-
3
- print("Downloading model...")
4
- MODEL = SentenceTransformer(
5
- "paraphrase-MiniLM-L3-v2",
6
- cache_folder="./model_cache"
7
- )
8
-
9
  print("Model cached successfully!")
 
1
+ from sentence_transformers import SentenceTransformer
2
+
3
+ print("Downloading model...")
4
+ MODEL = SentenceTransformer(
5
+ "paraphrase-MiniLM-L3-v2",
6
+ cache_folder="./model_cache"
7
+ )
8
+
9
  print("Model cached successfully!")
requirements.txt CHANGED
@@ -1,10 +1,15 @@
1
  fastapi
2
- uvicorn
3
- sentence-transformers==2.2.2
4
- huggingface_hub==0.19.4
5
  faiss-cpu
6
  numpy
7
  pydantic
8
  torch
9
- transformers==4.35.2
10
- tokenizers==0.15.2
 
 
 
 
 
 
1
  fastapi
2
+ uvicorn[standard]
3
+ sentence-transformers
4
+ huggingface_hub
5
  faiss-cpu
6
  numpy
7
  pydantic
8
  torch
9
+ transformers
10
+ tokenizers
11
+ langchain
12
+ langchain-core
13
+ langchain-community
14
+ langchain-huggingface
15
+ chromadb
retrieve_docs.py CHANGED
@@ -1,53 +1,53 @@
1
- # retrieve_docs.py
2
-
3
- import json
4
- import faiss
5
- import numpy as np
6
- from sentence_transformers import SentenceTransformer
7
-
8
- INDEX_PATH = "data/docs/docs.index"
9
- META_PATH = "data/docs/docs_meta.json"
10
- TOP_K = 5
11
-
12
- QUERY_TEMPLATES = {
13
- "groovy_syntax_error": "Jenkins pipeline Groovy syntax error missing brace",
14
- "missing_agent": "Jenkins pipeline agent none requires node context",
15
- "no_node_available": "Jenkins no nodes with label scheduling executor",
16
- "missing_plugin": "Jenkins No such DSL method pipeline step",
17
- "missing_credentials": "Jenkins credentials not found pipeline",
18
- "file_not_found": "Jenkins pipeline workspace file not found",
19
- "git_authentication_error": "Jenkins git authentication failed checkout"
20
- }
21
- model = SentenceTransformer(
22
- "paraphrase-MiniLM-L3-v2",
23
- cache_folder="./model_cache"
24
- )
25
-
26
- def retrieve_docs(error_category: str):
27
-
28
- index = faiss.read_index(INDEX_PATH)
29
- with open(META_PATH, "r", encoding="utf-8") as f:
30
- metadata = json.load(f)
31
-
32
- query = QUERY_TEMPLATES.get(
33
- error_category,
34
- "Jenkins pipeline error"
35
- )
36
-
37
- query_embedding = model.encode([query])
38
- distances, indices = index.search(query_embedding, TOP_K)
39
-
40
- results = []
41
- for idx in indices[0]:
42
- results.append({
43
- "text": None,
44
- "meta": metadata[idx]
45
- })
46
-
47
- return results
48
-
49
-
50
- if __name__ == "__main__":
51
- results = retrieve_docs("Started by user Arvind Nandigam org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed: WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.1 error")
52
- for r in results:
53
  print(r)
 
1
+ # retrieve_docs.py
2
+
3
+ import json
4
+ import faiss
5
+ import numpy as np
6
+ from sentence_transformers import SentenceTransformer
7
+
8
+ INDEX_PATH = "data/docs/docs.index"
9
+ META_PATH = "data/docs/docs_meta.json"
10
+ TOP_K = 5
11
+
12
+ QUERY_TEMPLATES = {
13
+ "groovy_syntax_error": "Jenkins pipeline Groovy syntax error missing brace",
14
+ "missing_agent": "Jenkins pipeline agent none requires node context",
15
+ "no_node_available": "Jenkins no nodes with label scheduling executor",
16
+ "missing_plugin": "Jenkins No such DSL method pipeline step",
17
+ "missing_credentials": "Jenkins credentials not found pipeline",
18
+ "file_not_found": "Jenkins pipeline workspace file not found",
19
+ "git_authentication_error": "Jenkins git authentication failed checkout"
20
+ }
21
+ model = SentenceTransformer(
22
+ "paraphrase-MiniLM-L3-v2",
23
+ cache_folder="./model_cache"
24
+ )
25
+
26
+ def retrieve_docs(error_category: str):
27
+
28
+ index = faiss.read_index(INDEX_PATH)
29
+ with open(META_PATH, "r", encoding="utf-8") as f:
30
+ metadata = json.load(f)
31
+
32
+ query = QUERY_TEMPLATES.get(
33
+ error_category,
34
+ "Jenkins pipeline error"
35
+ )
36
+
37
+ query_embedding = model.encode([query])
38
+ distances, indices = index.search(query_embedding, TOP_K)
39
+
40
+ results = []
41
+ for idx in indices[0]:
42
+ results.append({
43
+ "text": None,
44
+ "meta": metadata[idx]
45
+ })
46
+
47
+ return results
48
+
49
+
50
+ if __name__ == "__main__":
51
+ results = retrieve_docs("Started by user Arvind Nandigam org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed: WorkflowScript: 10: expecting '}', found '' @ line 10, column 1.1 error")
52
+ for r in results:
53
  print(r)