repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
mozilla-releng/scriptworker | scriptworker/task.py | get_pull_request_number | def get_pull_request_number(task, source_env_prefix):
"""Get what Github pull request created the graph.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
source_env_prefix (str): The environment variable prefix that is used
to get repository information.
Returns:
int: the pull request number.
None: if not defined for this task.
"""
pull_request = _extract_from_env_in_payload(task, source_env_prefix + '_PULL_REQUEST_NUMBER')
if pull_request is not None:
pull_request = int(pull_request)
return pull_request | python | def get_pull_request_number(task, source_env_prefix):
"""Get what Github pull request created the graph.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
source_env_prefix (str): The environment variable prefix that is used
to get repository information.
Returns:
int: the pull request number.
None: if not defined for this task.
"""
pull_request = _extract_from_env_in_payload(task, source_env_prefix + '_PULL_REQUEST_NUMBER')
if pull_request is not None:
pull_request = int(pull_request)
return pull_request | [
"def",
"get_pull_request_number",
"(",
"task",
",",
"source_env_prefix",
")",
":",
"pull_request",
"=",
"_extract_from_env_in_payload",
"(",
"task",
",",
"source_env_prefix",
"+",
"'_PULL_REQUEST_NUMBER'",
")",
"if",
"pull_request",
"is",
"not",
"None",
":",
"pull_req... | Get what Github pull request created the graph.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
source_env_prefix (str): The environment variable prefix that is used
to get repository information.
Returns:
int: the pull request number.
None: if not defined for this task. | [
"Get",
"what",
"Github",
"pull",
"request",
"created",
"the",
"graph",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L219-L235 | train | 28,000 |
mozilla-releng/scriptworker | scriptworker/task.py | get_and_check_project | def get_and_check_project(valid_vcs_rules, source_url):
"""Given vcs rules and a source_url, return the project.
The project is in the path, but is the repo name.
`releases/mozilla-beta` is the path; `mozilla-beta` is the project.
Args:
valid_vcs_rules (tuple of frozendicts): the valid vcs rules, per
``match_url_regex``.
source_url (str): the source url to find the project for.
Raises:
RuntimeError: on failure to find the project.
Returns:
str: the project.
"""
project_path = match_url_regex(valid_vcs_rules, source_url, match_url_path_callback)
if project_path is None:
raise ValueError("Unknown repo for source url {}!".format(source_url))
project = project_path.split('/')[-1]
return project | python | def get_and_check_project(valid_vcs_rules, source_url):
"""Given vcs rules and a source_url, return the project.
The project is in the path, but is the repo name.
`releases/mozilla-beta` is the path; `mozilla-beta` is the project.
Args:
valid_vcs_rules (tuple of frozendicts): the valid vcs rules, per
``match_url_regex``.
source_url (str): the source url to find the project for.
Raises:
RuntimeError: on failure to find the project.
Returns:
str: the project.
"""
project_path = match_url_regex(valid_vcs_rules, source_url, match_url_path_callback)
if project_path is None:
raise ValueError("Unknown repo for source url {}!".format(source_url))
project = project_path.split('/')[-1]
return project | [
"def",
"get_and_check_project",
"(",
"valid_vcs_rules",
",",
"source_url",
")",
":",
"project_path",
"=",
"match_url_regex",
"(",
"valid_vcs_rules",
",",
"source_url",
",",
"match_url_path_callback",
")",
"if",
"project_path",
"is",
"None",
":",
"raise",
"ValueError",... | Given vcs rules and a source_url, return the project.
The project is in the path, but is the repo name.
`releases/mozilla-beta` is the path; `mozilla-beta` is the project.
Args:
valid_vcs_rules (tuple of frozendicts): the valid vcs rules, per
``match_url_regex``.
source_url (str): the source url to find the project for.
Raises:
RuntimeError: on failure to find the project.
Returns:
str: the project. | [
"Given",
"vcs",
"rules",
"and",
"a",
"source_url",
"return",
"the",
"project",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L278-L300 | train | 28,001 |
mozilla-releng/scriptworker | scriptworker/task.py | get_and_check_tasks_for | def get_and_check_tasks_for(context, task, msg_prefix=''):
"""Given a parent task, return the reason the parent task was spawned.
``.taskcluster.yml`` uses this to know whether to spawn an action,
cron, or decision task definition. ``tasks_for`` must be a valid one defined in the context.
Args:
task (dict): the task definition.
msg_prefix (str): the string prefix to use for an exception.
Raises:
(KeyError, ValueError): on failure to find a valid ``tasks_for``.
Returns:
str: the ``tasks_for``
"""
tasks_for = task['extra']['tasks_for']
if tasks_for not in context.config['valid_tasks_for']:
raise ValueError(
'{}Unknown tasks_for: {}'.format(msg_prefix, tasks_for)
)
return tasks_for | python | def get_and_check_tasks_for(context, task, msg_prefix=''):
"""Given a parent task, return the reason the parent task was spawned.
``.taskcluster.yml`` uses this to know whether to spawn an action,
cron, or decision task definition. ``tasks_for`` must be a valid one defined in the context.
Args:
task (dict): the task definition.
msg_prefix (str): the string prefix to use for an exception.
Raises:
(KeyError, ValueError): on failure to find a valid ``tasks_for``.
Returns:
str: the ``tasks_for``
"""
tasks_for = task['extra']['tasks_for']
if tasks_for not in context.config['valid_tasks_for']:
raise ValueError(
'{}Unknown tasks_for: {}'.format(msg_prefix, tasks_for)
)
return tasks_for | [
"def",
"get_and_check_tasks_for",
"(",
"context",
",",
"task",
",",
"msg_prefix",
"=",
"''",
")",
":",
"tasks_for",
"=",
"task",
"[",
"'extra'",
"]",
"[",
"'tasks_for'",
"]",
"if",
"tasks_for",
"not",
"in",
"context",
".",
"config",
"[",
"'valid_tasks_for'",... | Given a parent task, return the reason the parent task was spawned.
``.taskcluster.yml`` uses this to know whether to spawn an action,
cron, or decision task definition. ``tasks_for`` must be a valid one defined in the context.
Args:
task (dict): the task definition.
msg_prefix (str): the string prefix to use for an exception.
Raises:
(KeyError, ValueError): on failure to find a valid ``tasks_for``.
Returns:
str: the ``tasks_for`` | [
"Given",
"a",
"parent",
"task",
"return",
"the",
"reason",
"the",
"parent",
"task",
"was",
"spawned",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L304-L326 | train | 28,002 |
mozilla-releng/scriptworker | scriptworker/task.py | get_repo_scope | def get_repo_scope(task, name):
"""Given a parent task, return the repo scope for the task.
Background in https://bugzilla.mozilla.org/show_bug.cgi?id=1459705#c3
Args:
task (dict): the task definition.
Raises:
ValueError: on too many `repo_scope`s (we allow for 1 or 0).
Returns:
str: the ``repo_scope``
None: if no ``repo_scope`` is found
"""
repo_scopes = []
for scope in task['scopes']:
if REPO_SCOPE_REGEX.match(scope):
repo_scopes.append(scope)
if len(repo_scopes) > 1:
raise ValueError(
"{}: Too many repo_scopes: {}!".format(name, repo_scopes)
)
if repo_scopes:
return repo_scopes[0] | python | def get_repo_scope(task, name):
"""Given a parent task, return the repo scope for the task.
Background in https://bugzilla.mozilla.org/show_bug.cgi?id=1459705#c3
Args:
task (dict): the task definition.
Raises:
ValueError: on too many `repo_scope`s (we allow for 1 or 0).
Returns:
str: the ``repo_scope``
None: if no ``repo_scope`` is found
"""
repo_scopes = []
for scope in task['scopes']:
if REPO_SCOPE_REGEX.match(scope):
repo_scopes.append(scope)
if len(repo_scopes) > 1:
raise ValueError(
"{}: Too many repo_scopes: {}!".format(name, repo_scopes)
)
if repo_scopes:
return repo_scopes[0] | [
"def",
"get_repo_scope",
"(",
"task",
",",
"name",
")",
":",
"repo_scopes",
"=",
"[",
"]",
"for",
"scope",
"in",
"task",
"[",
"'scopes'",
"]",
":",
"if",
"REPO_SCOPE_REGEX",
".",
"match",
"(",
"scope",
")",
":",
"repo_scopes",
".",
"append",
"(",
"scop... | Given a parent task, return the repo scope for the task.
Background in https://bugzilla.mozilla.org/show_bug.cgi?id=1459705#c3
Args:
task (dict): the task definition.
Raises:
ValueError: on too many `repo_scope`s (we allow for 1 or 0).
Returns:
str: the ``repo_scope``
None: if no ``repo_scope`` is found | [
"Given",
"a",
"parent",
"task",
"return",
"the",
"repo",
"scope",
"for",
"the",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L330-L355 | train | 28,003 |
mozilla-releng/scriptworker | scriptworker/task.py | is_github_task | def is_github_task(task):
"""Determine if a task is related to GitHub.
This function currently looks into the ``schedulerId``, ``extra.tasks_for``, and
``metadata.source``.
Args:
task (dict): the task definition to check.
Returns:
bool: True if a piece of data refers to GitHub
"""
return any((
# XXX Cron tasks don't usually define 'taskcluster-github' as their schedulerId as they
# are scheduled within another Taskcluster task.
task.get('schedulerId') == 'taskcluster-github',
# XXX Same here, cron tasks don't start with github
task.get('extra', {}).get('tasks_for', '').startswith('github-'),
is_github_url(task.get('metadata', {}).get('source', '')),
)) | python | def is_github_task(task):
"""Determine if a task is related to GitHub.
This function currently looks into the ``schedulerId``, ``extra.tasks_for``, and
``metadata.source``.
Args:
task (dict): the task definition to check.
Returns:
bool: True if a piece of data refers to GitHub
"""
return any((
# XXX Cron tasks don't usually define 'taskcluster-github' as their schedulerId as they
# are scheduled within another Taskcluster task.
task.get('schedulerId') == 'taskcluster-github',
# XXX Same here, cron tasks don't start with github
task.get('extra', {}).get('tasks_for', '').startswith('github-'),
is_github_url(task.get('metadata', {}).get('source', '')),
)) | [
"def",
"is_github_task",
"(",
"task",
")",
":",
"return",
"any",
"(",
"(",
"# XXX Cron tasks don't usually define 'taskcluster-github' as their schedulerId as they",
"# are scheduled within another Taskcluster task.",
"task",
".",
"get",
"(",
"'schedulerId'",
")",
"==",
"'taskc... | Determine if a task is related to GitHub.
This function currently looks into the ``schedulerId``, ``extra.tasks_for``, and
``metadata.source``.
Args:
task (dict): the task definition to check.
Returns:
bool: True if a piece of data refers to GitHub | [
"Determine",
"if",
"a",
"task",
"is",
"related",
"to",
"GitHub",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L466-L486 | train | 28,004 |
mozilla-releng/scriptworker | scriptworker/task.py | is_action | def is_action(task):
"""Determine if a task is an action task.
Trusted decision and action tasks are important in that they can generate
other valid tasks. The verification of decision and action tasks is slightly
different, so we need to be able to tell them apart.
This checks for the following things::
* ``task.payload.env.ACTION_CALLBACK`` exists
* ``task.extra.action`` exists
Args:
task (dict): the task definition to check
Returns:
bool: True if it's an action
"""
result = False
if _extract_from_env_in_payload(task, 'ACTION_CALLBACK'):
result = True
if task.get('extra', {}).get('action') is not None:
result = True
return result | python | def is_action(task):
"""Determine if a task is an action task.
Trusted decision and action tasks are important in that they can generate
other valid tasks. The verification of decision and action tasks is slightly
different, so we need to be able to tell them apart.
This checks for the following things::
* ``task.payload.env.ACTION_CALLBACK`` exists
* ``task.extra.action`` exists
Args:
task (dict): the task definition to check
Returns:
bool: True if it's an action
"""
result = False
if _extract_from_env_in_payload(task, 'ACTION_CALLBACK'):
result = True
if task.get('extra', {}).get('action') is not None:
result = True
return result | [
"def",
"is_action",
"(",
"task",
")",
":",
"result",
"=",
"False",
"if",
"_extract_from_env_in_payload",
"(",
"task",
",",
"'ACTION_CALLBACK'",
")",
":",
"result",
"=",
"True",
"if",
"task",
".",
"get",
"(",
"'extra'",
",",
"{",
"}",
")",
".",
"get",
"... | Determine if a task is an action task.
Trusted decision and action tasks are important in that they can generate
other valid tasks. The verification of decision and action tasks is slightly
different, so we need to be able to tell them apart.
This checks for the following things::
* ``task.payload.env.ACTION_CALLBACK`` exists
* ``task.extra.action`` exists
Args:
task (dict): the task definition to check
Returns:
bool: True if it's an action | [
"Determine",
"if",
"a",
"task",
"is",
"an",
"action",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L490-L514 | train | 28,005 |
mozilla-releng/scriptworker | scriptworker/task.py | prepare_to_run_task | def prepare_to_run_task(context, claim_task):
"""Given a `claim_task` json dict, prepare the `context` and `work_dir`.
Set `context.claim_task`, and write a `work_dir/current_task_info.json`
Args:
context (scriptworker.context.Context): the scriptworker context.
claim_task (dict): the claim_task dict.
Returns:
dict: the contents of `current_task_info.json`
"""
current_task_info = {}
context.claim_task = claim_task
current_task_info['taskId'] = get_task_id(claim_task)
current_task_info['runId'] = get_run_id(claim_task)
log.info("Going to run taskId {taskId} runId {runId}!".format(
**current_task_info
))
context.write_json(
os.path.join(context.config['work_dir'], 'current_task_info.json'),
current_task_info, "Writing current task info to {path}..."
)
return current_task_info | python | def prepare_to_run_task(context, claim_task):
"""Given a `claim_task` json dict, prepare the `context` and `work_dir`.
Set `context.claim_task`, and write a `work_dir/current_task_info.json`
Args:
context (scriptworker.context.Context): the scriptworker context.
claim_task (dict): the claim_task dict.
Returns:
dict: the contents of `current_task_info.json`
"""
current_task_info = {}
context.claim_task = claim_task
current_task_info['taskId'] = get_task_id(claim_task)
current_task_info['runId'] = get_run_id(claim_task)
log.info("Going to run taskId {taskId} runId {runId}!".format(
**current_task_info
))
context.write_json(
os.path.join(context.config['work_dir'], 'current_task_info.json'),
current_task_info, "Writing current task info to {path}..."
)
return current_task_info | [
"def",
"prepare_to_run_task",
"(",
"context",
",",
"claim_task",
")",
":",
"current_task_info",
"=",
"{",
"}",
"context",
".",
"claim_task",
"=",
"claim_task",
"current_task_info",
"[",
"'taskId'",
"]",
"=",
"get_task_id",
"(",
"claim_task",
")",
"current_task_inf... | Given a `claim_task` json dict, prepare the `context` and `work_dir`.
Set `context.claim_task`, and write a `work_dir/current_task_info.json`
Args:
context (scriptworker.context.Context): the scriptworker context.
claim_task (dict): the claim_task dict.
Returns:
dict: the contents of `current_task_info.json` | [
"Given",
"a",
"claim_task",
"json",
"dict",
"prepare",
"the",
"context",
"and",
"work_dir",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L518-L542 | train | 28,006 |
mozilla-releng/scriptworker | scriptworker/task.py | run_task | async def run_task(context, to_cancellable_process):
"""Run the task, sending stdout+stderr to files.
https://github.com/python/asyncio/blob/master/examples/subprocess_shell.py
Args:
context (scriptworker.context.Context): the scriptworker context.
to_cancellable_process (types.Callable): tracks the process so that it can be stopped if the worker is shut down
Returns:
int: exit code
"""
kwargs = { # pragma: no branch
'stdout': PIPE,
'stderr': PIPE,
'stdin': None,
'close_fds': True,
'preexec_fn': lambda: os.setsid(),
}
subprocess = await asyncio.create_subprocess_exec(*context.config['task_script'], **kwargs)
context.proc = await to_cancellable_process(TaskProcess(subprocess))
timeout = context.config['task_max_timeout']
with get_log_filehandle(context) as log_filehandle:
stderr_future = asyncio.ensure_future(
pipe_to_log(context.proc.process.stderr, filehandles=[log_filehandle])
)
stdout_future = asyncio.ensure_future(
pipe_to_log(context.proc.process.stdout, filehandles=[log_filehandle])
)
try:
_, pending = await asyncio.wait(
[stderr_future, stdout_future], timeout=timeout
)
if pending:
message = "Exceeded task_max_timeout of {} seconds".format(timeout)
log.warning(message)
await context.proc.stop()
raise ScriptWorkerTaskException(message, exit_code=context.config['task_max_timeout_status'])
finally:
# in the case of a timeout, this will be -15.
# this code is in the finally: block so we still get the final
# log lines.
exitcode = await context.proc.process.wait()
# make sure we haven't lost any of the logs
await asyncio.wait([stdout_future, stderr_future])
# add an exit code line at the end of the log
status_line = "exit code: {}".format(exitcode)
if exitcode < 0:
status_line = "Automation Error: python exited with signal {}".format(exitcode)
log.info(status_line)
print(status_line, file=log_filehandle)
stopped_due_to_worker_shutdown = context.proc.stopped_due_to_worker_shutdown
context.proc = None
if stopped_due_to_worker_shutdown:
raise WorkerShutdownDuringTask
return exitcode | python | async def run_task(context, to_cancellable_process):
"""Run the task, sending stdout+stderr to files.
https://github.com/python/asyncio/blob/master/examples/subprocess_shell.py
Args:
context (scriptworker.context.Context): the scriptworker context.
to_cancellable_process (types.Callable): tracks the process so that it can be stopped if the worker is shut down
Returns:
int: exit code
"""
kwargs = { # pragma: no branch
'stdout': PIPE,
'stderr': PIPE,
'stdin': None,
'close_fds': True,
'preexec_fn': lambda: os.setsid(),
}
subprocess = await asyncio.create_subprocess_exec(*context.config['task_script'], **kwargs)
context.proc = await to_cancellable_process(TaskProcess(subprocess))
timeout = context.config['task_max_timeout']
with get_log_filehandle(context) as log_filehandle:
stderr_future = asyncio.ensure_future(
pipe_to_log(context.proc.process.stderr, filehandles=[log_filehandle])
)
stdout_future = asyncio.ensure_future(
pipe_to_log(context.proc.process.stdout, filehandles=[log_filehandle])
)
try:
_, pending = await asyncio.wait(
[stderr_future, stdout_future], timeout=timeout
)
if pending:
message = "Exceeded task_max_timeout of {} seconds".format(timeout)
log.warning(message)
await context.proc.stop()
raise ScriptWorkerTaskException(message, exit_code=context.config['task_max_timeout_status'])
finally:
# in the case of a timeout, this will be -15.
# this code is in the finally: block so we still get the final
# log lines.
exitcode = await context.proc.process.wait()
# make sure we haven't lost any of the logs
await asyncio.wait([stdout_future, stderr_future])
# add an exit code line at the end of the log
status_line = "exit code: {}".format(exitcode)
if exitcode < 0:
status_line = "Automation Error: python exited with signal {}".format(exitcode)
log.info(status_line)
print(status_line, file=log_filehandle)
stopped_due_to_worker_shutdown = context.proc.stopped_due_to_worker_shutdown
context.proc = None
if stopped_due_to_worker_shutdown:
raise WorkerShutdownDuringTask
return exitcode | [
"async",
"def",
"run_task",
"(",
"context",
",",
"to_cancellable_process",
")",
":",
"kwargs",
"=",
"{",
"# pragma: no branch",
"'stdout'",
":",
"PIPE",
",",
"'stderr'",
":",
"PIPE",
",",
"'stdin'",
":",
"None",
",",
"'close_fds'",
":",
"True",
",",
"'preexe... | Run the task, sending stdout+stderr to files.
https://github.com/python/asyncio/blob/master/examples/subprocess_shell.py
Args:
context (scriptworker.context.Context): the scriptworker context.
to_cancellable_process (types.Callable): tracks the process so that it can be stopped if the worker is shut down
Returns:
int: exit code | [
"Run",
"the",
"task",
"sending",
"stdout",
"+",
"stderr",
"to",
"files",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L546-L606 | train | 28,007 |
mozilla-releng/scriptworker | scriptworker/task.py | reclaim_task | async def reclaim_task(context, task):
"""Try to reclaim a task from the queue.
This is a keepalive / heartbeat. Without it the job will expire and
potentially be re-queued. Since this is run async from the task, the
task may complete before we run, in which case we'll get a 409 the next
time we reclaim.
Args:
context (scriptworker.context.Context): the scriptworker context
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 status_code
from taskcluster.aio.Queue.reclaimTask()
"""
while True:
log.debug("waiting %s seconds before reclaiming..." % context.config['reclaim_interval'])
await asyncio.sleep(context.config['reclaim_interval'])
if task != context.task:
return
log.debug("Reclaiming task...")
try:
context.reclaim_task = await context.temp_queue.reclaimTask(
get_task_id(context.claim_task),
get_run_id(context.claim_task),
)
clean_response = deepcopy(context.reclaim_task)
clean_response['credentials'] = "{********}"
log.debug("Reclaim task response:\n{}".format(pprint.pformat(clean_response)))
except taskcluster.exceptions.TaskclusterRestFailure as exc:
if exc.status_code == 409:
log.debug("409: not reclaiming task.")
if context.proc and task == context.task:
message = "Killing task after receiving 409 status in reclaim_task"
log.warning(message)
await context.proc.stop()
raise ScriptWorkerTaskException(message, exit_code=context.config['invalid_reclaim_status'])
break
else:
raise | python | async def reclaim_task(context, task):
"""Try to reclaim a task from the queue.
This is a keepalive / heartbeat. Without it the job will expire and
potentially be re-queued. Since this is run async from the task, the
task may complete before we run, in which case we'll get a 409 the next
time we reclaim.
Args:
context (scriptworker.context.Context): the scriptworker context
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 status_code
from taskcluster.aio.Queue.reclaimTask()
"""
while True:
log.debug("waiting %s seconds before reclaiming..." % context.config['reclaim_interval'])
await asyncio.sleep(context.config['reclaim_interval'])
if task != context.task:
return
log.debug("Reclaiming task...")
try:
context.reclaim_task = await context.temp_queue.reclaimTask(
get_task_id(context.claim_task),
get_run_id(context.claim_task),
)
clean_response = deepcopy(context.reclaim_task)
clean_response['credentials'] = "{********}"
log.debug("Reclaim task response:\n{}".format(pprint.pformat(clean_response)))
except taskcluster.exceptions.TaskclusterRestFailure as exc:
if exc.status_code == 409:
log.debug("409: not reclaiming task.")
if context.proc and task == context.task:
message = "Killing task after receiving 409 status in reclaim_task"
log.warning(message)
await context.proc.stop()
raise ScriptWorkerTaskException(message, exit_code=context.config['invalid_reclaim_status'])
break
else:
raise | [
"async",
"def",
"reclaim_task",
"(",
"context",
",",
"task",
")",
":",
"while",
"True",
":",
"log",
".",
"debug",
"(",
"\"waiting %s seconds before reclaiming...\"",
"%",
"context",
".",
"config",
"[",
"'reclaim_interval'",
"]",
")",
"await",
"asyncio",
".",
"... | Try to reclaim a task from the queue.
This is a keepalive / heartbeat. Without it the job will expire and
potentially be re-queued. Since this is run async from the task, the
task may complete before we run, in which case we'll get a 409 the next
time we reclaim.
Args:
context (scriptworker.context.Context): the scriptworker context
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 status_code
from taskcluster.aio.Queue.reclaimTask() | [
"Try",
"to",
"reclaim",
"a",
"task",
"from",
"the",
"queue",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L610-L650 | train | 28,008 |
mozilla-releng/scriptworker | scriptworker/task.py | complete_task | async def complete_task(context, result):
"""Mark the task as completed in the queue.
Decide whether to call reportCompleted, reportFailed, or reportException
based on the exit status of the script.
If the task has expired or been cancelled, we'll get a 409 status.
Args:
context (scriptworker.context.Context): the scriptworker context.
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 error.
"""
args = [get_task_id(context.claim_task), get_run_id(context.claim_task)]
reversed_statuses = get_reversed_statuses(context)
try:
if result == 0:
log.info("Reporting task complete...")
response = await context.temp_queue.reportCompleted(*args)
elif result != 1 and result in reversed_statuses:
reason = reversed_statuses[result]
log.info("Reporting task exception {}...".format(reason))
payload = {"reason": reason}
response = await context.temp_queue.reportException(*args, payload)
else:
log.info("Reporting task failed...")
response = await context.temp_queue.reportFailed(*args)
log.debug("Task status response:\n{}".format(pprint.pformat(response)))
except taskcluster.exceptions.TaskclusterRestFailure as exc:
if exc.status_code == 409:
log.info("409: not reporting complete/failed.")
else:
raise | python | async def complete_task(context, result):
"""Mark the task as completed in the queue.
Decide whether to call reportCompleted, reportFailed, or reportException
based on the exit status of the script.
If the task has expired or been cancelled, we'll get a 409 status.
Args:
context (scriptworker.context.Context): the scriptworker context.
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 error.
"""
args = [get_task_id(context.claim_task), get_run_id(context.claim_task)]
reversed_statuses = get_reversed_statuses(context)
try:
if result == 0:
log.info("Reporting task complete...")
response = await context.temp_queue.reportCompleted(*args)
elif result != 1 and result in reversed_statuses:
reason = reversed_statuses[result]
log.info("Reporting task exception {}...".format(reason))
payload = {"reason": reason}
response = await context.temp_queue.reportException(*args, payload)
else:
log.info("Reporting task failed...")
response = await context.temp_queue.reportFailed(*args)
log.debug("Task status response:\n{}".format(pprint.pformat(response)))
except taskcluster.exceptions.TaskclusterRestFailure as exc:
if exc.status_code == 409:
log.info("409: not reporting complete/failed.")
else:
raise | [
"async",
"def",
"complete_task",
"(",
"context",
",",
"result",
")",
":",
"args",
"=",
"[",
"get_task_id",
"(",
"context",
".",
"claim_task",
")",
",",
"get_run_id",
"(",
"context",
".",
"claim_task",
")",
"]",
"reversed_statuses",
"=",
"get_reversed_statuses"... | Mark the task as completed in the queue.
Decide whether to call reportCompleted, reportFailed, or reportException
based on the exit status of the script.
If the task has expired or been cancelled, we'll get a 409 status.
Args:
context (scriptworker.context.Context): the scriptworker context.
Raises:
taskcluster.exceptions.TaskclusterRestFailure: on non-409 error. | [
"Mark",
"the",
"task",
"as",
"completed",
"in",
"the",
"queue",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L654-L688 | train | 28,009 |
mozilla-releng/scriptworker | scriptworker/task.py | claim_work | async def claim_work(context):
"""Find and claim the next pending task in the queue, if any.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dict containing a list of the task definitions of the tasks claimed.
"""
log.debug("Calling claimWork...")
payload = {
'workerGroup': context.config['worker_group'],
'workerId': context.config['worker_id'],
# Hardcode one task at a time. Make this a pref if we allow for
# parallel tasks in multiple `work_dir`s.
'tasks': 1,
}
try:
return await context.queue.claimWork(
context.config['provisioner_id'],
context.config['worker_type'],
payload
)
except (taskcluster.exceptions.TaskclusterFailure, aiohttp.ClientError) as exc:
log.warning("{} {}".format(exc.__class__, exc)) | python | async def claim_work(context):
"""Find and claim the next pending task in the queue, if any.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dict containing a list of the task definitions of the tasks claimed.
"""
log.debug("Calling claimWork...")
payload = {
'workerGroup': context.config['worker_group'],
'workerId': context.config['worker_id'],
# Hardcode one task at a time. Make this a pref if we allow for
# parallel tasks in multiple `work_dir`s.
'tasks': 1,
}
try:
return await context.queue.claimWork(
context.config['provisioner_id'],
context.config['worker_type'],
payload
)
except (taskcluster.exceptions.TaskclusterFailure, aiohttp.ClientError) as exc:
log.warning("{} {}".format(exc.__class__, exc)) | [
"async",
"def",
"claim_work",
"(",
"context",
")",
":",
"log",
".",
"debug",
"(",
"\"Calling claimWork...\"",
")",
"payload",
"=",
"{",
"'workerGroup'",
":",
"context",
".",
"config",
"[",
"'worker_group'",
"]",
",",
"'workerId'",
":",
"context",
".",
"confi... | Find and claim the next pending task in the queue, if any.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dict containing a list of the task definitions of the tasks claimed. | [
"Find",
"and",
"claim",
"the",
"next",
"pending",
"task",
"in",
"the",
"queue",
"if",
"any",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/task.py#L692-L717 | train | 28,010 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | raise_on_errors | def raise_on_errors(errors, level=logging.CRITICAL):
"""Raise a CoTError if errors.
Helper function because I had this code block everywhere.
Args:
errors (list): the error errors
level (int, optional): the log level to use. Defaults to logging.CRITICAL
Raises:
CoTError: if errors is non-empty
"""
if errors:
log.log(level, "\n".join(errors))
raise CoTError("\n".join(errors)) | python | def raise_on_errors(errors, level=logging.CRITICAL):
"""Raise a CoTError if errors.
Helper function because I had this code block everywhere.
Args:
errors (list): the error errors
level (int, optional): the log level to use. Defaults to logging.CRITICAL
Raises:
CoTError: if errors is non-empty
"""
if errors:
log.log(level, "\n".join(errors))
raise CoTError("\n".join(errors)) | [
"def",
"raise_on_errors",
"(",
"errors",
",",
"level",
"=",
"logging",
".",
"CRITICAL",
")",
":",
"if",
"errors",
":",
"log",
".",
"log",
"(",
"level",
",",
"\"\\n\"",
".",
"join",
"(",
"errors",
")",
")",
"raise",
"CoTError",
"(",
"\"\\n\"",
".",
"j... | Raise a CoTError if errors.
Helper function because I had this code block everywhere.
Args:
errors (list): the error errors
level (int, optional): the log level to use. Defaults to logging.CRITICAL
Raises:
CoTError: if errors is non-empty | [
"Raise",
"a",
"CoTError",
"if",
"errors",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L286-L301 | train | 28,011 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | guess_task_type | def guess_task_type(name, task_defn):
"""Guess the task type of the task.
Args:
name (str): the name of the task.
Returns:
str: the task_type.
Raises:
CoTError: on invalid task_type.
"""
parts = name.split(':')
task_type = parts[-1]
if task_type == 'parent':
if is_action(task_defn):
task_type = 'action'
else:
task_type = 'decision'
if task_type not in get_valid_task_types():
raise CoTError(
"Invalid task type for {}!".format(name)
)
return task_type | python | def guess_task_type(name, task_defn):
"""Guess the task type of the task.
Args:
name (str): the name of the task.
Returns:
str: the task_type.
Raises:
CoTError: on invalid task_type.
"""
parts = name.split(':')
task_type = parts[-1]
if task_type == 'parent':
if is_action(task_defn):
task_type = 'action'
else:
task_type = 'decision'
if task_type not in get_valid_task_types():
raise CoTError(
"Invalid task type for {}!".format(name)
)
return task_type | [
"def",
"guess_task_type",
"(",
"name",
",",
"task_defn",
")",
":",
"parts",
"=",
"name",
".",
"split",
"(",
"':'",
")",
"task_type",
"=",
"parts",
"[",
"-",
"1",
"]",
"if",
"task_type",
"==",
"'parent'",
":",
"if",
"is_action",
"(",
"task_defn",
")",
... | Guess the task type of the task.
Args:
name (str): the name of the task.
Returns:
str: the task_type.
Raises:
CoTError: on invalid task_type. | [
"Guess",
"the",
"task",
"type",
"of",
"the",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L375-L399 | train | 28,012 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | check_interactive_docker_worker | def check_interactive_docker_worker(link):
"""Given a task, make sure the task was not defined as interactive.
* ``task.payload.features.interactive`` must be absent or False.
* ``task.payload.env.TASKCLUSTER_INTERACTIVE`` must be absent or False.
Args:
link (LinkOfTrust): the task link we're checking.
Returns:
list: the list of error errors. Success is an empty list.
"""
errors = []
log.info("Checking for {} {} interactive docker-worker".format(link.name, link.task_id))
try:
if link.task['payload']['features'].get('interactive'):
errors.append("{} is interactive: task.payload.features.interactive!".format(link.name))
if link.task['payload']['env'].get('TASKCLUSTER_INTERACTIVE'):
errors.append("{} is interactive: task.payload.env.TASKCLUSTER_INTERACTIVE!".format(link.name))
except KeyError:
errors.append("check_interactive_docker_worker: {} task definition is malformed!".format(link.name))
return errors | python | def check_interactive_docker_worker(link):
"""Given a task, make sure the task was not defined as interactive.
* ``task.payload.features.interactive`` must be absent or False.
* ``task.payload.env.TASKCLUSTER_INTERACTIVE`` must be absent or False.
Args:
link (LinkOfTrust): the task link we're checking.
Returns:
list: the list of error errors. Success is an empty list.
"""
errors = []
log.info("Checking for {} {} interactive docker-worker".format(link.name, link.task_id))
try:
if link.task['payload']['features'].get('interactive'):
errors.append("{} is interactive: task.payload.features.interactive!".format(link.name))
if link.task['payload']['env'].get('TASKCLUSTER_INTERACTIVE'):
errors.append("{} is interactive: task.payload.env.TASKCLUSTER_INTERACTIVE!".format(link.name))
except KeyError:
errors.append("check_interactive_docker_worker: {} task definition is malformed!".format(link.name))
return errors | [
"def",
"check_interactive_docker_worker",
"(",
"link",
")",
":",
"errors",
"=",
"[",
"]",
"log",
".",
"info",
"(",
"\"Checking for {} {} interactive docker-worker\"",
".",
"format",
"(",
"link",
".",
"name",
",",
"link",
".",
"task_id",
")",
")",
"try",
":",
... | Given a task, make sure the task was not defined as interactive.
* ``task.payload.features.interactive`` must be absent or False.
* ``task.payload.env.TASKCLUSTER_INTERACTIVE`` must be absent or False.
Args:
link (LinkOfTrust): the task link we're checking.
Returns:
list: the list of error errors. Success is an empty list. | [
"Given",
"a",
"task",
"make",
"sure",
"the",
"task",
"was",
"not",
"defined",
"as",
"interactive",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L432-L454 | train | 28,013 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_docker_image_sha | def verify_docker_image_sha(chain, link):
"""Verify that built docker shas match the artifact.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on failure.
"""
cot = link.cot
task = link.task
errors = []
if isinstance(task['payload'].get('image'), dict):
# Using pre-built image from docker-image task
docker_image_task_id = task['extra']['chainOfTrust']['inputs']['docker-image']
log.debug("Verifying {} {} against docker-image {}".format(
link.name, link.task_id, docker_image_task_id
))
if docker_image_task_id != task['payload']['image']['taskId']:
errors.append("{} {} docker-image taskId isn't consistent!: {} vs {}".format(
link.name, link.task_id, docker_image_task_id,
task['payload']['image']['taskId']
))
else:
path = task['payload']['image']['path']
# we need change the hash alg everywhere if we change, and recreate
# the docker images...
image_hash = cot['environment']['imageArtifactHash']
alg, sha = image_hash.split(':')
docker_image_link = chain.get_link(docker_image_task_id)
upstream_sha = docker_image_link.cot['artifacts'].get(path, {}).get(alg)
if upstream_sha is None:
errors.append("{} {} docker-image docker sha {} is missing! {}".format(
link.name, link.task_id, alg,
docker_image_link.cot['artifacts'][path]
))
elif upstream_sha != sha:
errors.append("{} {} docker-image docker sha doesn't match! {} {} vs {}".format(
link.name, link.task_id, alg, sha, upstream_sha
))
else:
log.debug("Found matching docker-image sha {}".format(upstream_sha))
else:
prebuilt_task_types = chain.context.config['prebuilt_docker_image_task_types']
if prebuilt_task_types != "any" and link.task_type not in prebuilt_task_types:
errors.append(
"Task type {} not allowed to use a prebuilt docker image!".format(
link.task_type
)
)
raise_on_errors(errors) | python | def verify_docker_image_sha(chain, link):
"""Verify that built docker shas match the artifact.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on failure.
"""
cot = link.cot
task = link.task
errors = []
if isinstance(task['payload'].get('image'), dict):
# Using pre-built image from docker-image task
docker_image_task_id = task['extra']['chainOfTrust']['inputs']['docker-image']
log.debug("Verifying {} {} against docker-image {}".format(
link.name, link.task_id, docker_image_task_id
))
if docker_image_task_id != task['payload']['image']['taskId']:
errors.append("{} {} docker-image taskId isn't consistent!: {} vs {}".format(
link.name, link.task_id, docker_image_task_id,
task['payload']['image']['taskId']
))
else:
path = task['payload']['image']['path']
# we need change the hash alg everywhere if we change, and recreate
# the docker images...
image_hash = cot['environment']['imageArtifactHash']
alg, sha = image_hash.split(':')
docker_image_link = chain.get_link(docker_image_task_id)
upstream_sha = docker_image_link.cot['artifacts'].get(path, {}).get(alg)
if upstream_sha is None:
errors.append("{} {} docker-image docker sha {} is missing! {}".format(
link.name, link.task_id, alg,
docker_image_link.cot['artifacts'][path]
))
elif upstream_sha != sha:
errors.append("{} {} docker-image docker sha doesn't match! {} {} vs {}".format(
link.name, link.task_id, alg, sha, upstream_sha
))
else:
log.debug("Found matching docker-image sha {}".format(upstream_sha))
else:
prebuilt_task_types = chain.context.config['prebuilt_docker_image_task_types']
if prebuilt_task_types != "any" and link.task_type not in prebuilt_task_types:
errors.append(
"Task type {} not allowed to use a prebuilt docker image!".format(
link.task_type
)
)
raise_on_errors(errors) | [
"def",
"verify_docker_image_sha",
"(",
"chain",
",",
"link",
")",
":",
"cot",
"=",
"link",
".",
"cot",
"task",
"=",
"link",
".",
"task",
"errors",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"task",
"[",
"'payload'",
"]",
".",
"get",
"(",
"'image'",
")",... | Verify that built docker shas match the artifact.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on failure. | [
"Verify",
"that",
"built",
"docker",
"shas",
"match",
"the",
"artifact",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L458-L511 | train | 28,014 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | find_sorted_task_dependencies | def find_sorted_task_dependencies(task, task_name, task_id):
"""Find the taskIds of the chain of trust dependencies of a given task.
Args:
task (dict): the task definition to inspect.
task_name (str): the name of the task, for logging and naming children.
task_id (str): the taskId of the task.
Returns:
list: tuples associating dependent task ``name`` to dependent task ``taskId``.
"""
log.info("find_sorted_task_dependencies {} {}".format(task_name, task_id))
cot_input_dependencies = [
_craft_dependency_tuple(task_name, task_type, task_id)
for task_type, task_id in task['extra'].get('chainOfTrust', {}).get('inputs', {}).items()
]
upstream_artifacts_dependencies = [
_craft_dependency_tuple(task_name, artifact_dict['taskType'], artifact_dict['taskId'])
for artifact_dict in task.get('payload', {}).get('upstreamArtifacts', [])
]
dependencies = [*cot_input_dependencies, *upstream_artifacts_dependencies]
dependencies = _sort_dependencies_by_name_then_task_id(dependencies)
parent_task_id = get_parent_task_id(task) or get_decision_task_id(task)
parent_task_type = 'parent'
# make sure we deal with the decision task first, or we may populate
# signing:build0:decision before signing:decision
parent_tuple = _craft_dependency_tuple(task_name, parent_task_type, parent_task_id)
dependencies.insert(0, parent_tuple)
log.info('found dependencies: {}'.format(dependencies))
return dependencies | python | def find_sorted_task_dependencies(task, task_name, task_id):
"""Find the taskIds of the chain of trust dependencies of a given task.
Args:
task (dict): the task definition to inspect.
task_name (str): the name of the task, for logging and naming children.
task_id (str): the taskId of the task.
Returns:
list: tuples associating dependent task ``name`` to dependent task ``taskId``.
"""
log.info("find_sorted_task_dependencies {} {}".format(task_name, task_id))
cot_input_dependencies = [
_craft_dependency_tuple(task_name, task_type, task_id)
for task_type, task_id in task['extra'].get('chainOfTrust', {}).get('inputs', {}).items()
]
upstream_artifacts_dependencies = [
_craft_dependency_tuple(task_name, artifact_dict['taskType'], artifact_dict['taskId'])
for artifact_dict in task.get('payload', {}).get('upstreamArtifacts', [])
]
dependencies = [*cot_input_dependencies, *upstream_artifacts_dependencies]
dependencies = _sort_dependencies_by_name_then_task_id(dependencies)
parent_task_id = get_parent_task_id(task) or get_decision_task_id(task)
parent_task_type = 'parent'
# make sure we deal with the decision task first, or we may populate
# signing:build0:decision before signing:decision
parent_tuple = _craft_dependency_tuple(task_name, parent_task_type, parent_task_id)
dependencies.insert(0, parent_tuple)
log.info('found dependencies: {}'.format(dependencies))
return dependencies | [
"def",
"find_sorted_task_dependencies",
"(",
"task",
",",
"task_name",
",",
"task_id",
")",
":",
"log",
".",
"info",
"(",
"\"find_sorted_task_dependencies {} {}\"",
".",
"format",
"(",
"task_name",
",",
"task_id",
")",
")",
"cot_input_dependencies",
"=",
"[",
"_cr... | Find the taskIds of the chain of trust dependencies of a given task.
Args:
task (dict): the task definition to inspect.
task_name (str): the name of the task, for logging and naming children.
task_id (str): the taskId of the task.
Returns:
list: tuples associating dependent task ``name`` to dependent task ``taskId``. | [
"Find",
"the",
"taskIds",
"of",
"the",
"chain",
"of",
"trust",
"dependencies",
"of",
"a",
"given",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L515-L550 | train | 28,015 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | build_task_dependencies | async def build_task_dependencies(chain, task, name, my_task_id):
"""Recursively build the task dependencies of a task.
Args:
chain (ChainOfTrust): the chain of trust to add to.
task (dict): the task definition to operate on.
name (str): the name of the task to operate on.
my_task_id (str): the taskId of the task to operate on.
Raises:
CoTError: on failure.
"""
log.info("build_task_dependencies {} {}".format(name, my_task_id))
if name.count(':') > chain.context.config['max_chain_length']:
raise CoTError("Too deep recursion!\n{}".format(name))
sorted_dependencies = find_sorted_task_dependencies(task, name, my_task_id)
for task_name, task_id in sorted_dependencies:
if task_id not in chain.dependent_task_ids():
link = LinkOfTrust(chain.context, task_name, task_id)
json_path = link.get_artifact_full_path('task.json')
try:
task_defn = await chain.context.queue.task(task_id)
link.task = task_defn
chain.links.append(link)
# write task json to disk
makedirs(os.path.dirname(json_path))
with open(json_path, 'w') as fh:
fh.write(format_json(task_defn))
await build_task_dependencies(chain, task_defn, task_name, task_id)
except TaskclusterFailure as exc:
raise CoTError(str(exc)) | python | async def build_task_dependencies(chain, task, name, my_task_id):
"""Recursively build the task dependencies of a task.
Args:
chain (ChainOfTrust): the chain of trust to add to.
task (dict): the task definition to operate on.
name (str): the name of the task to operate on.
my_task_id (str): the taskId of the task to operate on.
Raises:
CoTError: on failure.
"""
log.info("build_task_dependencies {} {}".format(name, my_task_id))
if name.count(':') > chain.context.config['max_chain_length']:
raise CoTError("Too deep recursion!\n{}".format(name))
sorted_dependencies = find_sorted_task_dependencies(task, name, my_task_id)
for task_name, task_id in sorted_dependencies:
if task_id not in chain.dependent_task_ids():
link = LinkOfTrust(chain.context, task_name, task_id)
json_path = link.get_artifact_full_path('task.json')
try:
task_defn = await chain.context.queue.task(task_id)
link.task = task_defn
chain.links.append(link)
# write task json to disk
makedirs(os.path.dirname(json_path))
with open(json_path, 'w') as fh:
fh.write(format_json(task_defn))
await build_task_dependencies(chain, task_defn, task_name, task_id)
except TaskclusterFailure as exc:
raise CoTError(str(exc)) | [
"async",
"def",
"build_task_dependencies",
"(",
"chain",
",",
"task",
",",
"name",
",",
"my_task_id",
")",
":",
"log",
".",
"info",
"(",
"\"build_task_dependencies {} {}\"",
".",
"format",
"(",
"name",
",",
"my_task_id",
")",
")",
"if",
"name",
".",
"count",... | Recursively build the task dependencies of a task.
Args:
chain (ChainOfTrust): the chain of trust to add to.
task (dict): the task definition to operate on.
name (str): the name of the task to operate on.
my_task_id (str): the taskId of the task to operate on.
Raises:
CoTError: on failure. | [
"Recursively",
"build",
"the",
"task",
"dependencies",
"of",
"a",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L562-L594 | train | 28,016 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | download_cot | async def download_cot(chain):
"""Download the signed chain of trust artifacts.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
BaseDownloadError: on failure.
"""
artifact_tasks = []
# only deal with chain.links, which are previously finished tasks with
# signed chain of trust artifacts. ``chain.task`` is the current running
# task, and will not have a signed chain of trust artifact yet.
for link in chain.links:
task_id = link.task_id
parent_dir = link.cot_dir
urls = []
unsigned_url = get_artifact_url(chain.context, task_id, 'public/chain-of-trust.json')
urls.append(unsigned_url)
if chain.context.config['verify_cot_signature']:
urls.append(
get_artifact_url(chain.context, task_id, 'public/chain-of-trust.json.sig')
)
artifact_tasks.append(
asyncio.ensure_future(
download_artifacts(
chain.context, urls, parent_dir=parent_dir,
valid_artifact_task_ids=[task_id]
)
)
)
artifacts_paths = await raise_future_exceptions(artifact_tasks)
for path in artifacts_paths:
sha = get_hash(path[0])
log.debug("{} downloaded; hash is {}".format(path[0], sha)) | python | async def download_cot(chain):
"""Download the signed chain of trust artifacts.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
BaseDownloadError: on failure.
"""
artifact_tasks = []
# only deal with chain.links, which are previously finished tasks with
# signed chain of trust artifacts. ``chain.task`` is the current running
# task, and will not have a signed chain of trust artifact yet.
for link in chain.links:
task_id = link.task_id
parent_dir = link.cot_dir
urls = []
unsigned_url = get_artifact_url(chain.context, task_id, 'public/chain-of-trust.json')
urls.append(unsigned_url)
if chain.context.config['verify_cot_signature']:
urls.append(
get_artifact_url(chain.context, task_id, 'public/chain-of-trust.json.sig')
)
artifact_tasks.append(
asyncio.ensure_future(
download_artifacts(
chain.context, urls, parent_dir=parent_dir,
valid_artifact_task_ids=[task_id]
)
)
)
artifacts_paths = await raise_future_exceptions(artifact_tasks)
for path in artifacts_paths:
sha = get_hash(path[0])
log.debug("{} downloaded; hash is {}".format(path[0], sha)) | [
"async",
"def",
"download_cot",
"(",
"chain",
")",
":",
"artifact_tasks",
"=",
"[",
"]",
"# only deal with chain.links, which are previously finished tasks with",
"# signed chain of trust artifacts. ``chain.task`` is the current running",
"# task, and will not have a signed chain of trust... | Download the signed chain of trust artifacts.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
BaseDownloadError: on failure. | [
"Download",
"the",
"signed",
"chain",
"of",
"trust",
"artifacts",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L598-L637 | train | 28,017 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | download_cot_artifact | async def download_cot_artifact(chain, task_id, path):
"""Download an artifact and verify its SHA against the chain of trust.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the task ID to download from
path (str): the relative path to the artifact to download
Returns:
str: the full path of the downloaded artifact
Raises:
CoTError: on failure.
"""
link = chain.get_link(task_id)
log.debug("Verifying {} is in {} cot artifacts...".format(path, task_id))
if not link.cot:
log.warning('Chain of Trust for "{}" in {} does not exist. See above log for more details. \
Skipping download of this artifact'.format(path, task_id))
return
if path not in link.cot['artifacts']:
raise CoTError("path {} not in {} {} chain of trust artifacts!".format(path, link.name, link.task_id))
url = get_artifact_url(chain.context, task_id, path)
loggable_url = get_loggable_url(url)
log.info("Downloading Chain of Trust artifact:\n{}".format(loggable_url))
await download_artifacts(
chain.context, [url], parent_dir=link.cot_dir, valid_artifact_task_ids=[task_id]
)
full_path = link.get_artifact_full_path(path)
for alg, expected_sha in link.cot['artifacts'][path].items():
if alg not in chain.context.config['valid_hash_algorithms']:
raise CoTError("BAD HASH ALGORITHM: {}: {} {}!".format(link.name, alg, full_path))
real_sha = get_hash(full_path, hash_alg=alg)
if expected_sha != real_sha:
raise CoTError("BAD HASH on file {}: {}: Expected {} {}; got {}!".format(
full_path, link.name, alg, expected_sha, real_sha
))
log.debug("{} matches the expected {} {}".format(full_path, alg, expected_sha))
return full_path | python | async def download_cot_artifact(chain, task_id, path):
"""Download an artifact and verify its SHA against the chain of trust.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the task ID to download from
path (str): the relative path to the artifact to download
Returns:
str: the full path of the downloaded artifact
Raises:
CoTError: on failure.
"""
link = chain.get_link(task_id)
log.debug("Verifying {} is in {} cot artifacts...".format(path, task_id))
if not link.cot:
log.warning('Chain of Trust for "{}" in {} does not exist. See above log for more details. \
Skipping download of this artifact'.format(path, task_id))
return
if path not in link.cot['artifacts']:
raise CoTError("path {} not in {} {} chain of trust artifacts!".format(path, link.name, link.task_id))
url = get_artifact_url(chain.context, task_id, path)
loggable_url = get_loggable_url(url)
log.info("Downloading Chain of Trust artifact:\n{}".format(loggable_url))
await download_artifacts(
chain.context, [url], parent_dir=link.cot_dir, valid_artifact_task_ids=[task_id]
)
full_path = link.get_artifact_full_path(path)
for alg, expected_sha in link.cot['artifacts'][path].items():
if alg not in chain.context.config['valid_hash_algorithms']:
raise CoTError("BAD HASH ALGORITHM: {}: {} {}!".format(link.name, alg, full_path))
real_sha = get_hash(full_path, hash_alg=alg)
if expected_sha != real_sha:
raise CoTError("BAD HASH on file {}: {}: Expected {} {}; got {}!".format(
full_path, link.name, alg, expected_sha, real_sha
))
log.debug("{} matches the expected {} {}".format(full_path, alg, expected_sha))
return full_path | [
"async",
"def",
"download_cot_artifact",
"(",
"chain",
",",
"task_id",
",",
"path",
")",
":",
"link",
"=",
"chain",
".",
"get_link",
"(",
"task_id",
")",
"log",
".",
"debug",
"(",
"\"Verifying {} is in {} cot artifacts...\"",
".",
"format",
"(",
"path",
",",
... | Download an artifact and verify its SHA against the chain of trust.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the task ID to download from
path (str): the relative path to the artifact to download
Returns:
str: the full path of the downloaded artifact
Raises:
CoTError: on failure. | [
"Download",
"an",
"artifact",
"and",
"verify",
"its",
"SHA",
"against",
"the",
"chain",
"of",
"trust",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L641-L681 | train | 28,018 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | download_cot_artifacts | async def download_cot_artifacts(chain):
"""Call ``download_cot_artifact`` in parallel for each "upstreamArtifacts".
Optional artifacts are allowed to not be downloaded.
Args:
chain (ChainOfTrust): the chain of trust object
Returns:
list: list of full paths to downloaded artifacts. Failed optional artifacts
aren't returned
Raises:
CoTError: on chain of trust sha validation error, on a mandatory artifact
BaseDownloadError: on download error on a mandatory artifact
"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
all_artifacts_per_task_id = get_all_artifacts_per_task_id(chain, upstream_artifacts)
mandatory_artifact_tasks = []
optional_artifact_tasks = []
for task_id, paths in all_artifacts_per_task_id.items():
for path in paths:
coroutine = asyncio.ensure_future(download_cot_artifact(chain, task_id, path))
if is_artifact_optional(chain, task_id, path):
optional_artifact_tasks.append(coroutine)
else:
mandatory_artifact_tasks.append(coroutine)
mandatory_artifacts_paths = await raise_future_exceptions(mandatory_artifact_tasks)
succeeded_optional_artifacts_paths, failed_optional_artifacts = \
await get_results_and_future_exceptions(optional_artifact_tasks)
if failed_optional_artifacts:
log.warning('Could not download {} artifacts: {}'.format(len(failed_optional_artifacts), failed_optional_artifacts))
return mandatory_artifacts_paths + succeeded_optional_artifacts_paths | python | async def download_cot_artifacts(chain):
"""Call ``download_cot_artifact`` in parallel for each "upstreamArtifacts".
Optional artifacts are allowed to not be downloaded.
Args:
chain (ChainOfTrust): the chain of trust object
Returns:
list: list of full paths to downloaded artifacts. Failed optional artifacts
aren't returned
Raises:
CoTError: on chain of trust sha validation error, on a mandatory artifact
BaseDownloadError: on download error on a mandatory artifact
"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
all_artifacts_per_task_id = get_all_artifacts_per_task_id(chain, upstream_artifacts)
mandatory_artifact_tasks = []
optional_artifact_tasks = []
for task_id, paths in all_artifacts_per_task_id.items():
for path in paths:
coroutine = asyncio.ensure_future(download_cot_artifact(chain, task_id, path))
if is_artifact_optional(chain, task_id, path):
optional_artifact_tasks.append(coroutine)
else:
mandatory_artifact_tasks.append(coroutine)
mandatory_artifacts_paths = await raise_future_exceptions(mandatory_artifact_tasks)
succeeded_optional_artifacts_paths, failed_optional_artifacts = \
await get_results_and_future_exceptions(optional_artifact_tasks)
if failed_optional_artifacts:
log.warning('Could not download {} artifacts: {}'.format(len(failed_optional_artifacts), failed_optional_artifacts))
return mandatory_artifacts_paths + succeeded_optional_artifacts_paths | [
"async",
"def",
"download_cot_artifacts",
"(",
"chain",
")",
":",
"upstream_artifacts",
"=",
"chain",
".",
"task",
"[",
"'payload'",
"]",
".",
"get",
"(",
"'upstreamArtifacts'",
",",
"[",
"]",
")",
"all_artifacts_per_task_id",
"=",
"get_all_artifacts_per_task_id",
... | Call ``download_cot_artifact`` in parallel for each "upstreamArtifacts".
Optional artifacts are allowed to not be downloaded.
Args:
chain (ChainOfTrust): the chain of trust object
Returns:
list: list of full paths to downloaded artifacts. Failed optional artifacts
aren't returned
Raises:
CoTError: on chain of trust sha validation error, on a mandatory artifact
BaseDownloadError: on download error on a mandatory artifact | [
"Call",
"download_cot_artifact",
"in",
"parallel",
"for",
"each",
"upstreamArtifacts",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L685-L723 | train | 28,019 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | is_artifact_optional | def is_artifact_optional(chain, task_id, path):
"""Tells whether an artifact is flagged as optional or not.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the id of the aforementioned task
Returns:
bool: True if artifact is optional
"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
optional_artifacts_per_task_id = get_optional_artifacts_per_task_id(upstream_artifacts)
return path in optional_artifacts_per_task_id.get(task_id, []) | python | def is_artifact_optional(chain, task_id, path):
"""Tells whether an artifact is flagged as optional or not.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the id of the aforementioned task
Returns:
bool: True if artifact is optional
"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
optional_artifacts_per_task_id = get_optional_artifacts_per_task_id(upstream_artifacts)
return path in optional_artifacts_per_task_id.get(task_id, []) | [
"def",
"is_artifact_optional",
"(",
"chain",
",",
"task_id",
",",
"path",
")",
":",
"upstream_artifacts",
"=",
"chain",
".",
"task",
"[",
"'payload'",
"]",
".",
"get",
"(",
"'upstreamArtifacts'",
",",
"[",
"]",
")",
"optional_artifacts_per_task_id",
"=",
"get_... | Tells whether an artifact is flagged as optional or not.
Args:
chain (ChainOfTrust): the chain of trust object
task_id (str): the id of the aforementioned task
Returns:
bool: True if artifact is optional | [
"Tells",
"whether",
"an",
"artifact",
"is",
"flagged",
"as",
"optional",
"or",
"not",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L726-L739 | train | 28,020 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_all_artifacts_per_task_id | def get_all_artifacts_per_task_id(chain, upstream_artifacts):
"""Return every artifact to download, including the Chain Of Trust Artifacts.
Args:
chain (ChainOfTrust): the chain of trust object
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: sorted list of paths to downloaded artifacts ordered by taskId
"""
all_artifacts_per_task_id = {}
for link in chain.links:
# Download task-graph.json for decision+action task cot verification
if link.task_type in PARENT_TASK_TYPES:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/task-graph.json'
)
# Download actions.json for decision+action task cot verification
if link.task_type in DECISION_TASK_TYPES:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/actions.json'
)
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/parameters.yml'
)
if upstream_artifacts:
for upstream_dict in upstream_artifacts:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=upstream_dict['taskId'], item=upstream_dict['paths']
)
# Avoid duplicate paths per task_id
for task_id, paths in all_artifacts_per_task_id.items():
all_artifacts_per_task_id[task_id] = sorted(set(paths))
return all_artifacts_per_task_id | python | def get_all_artifacts_per_task_id(chain, upstream_artifacts):
"""Return every artifact to download, including the Chain Of Trust Artifacts.
Args:
chain (ChainOfTrust): the chain of trust object
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: sorted list of paths to downloaded artifacts ordered by taskId
"""
all_artifacts_per_task_id = {}
for link in chain.links:
# Download task-graph.json for decision+action task cot verification
if link.task_type in PARENT_TASK_TYPES:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/task-graph.json'
)
# Download actions.json for decision+action task cot verification
if link.task_type in DECISION_TASK_TYPES:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/actions.json'
)
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=link.task_id, item='public/parameters.yml'
)
if upstream_artifacts:
for upstream_dict in upstream_artifacts:
add_enumerable_item_to_dict(
dict_=all_artifacts_per_task_id, key=upstream_dict['taskId'], item=upstream_dict['paths']
)
# Avoid duplicate paths per task_id
for task_id, paths in all_artifacts_per_task_id.items():
all_artifacts_per_task_id[task_id] = sorted(set(paths))
return all_artifacts_per_task_id | [
"def",
"get_all_artifacts_per_task_id",
"(",
"chain",
",",
"upstream_artifacts",
")",
":",
"all_artifacts_per_task_id",
"=",
"{",
"}",
"for",
"link",
"in",
"chain",
".",
"links",
":",
"# Download task-graph.json for decision+action task cot verification",
"if",
"link",
".... | Return every artifact to download, including the Chain Of Trust Artifacts.
Args:
chain (ChainOfTrust): the chain of trust object
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: sorted list of paths to downloaded artifacts ordered by taskId | [
"Return",
"every",
"artifact",
"to",
"download",
"including",
"the",
"Chain",
"Of",
"Trust",
"Artifacts",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L742-L779 | train | 28,021 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_link_ed25519_cot_signature | def verify_link_ed25519_cot_signature(chain, link, unsigned_path, signature_path):
"""Verify the ed25519 signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
(CoTError, ScriptWorkerEd25519Error): on signature verification failure.
"""
if chain.context.config['verify_cot_signature']:
log.debug("Verifying the {} {} {} ed25519 chain of trust signature".format(
link.name, link.task_id, link.worker_impl
))
signature = read_from_file(signature_path, file_type='binary', exception=CoTError)
binary_contents = read_from_file(unsigned_path, file_type='binary', exception=CoTError)
errors = []
verify_key_seeds = chain.context.config['ed25519_public_keys'].get(link.worker_impl, [])
for seed in verify_key_seeds:
try:
verify_key = ed25519_public_key_from_string(seed)
verify_ed25519_signature(
verify_key, binary_contents, signature,
"{} {}: {} ed25519 cot signature doesn't verify against {}: %(exc)s".format(
link.name, link.task_id, link.worker_impl, seed
)
)
log.debug("{} {}: ed25519 cot signature verified.".format(link.name, link.task_id))
break
except ScriptWorkerEd25519Error as exc:
errors.append(str(exc))
else:
errors = errors or [
"{} {}: Unknown error verifying ed25519 cot signature. worker_impl {} verify_keys {}".format(
link.name, link.task_id, link.worker_impl,
verify_key_seeds
)
]
message = "\n".join(errors)
raise CoTError(message)
link.cot = load_json_or_yaml(
unsigned_path, is_path=True, exception=CoTError,
message="{} {}: Invalid unsigned cot json body! %(exc)s".format(link.name, link.task_id)
) | python | def verify_link_ed25519_cot_signature(chain, link, unsigned_path, signature_path):
"""Verify the ed25519 signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
(CoTError, ScriptWorkerEd25519Error): on signature verification failure.
"""
if chain.context.config['verify_cot_signature']:
log.debug("Verifying the {} {} {} ed25519 chain of trust signature".format(
link.name, link.task_id, link.worker_impl
))
signature = read_from_file(signature_path, file_type='binary', exception=CoTError)
binary_contents = read_from_file(unsigned_path, file_type='binary', exception=CoTError)
errors = []
verify_key_seeds = chain.context.config['ed25519_public_keys'].get(link.worker_impl, [])
for seed in verify_key_seeds:
try:
verify_key = ed25519_public_key_from_string(seed)
verify_ed25519_signature(
verify_key, binary_contents, signature,
"{} {}: {} ed25519 cot signature doesn't verify against {}: %(exc)s".format(
link.name, link.task_id, link.worker_impl, seed
)
)
log.debug("{} {}: ed25519 cot signature verified.".format(link.name, link.task_id))
break
except ScriptWorkerEd25519Error as exc:
errors.append(str(exc))
else:
errors = errors or [
"{} {}: Unknown error verifying ed25519 cot signature. worker_impl {} verify_keys {}".format(
link.name, link.task_id, link.worker_impl,
verify_key_seeds
)
]
message = "\n".join(errors)
raise CoTError(message)
link.cot = load_json_or_yaml(
unsigned_path, is_path=True, exception=CoTError,
message="{} {}: Invalid unsigned cot json body! %(exc)s".format(link.name, link.task_id)
) | [
"def",
"verify_link_ed25519_cot_signature",
"(",
"chain",
",",
"link",
",",
"unsigned_path",
",",
"signature_path",
")",
":",
"if",
"chain",
".",
"context",
".",
"config",
"[",
"'verify_cot_signature'",
"]",
":",
"log",
".",
"debug",
"(",
"\"Verifying the {} {} {}... | Verify the ed25519 signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
(CoTError, ScriptWorkerEd25519Error): on signature verification failure. | [
"Verify",
"the",
"ed25519",
"signatures",
"of",
"the",
"chain",
"of",
"trust",
"artifacts",
"populated",
"in",
"download_cot",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L783-L828 | train | 28,022 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_cot_signatures | def verify_cot_signatures(chain):
"""Verify the signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
CoTError: on failure.
"""
for link in chain.links:
unsigned_path = link.get_artifact_full_path('public/chain-of-trust.json')
ed25519_signature_path = link.get_artifact_full_path('public/chain-of-trust.json.sig')
verify_link_ed25519_cot_signature(chain, link, unsigned_path, ed25519_signature_path) | python | def verify_cot_signatures(chain):
"""Verify the signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
CoTError: on failure.
"""
for link in chain.links:
unsigned_path = link.get_artifact_full_path('public/chain-of-trust.json')
ed25519_signature_path = link.get_artifact_full_path('public/chain-of-trust.json.sig')
verify_link_ed25519_cot_signature(chain, link, unsigned_path, ed25519_signature_path) | [
"def",
"verify_cot_signatures",
"(",
"chain",
")",
":",
"for",
"link",
"in",
"chain",
".",
"links",
":",
"unsigned_path",
"=",
"link",
".",
"get_artifact_full_path",
"(",
"'public/chain-of-trust.json'",
")",
"ed25519_signature_path",
"=",
"link",
".",
"get_artifact_... | Verify the signatures of the chain of trust artifacts populated in ``download_cot``.
Populate each link.cot with the chain of trust json body.
Args:
chain (ChainOfTrust): the chain of trust to add to.
Raises:
CoTError: on failure. | [
"Verify",
"the",
"signatures",
"of",
"the",
"chain",
"of",
"trust",
"artifacts",
"populated",
"in",
"download_cot",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L831-L846 | train | 28,023 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_task_in_task_graph | def verify_task_in_task_graph(task_link, graph_defn, level=logging.CRITICAL):
"""Verify a given task_link's task against a given graph task definition.
This is a helper function for ``verify_link_in_task_graph``; this is split
out so we can call it multiple times when we fuzzy match.
Args:
task_link (LinkOfTrust): the link to try to match
graph_defn (dict): the task definition from the task-graph.json to match
``task_link`` against
level (int, optional): the logging level to use on errors. Defaults to logging.CRITICAL
Raises:
CoTError: on failure
"""
ignore_keys = ("created", "deadline", "expires", "dependencies", "schedulerId")
errors = []
runtime_defn = deepcopy(task_link.task)
# dependencies
# Allow for the decision task ID in the dependencies; otherwise the runtime
# dependencies must be a subset of the graph dependencies.
bad_deps = set(runtime_defn['dependencies']) - set(graph_defn['task']['dependencies'])
# it's OK if a task depends on the decision task
bad_deps = bad_deps - {task_link.decision_task_id}
if bad_deps:
errors.append("{} {} dependencies don't line up!\n{}".format(
task_link.name, task_link.task_id, bad_deps
))
# payload - eliminate the 'expires' key from artifacts because the datestring
# will change
runtime_defn['payload'] = _take_expires_out_from_artifacts_in_payload(runtime_defn['payload'])
graph_defn['task']['payload'] = _take_expires_out_from_artifacts_in_payload(graph_defn['task']['payload'])
# test all non-ignored key/value pairs in the task defn
for key, value in graph_defn['task'].items():
if key in ignore_keys:
continue
if value != runtime_defn[key]:
errors.append("{} {} {} differs!\n graph: {}\n task: {}".format(
task_link.name, task_link.task_id, key,
format_json(value), format_json(runtime_defn[key])
))
raise_on_errors(errors, level=level) | python | def verify_task_in_task_graph(task_link, graph_defn, level=logging.CRITICAL):
"""Verify a given task_link's task against a given graph task definition.
This is a helper function for ``verify_link_in_task_graph``; this is split
out so we can call it multiple times when we fuzzy match.
Args:
task_link (LinkOfTrust): the link to try to match
graph_defn (dict): the task definition from the task-graph.json to match
``task_link`` against
level (int, optional): the logging level to use on errors. Defaults to logging.CRITICAL
Raises:
CoTError: on failure
"""
ignore_keys = ("created", "deadline", "expires", "dependencies", "schedulerId")
errors = []
runtime_defn = deepcopy(task_link.task)
# dependencies
# Allow for the decision task ID in the dependencies; otherwise the runtime
# dependencies must be a subset of the graph dependencies.
bad_deps = set(runtime_defn['dependencies']) - set(graph_defn['task']['dependencies'])
# it's OK if a task depends on the decision task
bad_deps = bad_deps - {task_link.decision_task_id}
if bad_deps:
errors.append("{} {} dependencies don't line up!\n{}".format(
task_link.name, task_link.task_id, bad_deps
))
# payload - eliminate the 'expires' key from artifacts because the datestring
# will change
runtime_defn['payload'] = _take_expires_out_from_artifacts_in_payload(runtime_defn['payload'])
graph_defn['task']['payload'] = _take_expires_out_from_artifacts_in_payload(graph_defn['task']['payload'])
# test all non-ignored key/value pairs in the task defn
for key, value in graph_defn['task'].items():
if key in ignore_keys:
continue
if value != runtime_defn[key]:
errors.append("{} {} {} differs!\n graph: {}\n task: {}".format(
task_link.name, task_link.task_id, key,
format_json(value), format_json(runtime_defn[key])
))
raise_on_errors(errors, level=level) | [
"def",
"verify_task_in_task_graph",
"(",
"task_link",
",",
"graph_defn",
",",
"level",
"=",
"logging",
".",
"CRITICAL",
")",
":",
"ignore_keys",
"=",
"(",
"\"created\"",
",",
"\"deadline\"",
",",
"\"expires\"",
",",
"\"dependencies\"",
",",
"\"schedulerId\"",
")",... | Verify a given task_link's task against a given graph task definition.
This is a helper function for ``verify_link_in_task_graph``; this is split
out so we can call it multiple times when we fuzzy match.
Args:
task_link (LinkOfTrust): the link to try to match
graph_defn (dict): the task definition from the task-graph.json to match
``task_link`` against
level (int, optional): the logging level to use on errors. Defaults to logging.CRITICAL
Raises:
CoTError: on failure | [
"Verify",
"a",
"given",
"task_link",
"s",
"task",
"against",
"a",
"given",
"graph",
"task",
"definition",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L850-L893 | train | 28,024 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_link_in_task_graph | def verify_link_in_task_graph(chain, decision_link, task_link):
"""Compare the runtime task definition against the decision task graph.
Args:
chain (ChainOfTrust): the chain we're operating on.
decision_link (LinkOfTrust): the decision task link
task_link (LinkOfTrust): the task link we're testing
Raises:
CoTError: on failure.
"""
log.info("Verifying the {} {} task definition is part of the {} {} task graph...".format(
task_link.name, task_link.task_id, decision_link.name, decision_link.task_id
))
if task_link.task_id in decision_link.task_graph:
graph_defn = deepcopy(decision_link.task_graph[task_link.task_id])
verify_task_in_task_graph(task_link, graph_defn)
log.info("Found {} in the graph; it's a match".format(task_link.task_id))
return
raise_on_errors(["Can't find task {} {} in {} {} task-graph.json!".format(
task_link.name, task_link.task_id, decision_link.name, decision_link.task_id
)]) | python | def verify_link_in_task_graph(chain, decision_link, task_link):
"""Compare the runtime task definition against the decision task graph.
Args:
chain (ChainOfTrust): the chain we're operating on.
decision_link (LinkOfTrust): the decision task link
task_link (LinkOfTrust): the task link we're testing
Raises:
CoTError: on failure.
"""
log.info("Verifying the {} {} task definition is part of the {} {} task graph...".format(
task_link.name, task_link.task_id, decision_link.name, decision_link.task_id
))
if task_link.task_id in decision_link.task_graph:
graph_defn = deepcopy(decision_link.task_graph[task_link.task_id])
verify_task_in_task_graph(task_link, graph_defn)
log.info("Found {} in the graph; it's a match".format(task_link.task_id))
return
raise_on_errors(["Can't find task {} {} in {} {} task-graph.json!".format(
task_link.name, task_link.task_id, decision_link.name, decision_link.task_id
)]) | [
"def",
"verify_link_in_task_graph",
"(",
"chain",
",",
"decision_link",
",",
"task_link",
")",
":",
"log",
".",
"info",
"(",
"\"Verifying the {} {} task definition is part of the {} {} task graph...\"",
".",
"format",
"(",
"task_link",
".",
"name",
",",
"task_link",
"."... | Compare the runtime task definition against the decision task graph.
Args:
chain (ChainOfTrust): the chain we're operating on.
decision_link (LinkOfTrust): the decision task link
task_link (LinkOfTrust): the task link we're testing
Raises:
CoTError: on failure. | [
"Compare",
"the",
"runtime",
"task",
"definition",
"against",
"the",
"decision",
"task",
"graph",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L915-L937 | train | 28,025 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_pushlog_info | async def get_pushlog_info(decision_link):
"""Get pushlog info for a decision LinkOfTrust.
Args:
decision_link (LinkOfTrust): the decision link to get pushlog info about.
Returns:
dict: pushlog info.
"""
source_env_prefix = decision_link.context.config['source_env_prefix']
repo = get_repo(decision_link.task, source_env_prefix)
rev = get_revision(decision_link.task, source_env_prefix)
context = decision_link.context
pushlog_url = context.config['pushlog_url'].format(
repo=repo, revision=rev
)
log.info("Pushlog url {}".format(pushlog_url))
file_path = os.path.join(context.config["work_dir"], "{}_push_log.json".format(decision_link.name))
pushlog_info = await load_json_or_yaml_from_url(
context, pushlog_url, file_path, overwrite=False
)
if len(pushlog_info['pushes']) != 1:
log.warning("Pushlog error: expected a single push at {} but got {}!".format(
pushlog_url, pushlog_info['pushes']
))
return pushlog_info | python | async def get_pushlog_info(decision_link):
"""Get pushlog info for a decision LinkOfTrust.
Args:
decision_link (LinkOfTrust): the decision link to get pushlog info about.
Returns:
dict: pushlog info.
"""
source_env_prefix = decision_link.context.config['source_env_prefix']
repo = get_repo(decision_link.task, source_env_prefix)
rev = get_revision(decision_link.task, source_env_prefix)
context = decision_link.context
pushlog_url = context.config['pushlog_url'].format(
repo=repo, revision=rev
)
log.info("Pushlog url {}".format(pushlog_url))
file_path = os.path.join(context.config["work_dir"], "{}_push_log.json".format(decision_link.name))
pushlog_info = await load_json_or_yaml_from_url(
context, pushlog_url, file_path, overwrite=False
)
if len(pushlog_info['pushes']) != 1:
log.warning("Pushlog error: expected a single push at {} but got {}!".format(
pushlog_url, pushlog_info['pushes']
))
return pushlog_info | [
"async",
"def",
"get_pushlog_info",
"(",
"decision_link",
")",
":",
"source_env_prefix",
"=",
"decision_link",
".",
"context",
".",
"config",
"[",
"'source_env_prefix'",
"]",
"repo",
"=",
"get_repo",
"(",
"decision_link",
".",
"task",
",",
"source_env_prefix",
")"... | Get pushlog info for a decision LinkOfTrust.
Args:
decision_link (LinkOfTrust): the decision link to get pushlog info about.
Returns:
dict: pushlog info. | [
"Get",
"pushlog",
"info",
"for",
"a",
"decision",
"LinkOfTrust",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L941-L967 | train | 28,026 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_scm_level | async def get_scm_level(context, project):
"""Get the scm level for a project from ``projects.yml``.
We define all known projects in ``projects.yml``. Let's make sure we have
it populated in ``context``, then return the scm level of ``project``.
SCM levels are an integer, 1-3, matching Mozilla commit levels.
https://www.mozilla.org/en-US/about/governance/policies/commit/access-policy/
Args:
context (scriptworker.context.Context): the scriptworker context
project (str): the project to get the scm level for.
Returns:
str: the level of the project, as a string.
"""
await context.populate_projects()
level = context.projects[project]['access'].replace("scm_level_", "")
return level | python | async def get_scm_level(context, project):
"""Get the scm level for a project from ``projects.yml``.
We define all known projects in ``projects.yml``. Let's make sure we have
it populated in ``context``, then return the scm level of ``project``.
SCM levels are an integer, 1-3, matching Mozilla commit levels.
https://www.mozilla.org/en-US/about/governance/policies/commit/access-policy/
Args:
context (scriptworker.context.Context): the scriptworker context
project (str): the project to get the scm level for.
Returns:
str: the level of the project, as a string.
"""
await context.populate_projects()
level = context.projects[project]['access'].replace("scm_level_", "")
return level | [
"async",
"def",
"get_scm_level",
"(",
"context",
",",
"project",
")",
":",
"await",
"context",
".",
"populate_projects",
"(",
")",
"level",
"=",
"context",
".",
"projects",
"[",
"project",
"]",
"[",
"'access'",
"]",
".",
"replace",
"(",
"\"scm_level_\"",
"... | Get the scm level for a project from ``projects.yml``.
We define all known projects in ``projects.yml``. Let's make sure we have
it populated in ``context``, then return the scm level of ``project``.
SCM levels are an integer, 1-3, matching Mozilla commit levels.
https://www.mozilla.org/en-US/about/governance/policies/commit/access-policy/
Args:
context (scriptworker.context.Context): the scriptworker context
project (str): the project to get the scm level for.
Returns:
str: the level of the project, as a string. | [
"Get",
"the",
"scm",
"level",
"for",
"a",
"project",
"from",
"projects",
".",
"yml",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L971-L990 | train | 28,027 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | populate_jsone_context | async def populate_jsone_context(chain, parent_link, decision_link, tasks_for):
"""Populate the json-e context to rebuild ``parent_link``'s task definition.
This defines the context that `.taskcluster.yml` expects to be rendered
with. See comments at the top of that file for details.
Args:
chain (ChainOfTrust): the chain of trust to add to.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Raises:
CoTError, KeyError, ValueError: on failure.
Returns:
dict: the json-e context.
"""
task_ids = {
"default": parent_link.task_id,
"decision": decision_link.task_id,
}
source_url = get_source_url(decision_link)
project = get_and_check_project(chain.context.config['valid_vcs_rules'], source_url)
log.debug("task_ids: {}".format(task_ids))
jsone_context = {
'now': parent_link.task['created'],
'as_slugid': lambda x: task_ids.get(x, task_ids['default']),
'tasks_for': tasks_for,
'repository': {
'url': get_repo(decision_link.task, decision_link.context.config['source_env_prefix']),
'project': project,
},
'ownTaskId': parent_link.task_id,
'taskId': None
}
if chain.context.config['cot_product'] in ('mobile', 'application-services'):
if tasks_for == 'github-release':
jsone_context.update(
await _get_additional_github_releases_jsone_context(decision_link)
)
elif tasks_for == 'cron':
jsone_context.update(_get_additional_git_cron_jsone_context(decision_link))
elif tasks_for == 'github-pull-request':
jsone_context.update(
await _get_additional_github_pull_request_jsone_context(decision_link)
)
elif tasks_for == 'github-push':
jsone_context.update(
await _get_additional_github_push_jsone_context(decision_link)
)
else:
raise CoTError('Unknown tasks_for "{}" for cot_product "mobile"!'.format(tasks_for))
else:
jsone_context['repository']['level'] = await get_scm_level(chain.context, project)
if tasks_for == 'action':
jsone_context.update(
await _get_additional_hg_action_jsone_context(parent_link, decision_link)
)
elif tasks_for == 'hg-push':
jsone_context.update(
await _get_additional_hg_push_jsone_context(parent_link, decision_link)
)
elif tasks_for == 'cron':
jsone_context.update(
await _get_additional_hg_cron_jsone_context(parent_link, decision_link)
)
else:
raise CoTError("Unknown tasks_for {}!".format(tasks_for))
log.debug("{} json-e context:".format(parent_link.name))
# format_json() breaks on lambda values; use pprint.pformat here.
log.debug(pprint.pformat(jsone_context))
return jsone_context | python | async def populate_jsone_context(chain, parent_link, decision_link, tasks_for):
"""Populate the json-e context to rebuild ``parent_link``'s task definition.
This defines the context that `.taskcluster.yml` expects to be rendered
with. See comments at the top of that file for details.
Args:
chain (ChainOfTrust): the chain of trust to add to.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Raises:
CoTError, KeyError, ValueError: on failure.
Returns:
dict: the json-e context.
"""
task_ids = {
"default": parent_link.task_id,
"decision": decision_link.task_id,
}
source_url = get_source_url(decision_link)
project = get_and_check_project(chain.context.config['valid_vcs_rules'], source_url)
log.debug("task_ids: {}".format(task_ids))
jsone_context = {
'now': parent_link.task['created'],
'as_slugid': lambda x: task_ids.get(x, task_ids['default']),
'tasks_for': tasks_for,
'repository': {
'url': get_repo(decision_link.task, decision_link.context.config['source_env_prefix']),
'project': project,
},
'ownTaskId': parent_link.task_id,
'taskId': None
}
if chain.context.config['cot_product'] in ('mobile', 'application-services'):
if tasks_for == 'github-release':
jsone_context.update(
await _get_additional_github_releases_jsone_context(decision_link)
)
elif tasks_for == 'cron':
jsone_context.update(_get_additional_git_cron_jsone_context(decision_link))
elif tasks_for == 'github-pull-request':
jsone_context.update(
await _get_additional_github_pull_request_jsone_context(decision_link)
)
elif tasks_for == 'github-push':
jsone_context.update(
await _get_additional_github_push_jsone_context(decision_link)
)
else:
raise CoTError('Unknown tasks_for "{}" for cot_product "mobile"!'.format(tasks_for))
else:
jsone_context['repository']['level'] = await get_scm_level(chain.context, project)
if tasks_for == 'action':
jsone_context.update(
await _get_additional_hg_action_jsone_context(parent_link, decision_link)
)
elif tasks_for == 'hg-push':
jsone_context.update(
await _get_additional_hg_push_jsone_context(parent_link, decision_link)
)
elif tasks_for == 'cron':
jsone_context.update(
await _get_additional_hg_cron_jsone_context(parent_link, decision_link)
)
else:
raise CoTError("Unknown tasks_for {}!".format(tasks_for))
log.debug("{} json-e context:".format(parent_link.name))
# format_json() breaks on lambda values; use pprint.pformat here.
log.debug(pprint.pformat(jsone_context))
return jsone_context | [
"async",
"def",
"populate_jsone_context",
"(",
"chain",
",",
"parent_link",
",",
"decision_link",
",",
"tasks_for",
")",
":",
"task_ids",
"=",
"{",
"\"default\"",
":",
"parent_link",
".",
"task_id",
",",
"\"decision\"",
":",
"decision_link",
".",
"task_id",
",",... | Populate the json-e context to rebuild ``parent_link``'s task definition.
This defines the context that `.taskcluster.yml` expects to be rendered
with. See comments at the top of that file for details.
Args:
chain (ChainOfTrust): the chain of trust to add to.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Raises:
CoTError, KeyError, ValueError: on failure.
Returns:
dict: the json-e context. | [
"Populate",
"the",
"json",
"-",
"e",
"context",
"to",
"rebuild",
"parent_link",
"s",
"task",
"definition",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1228-L1305 | train | 28,028 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_in_tree_template | async def get_in_tree_template(link):
"""Get the in-tree json-e template for a given link.
By convention, this template is SOURCE_REPO/.taskcluster.yml.
Args:
link (LinkOfTrust): the parent link to get the source url from.
Raises:
CoTError: on non-yaml `source_url`
KeyError: on non-well-formed source template
Returns:
dict: the first task in the template.
"""
context = link.context
source_url = get_source_url(link)
if not source_url.endswith(('.yml', '.yaml')):
raise CoTError("{} source url {} doesn't end in .yml or .yaml!".format(
link.name, source_url
))
tmpl = await load_json_or_yaml_from_url(
context, source_url, os.path.join(
context.config["work_dir"], "{}_taskcluster.yml".format(link.name)
)
)
return tmpl | python | async def get_in_tree_template(link):
"""Get the in-tree json-e template for a given link.
By convention, this template is SOURCE_REPO/.taskcluster.yml.
Args:
link (LinkOfTrust): the parent link to get the source url from.
Raises:
CoTError: on non-yaml `source_url`
KeyError: on non-well-formed source template
Returns:
dict: the first task in the template.
"""
context = link.context
source_url = get_source_url(link)
if not source_url.endswith(('.yml', '.yaml')):
raise CoTError("{} source url {} doesn't end in .yml or .yaml!".format(
link.name, source_url
))
tmpl = await load_json_or_yaml_from_url(
context, source_url, os.path.join(
context.config["work_dir"], "{}_taskcluster.yml".format(link.name)
)
)
return tmpl | [
"async",
"def",
"get_in_tree_template",
"(",
"link",
")",
":",
"context",
"=",
"link",
".",
"context",
"source_url",
"=",
"get_source_url",
"(",
"link",
")",
"if",
"not",
"source_url",
".",
"endswith",
"(",
"(",
"'.yml'",
",",
"'.yaml'",
")",
")",
":",
"... | Get the in-tree json-e template for a given link.
By convention, this template is SOURCE_REPO/.taskcluster.yml.
Args:
link (LinkOfTrust): the parent link to get the source url from.
Raises:
CoTError: on non-yaml `source_url`
KeyError: on non-well-formed source template
Returns:
dict: the first task in the template. | [
"Get",
"the",
"in",
"-",
"tree",
"json",
"-",
"e",
"template",
"for",
"a",
"given",
"link",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1309-L1336 | train | 28,029 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_action_context_and_template | async def get_action_context_and_template(chain, parent_link, decision_link):
"""Get the appropriate json-e context and template for an action task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template.
"""
actions_path = decision_link.get_artifact_full_path('public/actions.json')
all_actions = load_json_or_yaml(actions_path, is_path=True)['actions']
action_name = get_action_callback_name(parent_link.task)
action_defn = _get_action_from_actions_json(all_actions, action_name)
jsone_context = await populate_jsone_context(chain, parent_link, decision_link, "action")
if 'task' in action_defn and chain.context.config['min_cot_version'] <= 2:
tmpl = {'tasks': [action_defn['task']]}
elif action_defn.get('kind') == 'hook':
# action-hook.
in_tree_tmpl = await get_in_tree_template(decision_link)
action_perm = _get_action_perm(action_defn)
tmpl = _wrap_action_hook_with_let(in_tree_tmpl, action_perm)
# define the JSON-e context with which the hook's task template was
# rendered, defined at
# https://docs.taskcluster.net/docs/reference/core/taskcluster-hooks/docs/firing-hooks#triggerhook
# This is created by working backward from the json-e context the
# .taskcluster.yml expects
jsone_context = {
'payload': _render_action_hook_payload(
action_defn, jsone_context, parent_link
),
'taskId': parent_link.task_id,
'now': jsone_context['now'],
'as_slugid': jsone_context['as_slugid'],
'clientId': jsone_context.get('clientId'),
}
elif action_defn.get('kind') == 'task':
# XXX Get rid of this block when all actions are hooks
tmpl = await get_in_tree_template(decision_link)
for k in ('action', 'push', 'repository'):
jsone_context[k] = deepcopy(action_defn['hookPayload']['decision'].get(k, {}))
jsone_context['action']['repo_scope'] = get_repo_scope(parent_link.task, parent_link.name)
else:
raise CoTError('Unknown action kind `{kind}` for action `{name}`.'.format(
kind=action_defn.get('kind', '<MISSING>'),
name=action_defn.get('name', '<MISSING>'),
))
return jsone_context, tmpl | python | async def get_action_context_and_template(chain, parent_link, decision_link):
"""Get the appropriate json-e context and template for an action task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template.
"""
actions_path = decision_link.get_artifact_full_path('public/actions.json')
all_actions = load_json_or_yaml(actions_path, is_path=True)['actions']
action_name = get_action_callback_name(parent_link.task)
action_defn = _get_action_from_actions_json(all_actions, action_name)
jsone_context = await populate_jsone_context(chain, parent_link, decision_link, "action")
if 'task' in action_defn and chain.context.config['min_cot_version'] <= 2:
tmpl = {'tasks': [action_defn['task']]}
elif action_defn.get('kind') == 'hook':
# action-hook.
in_tree_tmpl = await get_in_tree_template(decision_link)
action_perm = _get_action_perm(action_defn)
tmpl = _wrap_action_hook_with_let(in_tree_tmpl, action_perm)
# define the JSON-e context with which the hook's task template was
# rendered, defined at
# https://docs.taskcluster.net/docs/reference/core/taskcluster-hooks/docs/firing-hooks#triggerhook
# This is created by working backward from the json-e context the
# .taskcluster.yml expects
jsone_context = {
'payload': _render_action_hook_payload(
action_defn, jsone_context, parent_link
),
'taskId': parent_link.task_id,
'now': jsone_context['now'],
'as_slugid': jsone_context['as_slugid'],
'clientId': jsone_context.get('clientId'),
}
elif action_defn.get('kind') == 'task':
# XXX Get rid of this block when all actions are hooks
tmpl = await get_in_tree_template(decision_link)
for k in ('action', 'push', 'repository'):
jsone_context[k] = deepcopy(action_defn['hookPayload']['decision'].get(k, {}))
jsone_context['action']['repo_scope'] = get_repo_scope(parent_link.task, parent_link.name)
else:
raise CoTError('Unknown action kind `{kind}` for action `{name}`.'.format(
kind=action_defn.get('kind', '<MISSING>'),
name=action_defn.get('name', '<MISSING>'),
))
return jsone_context, tmpl | [
"async",
"def",
"get_action_context_and_template",
"(",
"chain",
",",
"parent_link",
",",
"decision_link",
")",
":",
"actions_path",
"=",
"decision_link",
".",
"get_artifact_full_path",
"(",
"'public/actions.json'",
")",
"all_actions",
"=",
"load_json_or_yaml",
"(",
"ac... | Get the appropriate json-e context and template for an action task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template. | [
"Get",
"the",
"appropriate",
"json",
"-",
"e",
"context",
"and",
"template",
"for",
"an",
"action",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1414-L1467 | train | 28,030 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_jsone_context_and_template | async def get_jsone_context_and_template(chain, parent_link, decision_link, tasks_for):
"""Get the appropriate json-e context and template for any parent task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template.
"""
if tasks_for == 'action':
jsone_context, tmpl = await get_action_context_and_template(
chain, parent_link, decision_link
)
else:
tmpl = await get_in_tree_template(decision_link)
jsone_context = await populate_jsone_context(
chain, parent_link, decision_link, tasks_for
)
return jsone_context, tmpl | python | async def get_jsone_context_and_template(chain, parent_link, decision_link, tasks_for):
"""Get the appropriate json-e context and template for any parent task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template.
"""
if tasks_for == 'action':
jsone_context, tmpl = await get_action_context_and_template(
chain, parent_link, decision_link
)
else:
tmpl = await get_in_tree_template(decision_link)
jsone_context = await populate_jsone_context(
chain, parent_link, decision_link, tasks_for
)
return jsone_context, tmpl | [
"async",
"def",
"get_jsone_context_and_template",
"(",
"chain",
",",
"parent_link",
",",
"decision_link",
",",
"tasks_for",
")",
":",
"if",
"tasks_for",
"==",
"'action'",
":",
"jsone_context",
",",
"tmpl",
"=",
"await",
"get_action_context_and_template",
"(",
"chain... | Get the appropriate json-e context and template for any parent task.
Args:
chain (ChainOfTrust): the chain of trust.
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the parent link's decision task link.
tasks_for (str): the reason the parent link was created (cron,
hg-push, action)
Returns:
(dict, dict): the json-e context and template. | [
"Get",
"the",
"appropriate",
"json",
"-",
"e",
"context",
"and",
"template",
"for",
"any",
"parent",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1471-L1494 | train | 28,031 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | check_and_update_action_task_group_id | def check_and_update_action_task_group_id(parent_link, decision_link, rebuilt_definitions):
"""Update the ``ACTION_TASK_GROUP_ID`` of an action after verifying.
Actions have varying ``ACTION_TASK_GROUP_ID`` behavior. Release Promotion
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the action ``taskId``
so the large set of release tasks have their own taskgroup. Non-relpro
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the decision
``taskId``, so tasks are more discoverable inside the original on-push
taskgroup.
This poses a json-e task definition problem, hence this function.
This function first checks to make sure the ``ACTION_TASK_GROUP_ID`` is
a member of ``{action_task_id, decision_task_id}``. Then it makes sure
the ``ACTION_TASK_GROUP_ID`` in the ``rebuilt_definition`` is set to the
``parent_link.task``'s ``ACTION_TASK_GROUP_ID`` so the json-e comparison
doesn't fail out.
Ideally, we want to obsolete and remove this function.
Args:
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the decision link to test.
rebuilt_definitions (dict): the rebuilt definitions to check and update.
Raises:
CoTError: on failure.
"""
rebuilt_gid = rebuilt_definitions['tasks'][0]['payload']['env']['ACTION_TASK_GROUP_ID']
runtime_gid = parent_link.task['payload']['env']['ACTION_TASK_GROUP_ID']
acceptable_gids = {parent_link.task_id, decision_link.task_id}
if rebuilt_gid not in acceptable_gids:
raise CoTError("{} ACTION_TASK_GROUP_ID {} not in {}!".format(
parent_link.name, rebuilt_gid, acceptable_gids
))
if runtime_gid != rebuilt_gid:
log.debug("runtime gid {} rebuilt gid {}".format(runtime_gid, rebuilt_gid))
rebuilt_definitions['tasks'][0]['payload']['env']['ACTION_TASK_GROUP_ID'] = runtime_gid | python | def check_and_update_action_task_group_id(parent_link, decision_link, rebuilt_definitions):
"""Update the ``ACTION_TASK_GROUP_ID`` of an action after verifying.
Actions have varying ``ACTION_TASK_GROUP_ID`` behavior. Release Promotion
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the action ``taskId``
so the large set of release tasks have their own taskgroup. Non-relpro
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the decision
``taskId``, so tasks are more discoverable inside the original on-push
taskgroup.
This poses a json-e task definition problem, hence this function.
This function first checks to make sure the ``ACTION_TASK_GROUP_ID`` is
a member of ``{action_task_id, decision_task_id}``. Then it makes sure
the ``ACTION_TASK_GROUP_ID`` in the ``rebuilt_definition`` is set to the
``parent_link.task``'s ``ACTION_TASK_GROUP_ID`` so the json-e comparison
doesn't fail out.
Ideally, we want to obsolete and remove this function.
Args:
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the decision link to test.
rebuilt_definitions (dict): the rebuilt definitions to check and update.
Raises:
CoTError: on failure.
"""
rebuilt_gid = rebuilt_definitions['tasks'][0]['payload']['env']['ACTION_TASK_GROUP_ID']
runtime_gid = parent_link.task['payload']['env']['ACTION_TASK_GROUP_ID']
acceptable_gids = {parent_link.task_id, decision_link.task_id}
if rebuilt_gid not in acceptable_gids:
raise CoTError("{} ACTION_TASK_GROUP_ID {} not in {}!".format(
parent_link.name, rebuilt_gid, acceptable_gids
))
if runtime_gid != rebuilt_gid:
log.debug("runtime gid {} rebuilt gid {}".format(runtime_gid, rebuilt_gid))
rebuilt_definitions['tasks'][0]['payload']['env']['ACTION_TASK_GROUP_ID'] = runtime_gid | [
"def",
"check_and_update_action_task_group_id",
"(",
"parent_link",
",",
"decision_link",
",",
"rebuilt_definitions",
")",
":",
"rebuilt_gid",
"=",
"rebuilt_definitions",
"[",
"'tasks'",
"]",
"[",
"0",
"]",
"[",
"'payload'",
"]",
"[",
"'env'",
"]",
"[",
"'ACTION_T... | Update the ``ACTION_TASK_GROUP_ID`` of an action after verifying.
Actions have varying ``ACTION_TASK_GROUP_ID`` behavior. Release Promotion
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the action ``taskId``
so the large set of release tasks have their own taskgroup. Non-relpro
action tasks set the ``ACTION_TASK_GROUP_ID`` to match the decision
``taskId``, so tasks are more discoverable inside the original on-push
taskgroup.
This poses a json-e task definition problem, hence this function.
This function first checks to make sure the ``ACTION_TASK_GROUP_ID`` is
a member of ``{action_task_id, decision_task_id}``. Then it makes sure
the ``ACTION_TASK_GROUP_ID`` in the ``rebuilt_definition`` is set to the
``parent_link.task``'s ``ACTION_TASK_GROUP_ID`` so the json-e comparison
doesn't fail out.
Ideally, we want to obsolete and remove this function.
Args:
parent_link (LinkOfTrust): the parent link to test.
decision_link (LinkOfTrust): the decision link to test.
rebuilt_definitions (dict): the rebuilt definitions to check and update.
Raises:
CoTError: on failure. | [
"Update",
"the",
"ACTION_TASK_GROUP_ID",
"of",
"an",
"action",
"after",
"verifying",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1547-L1585 | train | 28,032 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | compare_jsone_task_definition | def compare_jsone_task_definition(parent_link, rebuilt_definitions):
"""Compare the json-e rebuilt task definition vs the runtime definition.
Args:
parent_link (LinkOfTrust): the parent link to test.
rebuilt_definitions (dict): the rebuilt task definitions.
Raises:
CoTError: on failure.
"""
diffs = []
for compare_definition in rebuilt_definitions['tasks']:
# Rebuilt decision tasks have an extra `taskId`; remove
if 'taskId' in compare_definition:
del(compare_definition['taskId'])
# remove key/value pairs where the value is empty, since json-e drops
# them instead of keeping them with a None/{}/[] value.
compare_definition = remove_empty_keys(compare_definition)
runtime_definition = remove_empty_keys(parent_link.task)
diff = list(dictdiffer.diff(compare_definition, runtime_definition))
if diff:
diffs.append(pprint.pformat(diff))
continue
log.info("{}: Good.".format(parent_link.name))
break
else:
error_msg = "{} {}: the runtime task doesn't match any rebuilt definition!\n{}".format(
parent_link.name, parent_link.task_id, pprint.pformat(diffs)
)
log.critical(error_msg)
raise CoTError(error_msg) | python | def compare_jsone_task_definition(parent_link, rebuilt_definitions):
"""Compare the json-e rebuilt task definition vs the runtime definition.
Args:
parent_link (LinkOfTrust): the parent link to test.
rebuilt_definitions (dict): the rebuilt task definitions.
Raises:
CoTError: on failure.
"""
diffs = []
for compare_definition in rebuilt_definitions['tasks']:
# Rebuilt decision tasks have an extra `taskId`; remove
if 'taskId' in compare_definition:
del(compare_definition['taskId'])
# remove key/value pairs where the value is empty, since json-e drops
# them instead of keeping them with a None/{}/[] value.
compare_definition = remove_empty_keys(compare_definition)
runtime_definition = remove_empty_keys(parent_link.task)
diff = list(dictdiffer.diff(compare_definition, runtime_definition))
if diff:
diffs.append(pprint.pformat(diff))
continue
log.info("{}: Good.".format(parent_link.name))
break
else:
error_msg = "{} {}: the runtime task doesn't match any rebuilt definition!\n{}".format(
parent_link.name, parent_link.task_id, pprint.pformat(diffs)
)
log.critical(error_msg)
raise CoTError(error_msg) | [
"def",
"compare_jsone_task_definition",
"(",
"parent_link",
",",
"rebuilt_definitions",
")",
":",
"diffs",
"=",
"[",
"]",
"for",
"compare_definition",
"in",
"rebuilt_definitions",
"[",
"'tasks'",
"]",
":",
"# Rebuilt decision tasks have an extra `taskId`; remove",
"if",
"... | Compare the json-e rebuilt task definition vs the runtime definition.
Args:
parent_link (LinkOfTrust): the parent link to test.
rebuilt_definitions (dict): the rebuilt task definitions.
Raises:
CoTError: on failure. | [
"Compare",
"the",
"json",
"-",
"e",
"rebuilt",
"task",
"definition",
"vs",
"the",
"runtime",
"definition",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1589-L1621 | train | 28,033 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_parent_task | async def verify_parent_task(chain, link):
"""Verify the parent task Link.
Action task verification is currently in the same verification function as
decision tasks, because sometimes we'll have an action task masquerading as
a decision task, e.g. in templatized actions for release graphs. To make
sure our guess of decision or action task isn't fatal, we call this
function; this function uses ``is_action()`` to determine how to verify
the task.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on chain of trust verification error.
"""
worker_type = get_worker_type(link.task)
if worker_type not in chain.context.config['valid_decision_worker_types']:
raise CoTError("{} is not a valid decision workerType!".format(worker_type))
if chain is not link:
# make sure all tasks generated from this parent task match the published
# task-graph.json. Not applicable if this link is the ChainOfTrust object,
# since this task won't have generated a task-graph.json yet.
path = link.get_artifact_full_path('public/task-graph.json')
if not os.path.exists(path):
raise CoTError("{} {}: {} doesn't exist!".format(link.name, link.task_id, path))
link.task_graph = load_json_or_yaml(
path, is_path=True, exception=CoTError, message="Can't load {}! %(exc)s".format(path)
)
# This check may want to move to a per-task check?
for target_link in chain.get_all_links_in_chain():
# Verify the target's task is in the parent task's task graph, unless
# it's this task or a parent task.
# (Decision tasks will not exist in a parent task's task-graph.json;
# action tasks, which are generated later, will also be missing.)
# https://github.com/mozilla-releng/scriptworker/issues/77
if target_link.parent_task_id == link.task_id and \
target_link.task_id != link.task_id and \
target_link.task_type not in PARENT_TASK_TYPES:
verify_link_in_task_graph(chain, link, target_link)
try:
await verify_parent_task_definition(chain, link)
except (BaseDownloadError, KeyError) as e:
raise CoTError(e) | python | async def verify_parent_task(chain, link):
"""Verify the parent task Link.
Action task verification is currently in the same verification function as
decision tasks, because sometimes we'll have an action task masquerading as
a decision task, e.g. in templatized actions for release graphs. To make
sure our guess of decision or action task isn't fatal, we call this
function; this function uses ``is_action()`` to determine how to verify
the task.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on chain of trust verification error.
"""
worker_type = get_worker_type(link.task)
if worker_type not in chain.context.config['valid_decision_worker_types']:
raise CoTError("{} is not a valid decision workerType!".format(worker_type))
if chain is not link:
# make sure all tasks generated from this parent task match the published
# task-graph.json. Not applicable if this link is the ChainOfTrust object,
# since this task won't have generated a task-graph.json yet.
path = link.get_artifact_full_path('public/task-graph.json')
if not os.path.exists(path):
raise CoTError("{} {}: {} doesn't exist!".format(link.name, link.task_id, path))
link.task_graph = load_json_or_yaml(
path, is_path=True, exception=CoTError, message="Can't load {}! %(exc)s".format(path)
)
# This check may want to move to a per-task check?
for target_link in chain.get_all_links_in_chain():
# Verify the target's task is in the parent task's task graph, unless
# it's this task or a parent task.
# (Decision tasks will not exist in a parent task's task-graph.json;
# action tasks, which are generated later, will also be missing.)
# https://github.com/mozilla-releng/scriptworker/issues/77
if target_link.parent_task_id == link.task_id and \
target_link.task_id != link.task_id and \
target_link.task_type not in PARENT_TASK_TYPES:
verify_link_in_task_graph(chain, link, target_link)
try:
await verify_parent_task_definition(chain, link)
except (BaseDownloadError, KeyError) as e:
raise CoTError(e) | [
"async",
"def",
"verify_parent_task",
"(",
"chain",
",",
"link",
")",
":",
"worker_type",
"=",
"get_worker_type",
"(",
"link",
".",
"task",
")",
"if",
"worker_type",
"not",
"in",
"chain",
".",
"context",
".",
"config",
"[",
"'valid_decision_worker_types'",
"]"... | Verify the parent task Link.
Action task verification is currently in the same verification function as
decision tasks, because sometimes we'll have an action task masquerading as
a decision task, e.g. in templatized actions for release graphs. To make
sure our guess of decision or action task isn't fatal, we call this
function; this function uses ``is_action()`` to determine how to verify
the task.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
Raises:
CoTError: on chain of trust verification error. | [
"Verify",
"the",
"parent",
"task",
"Link",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1625-L1670 | train | 28,034 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_docker_image_task | async def verify_docker_image_task(chain, link):
"""Verify the docker image Link.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
"""
errors = []
# workerType
worker_type = get_worker_type(link.task)
if worker_type not in chain.context.config['valid_docker_image_worker_types']:
errors.append("{} is not a valid docker-image workerType!".format(worker_type))
raise_on_errors(errors) | python | async def verify_docker_image_task(chain, link):
"""Verify the docker image Link.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking.
"""
errors = []
# workerType
worker_type = get_worker_type(link.task)
if worker_type not in chain.context.config['valid_docker_image_worker_types']:
errors.append("{} is not a valid docker-image workerType!".format(worker_type))
raise_on_errors(errors) | [
"async",
"def",
"verify_docker_image_task",
"(",
"chain",
",",
"link",
")",
":",
"errors",
"=",
"[",
"]",
"# workerType",
"worker_type",
"=",
"get_worker_type",
"(",
"link",
".",
"task",
")",
"if",
"worker_type",
"not",
"in",
"chain",
".",
"context",
".",
... | Verify the docker image Link.
Args:
chain (ChainOfTrust): the chain we're operating on.
link (LinkOfTrust): the task link we're checking. | [
"Verify",
"the",
"docker",
"image",
"Link",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1690-L1703 | train | 28,035 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | check_num_tasks | def check_num_tasks(chain, task_count):
"""Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure.
"""
errors = []
# hardcode for now. If we need a different set of constraints, either
# go by cot_product settings or by task_count['docker-image'] + 1
min_decision_tasks = 1
if task_count['decision'] < min_decision_tasks:
errors.append("{} decision tasks; we must have at least {}!".format(
task_count['decision'], min_decision_tasks
))
raise_on_errors(errors) | python | def check_num_tasks(chain, task_count):
"""Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure.
"""
errors = []
# hardcode for now. If we need a different set of constraints, either
# go by cot_product settings or by task_count['docker-image'] + 1
min_decision_tasks = 1
if task_count['decision'] < min_decision_tasks:
errors.append("{} decision tasks; we must have at least {}!".format(
task_count['decision'], min_decision_tasks
))
raise_on_errors(errors) | [
"def",
"check_num_tasks",
"(",
"chain",
",",
"task_count",
")",
":",
"errors",
"=",
"[",
"]",
"# hardcode for now. If we need a different set of constraints, either",
"# go by cot_product settings or by task_count['docker-image'] + 1",
"min_decision_tasks",
"=",
"1",
"if",
"task... | Make sure there are a specific number of specific task types.
Currently we only check decision tasks.
Args:
chain (ChainOfTrust): the chain we're operating on
task_count (dict): mapping task type to the number of links.
Raises:
CoTError: on failure. | [
"Make",
"sure",
"there",
"are",
"a",
"specific",
"number",
"of",
"specific",
"task",
"types",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1841-L1862 | train | 28,036 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_docker_worker_task | async def verify_docker_worker_task(chain, link):
"""Docker-worker specific checks.
Args:
chain (ChainOfTrust): the chain we're operating on
link (ChainOfTrust or LinkOfTrust): the trust object for the signing task.
Raises:
CoTError: on failure.
"""
if chain != link:
# These two checks will die on `link.cot` if `link` is a ChainOfTrust
# object (e.g., the task we're running `verify_cot` against is a
# docker-worker task). So only run these tests if they are not the chain
# object.
check_interactive_docker_worker(link)
verify_docker_image_sha(chain, link) | python | async def verify_docker_worker_task(chain, link):
"""Docker-worker specific checks.
Args:
chain (ChainOfTrust): the chain we're operating on
link (ChainOfTrust or LinkOfTrust): the trust object for the signing task.
Raises:
CoTError: on failure.
"""
if chain != link:
# These two checks will die on `link.cot` if `link` is a ChainOfTrust
# object (e.g., the task we're running `verify_cot` against is a
# docker-worker task). So only run these tests if they are not the chain
# object.
check_interactive_docker_worker(link)
verify_docker_image_sha(chain, link) | [
"async",
"def",
"verify_docker_worker_task",
"(",
"chain",
",",
"link",
")",
":",
"if",
"chain",
"!=",
"link",
":",
"# These two checks will die on `link.cot` if `link` is a ChainOfTrust",
"# object (e.g., the task we're running `verify_cot` against is a",
"# docker-worker task). So ... | Docker-worker specific checks.
Args:
chain (ChainOfTrust): the chain we're operating on
link (ChainOfTrust or LinkOfTrust): the trust object for the signing task.
Raises:
CoTError: on failure. | [
"Docker",
"-",
"worker",
"specific",
"checks",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1890-L1907 | train | 28,037 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_scriptworker_task | async def verify_scriptworker_task(chain, obj):
"""Verify the signing trust object.
Currently the only check is to make sure it was run on a scriptworker.
Args:
chain (ChainOfTrust): the chain we're operating on
obj (ChainOfTrust or LinkOfTrust): the trust object for the signing task.
"""
errors = []
if obj.worker_impl != "scriptworker":
errors.append("{} {} must be run from scriptworker!".format(obj.name, obj.task_id))
raise_on_errors(errors) | python | async def verify_scriptworker_task(chain, obj):
"""Verify the signing trust object.
Currently the only check is to make sure it was run on a scriptworker.
Args:
chain (ChainOfTrust): the chain we're operating on
obj (ChainOfTrust or LinkOfTrust): the trust object for the signing task.
"""
errors = []
if obj.worker_impl != "scriptworker":
errors.append("{} {} must be run from scriptworker!".format(obj.name, obj.task_id))
raise_on_errors(errors) | [
"async",
"def",
"verify_scriptworker_task",
"(",
"chain",
",",
"obj",
")",
":",
"errors",
"=",
"[",
"]",
"if",
"obj",
".",
"worker_impl",
"!=",
"\"scriptworker\"",
":",
"errors",
".",
"append",
"(",
"\"{} {} must be run from scriptworker!\"",
".",
"format",
"(",... | Verify the signing trust object.
Currently the only check is to make sure it was run on a scriptworker.
Args:
chain (ChainOfTrust): the chain we're operating on
obj (ChainOfTrust or LinkOfTrust): the trust object for the signing task. | [
"Verify",
"the",
"signing",
"trust",
"object",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1926-L1939 | train | 28,038 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_repo_matches_url | def verify_repo_matches_url(repo, url):
"""Verify ``url`` is a part of ``repo``.
We were using ``startswith()`` for a while, which isn't a good comparison.
This function allows us to ``urlparse`` and compare host and path.
Args:
repo (str): the repo url
url (str): the url to verify is part of the repo
Returns:
bool: ``True`` if the repo matches the url.
"""
repo_parts = urlparse(repo)
url_parts = urlparse(url)
errors = []
repo_path_parts = repo_parts.path.split('/')
url_path_parts = url_parts.path.split('/')
if repo_parts.hostname != url_parts.hostname:
errors.append("verify_repo_matches_url: Hostnames don't match! {} {}".format(
repo_parts.hostname, url_parts.hostname
))
if not url_parts.path.startswith(repo_parts.path) or \
url_path_parts[:len(repo_path_parts)] != repo_path_parts:
errors.append("verify_repo_matches_url: Paths don't match! {} {}".format(
repo_parts.path, url_parts.path
))
if errors:
log.warning("\n".join(errors))
return False
return True | python | def verify_repo_matches_url(repo, url):
"""Verify ``url`` is a part of ``repo``.
We were using ``startswith()`` for a while, which isn't a good comparison.
This function allows us to ``urlparse`` and compare host and path.
Args:
repo (str): the repo url
url (str): the url to verify is part of the repo
Returns:
bool: ``True`` if the repo matches the url.
"""
repo_parts = urlparse(repo)
url_parts = urlparse(url)
errors = []
repo_path_parts = repo_parts.path.split('/')
url_path_parts = url_parts.path.split('/')
if repo_parts.hostname != url_parts.hostname:
errors.append("verify_repo_matches_url: Hostnames don't match! {} {}".format(
repo_parts.hostname, url_parts.hostname
))
if not url_parts.path.startswith(repo_parts.path) or \
url_path_parts[:len(repo_path_parts)] != repo_path_parts:
errors.append("verify_repo_matches_url: Paths don't match! {} {}".format(
repo_parts.path, url_parts.path
))
if errors:
log.warning("\n".join(errors))
return False
return True | [
"def",
"verify_repo_matches_url",
"(",
"repo",
",",
"url",
")",
":",
"repo_parts",
"=",
"urlparse",
"(",
"repo",
")",
"url_parts",
"=",
"urlparse",
"(",
"url",
")",
"errors",
"=",
"[",
"]",
"repo_path_parts",
"=",
"repo_parts",
".",
"path",
".",
"split",
... | Verify ``url`` is a part of ``repo``.
We were using ``startswith()`` for a while, which isn't a good comparison.
This function allows us to ``urlparse`` and compare host and path.
Args:
repo (str): the repo url
url (str): the url to verify is part of the repo
Returns:
bool: ``True`` if the repo matches the url. | [
"Verify",
"url",
"is",
"a",
"part",
"of",
"repo",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1963-L1994 | train | 28,039 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | get_source_url | def get_source_url(obj):
"""Get the source url for a Trust object.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
Raises:
CoTError: if repo and source are defined and don't match
Returns:
str: the source url.
"""
source_env_prefix = obj.context.config['source_env_prefix']
task = obj.task
log.debug("Getting source url for {} {}...".format(obj.name, obj.task_id))
repo = get_repo(obj.task, source_env_prefix=source_env_prefix)
source = task['metadata']['source']
if repo and not verify_repo_matches_url(repo, source):
raise CoTError("{name} {task_id}: {source_env_prefix} {repo} doesn't match source {source}!".format(
name=obj.name, task_id=obj.task_id, source_env_prefix=source_env_prefix, repo=repo, source=source
))
log.info("{} {}: found {}".format(obj.name, obj.task_id, source))
return source | python | def get_source_url(obj):
"""Get the source url for a Trust object.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
Raises:
CoTError: if repo and source are defined and don't match
Returns:
str: the source url.
"""
source_env_prefix = obj.context.config['source_env_prefix']
task = obj.task
log.debug("Getting source url for {} {}...".format(obj.name, obj.task_id))
repo = get_repo(obj.task, source_env_prefix=source_env_prefix)
source = task['metadata']['source']
if repo and not verify_repo_matches_url(repo, source):
raise CoTError("{name} {task_id}: {source_env_prefix} {repo} doesn't match source {source}!".format(
name=obj.name, task_id=obj.task_id, source_env_prefix=source_env_prefix, repo=repo, source=source
))
log.info("{} {}: found {}".format(obj.name, obj.task_id, source))
return source | [
"def",
"get_source_url",
"(",
"obj",
")",
":",
"source_env_prefix",
"=",
"obj",
".",
"context",
".",
"config",
"[",
"'source_env_prefix'",
"]",
"task",
"=",
"obj",
".",
"task",
"log",
".",
"debug",
"(",
"\"Getting source url for {} {}...\"",
".",
"format",
"("... | Get the source url for a Trust object.
Args:
obj (ChainOfTrust or LinkOfTrust): the trust object to inspect
Raises:
CoTError: if repo and source are defined and don't match
Returns:
str: the source url. | [
"Get",
"the",
"source",
"url",
"for",
"a",
"Trust",
"object",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L1997-L2020 | train | 28,040 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | trace_back_to_tree | async def trace_back_to_tree(chain):
"""Trace the chain back to the tree.
task.metadata.source: "https://hg.mozilla.org/projects/date//file/a80373508881bfbff67a2a49297c328ff8052572/taskcluster/ci/build"
task.payload.env.GECKO_HEAD_REPOSITORY "https://hg.mozilla.org/projects/date/"
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on error.
"""
errors = []
repos = {}
restricted_privs = None
rules = {}
for my_key, config_key in {
'scopes': 'cot_restricted_scopes',
'trees': 'cot_restricted_trees'
}.items():
rules[my_key] = chain.context.config[config_key]
# a repo_path of None means we have no restricted privs.
# a string repo_path may mean we have higher privs
for obj in [chain] + chain.links:
source_url = get_source_url(obj)
repo_path = match_url_regex(
chain.context.config['valid_vcs_rules'], source_url, match_url_path_callback
)
repos[obj] = repo_path
# check for restricted scopes.
my_repo = repos[chain]
for scope in chain.task['scopes']:
if scope in rules['scopes']:
log.info("Found privileged scope {}".format(scope))
restricted_privs = True
level = rules['scopes'][scope]
if my_repo not in rules['trees'][level]:
errors.append("{} {}: repo {} not allowlisted for scope {}!".format(
chain.name, chain.task_id, my_repo, scope
))
# verify all tasks w/ same decision_task_id have the same source repo.
if len(set(repos.values())) > 1:
for obj, repo in repos.items():
if obj.decision_task_id == chain.decision_task_id:
if repo != my_repo:
errors.append("{} {} repo {} doesn't match my repo {}!".format(
obj.name, obj.task_id, repo, my_repo
))
# if we have restricted privs, the non-sibling tasks must at least be in
# a known repo.
# (Not currently requiring that all tasks have the same privilege level,
# in case a docker-image build is run on mozilla-central and that image
# is used for a release-priv task, for example.)
elif restricted_privs and repo is None:
errors.append("{} {} has no privileged repo on an restricted privilege scope!".format(
obj.name, obj.task_id
))
# Disallow restricted privs on is_try_or_pull_request. This may be a redundant check.
if restricted_privs and await chain.is_try_or_pull_request():
errors.append(
"{} {} has restricted privilege scope, and is_try_or_pull_request()!".format(
chain.name, chain.task_id
)
)
raise_on_errors(errors) | python | async def trace_back_to_tree(chain):
"""Trace the chain back to the tree.
task.metadata.source: "https://hg.mozilla.org/projects/date//file/a80373508881bfbff67a2a49297c328ff8052572/taskcluster/ci/build"
task.payload.env.GECKO_HEAD_REPOSITORY "https://hg.mozilla.org/projects/date/"
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on error.
"""
errors = []
repos = {}
restricted_privs = None
rules = {}
for my_key, config_key in {
'scopes': 'cot_restricted_scopes',
'trees': 'cot_restricted_trees'
}.items():
rules[my_key] = chain.context.config[config_key]
# a repo_path of None means we have no restricted privs.
# a string repo_path may mean we have higher privs
for obj in [chain] + chain.links:
source_url = get_source_url(obj)
repo_path = match_url_regex(
chain.context.config['valid_vcs_rules'], source_url, match_url_path_callback
)
repos[obj] = repo_path
# check for restricted scopes.
my_repo = repos[chain]
for scope in chain.task['scopes']:
if scope in rules['scopes']:
log.info("Found privileged scope {}".format(scope))
restricted_privs = True
level = rules['scopes'][scope]
if my_repo not in rules['trees'][level]:
errors.append("{} {}: repo {} not allowlisted for scope {}!".format(
chain.name, chain.task_id, my_repo, scope
))
# verify all tasks w/ same decision_task_id have the same source repo.
if len(set(repos.values())) > 1:
for obj, repo in repos.items():
if obj.decision_task_id == chain.decision_task_id:
if repo != my_repo:
errors.append("{} {} repo {} doesn't match my repo {}!".format(
obj.name, obj.task_id, repo, my_repo
))
# if we have restricted privs, the non-sibling tasks must at least be in
# a known repo.
# (Not currently requiring that all tasks have the same privilege level,
# in case a docker-image build is run on mozilla-central and that image
# is used for a release-priv task, for example.)
elif restricted_privs and repo is None:
errors.append("{} {} has no privileged repo on an restricted privilege scope!".format(
obj.name, obj.task_id
))
# Disallow restricted privs on is_try_or_pull_request. This may be a redundant check.
if restricted_privs and await chain.is_try_or_pull_request():
errors.append(
"{} {} has restricted privilege scope, and is_try_or_pull_request()!".format(
chain.name, chain.task_id
)
)
raise_on_errors(errors) | [
"async",
"def",
"trace_back_to_tree",
"(",
"chain",
")",
":",
"errors",
"=",
"[",
"]",
"repos",
"=",
"{",
"}",
"restricted_privs",
"=",
"None",
"rules",
"=",
"{",
"}",
"for",
"my_key",
",",
"config_key",
"in",
"{",
"'scopes'",
":",
"'cot_restricted_scopes'... | Trace the chain back to the tree.
task.metadata.source: "https://hg.mozilla.org/projects/date//file/a80373508881bfbff67a2a49297c328ff8052572/taskcluster/ci/build"
task.payload.env.GECKO_HEAD_REPOSITORY "https://hg.mozilla.org/projects/date/"
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on error. | [
"Trace",
"the",
"chain",
"back",
"to",
"the",
"tree",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L2024-L2090 | train | 28,041 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | verify_chain_of_trust | async def verify_chain_of_trust(chain):
"""Build and verify the chain of trust.
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on failure
"""
log_path = os.path.join(chain.context.config["task_log_dir"], "chain_of_trust.log")
scriptworker_log = logging.getLogger('scriptworker')
with contextual_log_handler(
chain.context, path=log_path, log_obj=scriptworker_log,
formatter=AuditLogFormatter(
fmt=chain.context.config['log_fmt'],
datefmt=chain.context.config['log_datefmt'],
)
):
try:
# build LinkOfTrust objects
await build_task_dependencies(chain, chain.task, chain.name, chain.task_id)
# download the signed chain of trust artifacts
await download_cot(chain)
# verify the signatures and populate the ``link.cot``s
verify_cot_signatures(chain)
# download all other artifacts needed to verify chain of trust
await download_cot_artifacts(chain)
# verify the task types, e.g. decision
task_count = await verify_task_types(chain)
check_num_tasks(chain, task_count)
# verify the worker_impls, e.g. docker-worker
await verify_worker_impls(chain)
await trace_back_to_tree(chain)
except (BaseDownloadError, KeyError, AttributeError) as exc:
log.critical("Chain of Trust verification error!", exc_info=True)
if isinstance(exc, CoTError):
raise
else:
raise CoTError(str(exc))
log.info("Good.") | python | async def verify_chain_of_trust(chain):
"""Build and verify the chain of trust.
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on failure
"""
log_path = os.path.join(chain.context.config["task_log_dir"], "chain_of_trust.log")
scriptworker_log = logging.getLogger('scriptworker')
with contextual_log_handler(
chain.context, path=log_path, log_obj=scriptworker_log,
formatter=AuditLogFormatter(
fmt=chain.context.config['log_fmt'],
datefmt=chain.context.config['log_datefmt'],
)
):
try:
# build LinkOfTrust objects
await build_task_dependencies(chain, chain.task, chain.name, chain.task_id)
# download the signed chain of trust artifacts
await download_cot(chain)
# verify the signatures and populate the ``link.cot``s
verify_cot_signatures(chain)
# download all other artifacts needed to verify chain of trust
await download_cot_artifacts(chain)
# verify the task types, e.g. decision
task_count = await verify_task_types(chain)
check_num_tasks(chain, task_count)
# verify the worker_impls, e.g. docker-worker
await verify_worker_impls(chain)
await trace_back_to_tree(chain)
except (BaseDownloadError, KeyError, AttributeError) as exc:
log.critical("Chain of Trust verification error!", exc_info=True)
if isinstance(exc, CoTError):
raise
else:
raise CoTError(str(exc))
log.info("Good.") | [
"async",
"def",
"verify_chain_of_trust",
"(",
"chain",
")",
":",
"log_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"chain",
".",
"context",
".",
"config",
"[",
"\"task_log_dir\"",
"]",
",",
"\"chain_of_trust.log\"",
")",
"scriptworker_log",
"=",
"logging",... | Build and verify the chain of trust.
Args:
chain (ChainOfTrust): the chain we're operating on
Raises:
CoTError: on failure | [
"Build",
"and",
"verify",
"the",
"chain",
"of",
"trust",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L2105-L2145 | train | 28,042 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | ChainOfTrust.is_try_or_pull_request | async def is_try_or_pull_request(self):
"""Determine if any task in the chain is a try task.
Returns:
bool: True if a task is a try task.
"""
tasks = [asyncio.ensure_future(link.is_try_or_pull_request()) for link in self.links]
tasks.insert(0, asyncio.ensure_future(is_try_or_pull_request(self.context, self.task)))
conditions = await raise_future_exceptions(tasks)
return any(conditions) | python | async def is_try_or_pull_request(self):
"""Determine if any task in the chain is a try task.
Returns:
bool: True if a task is a try task.
"""
tasks = [asyncio.ensure_future(link.is_try_or_pull_request()) for link in self.links]
tasks.insert(0, asyncio.ensure_future(is_try_or_pull_request(self.context, self.task)))
conditions = await raise_future_exceptions(tasks)
return any(conditions) | [
"async",
"def",
"is_try_or_pull_request",
"(",
"self",
")",
":",
"tasks",
"=",
"[",
"asyncio",
".",
"ensure_future",
"(",
"link",
".",
"is_try_or_pull_request",
"(",
")",
")",
"for",
"link",
"in",
"self",
".",
"links",
"]",
"tasks",
".",
"insert",
"(",
"... | Determine if any task in the chain is a try task.
Returns:
bool: True if a task is a try task. | [
"Determine",
"if",
"any",
"task",
"in",
"the",
"chain",
"is",
"a",
"try",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L129-L140 | train | 28,043 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | ChainOfTrust.get_link | def get_link(self, task_id):
"""Get a ``LinkOfTrust`` by task id.
Args:
task_id (str): the task id to find.
Returns:
LinkOfTrust: the link matching the task id.
Raises:
CoTError: if no ``LinkOfTrust`` matches.
"""
links = [x for x in self.links if x.task_id == task_id]
if len(links) != 1:
raise CoTError("No single Link matches task_id {}!\n{}".format(task_id, self.dependent_task_ids()))
return links[0] | python | def get_link(self, task_id):
"""Get a ``LinkOfTrust`` by task id.
Args:
task_id (str): the task id to find.
Returns:
LinkOfTrust: the link matching the task id.
Raises:
CoTError: if no ``LinkOfTrust`` matches.
"""
links = [x for x in self.links if x.task_id == task_id]
if len(links) != 1:
raise CoTError("No single Link matches task_id {}!\n{}".format(task_id, self.dependent_task_ids()))
return links[0] | [
"def",
"get_link",
"(",
"self",
",",
"task_id",
")",
":",
"links",
"=",
"[",
"x",
"for",
"x",
"in",
"self",
".",
"links",
"if",
"x",
".",
"task_id",
"==",
"task_id",
"]",
"if",
"len",
"(",
"links",
")",
"!=",
"1",
":",
"raise",
"CoTError",
"(",
... | Get a ``LinkOfTrust`` by task id.
Args:
task_id (str): the task id to find.
Returns:
LinkOfTrust: the link matching the task id.
Raises:
CoTError: if no ``LinkOfTrust`` matches. | [
"Get",
"a",
"LinkOfTrust",
"by",
"task",
"id",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L142-L158 | train | 28,044 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | ChainOfTrust.get_all_links_in_chain | def get_all_links_in_chain(self):
"""Return all links in the chain of trust, including the target task.
By default, we're checking a task and all its dependencies back to the
tree, so the full chain is ``self.links`` + ``self``. However, we also
support checking the decision task itself. In that case, we populate
the decision task as a link in ``self.links``, and we don't need to add
another check for ``self``.
Returns:
list: of all ``LinkOfTrust``s to verify.
"""
if self.is_decision() and self.get_link(self.task_id):
return self.links
return [self] + self.links | python | def get_all_links_in_chain(self):
"""Return all links in the chain of trust, including the target task.
By default, we're checking a task and all its dependencies back to the
tree, so the full chain is ``self.links`` + ``self``. However, we also
support checking the decision task itself. In that case, we populate
the decision task as a link in ``self.links``, and we don't need to add
another check for ``self``.
Returns:
list: of all ``LinkOfTrust``s to verify.
"""
if self.is_decision() and self.get_link(self.task_id):
return self.links
return [self] + self.links | [
"def",
"get_all_links_in_chain",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_decision",
"(",
")",
"and",
"self",
".",
"get_link",
"(",
"self",
".",
"task_id",
")",
":",
"return",
"self",
".",
"links",
"return",
"[",
"self",
"]",
"+",
"self",
".",
"... | Return all links in the chain of trust, including the target task.
By default, we're checking a task and all its dependencies back to the
tree, so the full chain is ``self.links`` + ``self``. However, we also
support checking the decision task itself. In that case, we populate
the decision task as a link in ``self.links``, and we don't need to add
another check for ``self``.
Returns:
list: of all ``LinkOfTrust``s to verify. | [
"Return",
"all",
"links",
"in",
"the",
"chain",
"of",
"trust",
"including",
"the",
"target",
"task",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L169-L184 | train | 28,045 |
mozilla-releng/scriptworker | scriptworker/cot/verify.py | AuditLogFormatter.format | def format(self, record):
"""Space debug messages for more legibility."""
if record.levelno == logging.DEBUG:
record.msg = ' {}'.format(record.msg)
return super(AuditLogFormatter, self).format(record) | python | def format(self, record):
"""Space debug messages for more legibility."""
if record.levelno == logging.DEBUG:
record.msg = ' {}'.format(record.msg)
return super(AuditLogFormatter, self).format(record) | [
"def",
"format",
"(",
"self",
",",
"record",
")",
":",
"if",
"record",
".",
"levelno",
"==",
"logging",
".",
"DEBUG",
":",
"record",
".",
"msg",
"=",
"' {}'",
".",
"format",
"(",
"record",
".",
"msg",
")",
"return",
"super",
"(",
"AuditLogFormatter",
... | Space debug messages for more legibility. | [
"Space",
"debug",
"messages",
"for",
"more",
"legibility",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/verify.py#L2097-L2101 | train | 28,046 |
mozilla-releng/scriptworker | scriptworker/version.py | get_version_string | def get_version_string(version):
"""Translate a version tuple into a string.
Specify the __version__ as a tuple for more precise comparisons, and
translate it to __version_string__ for when that's needed.
This function exists primarily for easier unit testing.
Args:
version (Tuple[int, int, int, str]): three ints and an optional string.
Returns:
version_string (str): the tuple translated into a string per semver.org
"""
version_len = len(version)
if version_len == 3:
version_string = '%d.%d.%d' % version
elif version_len == 4:
version_string = '%d.%d.%d-%s' % version
else:
raise Exception(
'Version tuple is non-semver-compliant {} length!'.format(version_len)
)
return version_string | python | def get_version_string(version):
"""Translate a version tuple into a string.
Specify the __version__ as a tuple for more precise comparisons, and
translate it to __version_string__ for when that's needed.
This function exists primarily for easier unit testing.
Args:
version (Tuple[int, int, int, str]): three ints and an optional string.
Returns:
version_string (str): the tuple translated into a string per semver.org
"""
version_len = len(version)
if version_len == 3:
version_string = '%d.%d.%d' % version
elif version_len == 4:
version_string = '%d.%d.%d-%s' % version
else:
raise Exception(
'Version tuple is non-semver-compliant {} length!'.format(version_len)
)
return version_string | [
"def",
"get_version_string",
"(",
"version",
")",
":",
"version_len",
"=",
"len",
"(",
"version",
")",
"if",
"version_len",
"==",
"3",
":",
"version_string",
"=",
"'%d.%d.%d'",
"%",
"version",
"elif",
"version_len",
"==",
"4",
":",
"version_string",
"=",
"'%... | Translate a version tuple into a string.
Specify the __version__ as a tuple for more precise comparisons, and
translate it to __version_string__ for when that's needed.
This function exists primarily for easier unit testing.
Args:
version (Tuple[int, int, int, str]): three ints and an optional string.
Returns:
version_string (str): the tuple translated into a string per semver.org | [
"Translate",
"a",
"version",
"tuple",
"into",
"a",
"string",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/version.py#L26-L50 | train | 28,047 |
mozilla-releng/scriptworker | scriptworker/config.py | get_unfrozen_copy | def get_unfrozen_copy(values):
"""Recursively convert `value`'s tuple values into lists, and frozendicts into dicts.
Args:
values (frozendict/tuple): the frozendict/tuple.
Returns:
values (dict/list): the unfrozen copy.
"""
if isinstance(values, (frozendict, dict)):
return {key: get_unfrozen_copy(value) for key, value in values.items()}
elif isinstance(values, (list, tuple)):
return [get_unfrozen_copy(value) for value in values]
# Nothing to unfreeze.
return values | python | def get_unfrozen_copy(values):
"""Recursively convert `value`'s tuple values into lists, and frozendicts into dicts.
Args:
values (frozendict/tuple): the frozendict/tuple.
Returns:
values (dict/list): the unfrozen copy.
"""
if isinstance(values, (frozendict, dict)):
return {key: get_unfrozen_copy(value) for key, value in values.items()}
elif isinstance(values, (list, tuple)):
return [get_unfrozen_copy(value) for value in values]
# Nothing to unfreeze.
return values | [
"def",
"get_unfrozen_copy",
"(",
"values",
")",
":",
"if",
"isinstance",
"(",
"values",
",",
"(",
"frozendict",
",",
"dict",
")",
")",
":",
"return",
"{",
"key",
":",
"get_unfrozen_copy",
"(",
"value",
")",
"for",
"key",
",",
"value",
"in",
"values",
"... | Recursively convert `value`'s tuple values into lists, and frozendicts into dicts.
Args:
values (frozendict/tuple): the frozendict/tuple.
Returns:
values (dict/list): the unfrozen copy. | [
"Recursively",
"convert",
"value",
"s",
"tuple",
"values",
"into",
"lists",
"and",
"frozendicts",
"into",
"dicts",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L57-L73 | train | 28,048 |
mozilla-releng/scriptworker | scriptworker/config.py | read_worker_creds | def read_worker_creds(key="credentials"):
"""Get credentials from CREDS_FILES or the environment.
This looks at the CREDS_FILES in order, and falls back to the environment.
Args:
key (str, optional): each CREDS_FILE is a json dict. This key's value
contains the credentials. Defaults to 'credentials'.
Returns:
dict: the credentials found. None if no credentials found.
"""
for path in CREDS_FILES:
if not os.path.exists(path):
continue
contents = load_json_or_yaml(path, is_path=True, exception=None)
if contents.get(key):
return contents[key]
else:
if key == "credentials" and os.environ.get("TASKCLUSTER_ACCESS_TOKEN") and \
os.environ.get("TASKCLUSTER_CLIENT_ID"):
credentials = {
"accessToken": os.environ["TASKCLUSTER_ACCESS_TOKEN"],
"clientId": os.environ["TASKCLUSTER_CLIENT_ID"],
}
if os.environ.get("TASKCLUSTER_CERTIFICATE"):
credentials['certificate'] = os.environ['TASKCLUSTER_CERTIFICATE']
return credentials | python | def read_worker_creds(key="credentials"):
"""Get credentials from CREDS_FILES or the environment.
This looks at the CREDS_FILES in order, and falls back to the environment.
Args:
key (str, optional): each CREDS_FILE is a json dict. This key's value
contains the credentials. Defaults to 'credentials'.
Returns:
dict: the credentials found. None if no credentials found.
"""
for path in CREDS_FILES:
if not os.path.exists(path):
continue
contents = load_json_or_yaml(path, is_path=True, exception=None)
if contents.get(key):
return contents[key]
else:
if key == "credentials" and os.environ.get("TASKCLUSTER_ACCESS_TOKEN") and \
os.environ.get("TASKCLUSTER_CLIENT_ID"):
credentials = {
"accessToken": os.environ["TASKCLUSTER_ACCESS_TOKEN"],
"clientId": os.environ["TASKCLUSTER_CLIENT_ID"],
}
if os.environ.get("TASKCLUSTER_CERTIFICATE"):
credentials['certificate'] = os.environ['TASKCLUSTER_CERTIFICATE']
return credentials | [
"def",
"read_worker_creds",
"(",
"key",
"=",
"\"credentials\"",
")",
":",
"for",
"path",
"in",
"CREDS_FILES",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"continue",
"contents",
"=",
"load_json_or_yaml",
"(",
"path",
",",
"... | Get credentials from CREDS_FILES or the environment.
This looks at the CREDS_FILES in order, and falls back to the environment.
Args:
key (str, optional): each CREDS_FILE is a json dict. This key's value
contains the credentials. Defaults to 'credentials'.
Returns:
dict: the credentials found. None if no credentials found. | [
"Get",
"credentials",
"from",
"CREDS_FILES",
"or",
"the",
"environment",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L77-L105 | train | 28,049 |
mozilla-releng/scriptworker | scriptworker/config.py | check_config | def check_config(config, path):
"""Validate the config against DEFAULT_CONFIG.
Any unknown keys or wrong types will add error messages.
Args:
config (dict): the running config.
path (str): the path to the config file, used in error messages.
Returns:
list: the error messages found when validating the config.
"""
messages = []
config_copy = get_frozen_copy(config)
missing_keys = set(DEFAULT_CONFIG.keys()) - set(config_copy.keys())
if missing_keys:
messages.append("Missing config keys {}!".format(missing_keys))
for key, value in config_copy.items():
if key not in DEFAULT_CONFIG:
messages.append("Unknown key {} in {}!".format(key, path))
continue
if value is None:
messages.append(_VALUE_UNDEFINED_MESSAGE.format(path=path, key=key))
else:
value_type = type(value)
if isinstance(DEFAULT_CONFIG[key], Mapping) and 'by-cot-product' in DEFAULT_CONFIG[key]:
default_type = type(DEFAULT_CONFIG[key]['by-cot-product'][config['cot_product']])
else:
default_type = type(DEFAULT_CONFIG[key])
if value_type is not default_type:
messages.append(
"{} {}: type {} is not {}!".format(path, key, value_type, default_type)
)
if value in ("...", b"..."):
messages.append(_VALUE_UNDEFINED_MESSAGE.format(path=path, key=key))
if key in ("provisioner_id", "worker_group", "worker_type", "worker_id") and not _is_id_valid(value):
messages.append('{} doesn\'t match "{}" (required by Taskcluster)'.format(key, _GENERIC_ID_REGEX.pattern))
return messages | python | def check_config(config, path):
"""Validate the config against DEFAULT_CONFIG.
Any unknown keys or wrong types will add error messages.
Args:
config (dict): the running config.
path (str): the path to the config file, used in error messages.
Returns:
list: the error messages found when validating the config.
"""
messages = []
config_copy = get_frozen_copy(config)
missing_keys = set(DEFAULT_CONFIG.keys()) - set(config_copy.keys())
if missing_keys:
messages.append("Missing config keys {}!".format(missing_keys))
for key, value in config_copy.items():
if key not in DEFAULT_CONFIG:
messages.append("Unknown key {} in {}!".format(key, path))
continue
if value is None:
messages.append(_VALUE_UNDEFINED_MESSAGE.format(path=path, key=key))
else:
value_type = type(value)
if isinstance(DEFAULT_CONFIG[key], Mapping) and 'by-cot-product' in DEFAULT_CONFIG[key]:
default_type = type(DEFAULT_CONFIG[key]['by-cot-product'][config['cot_product']])
else:
default_type = type(DEFAULT_CONFIG[key])
if value_type is not default_type:
messages.append(
"{} {}: type {} is not {}!".format(path, key, value_type, default_type)
)
if value in ("...", b"..."):
messages.append(_VALUE_UNDEFINED_MESSAGE.format(path=path, key=key))
if key in ("provisioner_id", "worker_group", "worker_type", "worker_id") and not _is_id_valid(value):
messages.append('{} doesn\'t match "{}" (required by Taskcluster)'.format(key, _GENERIC_ID_REGEX.pattern))
return messages | [
"def",
"check_config",
"(",
"config",
",",
"path",
")",
":",
"messages",
"=",
"[",
"]",
"config_copy",
"=",
"get_frozen_copy",
"(",
"config",
")",
"missing_keys",
"=",
"set",
"(",
"DEFAULT_CONFIG",
".",
"keys",
"(",
")",
")",
"-",
"set",
"(",
"config_cop... | Validate the config against DEFAULT_CONFIG.
Any unknown keys or wrong types will add error messages.
Args:
config (dict): the running config.
path (str): the path to the config file, used in error messages.
Returns:
list: the error messages found when validating the config. | [
"Validate",
"the",
"config",
"against",
"DEFAULT_CONFIG",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L109-L149 | train | 28,050 |
mozilla-releng/scriptworker | scriptworker/config.py | apply_product_config | def apply_product_config(config):
"""Apply config values that are keyed by `cot_product`.
This modifies the passed in configuration.
Args:
config dict: the config to apply cot_product keying too
Returns: dict
"""
cot_product = config['cot_product']
for key in config:
if isinstance(config[key], Mapping) and 'by-cot-product' in config[key]:
try:
config[key] = config[key]['by-cot-product'][cot_product]
except KeyError:
raise ConfigError("Product {} not specified for key {}".format(cot_product, key))
return config | python | def apply_product_config(config):
"""Apply config values that are keyed by `cot_product`.
This modifies the passed in configuration.
Args:
config dict: the config to apply cot_product keying too
Returns: dict
"""
cot_product = config['cot_product']
for key in config:
if isinstance(config[key], Mapping) and 'by-cot-product' in config[key]:
try:
config[key] = config[key]['by-cot-product'][cot_product]
except KeyError:
raise ConfigError("Product {} not specified for key {}".format(cot_product, key))
return config | [
"def",
"apply_product_config",
"(",
"config",
")",
":",
"cot_product",
"=",
"config",
"[",
"'cot_product'",
"]",
"for",
"key",
"in",
"config",
":",
"if",
"isinstance",
"(",
"config",
"[",
"key",
"]",
",",
"Mapping",
")",
"and",
"'by-cot-product'",
"in",
"c... | Apply config values that are keyed by `cot_product`.
This modifies the passed in configuration.
Args:
config dict: the config to apply cot_product keying too
Returns: dict | [
"Apply",
"config",
"values",
"that",
"are",
"keyed",
"by",
"cot_product",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L156-L176 | train | 28,051 |
mozilla-releng/scriptworker | scriptworker/config.py | create_config | def create_config(config_path="scriptworker.yaml"):
"""Create a config from DEFAULT_CONFIG, arguments, and config file.
Then validate it and freeze it.
Args:
config_path (str, optional): the path to the config file. Defaults to
"scriptworker.yaml"
Returns:
tuple: (config frozendict, credentials dict)
Raises:
SystemExit: on failure
"""
if not os.path.exists(config_path):
print("{} doesn't exist! Exiting...".format(config_path), file=sys.stderr)
sys.exit(1)
with open(config_path, "r", encoding="utf-8") as fh:
secrets = safe_load(fh)
config = dict(deepcopy(DEFAULT_CONFIG))
if not secrets.get("credentials"):
secrets['credentials'] = read_worker_creds()
config.update(secrets)
apply_product_config(config)
messages = check_config(config, config_path)
if messages:
print('\n'.join(messages), file=sys.stderr)
print("Exiting...", file=sys.stderr)
sys.exit(1)
credentials = get_frozen_copy(secrets['credentials'])
del(config['credentials'])
config = get_frozen_copy(config)
return config, credentials | python | def create_config(config_path="scriptworker.yaml"):
"""Create a config from DEFAULT_CONFIG, arguments, and config file.
Then validate it and freeze it.
Args:
config_path (str, optional): the path to the config file. Defaults to
"scriptworker.yaml"
Returns:
tuple: (config frozendict, credentials dict)
Raises:
SystemExit: on failure
"""
if not os.path.exists(config_path):
print("{} doesn't exist! Exiting...".format(config_path), file=sys.stderr)
sys.exit(1)
with open(config_path, "r", encoding="utf-8") as fh:
secrets = safe_load(fh)
config = dict(deepcopy(DEFAULT_CONFIG))
if not secrets.get("credentials"):
secrets['credentials'] = read_worker_creds()
config.update(secrets)
apply_product_config(config)
messages = check_config(config, config_path)
if messages:
print('\n'.join(messages), file=sys.stderr)
print("Exiting...", file=sys.stderr)
sys.exit(1)
credentials = get_frozen_copy(secrets['credentials'])
del(config['credentials'])
config = get_frozen_copy(config)
return config, credentials | [
"def",
"create_config",
"(",
"config_path",
"=",
"\"scriptworker.yaml\"",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"config_path",
")",
":",
"print",
"(",
"\"{} doesn't exist! Exiting...\"",
".",
"format",
"(",
"config_path",
")",
",",
"fil... | Create a config from DEFAULT_CONFIG, arguments, and config file.
Then validate it and freeze it.
Args:
config_path (str, optional): the path to the config file. Defaults to
"scriptworker.yaml"
Returns:
tuple: (config frozendict, credentials dict)
Raises:
SystemExit: on failure | [
"Create",
"a",
"config",
"from",
"DEFAULT_CONFIG",
"arguments",
"and",
"config",
"file",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L180-L214 | train | 28,052 |
mozilla-releng/scriptworker | scriptworker/config.py | get_context_from_cmdln | def get_context_from_cmdln(args, desc="Run scriptworker"):
"""Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict
"""
context = Context()
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"config_path", type=str, nargs="?", default="scriptworker.yaml",
help="the path to the config file"
)
parsed_args = parser.parse_args(args)
context.config, credentials = create_config(config_path=parsed_args.config_path)
update_logging_config(context)
return context, credentials | python | def get_context_from_cmdln(args, desc="Run scriptworker"):
"""Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict
"""
context = Context()
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"config_path", type=str, nargs="?", default="scriptworker.yaml",
help="the path to the config file"
)
parsed_args = parser.parse_args(args)
context.config, credentials = create_config(config_path=parsed_args.config_path)
update_logging_config(context)
return context, credentials | [
"def",
"get_context_from_cmdln",
"(",
"args",
",",
"desc",
"=",
"\"Run scriptworker\"",
")",
":",
"context",
"=",
"Context",
"(",
")",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"desc",
")",
"parser",
".",
"add_argument",
"(",
... | Create a Context object from args.
Args:
args (list): the commandline args. Generally sys.argv
Returns:
tuple: ``scriptworker.context.Context`` with populated config, and
credentials frozendict | [
"Create",
"a",
"Context",
"object",
"from",
"args",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/config.py#L218-L238 | train | 28,053 |
mozilla-releng/scriptworker | scriptworker/cot/generate.py | get_cot_artifacts | def get_cot_artifacts(context):
"""Generate the artifact relative paths and shas for the chain of trust.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dictionary of {"path/to/artifact": {"hash_alg": "..."}, ...}
"""
artifacts = {}
filepaths = filepaths_in_dir(context.config['artifact_dir'])
hash_alg = context.config['chain_of_trust_hash_algorithm']
for filepath in sorted(filepaths):
path = os.path.join(context.config['artifact_dir'], filepath)
sha = get_hash(path, hash_alg=hash_alg)
artifacts[filepath] = {hash_alg: sha}
return artifacts | python | def get_cot_artifacts(context):
"""Generate the artifact relative paths and shas for the chain of trust.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dictionary of {"path/to/artifact": {"hash_alg": "..."}, ...}
"""
artifacts = {}
filepaths = filepaths_in_dir(context.config['artifact_dir'])
hash_alg = context.config['chain_of_trust_hash_algorithm']
for filepath in sorted(filepaths):
path = os.path.join(context.config['artifact_dir'], filepath)
sha = get_hash(path, hash_alg=hash_alg)
artifacts[filepath] = {hash_alg: sha}
return artifacts | [
"def",
"get_cot_artifacts",
"(",
"context",
")",
":",
"artifacts",
"=",
"{",
"}",
"filepaths",
"=",
"filepaths_in_dir",
"(",
"context",
".",
"config",
"[",
"'artifact_dir'",
"]",
")",
"hash_alg",
"=",
"context",
".",
"config",
"[",
"'chain_of_trust_hash_algorith... | Generate the artifact relative paths and shas for the chain of trust.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: a dictionary of {"path/to/artifact": {"hash_alg": "..."}, ...} | [
"Generate",
"the",
"artifact",
"relative",
"paths",
"and",
"shas",
"for",
"the",
"chain",
"of",
"trust",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/generate.py#L25-L42 | train | 28,054 |
mozilla-releng/scriptworker | scriptworker/cot/generate.py | generate_cot_body | def generate_cot_body(context):
"""Generate the chain of trust dictionary.
This is the unsigned and unformatted chain of trust artifact contents.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: the unsignd and unformatted chain of trust artifact contents.
Raises:
ScriptWorkerException: on error.
"""
try:
cot = {
'artifacts': get_cot_artifacts(context),
'chainOfTrustVersion': 1,
'runId': context.claim_task['runId'],
'task': context.task,
'taskId': context.claim_task['status']['taskId'],
'workerGroup': context.claim_task['workerGroup'],
'workerId': context.config['worker_id'],
'workerType': context.config['worker_type'],
'environment': get_cot_environment(context),
}
except (KeyError, ) as exc:
raise ScriptWorkerException("Can't generate chain of trust! {}".format(str(exc)))
return cot | python | def generate_cot_body(context):
"""Generate the chain of trust dictionary.
This is the unsigned and unformatted chain of trust artifact contents.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: the unsignd and unformatted chain of trust artifact contents.
Raises:
ScriptWorkerException: on error.
"""
try:
cot = {
'artifacts': get_cot_artifacts(context),
'chainOfTrustVersion': 1,
'runId': context.claim_task['runId'],
'task': context.task,
'taskId': context.claim_task['status']['taskId'],
'workerGroup': context.claim_task['workerGroup'],
'workerId': context.config['worker_id'],
'workerType': context.config['worker_type'],
'environment': get_cot_environment(context),
}
except (KeyError, ) as exc:
raise ScriptWorkerException("Can't generate chain of trust! {}".format(str(exc)))
return cot | [
"def",
"generate_cot_body",
"(",
"context",
")",
":",
"try",
":",
"cot",
"=",
"{",
"'artifacts'",
":",
"get_cot_artifacts",
"(",
"context",
")",
",",
"'chainOfTrustVersion'",
":",
"1",
",",
"'runId'",
":",
"context",
".",
"claim_task",
"[",
"'runId'",
"]",
... | Generate the chain of trust dictionary.
This is the unsigned and unformatted chain of trust artifact contents.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict: the unsignd and unformatted chain of trust artifact contents.
Raises:
ScriptWorkerException: on error. | [
"Generate",
"the",
"chain",
"of",
"trust",
"dictionary",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/generate.py#L62-L92 | train | 28,055 |
mozilla-releng/scriptworker | scriptworker/cot/generate.py | generate_cot | def generate_cot(context, parent_path=None):
"""Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error.
"""
body = generate_cot_body(context)
schema = load_json_or_yaml(
context.config['cot_schema_path'], is_path=True,
exception=ScriptWorkerException,
message="Can't read schema file {}: %(exc)s".format(context.config['cot_schema_path'])
)
validate_json_schema(body, schema, name="chain of trust")
body = format_json(body)
parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public')
unsigned_path = os.path.join(parent_path, 'chain-of-trust.json')
write_to_file(unsigned_path, body)
if context.config['sign_chain_of_trust']:
ed25519_signature_path = '{}.sig'.format(unsigned_path)
ed25519_private_key = ed25519_private_key_from_file(context.config['ed25519_private_key_path'])
ed25519_signature = ed25519_private_key.sign(body.encode('utf-8'))
write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary')
return body | python | def generate_cot(context, parent_path=None):
"""Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error.
"""
body = generate_cot_body(context)
schema = load_json_or_yaml(
context.config['cot_schema_path'], is_path=True,
exception=ScriptWorkerException,
message="Can't read schema file {}: %(exc)s".format(context.config['cot_schema_path'])
)
validate_json_schema(body, schema, name="chain of trust")
body = format_json(body)
parent_path = parent_path or os.path.join(context.config['artifact_dir'], 'public')
unsigned_path = os.path.join(parent_path, 'chain-of-trust.json')
write_to_file(unsigned_path, body)
if context.config['sign_chain_of_trust']:
ed25519_signature_path = '{}.sig'.format(unsigned_path)
ed25519_private_key = ed25519_private_key_from_file(context.config['ed25519_private_key_path'])
ed25519_signature = ed25519_private_key.sign(body.encode('utf-8'))
write_to_file(ed25519_signature_path, ed25519_signature, file_type='binary')
return body | [
"def",
"generate_cot",
"(",
"context",
",",
"parent_path",
"=",
"None",
")",
":",
"body",
"=",
"generate_cot_body",
"(",
"context",
")",
"schema",
"=",
"load_json_or_yaml",
"(",
"context",
".",
"config",
"[",
"'cot_schema_path'",
"]",
",",
"is_path",
"=",
"T... | Format and sign the cot body, and write to disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
parent_path (str, optional): The directory to write the chain of trust
artifacts to. If None, this is ``artifact_dir/public/``.
Defaults to None.
Returns:
str: the contents of the chain of trust artifact.
Raises:
ScriptWorkerException: on schema error. | [
"Format",
"and",
"sign",
"the",
"cot",
"body",
"and",
"write",
"to",
"disk",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/cot/generate.py#L96-L128 | train | 28,056 |
mozilla-releng/scriptworker | scriptworker/github.py | is_github_repo_owner_the_official_one | def is_github_repo_owner_the_official_one(context, repo_owner):
"""Given a repo_owner, check if it matches the one configured to be the official one.
Args:
context (scriptworker.context.Context): the scriptworker context.
repo_owner (str): the repo_owner to verify
Raises:
scriptworker.exceptions.ConfigError: when no official owner was defined
Returns:
bool: True when ``repo_owner`` matches the one configured to be the official one
"""
official_repo_owner = context.config['official_github_repos_owner']
if not official_repo_owner:
raise ConfigError(
'This worker does not have a defined owner for official GitHub repositories. '
'Given "official_github_repos_owner": {}'.format(official_repo_owner)
)
return official_repo_owner == repo_owner | python | def is_github_repo_owner_the_official_one(context, repo_owner):
"""Given a repo_owner, check if it matches the one configured to be the official one.
Args:
context (scriptworker.context.Context): the scriptworker context.
repo_owner (str): the repo_owner to verify
Raises:
scriptworker.exceptions.ConfigError: when no official owner was defined
Returns:
bool: True when ``repo_owner`` matches the one configured to be the official one
"""
official_repo_owner = context.config['official_github_repos_owner']
if not official_repo_owner:
raise ConfigError(
'This worker does not have a defined owner for official GitHub repositories. '
'Given "official_github_repos_owner": {}'.format(official_repo_owner)
)
return official_repo_owner == repo_owner | [
"def",
"is_github_repo_owner_the_official_one",
"(",
"context",
",",
"repo_owner",
")",
":",
"official_repo_owner",
"=",
"context",
".",
"config",
"[",
"'official_github_repos_owner'",
"]",
"if",
"not",
"official_repo_owner",
":",
"raise",
"ConfigError",
"(",
"'This wor... | Given a repo_owner, check if it matches the one configured to be the official one.
Args:
context (scriptworker.context.Context): the scriptworker context.
repo_owner (str): the repo_owner to verify
Raises:
scriptworker.exceptions.ConfigError: when no official owner was defined
Returns:
bool: True when ``repo_owner`` matches the one configured to be the official one | [
"Given",
"a",
"repo_owner",
"check",
"if",
"it",
"matches",
"the",
"one",
"configured",
"to",
"be",
"the",
"official",
"one",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/github.py#L215-L236 | train | 28,057 |
mozilla-releng/scriptworker | scriptworker/github.py | GitHubRepository.get_tag_hash | def get_tag_hash(self, tag_name):
"""Fetch the commit hash that was tagged with ``tag_name``.
Args:
tag_name (str): the name of the tag
Returns:
str: the commit hash linked by the tag
"""
tag_object = get_single_item_from_sequence(
sequence=self._github_repository.tags(),
condition=lambda tag: tag.name == tag_name,
no_item_error_message='No tag "{}" exist'.format(tag_name),
too_many_item_error_message='Too many tags "{}" found'.format(tag_name),
)
return tag_object.commit.sha | python | def get_tag_hash(self, tag_name):
"""Fetch the commit hash that was tagged with ``tag_name``.
Args:
tag_name (str): the name of the tag
Returns:
str: the commit hash linked by the tag
"""
tag_object = get_single_item_from_sequence(
sequence=self._github_repository.tags(),
condition=lambda tag: tag.name == tag_name,
no_item_error_message='No tag "{}" exist'.format(tag_name),
too_many_item_error_message='Too many tags "{}" found'.format(tag_name),
)
return tag_object.commit.sha | [
"def",
"get_tag_hash",
"(",
"self",
",",
"tag_name",
")",
":",
"tag_object",
"=",
"get_single_item_from_sequence",
"(",
"sequence",
"=",
"self",
".",
"_github_repository",
".",
"tags",
"(",
")",
",",
"condition",
"=",
"lambda",
"tag",
":",
"tag",
".",
"name"... | Fetch the commit hash that was tagged with ``tag_name``.
Args:
tag_name (str): the name of the tag
Returns:
str: the commit hash linked by the tag | [
"Fetch",
"the",
"commit",
"hash",
"that",
"was",
"tagged",
"with",
"tag_name",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/github.py#L80-L97 | train | 28,058 |
mozilla-releng/scriptworker | scriptworker/github.py | GitHubRepository.has_commit_landed_on_repository | async def has_commit_landed_on_repository(self, context, revision):
"""Tell if a commit was landed on the repository or if it just comes from a pull request.
Args:
context (scriptworker.context.Context): the scriptworker context.
revision (str): the commit hash or the tag name.
Returns:
bool: True if the commit is present in one of the branches of the main repository
"""
# Revision may be a tag name. `branch_commits` doesn't work on tags
if not _is_git_full_hash(revision):
revision = self.get_tag_hash(tag_name=revision)
repo = self._github_repository.html_url
url = '/'.join([repo.rstrip('/'), 'branch_commits', revision])
html_data = await retry_request(context, url)
html_text = html_data.strip()
# https://github.com/{repo_owner}/{repo_name}/branch_commits/{revision} just returns some \n
# when the commit hasn't landed on the origin repo. Otherwise, some HTML data is returned - it
# represents the branches on which the given revision is present.
return html_text != '' | python | async def has_commit_landed_on_repository(self, context, revision):
"""Tell if a commit was landed on the repository or if it just comes from a pull request.
Args:
context (scriptworker.context.Context): the scriptworker context.
revision (str): the commit hash or the tag name.
Returns:
bool: True if the commit is present in one of the branches of the main repository
"""
# Revision may be a tag name. `branch_commits` doesn't work on tags
if not _is_git_full_hash(revision):
revision = self.get_tag_hash(tag_name=revision)
repo = self._github_repository.html_url
url = '/'.join([repo.rstrip('/'), 'branch_commits', revision])
html_data = await retry_request(context, url)
html_text = html_data.strip()
# https://github.com/{repo_owner}/{repo_name}/branch_commits/{revision} just returns some \n
# when the commit hasn't landed on the origin repo. Otherwise, some HTML data is returned - it
# represents the branches on which the given revision is present.
return html_text != '' | [
"async",
"def",
"has_commit_landed_on_repository",
"(",
"self",
",",
"context",
",",
"revision",
")",
":",
"# Revision may be a tag name. `branch_commits` doesn't work on tags",
"if",
"not",
"_is_git_full_hash",
"(",
"revision",
")",
":",
"revision",
"=",
"self",
".",
"... | Tell if a commit was landed on the repository or if it just comes from a pull request.
Args:
context (scriptworker.context.Context): the scriptworker context.
revision (str): the commit hash or the tag name.
Returns:
bool: True if the commit is present in one of the branches of the main repository | [
"Tell",
"if",
"a",
"commit",
"was",
"landed",
"on",
"the",
"repository",
"or",
"if",
"it",
"just",
"comes",
"from",
"a",
"pull",
"request",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/github.py#L99-L122 | train | 28,059 |
mozilla-releng/scriptworker | scriptworker/log.py | update_logging_config | def update_logging_config(context, log_name=None, file_name='worker.log'):
"""Update python logging settings from config.
By default, this sets the ``scriptworker`` log settings, but this will
change if some other package calls this function or specifies the ``log_name``.
* Use formatting from config settings.
* Log to screen if ``verbose``
* Add a rotating logfile from config settings.
Args:
context (scriptworker.context.Context): the scriptworker context.
log_name (str, optional): the name of the Logger to modify.
If None, use the top level module ('scriptworker').
Defaults to None.
"""
log_name = log_name or __name__.split('.')[0]
top_level_logger = logging.getLogger(log_name)
datefmt = context.config['log_datefmt']
fmt = context.config['log_fmt']
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
if context.config.get("verbose"):
top_level_logger.setLevel(logging.DEBUG)
if len(top_level_logger.handlers) == 0:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
top_level_logger.addHandler(handler)
else:
top_level_logger.setLevel(logging.INFO)
# Rotating log file
makedirs(context.config['log_dir'])
path = os.path.join(context.config['log_dir'], file_name)
if context.config["watch_log_file"]:
# If we rotate the log file via logrotate.d, let's watch the file
# so we can automatically close/reopen on move.
handler = logging.handlers.WatchedFileHandler(path)
else:
# Avoid using WatchedFileHandler during scriptworker unittests
handler = logging.FileHandler(path)
handler.setFormatter(formatter)
top_level_logger.addHandler(handler)
top_level_logger.addHandler(logging.NullHandler()) | python | def update_logging_config(context, log_name=None, file_name='worker.log'):
"""Update python logging settings from config.
By default, this sets the ``scriptworker`` log settings, but this will
change if some other package calls this function or specifies the ``log_name``.
* Use formatting from config settings.
* Log to screen if ``verbose``
* Add a rotating logfile from config settings.
Args:
context (scriptworker.context.Context): the scriptworker context.
log_name (str, optional): the name of the Logger to modify.
If None, use the top level module ('scriptworker').
Defaults to None.
"""
log_name = log_name or __name__.split('.')[0]
top_level_logger = logging.getLogger(log_name)
datefmt = context.config['log_datefmt']
fmt = context.config['log_fmt']
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
if context.config.get("verbose"):
top_level_logger.setLevel(logging.DEBUG)
if len(top_level_logger.handlers) == 0:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
top_level_logger.addHandler(handler)
else:
top_level_logger.setLevel(logging.INFO)
# Rotating log file
makedirs(context.config['log_dir'])
path = os.path.join(context.config['log_dir'], file_name)
if context.config["watch_log_file"]:
# If we rotate the log file via logrotate.d, let's watch the file
# so we can automatically close/reopen on move.
handler = logging.handlers.WatchedFileHandler(path)
else:
# Avoid using WatchedFileHandler during scriptworker unittests
handler = logging.FileHandler(path)
handler.setFormatter(formatter)
top_level_logger.addHandler(handler)
top_level_logger.addHandler(logging.NullHandler()) | [
"def",
"update_logging_config",
"(",
"context",
",",
"log_name",
"=",
"None",
",",
"file_name",
"=",
"'worker.log'",
")",
":",
"log_name",
"=",
"log_name",
"or",
"__name__",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"top_level_logger",
"=",
"logging",
... | Update python logging settings from config.
By default, this sets the ``scriptworker`` log settings, but this will
change if some other package calls this function or specifies the ``log_name``.
* Use formatting from config settings.
* Log to screen if ``verbose``
* Add a rotating logfile from config settings.
Args:
context (scriptworker.context.Context): the scriptworker context.
log_name (str, optional): the name of the Logger to modify.
If None, use the top level module ('scriptworker').
Defaults to None. | [
"Update",
"python",
"logging",
"settings",
"from",
"config",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/log.py#L19-L64 | train | 28,060 |
mozilla-releng/scriptworker | scriptworker/log.py | pipe_to_log | async def pipe_to_log(pipe, filehandles=(), level=logging.INFO):
"""Log from a subprocess PIPE.
Args:
pipe (filehandle): subprocess process STDOUT or STDERR
filehandles (list of filehandles, optional): the filehandle(s) to write
to. If empty, don't write to a separate file. Defaults to ().
level (int, optional): the level to log to. Defaults to ``logging.INFO``.
"""
while True:
line = await pipe.readline()
if line:
line = to_unicode(line)
log.log(level, line.rstrip())
for filehandle in filehandles:
print(line, file=filehandle, end="")
else:
break | python | async def pipe_to_log(pipe, filehandles=(), level=logging.INFO):
"""Log from a subprocess PIPE.
Args:
pipe (filehandle): subprocess process STDOUT or STDERR
filehandles (list of filehandles, optional): the filehandle(s) to write
to. If empty, don't write to a separate file. Defaults to ().
level (int, optional): the level to log to. Defaults to ``logging.INFO``.
"""
while True:
line = await pipe.readline()
if line:
line = to_unicode(line)
log.log(level, line.rstrip())
for filehandle in filehandles:
print(line, file=filehandle, end="")
else:
break | [
"async",
"def",
"pipe_to_log",
"(",
"pipe",
",",
"filehandles",
"=",
"(",
")",
",",
"level",
"=",
"logging",
".",
"INFO",
")",
":",
"while",
"True",
":",
"line",
"=",
"await",
"pipe",
".",
"readline",
"(",
")",
"if",
"line",
":",
"line",
"=",
"to_u... | Log from a subprocess PIPE.
Args:
pipe (filehandle): subprocess process STDOUT or STDERR
filehandles (list of filehandles, optional): the filehandle(s) to write
to. If empty, don't write to a separate file. Defaults to ().
level (int, optional): the level to log to. Defaults to ``logging.INFO``. | [
"Log",
"from",
"a",
"subprocess",
"PIPE",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/log.py#L67-L85 | train | 28,061 |
mozilla-releng/scriptworker | scriptworker/log.py | get_log_filehandle | def get_log_filehandle(context):
"""Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle
"""
log_file_name = get_log_filename(context)
makedirs(context.config['task_log_dir'])
with open(log_file_name, "w", encoding="utf-8") as filehandle:
yield filehandle | python | def get_log_filehandle(context):
"""Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle
"""
log_file_name = get_log_filename(context)
makedirs(context.config['task_log_dir'])
with open(log_file_name, "w", encoding="utf-8") as filehandle:
yield filehandle | [
"def",
"get_log_filehandle",
"(",
"context",
")",
":",
"log_file_name",
"=",
"get_log_filename",
"(",
"context",
")",
"makedirs",
"(",
"context",
".",
"config",
"[",
"'task_log_dir'",
"]",
")",
"with",
"open",
"(",
"log_file_name",
",",
"\"w\"",
",",
"encoding... | Open the log and error filehandles.
Args:
context (scriptworker.context.Context): the scriptworker context.
Yields:
log filehandle | [
"Open",
"the",
"log",
"and",
"error",
"filehandles",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/log.py#L103-L116 | train | 28,062 |
mozilla-releng/scriptworker | scriptworker/log.py | contextual_log_handler | def contextual_log_handler(context, path, log_obj=None, level=logging.DEBUG,
formatter=None):
"""Add a short-lived log with a contextmanager for cleanup.
Args:
context (scriptworker.context.Context): the scriptworker context
path (str): the path to the log file to create
log_obj (logging.Logger): the log object to modify. If None, use
``scriptworker.log.log``. Defaults to None.
level (int, optional): the logging level. Defaults to logging.DEBUG.
formatter (logging.Formatter, optional): the logging formatter. If None,
defaults to ``logging.Formatter(fmt=fmt)``. Default is None.
Yields:
None: but cleans up the handler afterwards.
"""
log_obj = log_obj or log
formatter = formatter or logging.Formatter(
fmt=context.config['log_fmt'],
datefmt=context.config['log_datefmt'],
)
parent_path = os.path.dirname(path)
makedirs(parent_path)
contextual_handler = logging.FileHandler(path, encoding='utf-8')
contextual_handler.setLevel(level)
contextual_handler.setFormatter(formatter)
log_obj.addHandler(contextual_handler)
yield
contextual_handler.close()
log_obj.removeHandler(contextual_handler) | python | def contextual_log_handler(context, path, log_obj=None, level=logging.DEBUG,
formatter=None):
"""Add a short-lived log with a contextmanager for cleanup.
Args:
context (scriptworker.context.Context): the scriptworker context
path (str): the path to the log file to create
log_obj (logging.Logger): the log object to modify. If None, use
``scriptworker.log.log``. Defaults to None.
level (int, optional): the logging level. Defaults to logging.DEBUG.
formatter (logging.Formatter, optional): the logging formatter. If None,
defaults to ``logging.Formatter(fmt=fmt)``. Default is None.
Yields:
None: but cleans up the handler afterwards.
"""
log_obj = log_obj or log
formatter = formatter or logging.Formatter(
fmt=context.config['log_fmt'],
datefmt=context.config['log_datefmt'],
)
parent_path = os.path.dirname(path)
makedirs(parent_path)
contextual_handler = logging.FileHandler(path, encoding='utf-8')
contextual_handler.setLevel(level)
contextual_handler.setFormatter(formatter)
log_obj.addHandler(contextual_handler)
yield
contextual_handler.close()
log_obj.removeHandler(contextual_handler) | [
"def",
"contextual_log_handler",
"(",
"context",
",",
"path",
",",
"log_obj",
"=",
"None",
",",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"formatter",
"=",
"None",
")",
":",
"log_obj",
"=",
"log_obj",
"or",
"log",
"formatter",
"=",
"formatter",
"or",
"... | Add a short-lived log with a contextmanager for cleanup.
Args:
context (scriptworker.context.Context): the scriptworker context
path (str): the path to the log file to create
log_obj (logging.Logger): the log object to modify. If None, use
``scriptworker.log.log``. Defaults to None.
level (int, optional): the logging level. Defaults to logging.DEBUG.
formatter (logging.Formatter, optional): the logging formatter. If None,
defaults to ``logging.Formatter(fmt=fmt)``. Default is None.
Yields:
None: but cleans up the handler afterwards. | [
"Add",
"a",
"short",
"-",
"lived",
"log",
"with",
"a",
"contextmanager",
"for",
"cleanup",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/log.py#L120-L150 | train | 28,063 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | upload_artifacts | async def upload_artifacts(context, files):
"""Compress and upload the requested files from ``artifact_dir``, preserving relative paths.
Compression only occurs with files known to be supported.
This function expects the directory structure in ``artifact_dir`` to remain
the same. So if we want the files in ``public/...``, create an
``artifact_dir/public`` and put the files in there.
Args:
context (scriptworker.context.Context): the scriptworker context.
files (list of str): files that should be uploaded as artifacts
Raises:
Exception: any exceptions the tasks raise.
"""
def to_upload_future(target_path):
path = os.path.join(context.config['artifact_dir'], target_path)
content_type, content_encoding = compress_artifact_if_supported(path)
return asyncio.ensure_future(retry_create_artifact(
context,
path,
target_path=target_path,
content_type=content_type,
content_encoding=content_encoding,
))
tasks = list(map(to_upload_future, files))
await raise_future_exceptions(tasks) | python | async def upload_artifacts(context, files):
"""Compress and upload the requested files from ``artifact_dir``, preserving relative paths.
Compression only occurs with files known to be supported.
This function expects the directory structure in ``artifact_dir`` to remain
the same. So if we want the files in ``public/...``, create an
``artifact_dir/public`` and put the files in there.
Args:
context (scriptworker.context.Context): the scriptworker context.
files (list of str): files that should be uploaded as artifacts
Raises:
Exception: any exceptions the tasks raise.
"""
def to_upload_future(target_path):
path = os.path.join(context.config['artifact_dir'], target_path)
content_type, content_encoding = compress_artifact_if_supported(path)
return asyncio.ensure_future(retry_create_artifact(
context,
path,
target_path=target_path,
content_type=content_type,
content_encoding=content_encoding,
))
tasks = list(map(to_upload_future, files))
await raise_future_exceptions(tasks) | [
"async",
"def",
"upload_artifacts",
"(",
"context",
",",
"files",
")",
":",
"def",
"to_upload_future",
"(",
"target_path",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"context",
".",
"config",
"[",
"'artifact_dir'",
"]",
",",
"target_path",... | Compress and upload the requested files from ``artifact_dir``, preserving relative paths.
Compression only occurs with files known to be supported.
This function expects the directory structure in ``artifact_dir`` to remain
the same. So if we want the files in ``public/...``, create an
``artifact_dir/public`` and put the files in there.
Args:
context (scriptworker.context.Context): the scriptworker context.
files (list of str): files that should be uploaded as artifacts
Raises:
Exception: any exceptions the tasks raise. | [
"Compress",
"and",
"upload",
"the",
"requested",
"files",
"from",
"artifact_dir",
"preserving",
"relative",
"paths",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L48-L77 | train | 28,064 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | compress_artifact_if_supported | def compress_artifact_if_supported(artifact_path):
"""Compress artifacts with GZip if they're known to be supported.
This replaces the artifact given by a gzip binary.
Args:
artifact_path (str): the path to compress
Returns:
content_type, content_encoding (tuple): Type and encoding of the file. Encoding equals 'gzip' if compressed.
"""
content_type, encoding = guess_content_type_and_encoding(artifact_path)
log.debug('"{}" is encoded with "{}" and has mime/type "{}"'.format(artifact_path, encoding, content_type))
if encoding is None and content_type in _GZIP_SUPPORTED_CONTENT_TYPE:
log.info('"{}" can be gzip\'d. Compressing...'.format(artifact_path))
with open(artifact_path, 'rb') as f_in:
text_content = f_in.read()
with gzip.open(artifact_path, 'wb') as f_out:
f_out.write(text_content)
encoding = 'gzip'
log.info('"{}" compressed'.format(artifact_path))
else:
log.debug('"{}" is not supported for compression.'.format(artifact_path))
return content_type, encoding | python | def compress_artifact_if_supported(artifact_path):
"""Compress artifacts with GZip if they're known to be supported.
This replaces the artifact given by a gzip binary.
Args:
artifact_path (str): the path to compress
Returns:
content_type, content_encoding (tuple): Type and encoding of the file. Encoding equals 'gzip' if compressed.
"""
content_type, encoding = guess_content_type_and_encoding(artifact_path)
log.debug('"{}" is encoded with "{}" and has mime/type "{}"'.format(artifact_path, encoding, content_type))
if encoding is None and content_type in _GZIP_SUPPORTED_CONTENT_TYPE:
log.info('"{}" can be gzip\'d. Compressing...'.format(artifact_path))
with open(artifact_path, 'rb') as f_in:
text_content = f_in.read()
with gzip.open(artifact_path, 'wb') as f_out:
f_out.write(text_content)
encoding = 'gzip'
log.info('"{}" compressed'.format(artifact_path))
else:
log.debug('"{}" is not supported for compression.'.format(artifact_path))
return content_type, encoding | [
"def",
"compress_artifact_if_supported",
"(",
"artifact_path",
")",
":",
"content_type",
",",
"encoding",
"=",
"guess_content_type_and_encoding",
"(",
"artifact_path",
")",
"log",
".",
"debug",
"(",
"'\"{}\" is encoded with \"{}\" and has mime/type \"{}\"'",
".",
"format",
... | Compress artifacts with GZip if they're known to be supported.
This replaces the artifact given by a gzip binary.
Args:
artifact_path (str): the path to compress
Returns:
content_type, content_encoding (tuple): Type and encoding of the file. Encoding equals 'gzip' if compressed. | [
"Compress",
"artifacts",
"with",
"GZip",
"if",
"they",
"re",
"known",
"to",
"be",
"supported",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L80-L108 | train | 28,065 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | guess_content_type_and_encoding | def guess_content_type_and_encoding(path):
"""Guess the content type of a path, using ``mimetypes``.
Falls back to "application/binary" if no content type is found.
Args:
path (str): the path to guess the mimetype of
Returns:
str: the content type of the file
"""
for ext, content_type in _EXTENSION_TO_MIME_TYPE.items():
if path.endswith(ext):
return content_type
content_type, encoding = mimetypes.guess_type(path)
content_type = content_type or "application/binary"
return content_type, encoding | python | def guess_content_type_and_encoding(path):
"""Guess the content type of a path, using ``mimetypes``.
Falls back to "application/binary" if no content type is found.
Args:
path (str): the path to guess the mimetype of
Returns:
str: the content type of the file
"""
for ext, content_type in _EXTENSION_TO_MIME_TYPE.items():
if path.endswith(ext):
return content_type
content_type, encoding = mimetypes.guess_type(path)
content_type = content_type or "application/binary"
return content_type, encoding | [
"def",
"guess_content_type_and_encoding",
"(",
"path",
")",
":",
"for",
"ext",
",",
"content_type",
"in",
"_EXTENSION_TO_MIME_TYPE",
".",
"items",
"(",
")",
":",
"if",
"path",
".",
"endswith",
"(",
"ext",
")",
":",
"return",
"content_type",
"content_type",
","... | Guess the content type of a path, using ``mimetypes``.
Falls back to "application/binary" if no content type is found.
Args:
path (str): the path to guess the mimetype of
Returns:
str: the content type of the file | [
"Guess",
"the",
"content",
"type",
"of",
"a",
"path",
"using",
"mimetypes",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L111-L129 | train | 28,066 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | create_artifact | async def create_artifact(context, path, target_path, content_type, content_encoding, storage_type='s3', expires=None):
"""Create an artifact and upload it.
This should support s3 and azure out of the box; we'll need some tweaking
if we want to support redirect/error artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
path (str): the path of the file to upload.
target_path (str):
content_type (str): Content type (MIME type) of the artifact. Values can be found via
scriptworker.artifacts.guess_content_type_and_encoding()
content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
be found via scriptworker.artifacts.guess_content_type_and_encoding()
storage_type (str, optional): the taskcluster storage type to use.
Defaults to 's3'
expires (str, optional): datestring of when the artifact expires.
Defaults to None.
Raises:
ScriptWorkerRetryException: on failure.
"""
payload = {
"storageType": storage_type,
"expires": expires or get_expiration_arrow(context).isoformat(),
"contentType": content_type,
}
args = [get_task_id(context.claim_task), get_run_id(context.claim_task),
target_path, payload]
tc_response = await context.temp_queue.createArtifact(*args)
skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
loggable_url = get_loggable_url(tc_response['putUrl'])
log.info("uploading {path} to {url}...".format(path=path, url=loggable_url))
with open(path, "rb") as fh:
async with async_timeout.timeout(context.config['artifact_upload_timeout']):
async with context.session.put(
tc_response['putUrl'], data=fh, headers=_craft_artifact_put_headers(content_type, content_encoding),
skip_auto_headers=skip_auto_headers, compress=False
) as resp:
log.info("create_artifact {}: {}".format(path, resp.status))
response_text = await resp.text()
log.info(response_text)
if resp.status not in (200, 204):
raise ScriptWorkerRetryException(
"Bad status {}".format(resp.status),
) | python | async def create_artifact(context, path, target_path, content_type, content_encoding, storage_type='s3', expires=None):
"""Create an artifact and upload it.
This should support s3 and azure out of the box; we'll need some tweaking
if we want to support redirect/error artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
path (str): the path of the file to upload.
target_path (str):
content_type (str): Content type (MIME type) of the artifact. Values can be found via
scriptworker.artifacts.guess_content_type_and_encoding()
content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
be found via scriptworker.artifacts.guess_content_type_and_encoding()
storage_type (str, optional): the taskcluster storage type to use.
Defaults to 's3'
expires (str, optional): datestring of when the artifact expires.
Defaults to None.
Raises:
ScriptWorkerRetryException: on failure.
"""
payload = {
"storageType": storage_type,
"expires": expires or get_expiration_arrow(context).isoformat(),
"contentType": content_type,
}
args = [get_task_id(context.claim_task), get_run_id(context.claim_task),
target_path, payload]
tc_response = await context.temp_queue.createArtifact(*args)
skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
loggable_url = get_loggable_url(tc_response['putUrl'])
log.info("uploading {path} to {url}...".format(path=path, url=loggable_url))
with open(path, "rb") as fh:
async with async_timeout.timeout(context.config['artifact_upload_timeout']):
async with context.session.put(
tc_response['putUrl'], data=fh, headers=_craft_artifact_put_headers(content_type, content_encoding),
skip_auto_headers=skip_auto_headers, compress=False
) as resp:
log.info("create_artifact {}: {}".format(path, resp.status))
response_text = await resp.text()
log.info(response_text)
if resp.status not in (200, 204):
raise ScriptWorkerRetryException(
"Bad status {}".format(resp.status),
) | [
"async",
"def",
"create_artifact",
"(",
"context",
",",
"path",
",",
"target_path",
",",
"content_type",
",",
"content_encoding",
",",
"storage_type",
"=",
"'s3'",
",",
"expires",
"=",
"None",
")",
":",
"payload",
"=",
"{",
"\"storageType\"",
":",
"storage_typ... | Create an artifact and upload it.
This should support s3 and azure out of the box; we'll need some tweaking
if we want to support redirect/error artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
path (str): the path of the file to upload.
target_path (str):
content_type (str): Content type (MIME type) of the artifact. Values can be found via
scriptworker.artifacts.guess_content_type_and_encoding()
content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
be found via scriptworker.artifacts.guess_content_type_and_encoding()
storage_type (str, optional): the taskcluster storage type to use.
Defaults to 's3'
expires (str, optional): datestring of when the artifact expires.
Defaults to None.
Raises:
ScriptWorkerRetryException: on failure. | [
"Create",
"an",
"artifact",
"and",
"upload",
"it",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L153-L200 | train | 28,067 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | get_artifact_url | def get_artifact_url(context, task_id, path):
"""Get a TaskCluster artifact url.
Args:
context (scriptworker.context.Context): the scriptworker context
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: the artifact url
Raises:
TaskClusterFailure: on failure.
"""
if path.startswith("public/"):
url = context.queue.buildUrl('getLatestArtifact', task_id, path)
else:
url = context.queue.buildSignedUrl(
'getLatestArtifact', task_id, path,
# XXX Can set expiration kwarg in (int) seconds from now;
# defaults to 15min.
)
return url | python | def get_artifact_url(context, task_id, path):
"""Get a TaskCluster artifact url.
Args:
context (scriptworker.context.Context): the scriptworker context
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: the artifact url
Raises:
TaskClusterFailure: on failure.
"""
if path.startswith("public/"):
url = context.queue.buildUrl('getLatestArtifact', task_id, path)
else:
url = context.queue.buildSignedUrl(
'getLatestArtifact', task_id, path,
# XXX Can set expiration kwarg in (int) seconds from now;
# defaults to 15min.
)
return url | [
"def",
"get_artifact_url",
"(",
"context",
",",
"task_id",
",",
"path",
")",
":",
"if",
"path",
".",
"startswith",
"(",
"\"public/\"",
")",
":",
"url",
"=",
"context",
".",
"queue",
".",
"buildUrl",
"(",
"'getLatestArtifact'",
",",
"task_id",
",",
"path",
... | Get a TaskCluster artifact url.
Args:
context (scriptworker.context.Context): the scriptworker context
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: the artifact url
Raises:
TaskClusterFailure: on failure. | [
"Get",
"a",
"TaskCluster",
"artifact",
"url",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L216-L240 | train | 28,068 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | download_artifacts | async def download_artifacts(context, file_urls, parent_dir=None, session=None,
download_func=download_file, valid_artifact_task_ids=None):
"""Download artifacts in parallel after validating their URLs.
Valid ``taskId``s for download include the task's dependencies and the
``taskGroupId``, which by convention is the ``taskId`` of the decision task.
Args:
context (scriptworker.context.Context): the scriptworker context.
file_urls (list): the list of artifact urls to download.
parent_dir (str, optional): the path of the directory to download the
artifacts into. If None, defaults to ``work_dir``. Default is None.
session (aiohttp.ClientSession, optional): the session to use to download.
If None, defaults to context.session. Default is None.
download_func (function, optional): the function to call to download the files.
default is ``download_file``.
valid_artifact_task_ids (list, optional): the list of task ids that are
valid to download from. If None, defaults to all task dependencies
plus the decision taskId. Defaults to None.
Returns:
list: the full paths to the files downloaded
Raises:
scriptworker.exceptions.BaseDownloadError: on download failure after
any applicable retries.
"""
parent_dir = parent_dir or context.config['work_dir']
session = session or context.session
tasks = []
files = []
valid_artifact_rules = context.config['valid_artifact_rules']
# XXX when chain of trust is on everywhere, hardcode the chain of trust task list
valid_artifact_task_ids = valid_artifact_task_ids or list(context.task['dependencies'] + [get_decision_task_id(context.task)])
for file_url in file_urls:
rel_path = validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, file_url)
abs_file_path = os.path.join(parent_dir, rel_path)
files.append(abs_file_path)
tasks.append(
asyncio.ensure_future(
retry_async(
download_func, args=(context, file_url, abs_file_path),
retry_exceptions=(DownloadError, aiohttp.ClientError, asyncio.TimeoutError),
kwargs={'session': session},
)
)
)
await raise_future_exceptions(tasks)
return files | python | async def download_artifacts(context, file_urls, parent_dir=None, session=None,
download_func=download_file, valid_artifact_task_ids=None):
"""Download artifacts in parallel after validating their URLs.
Valid ``taskId``s for download include the task's dependencies and the
``taskGroupId``, which by convention is the ``taskId`` of the decision task.
Args:
context (scriptworker.context.Context): the scriptworker context.
file_urls (list): the list of artifact urls to download.
parent_dir (str, optional): the path of the directory to download the
artifacts into. If None, defaults to ``work_dir``. Default is None.
session (aiohttp.ClientSession, optional): the session to use to download.
If None, defaults to context.session. Default is None.
download_func (function, optional): the function to call to download the files.
default is ``download_file``.
valid_artifact_task_ids (list, optional): the list of task ids that are
valid to download from. If None, defaults to all task dependencies
plus the decision taskId. Defaults to None.
Returns:
list: the full paths to the files downloaded
Raises:
scriptworker.exceptions.BaseDownloadError: on download failure after
any applicable retries.
"""
parent_dir = parent_dir or context.config['work_dir']
session = session or context.session
tasks = []
files = []
valid_artifact_rules = context.config['valid_artifact_rules']
# XXX when chain of trust is on everywhere, hardcode the chain of trust task list
valid_artifact_task_ids = valid_artifact_task_ids or list(context.task['dependencies'] + [get_decision_task_id(context.task)])
for file_url in file_urls:
rel_path = validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, file_url)
abs_file_path = os.path.join(parent_dir, rel_path)
files.append(abs_file_path)
tasks.append(
asyncio.ensure_future(
retry_async(
download_func, args=(context, file_url, abs_file_path),
retry_exceptions=(DownloadError, aiohttp.ClientError, asyncio.TimeoutError),
kwargs={'session': session},
)
)
)
await raise_future_exceptions(tasks)
return files | [
"async",
"def",
"download_artifacts",
"(",
"context",
",",
"file_urls",
",",
"parent_dir",
"=",
"None",
",",
"session",
"=",
"None",
",",
"download_func",
"=",
"download_file",
",",
"valid_artifact_task_ids",
"=",
"None",
")",
":",
"parent_dir",
"=",
"parent_dir... | Download artifacts in parallel after validating their URLs.
Valid ``taskId``s for download include the task's dependencies and the
``taskGroupId``, which by convention is the ``taskId`` of the decision task.
Args:
context (scriptworker.context.Context): the scriptworker context.
file_urls (list): the list of artifact urls to download.
parent_dir (str, optional): the path of the directory to download the
artifacts into. If None, defaults to ``work_dir``. Default is None.
session (aiohttp.ClientSession, optional): the session to use to download.
If None, defaults to context.session. Default is None.
download_func (function, optional): the function to call to download the files.
default is ``download_file``.
valid_artifact_task_ids (list, optional): the list of task ids that are
valid to download from. If None, defaults to all task dependencies
plus the decision taskId. Defaults to None.
Returns:
list: the full paths to the files downloaded
Raises:
scriptworker.exceptions.BaseDownloadError: on download failure after
any applicable retries. | [
"Download",
"artifacts",
"in",
"parallel",
"after",
"validating",
"their",
"URLs",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L258-L309 | train | 28,069 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | get_upstream_artifacts_full_paths_per_task_id | def get_upstream_artifacts_full_paths_per_task_id(context):
"""List the downloaded upstream artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict, dict: lists of the paths to upstream artifacts, sorted by task_id.
First dict represents the existing upstream artifacts. The second one
maps the optional artifacts that couldn't be downloaded
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist.
"""
upstream_artifacts = context.task['payload']['upstreamArtifacts']
task_ids_and_relative_paths = [
(artifact_definition['taskId'], artifact_definition['paths'])
for artifact_definition in upstream_artifacts
]
optional_artifacts_per_task_id = get_optional_artifacts_per_task_id(upstream_artifacts)
upstream_artifacts_full_paths_per_task_id = {}
failed_paths_per_task_id = {}
for task_id, paths in task_ids_and_relative_paths:
for path in paths:
try:
path_to_add = get_and_check_single_upstream_artifact_full_path(context, task_id, path)
add_enumerable_item_to_dict(
dict_=upstream_artifacts_full_paths_per_task_id,
key=task_id, item=path_to_add
)
except ScriptWorkerTaskException:
if path in optional_artifacts_per_task_id.get(task_id, []):
log.warning('Optional artifact "{}" of task "{}" not found'.format(path, task_id))
add_enumerable_item_to_dict(
dict_=failed_paths_per_task_id,
key=task_id, item=path
)
else:
raise
return upstream_artifacts_full_paths_per_task_id, failed_paths_per_task_id | python | def get_upstream_artifacts_full_paths_per_task_id(context):
"""List the downloaded upstream artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict, dict: lists of the paths to upstream artifacts, sorted by task_id.
First dict represents the existing upstream artifacts. The second one
maps the optional artifacts that couldn't be downloaded
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist.
"""
upstream_artifacts = context.task['payload']['upstreamArtifacts']
task_ids_and_relative_paths = [
(artifact_definition['taskId'], artifact_definition['paths'])
for artifact_definition in upstream_artifacts
]
optional_artifacts_per_task_id = get_optional_artifacts_per_task_id(upstream_artifacts)
upstream_artifacts_full_paths_per_task_id = {}
failed_paths_per_task_id = {}
for task_id, paths in task_ids_and_relative_paths:
for path in paths:
try:
path_to_add = get_and_check_single_upstream_artifact_full_path(context, task_id, path)
add_enumerable_item_to_dict(
dict_=upstream_artifacts_full_paths_per_task_id,
key=task_id, item=path_to_add
)
except ScriptWorkerTaskException:
if path in optional_artifacts_per_task_id.get(task_id, []):
log.warning('Optional artifact "{}" of task "{}" not found'.format(path, task_id))
add_enumerable_item_to_dict(
dict_=failed_paths_per_task_id,
key=task_id, item=path
)
else:
raise
return upstream_artifacts_full_paths_per_task_id, failed_paths_per_task_id | [
"def",
"get_upstream_artifacts_full_paths_per_task_id",
"(",
"context",
")",
":",
"upstream_artifacts",
"=",
"context",
".",
"task",
"[",
"'payload'",
"]",
"[",
"'upstreamArtifacts'",
"]",
"task_ids_and_relative_paths",
"=",
"[",
"(",
"artifact_definition",
"[",
"'taskI... | List the downloaded upstream artifacts.
Args:
context (scriptworker.context.Context): the scriptworker context.
Returns:
dict, dict: lists of the paths to upstream artifacts, sorted by task_id.
First dict represents the existing upstream artifacts. The second one
maps the optional artifacts that couldn't be downloaded
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist. | [
"List",
"the",
"downloaded",
"upstream",
"artifacts",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L312-L355 | train | 28,070 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | get_and_check_single_upstream_artifact_full_path | def get_and_check_single_upstream_artifact_full_path(context, task_id, path):
"""Return the full path where an upstream artifact is located on disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist.
"""
abs_path = get_single_upstream_artifact_full_path(context, task_id, path)
if not os.path.exists(abs_path):
raise ScriptWorkerTaskException(
'upstream artifact with path: {}, does not exist'.format(abs_path)
)
return abs_path | python | def get_and_check_single_upstream_artifact_full_path(context, task_id, path):
"""Return the full path where an upstream artifact is located on disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist.
"""
abs_path = get_single_upstream_artifact_full_path(context, task_id, path)
if not os.path.exists(abs_path):
raise ScriptWorkerTaskException(
'upstream artifact with path: {}, does not exist'.format(abs_path)
)
return abs_path | [
"def",
"get_and_check_single_upstream_artifact_full_path",
"(",
"context",
",",
"task_id",
",",
"path",
")",
":",
"abs_path",
"=",
"get_single_upstream_artifact_full_path",
"(",
"context",
",",
"task_id",
",",
"path",
")",
"if",
"not",
"os",
".",
"path",
".",
"exi... | Return the full path where an upstream artifact is located on disk.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact
Raises:
scriptworker.exceptions.ScriptWorkerTaskException: when an artifact doesn't exist. | [
"Return",
"the",
"full",
"path",
"where",
"an",
"upstream",
"artifact",
"is",
"located",
"on",
"disk",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L358-L379 | train | 28,071 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | get_single_upstream_artifact_full_path | def get_single_upstream_artifact_full_path(context, task_id, path):
"""Return the full path where an upstream artifact should be located.
Artifact may not exist. If you want to be sure if does, use
``get_and_check_single_upstream_artifact_full_path()`` instead.
This function is mainly used to move artifacts to the expected location.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact should be.
"""
return os.path.abspath(os.path.join(context.config['work_dir'], 'cot', task_id, path)) | python | def get_single_upstream_artifact_full_path(context, task_id, path):
"""Return the full path where an upstream artifact should be located.
Artifact may not exist. If you want to be sure if does, use
``get_and_check_single_upstream_artifact_full_path()`` instead.
This function is mainly used to move artifacts to the expected location.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact should be.
"""
return os.path.abspath(os.path.join(context.config['work_dir'], 'cot', task_id, path)) | [
"def",
"get_single_upstream_artifact_full_path",
"(",
"context",
",",
"task_id",
",",
"path",
")",
":",
"return",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"context",
".",
"config",
"[",
"'work_dir'",
"]",
",",
"'cot'",
... | Return the full path where an upstream artifact should be located.
Artifact may not exist. If you want to be sure if does, use
``get_and_check_single_upstream_artifact_full_path()`` instead.
This function is mainly used to move artifacts to the expected location.
Args:
context (scriptworker.context.Context): the scriptworker context.
task_id (str): the task id of the task that published the artifact
path (str): the relative path of the artifact
Returns:
str: absolute path to the artifact should be. | [
"Return",
"the",
"full",
"path",
"where",
"an",
"upstream",
"artifact",
"should",
"be",
"located",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L382-L399 | train | 28,072 |
mozilla-releng/scriptworker | scriptworker/artifacts.py | get_optional_artifacts_per_task_id | def get_optional_artifacts_per_task_id(upstream_artifacts):
"""Return every optional artifact defined in ``upstream_artifacts``, ordered by taskId.
Args:
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: list of paths to downloaded artifacts ordered by taskId
"""
# A given taskId might be defined many times in upstreamArtifacts. Thus, we can't
# use a dict comprehension
optional_artifacts_per_task_id = {}
for artifact_definition in upstream_artifacts:
if artifact_definition.get('optional', False) is True:
task_id = artifact_definition['taskId']
artifacts_paths = artifact_definition['paths']
add_enumerable_item_to_dict(
dict_=optional_artifacts_per_task_id,
key=task_id, item=artifacts_paths
)
return optional_artifacts_per_task_id | python | def get_optional_artifacts_per_task_id(upstream_artifacts):
"""Return every optional artifact defined in ``upstream_artifacts``, ordered by taskId.
Args:
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: list of paths to downloaded artifacts ordered by taskId
"""
# A given taskId might be defined many times in upstreamArtifacts. Thus, we can't
# use a dict comprehension
optional_artifacts_per_task_id = {}
for artifact_definition in upstream_artifacts:
if artifact_definition.get('optional', False) is True:
task_id = artifact_definition['taskId']
artifacts_paths = artifact_definition['paths']
add_enumerable_item_to_dict(
dict_=optional_artifacts_per_task_id,
key=task_id, item=artifacts_paths
)
return optional_artifacts_per_task_id | [
"def",
"get_optional_artifacts_per_task_id",
"(",
"upstream_artifacts",
")",
":",
"# A given taskId might be defined many times in upstreamArtifacts. Thus, we can't",
"# use a dict comprehension",
"optional_artifacts_per_task_id",
"=",
"{",
"}",
"for",
"artifact_definition",
"in",
"ups... | Return every optional artifact defined in ``upstream_artifacts``, ordered by taskId.
Args:
upstream_artifacts: the list of upstream artifact definitions
Returns:
dict: list of paths to downloaded artifacts ordered by taskId | [
"Return",
"every",
"optional",
"artifact",
"defined",
"in",
"upstream_artifacts",
"ordered",
"by",
"taskId",
"."
] | 8e97bbd83b9b578565ec57904c966dd6ae4ef0ae | https://github.com/mozilla-releng/scriptworker/blob/8e97bbd83b9b578565ec57904c966dd6ae4ef0ae/scriptworker/artifacts.py#L402-L426 | train | 28,073 |
datamachine/twx.botapi | twx/botapi/botapi.py | set_chat_description | def set_chat_description(chat_id, description, **kwargs):
"""
Use this method to change the description of a supergroup or a channel. The bot must be an administrator in the
chat for this to work and must have the appropriate admin rights. Returns True on success.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param description: New chat description, 0-255 characters
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:return: Returns True on success.
:rtype: bool
"""
if len(description) > 255:
raise ValueError("Chat description must be less than 255 characters.")
# required args
params = dict(
chat_id=chat_id,
description=description
)
return TelegramBotRPCRequest('setChatTitle', params=params, on_result=lambda result: result, **kwargs) | python | def set_chat_description(chat_id, description, **kwargs):
"""
Use this method to change the description of a supergroup or a channel. The bot must be an administrator in the
chat for this to work and must have the appropriate admin rights. Returns True on success.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param description: New chat description, 0-255 characters
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:return: Returns True on success.
:rtype: bool
"""
if len(description) > 255:
raise ValueError("Chat description must be less than 255 characters.")
# required args
params = dict(
chat_id=chat_id,
description=description
)
return TelegramBotRPCRequest('setChatTitle', params=params, on_result=lambda result: result, **kwargs) | [
"def",
"set_chat_description",
"(",
"chat_id",
",",
"description",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"len",
"(",
"description",
")",
">",
"255",
":",
"raise",
"ValueError",
"(",
"\"Chat description must be less than 255 characters.\"",
")",
"# required args",... | Use this method to change the description of a supergroup or a channel. The bot must be an administrator in the
chat for this to work and must have the appropriate admin rights. Returns True on success.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param description: New chat description, 0-255 characters
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:return: Returns True on success.
:rtype: bool | [
"Use",
"this",
"method",
"to",
"change",
"the",
"description",
"of",
"a",
"supergroup",
"or",
"a",
"channel",
".",
"The",
"bot",
"must",
"be",
"an",
"administrator",
"in",
"the",
"chat",
"for",
"this",
"to",
"work",
"and",
"must",
"have",
"the",
"appropr... | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L2379-L2400 | train | 28,074 |
datamachine/twx.botapi | twx/botapi/botapi.py | send_audio | def send_audio(chat_id, audio,
caption=None, duration=None, performer=None, title=None, reply_to_message_id=None, reply_markup=None,
disable_notification=False, parse_mode=None, **kwargs):
"""
Use this method to send audio files, if you want Telegram clients to display them in the music player.
Your audio must be in the .mp3 format. On success, the sent Message is returned. Bots can currently send audio
files of up to 50 MB in size, this limit may be changed in the future.
For backward compatibility, when the fields title and performer are both empty and the mime-type of the file to
be sent is not audio/mpeg, the file will be sent as a playable voice message. For this to work, the audio must
be in an .ogg file encoded with OPUS. This behavior will be phased out in the future. For sending voice
messages, use the sendVoice method instead.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param audio: Audio file to send. Pass a file_id as String to send an audio file that exists on the Telegram servers (recommended),
pass an HTTP URL as a String for Telegram to get an audio file from the Internet, or upload a new one using multipart/form-data.
:param caption: Audio caption, 0-200 characters
:param duration: Duration of the audio in seconds
:param performer: Performer
:param title: Track name
:param reply_to_message_id: If the message is a reply, ID of the original message
:param reply_markup: Additional interface options. A JSON-serialized object for a custom reply keyboard,
:param disable_notification: Sends the message silently. iOS users will not receive a notification, Android users
will receive a notification with no sound. Other apps coming soon.
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline
URLs in your bot's message.
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type audio: InputFile or str
:type caption: str
:type duration: int
:type performer: str
:type title: str
:type reply_to_message_id: int
:type reply_markup: ReplyKeyboardMarkup or ReplyKeyboardHide or ForceReply
:type parse_mode: str
:returns: On success, the sent Message is returned.
:rtype: TelegramBotRPCRequest
"""
files = None
if isinstance(audio, InputFile):
files = [audio]
audio = None
elif not isinstance(audio, str):
raise Exception('audio must be instance of InputFile or str')
# required args
params = dict(
chat_id=chat_id,
audio=audio
)
# optional args
params.update(
_clean_params(
caption=caption,
duration=duration,
performer=performer,
title=title,
reply_to_message_id=reply_to_message_id,
reply_markup=reply_markup,
disable_notification=disable_notification,
parse_mode=parse_mode,
)
)
return TelegramBotRPCRequest('sendAudio', params=params, files=files, on_result=Message.from_result, **kwargs) | python | def send_audio(chat_id, audio,
caption=None, duration=None, performer=None, title=None, reply_to_message_id=None, reply_markup=None,
disable_notification=False, parse_mode=None, **kwargs):
"""
Use this method to send audio files, if you want Telegram clients to display them in the music player.
Your audio must be in the .mp3 format. On success, the sent Message is returned. Bots can currently send audio
files of up to 50 MB in size, this limit may be changed in the future.
For backward compatibility, when the fields title and performer are both empty and the mime-type of the file to
be sent is not audio/mpeg, the file will be sent as a playable voice message. For this to work, the audio must
be in an .ogg file encoded with OPUS. This behavior will be phased out in the future. For sending voice
messages, use the sendVoice method instead.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param audio: Audio file to send. Pass a file_id as String to send an audio file that exists on the Telegram servers (recommended),
pass an HTTP URL as a String for Telegram to get an audio file from the Internet, or upload a new one using multipart/form-data.
:param caption: Audio caption, 0-200 characters
:param duration: Duration of the audio in seconds
:param performer: Performer
:param title: Track name
:param reply_to_message_id: If the message is a reply, ID of the original message
:param reply_markup: Additional interface options. A JSON-serialized object for a custom reply keyboard,
:param disable_notification: Sends the message silently. iOS users will not receive a notification, Android users
will receive a notification with no sound. Other apps coming soon.
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline
URLs in your bot's message.
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type audio: InputFile or str
:type caption: str
:type duration: int
:type performer: str
:type title: str
:type reply_to_message_id: int
:type reply_markup: ReplyKeyboardMarkup or ReplyKeyboardHide or ForceReply
:type parse_mode: str
:returns: On success, the sent Message is returned.
:rtype: TelegramBotRPCRequest
"""
files = None
if isinstance(audio, InputFile):
files = [audio]
audio = None
elif not isinstance(audio, str):
raise Exception('audio must be instance of InputFile or str')
# required args
params = dict(
chat_id=chat_id,
audio=audio
)
# optional args
params.update(
_clean_params(
caption=caption,
duration=duration,
performer=performer,
title=title,
reply_to_message_id=reply_to_message_id,
reply_markup=reply_markup,
disable_notification=disable_notification,
parse_mode=parse_mode,
)
)
return TelegramBotRPCRequest('sendAudio', params=params, files=files, on_result=Message.from_result, **kwargs) | [
"def",
"send_audio",
"(",
"chat_id",
",",
"audio",
",",
"caption",
"=",
"None",
",",
"duration",
"=",
"None",
",",
"performer",
"=",
"None",
",",
"title",
"=",
"None",
",",
"reply_to_message_id",
"=",
"None",
",",
"reply_markup",
"=",
"None",
",",
"disab... | Use this method to send audio files, if you want Telegram clients to display them in the music player.
Your audio must be in the .mp3 format. On success, the sent Message is returned. Bots can currently send audio
files of up to 50 MB in size, this limit may be changed in the future.
For backward compatibility, when the fields title and performer are both empty and the mime-type of the file to
be sent is not audio/mpeg, the file will be sent as a playable voice message. For this to work, the audio must
be in an .ogg file encoded with OPUS. This behavior will be phased out in the future. For sending voice
messages, use the sendVoice method instead.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param audio: Audio file to send. Pass a file_id as String to send an audio file that exists on the Telegram servers (recommended),
pass an HTTP URL as a String for Telegram to get an audio file from the Internet, or upload a new one using multipart/form-data.
:param caption: Audio caption, 0-200 characters
:param duration: Duration of the audio in seconds
:param performer: Performer
:param title: Track name
:param reply_to_message_id: If the message is a reply, ID of the original message
:param reply_markup: Additional interface options. A JSON-serialized object for a custom reply keyboard,
:param disable_notification: Sends the message silently. iOS users will not receive a notification, Android users
will receive a notification with no sound. Other apps coming soon.
:param parse_mode: Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline
URLs in your bot's message.
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type audio: InputFile or str
:type caption: str
:type duration: int
:type performer: str
:type title: str
:type reply_to_message_id: int
:type reply_markup: ReplyKeyboardMarkup or ReplyKeyboardHide or ForceReply
:type parse_mode: str
:returns: On success, the sent Message is returned.
:rtype: TelegramBotRPCRequest | [
"Use",
"this",
"method",
"to",
"send",
"audio",
"files",
"if",
"you",
"want",
"Telegram",
"clients",
"to",
"display",
"them",
"in",
"the",
"music",
"player",
"."
] | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L2599-L2668 | train | 28,075 |
datamachine/twx.botapi | twx/botapi/botapi.py | unban_chat_member | def unban_chat_member(chat_id, user_id, **kwargs):
"""
Use this method to unban a previously kicked user in a supergroup. The user will not return to the group automatically,
but will be able to join via link, etc. The bot must be an administrator in the group for this to work
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns True on success.
:rtype: bool
"""
# required args
params = dict(
chat_id=chat_id,
user_id=user_id,
)
return TelegramBotRPCRequest('unbanChatMember', params=params, on_result=lambda result: result, **kwargs) | python | def unban_chat_member(chat_id, user_id, **kwargs):
"""
Use this method to unban a previously kicked user in a supergroup. The user will not return to the group automatically,
but will be able to join via link, etc. The bot must be an administrator in the group for this to work
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns True on success.
:rtype: bool
"""
# required args
params = dict(
chat_id=chat_id,
user_id=user_id,
)
return TelegramBotRPCRequest('unbanChatMember', params=params, on_result=lambda result: result, **kwargs) | [
"def",
"unban_chat_member",
"(",
"chat_id",
",",
"user_id",
",",
"*",
"*",
"kwargs",
")",
":",
"# required args",
"params",
"=",
"dict",
"(",
"chat_id",
"=",
"chat_id",
",",
"user_id",
"=",
"user_id",
",",
")",
"return",
"TelegramBotRPCRequest",
"(",
"'unban... | Use this method to unban a previously kicked user in a supergroup. The user will not return to the group automatically,
but will be able to join via link, etc. The bot must be an administrator in the group for this to work
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns True on success.
:rtype: bool | [
"Use",
"this",
"method",
"to",
"unban",
"a",
"previously",
"kicked",
"user",
"in",
"a",
"supergroup",
".",
"The",
"user",
"will",
"not",
"return",
"to",
"the",
"group",
"automatically",
"but",
"will",
"be",
"able",
"to",
"join",
"via",
"link",
"etc",
"."... | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L3442-L3464 | train | 28,076 |
datamachine/twx.botapi | twx/botapi/botapi.py | get_chat_member | def get_chat_member(chat_id, user_id, **kwargs):
"""
Use this method to get information about a member of a chat
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns ChatMember on success.
:rtype: ChatMember
"""
# required args
params = dict(
chat_id=chat_id,
user_id=user_id,
)
return TelegramBotRPCRequest('getChatMember', params=params, on_result=lambda result: ChatMember.from_result(result), **kwargs) | python | def get_chat_member(chat_id, user_id, **kwargs):
"""
Use this method to get information about a member of a chat
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns ChatMember on success.
:rtype: ChatMember
"""
# required args
params = dict(
chat_id=chat_id,
user_id=user_id,
)
return TelegramBotRPCRequest('getChatMember', params=params, on_result=lambda result: ChatMember.from_result(result), **kwargs) | [
"def",
"get_chat_member",
"(",
"chat_id",
",",
"user_id",
",",
"*",
"*",
"kwargs",
")",
":",
"# required args",
"params",
"=",
"dict",
"(",
"chat_id",
"=",
"chat_id",
",",
"user_id",
"=",
"user_id",
",",
")",
"return",
"TelegramBotRPCRequest",
"(",
"'getChat... | Use this method to get information about a member of a chat
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param user_id: Unique identifier of the target user
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type chat_id: int or str
:type user_id: int
:returns: Returns ChatMember on success.
:rtype: ChatMember | [
"Use",
"this",
"method",
"to",
"get",
"information",
"about",
"a",
"member",
"of",
"a",
"chat"
] | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L3535-L3556 | train | 28,077 |
datamachine/twx.botapi | twx/botapi/botapi.py | get_file | def get_file(file_id, **kwargs):
"""
Use this method to get basic info about a file and prepare it for downloading.
For the moment, bots can download files of up to 20MB in size. On success, a File object is returned. The file can
then be downloaded via the link https://api.telegram.org/file/bot<token>/<file_path>, where <file_path> is taken
from the response. It is guaranteed that the link will be valid for at least 1 hour. When the link expires, a new
one can be requested by calling getFile again.
:param file_id: File identifier to get info about
:type file_id: str
:returns: Returns a File object.
:rtype: TelegramBotRPCRequest
"""
# required args
params = dict(file_id=file_id)
return TelegramBotRPCRequest('getFile', params=params,
on_result=File.from_result, **kwargs) | python | def get_file(file_id, **kwargs):
"""
Use this method to get basic info about a file and prepare it for downloading.
For the moment, bots can download files of up to 20MB in size. On success, a File object is returned. The file can
then be downloaded via the link https://api.telegram.org/file/bot<token>/<file_path>, where <file_path> is taken
from the response. It is guaranteed that the link will be valid for at least 1 hour. When the link expires, a new
one can be requested by calling getFile again.
:param file_id: File identifier to get info about
:type file_id: str
:returns: Returns a File object.
:rtype: TelegramBotRPCRequest
"""
# required args
params = dict(file_id=file_id)
return TelegramBotRPCRequest('getFile', params=params,
on_result=File.from_result, **kwargs) | [
"def",
"get_file",
"(",
"file_id",
",",
"*",
"*",
"kwargs",
")",
":",
"# required args",
"params",
"=",
"dict",
"(",
"file_id",
"=",
"file_id",
")",
"return",
"TelegramBotRPCRequest",
"(",
"'getFile'",
",",
"params",
"=",
"params",
",",
"on_result",
"=",
"... | Use this method to get basic info about a file and prepare it for downloading.
For the moment, bots can download files of up to 20MB in size. On success, a File object is returned. The file can
then be downloaded via the link https://api.telegram.org/file/bot<token>/<file_path>, where <file_path> is taken
from the response. It is guaranteed that the link will be valid for at least 1 hour. When the link expires, a new
one can be requested by calling getFile again.
:param file_id: File identifier to get info about
:type file_id: str
:returns: Returns a File object.
:rtype: TelegramBotRPCRequest | [
"Use",
"this",
"method",
"to",
"get",
"basic",
"info",
"about",
"a",
"file",
"and",
"prepare",
"it",
"for",
"downloading",
"."
] | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L3941-L3961 | train | 28,078 |
datamachine/twx.botapi | twx/botapi/botapi.py | get_updates | def get_updates(offset=None, limit=None, timeout=None, allowed_updates=None,
**kwargs):
"""
Use this method to receive incoming updates using long polling.
.. note::
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
:param allowed_updates: List the types of updates you want your bot to receive. For example, specify
[“message”, “edited_channel_post”, “callback_query”] to only receive updates
of these types. See Update for a complete list of available update types.
Specify an empty list to receive all updates regardless of type (default).
If not specified, the previous setting will be used.
Please note that this parameter doesn't affect updates created before the call
to the getUpdates, so unwanted updates may be received for a short period of time.
:param offset: Identifier of the first update to be returned. Must be
greater by one than the highest among the identifiers of
previously received updates. By default, updates starting
with the earliest unconfirmed update are returned. An update
is considered confirmed as soon as getUpdates is called with
an offset higher than its update_id.
:param limit: Limits the number of updates to be retrieved. Values between
1—100 are accepted. Defaults to 100
:param timeout: Timeout in seconds for long polling. Defaults to 0, i.e.
usual short polling
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type offset: int
:type limit: int
:type timeout: int
:returns: An Array of Update objects is returned.
:rtype: TelegramBotRPCRequest
"""
# optional parameters
params = _clean_params(
offset=offset,
limit=limit,
timeout=timeout,
allowed_updates=allowed_updates,
)
return TelegramBotRPCRequest('getUpdates', params=params, on_result=Update.from_result, **kwargs) | python | def get_updates(offset=None, limit=None, timeout=None, allowed_updates=None,
**kwargs):
"""
Use this method to receive incoming updates using long polling.
.. note::
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
:param allowed_updates: List the types of updates you want your bot to receive. For example, specify
[“message”, “edited_channel_post”, “callback_query”] to only receive updates
of these types. See Update for a complete list of available update types.
Specify an empty list to receive all updates regardless of type (default).
If not specified, the previous setting will be used.
Please note that this parameter doesn't affect updates created before the call
to the getUpdates, so unwanted updates may be received for a short period of time.
:param offset: Identifier of the first update to be returned. Must be
greater by one than the highest among the identifiers of
previously received updates. By default, updates starting
with the earliest unconfirmed update are returned. An update
is considered confirmed as soon as getUpdates is called with
an offset higher than its update_id.
:param limit: Limits the number of updates to be retrieved. Values between
1—100 are accepted. Defaults to 100
:param timeout: Timeout in seconds for long polling. Defaults to 0, i.e.
usual short polling
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type offset: int
:type limit: int
:type timeout: int
:returns: An Array of Update objects is returned.
:rtype: TelegramBotRPCRequest
"""
# optional parameters
params = _clean_params(
offset=offset,
limit=limit,
timeout=timeout,
allowed_updates=allowed_updates,
)
return TelegramBotRPCRequest('getUpdates', params=params, on_result=Update.from_result, **kwargs) | [
"def",
"get_updates",
"(",
"offset",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"allowed_updates",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"# optional parameters",
"params",
"=",
"_clean_params",
"(",
"offset",
"=",
... | Use this method to receive incoming updates using long polling.
.. note::
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
:param allowed_updates: List the types of updates you want your bot to receive. For example, specify
[“message”, “edited_channel_post”, “callback_query”] to only receive updates
of these types. See Update for a complete list of available update types.
Specify an empty list to receive all updates regardless of type (default).
If not specified, the previous setting will be used.
Please note that this parameter doesn't affect updates created before the call
to the getUpdates, so unwanted updates may be received for a short period of time.
:param offset: Identifier of the first update to be returned. Must be
greater by one than the highest among the identifiers of
previously received updates. By default, updates starting
with the earliest unconfirmed update are returned. An update
is considered confirmed as soon as getUpdates is called with
an offset higher than its update_id.
:param limit: Limits the number of updates to be retrieved. Values between
1—100 are accepted. Defaults to 100
:param timeout: Timeout in seconds for long polling. Defaults to 0, i.e.
usual short polling
:param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest`
:type offset: int
:type limit: int
:type timeout: int
:returns: An Array of Update objects is returned.
:rtype: TelegramBotRPCRequest | [
"Use",
"this",
"method",
"to",
"receive",
"incoming",
"updates",
"using",
"long",
"polling",
"."
] | c85184da738169e8f9d6d8e62970540f427c486e | https://github.com/datamachine/twx.botapi/blob/c85184da738169e8f9d6d8e62970540f427c486e/twx/botapi/botapi.py#L4097-L4142 | train | 28,079 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client.connect | async def connect(self, cluster_id, client_id,
nats=None,
connect_timeout=DEFAULT_CONNECT_TIMEOUT,
max_pub_acks_inflight=DEFAULT_MAX_PUB_ACKS_INFLIGHT,
loop=None,
):
"""
Starts a session with a NATS Streaming cluster.
:param cluster: Name of the cluster to which we will connect.
:param nats: NATS connection to be borrowed for NATS Streaming.
"""
self._cluster_id = cluster_id
self._client_id = client_id
self._loop = loop
self._connect_timeout = connect_timeout
if nats is not None:
self._nc = nats
# NATS Streaming client should use same event loop
# as the borrowed NATS connection.
self._loop = self._nc._loop
# Subjects
self._discover_subject = DEFAULT_DISCOVER_SUBJECT % self._cluster_id
self._hb_inbox = new_guid()
self._ack_subject = DEFAULT_ACKS_SUBJECT % new_guid()
# Pending pub acks inflight
self._pending_pub_acks_queue = asyncio.Queue(
maxsize=max_pub_acks_inflight, loop=self._loop)
# Heartbeats subscription
self._hb_inbox_sid = await self._nc.subscribe(
self._hb_inbox,
cb=self._process_heartbeats,
)
# Acks processing subscription
self._ack_subject_sid = await self._nc.subscribe(
self._ack_subject,
cb=self._process_ack,
)
await self._nc.flush()
# Start NATS Streaming session by sending ConnectRequest
creq = protocol.ConnectRequest()
creq.clientID = self._client_id
creq.heartbeatInbox = self._hb_inbox
payload = creq.SerializeToString()
msg = None
try:
msg = await self._nc.request(
self._discover_subject,
payload,
timeout=self._connect_timeout,
)
except:
await self._close()
raise ErrConnectReqTimeout("stan: failed connecting to '{}'".format(cluster_id))
# We should get the NATS Streaming subject from the
# response from the ConnectRequest.
resp = protocol.ConnectResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
try:
await self._close()
except:
pass
raise StanError(resp.error)
self._pub_prefix = resp.pubPrefix
self._sub_req_subject = resp.subRequests
self._unsub_req_subject = resp.unsubRequests
self._close_req_subject = resp.closeRequests
self._sub_close_req_subject = resp.subCloseRequests | python | async def connect(self, cluster_id, client_id,
nats=None,
connect_timeout=DEFAULT_CONNECT_TIMEOUT,
max_pub_acks_inflight=DEFAULT_MAX_PUB_ACKS_INFLIGHT,
loop=None,
):
"""
Starts a session with a NATS Streaming cluster.
:param cluster: Name of the cluster to which we will connect.
:param nats: NATS connection to be borrowed for NATS Streaming.
"""
self._cluster_id = cluster_id
self._client_id = client_id
self._loop = loop
self._connect_timeout = connect_timeout
if nats is not None:
self._nc = nats
# NATS Streaming client should use same event loop
# as the borrowed NATS connection.
self._loop = self._nc._loop
# Subjects
self._discover_subject = DEFAULT_DISCOVER_SUBJECT % self._cluster_id
self._hb_inbox = new_guid()
self._ack_subject = DEFAULT_ACKS_SUBJECT % new_guid()
# Pending pub acks inflight
self._pending_pub_acks_queue = asyncio.Queue(
maxsize=max_pub_acks_inflight, loop=self._loop)
# Heartbeats subscription
self._hb_inbox_sid = await self._nc.subscribe(
self._hb_inbox,
cb=self._process_heartbeats,
)
# Acks processing subscription
self._ack_subject_sid = await self._nc.subscribe(
self._ack_subject,
cb=self._process_ack,
)
await self._nc.flush()
# Start NATS Streaming session by sending ConnectRequest
creq = protocol.ConnectRequest()
creq.clientID = self._client_id
creq.heartbeatInbox = self._hb_inbox
payload = creq.SerializeToString()
msg = None
try:
msg = await self._nc.request(
self._discover_subject,
payload,
timeout=self._connect_timeout,
)
except:
await self._close()
raise ErrConnectReqTimeout("stan: failed connecting to '{}'".format(cluster_id))
# We should get the NATS Streaming subject from the
# response from the ConnectRequest.
resp = protocol.ConnectResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
try:
await self._close()
except:
pass
raise StanError(resp.error)
self._pub_prefix = resp.pubPrefix
self._sub_req_subject = resp.subRequests
self._unsub_req_subject = resp.unsubRequests
self._close_req_subject = resp.closeRequests
self._sub_close_req_subject = resp.subCloseRequests | [
"async",
"def",
"connect",
"(",
"self",
",",
"cluster_id",
",",
"client_id",
",",
"nats",
"=",
"None",
",",
"connect_timeout",
"=",
"DEFAULT_CONNECT_TIMEOUT",
",",
"max_pub_acks_inflight",
"=",
"DEFAULT_MAX_PUB_ACKS_INFLIGHT",
",",
"loop",
"=",
"None",
",",
")",
... | Starts a session with a NATS Streaming cluster.
:param cluster: Name of the cluster to which we will connect.
:param nats: NATS connection to be borrowed for NATS Streaming. | [
"Starts",
"a",
"session",
"with",
"a",
"NATS",
"Streaming",
"cluster",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L85-L162 | train | 28,080 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client._process_ack | async def _process_ack(self, msg):
"""
Receives acks from the publishes via the _STAN.acks subscription.
"""
pub_ack = protocol.PubAck()
pub_ack.ParseFromString(msg.data)
# Unblock pending acks queue if required.
if not self._pending_pub_acks_queue.empty():
await self._pending_pub_acks_queue.get()
try:
cb = self._pub_ack_map[pub_ack.guid]
await cb(pub_ack)
del self._pub_ack_map[pub_ack.guid]
except KeyError:
# Just skip the pub ack
return
except:
# TODO: Check for protocol error
return | python | async def _process_ack(self, msg):
"""
Receives acks from the publishes via the _STAN.acks subscription.
"""
pub_ack = protocol.PubAck()
pub_ack.ParseFromString(msg.data)
# Unblock pending acks queue if required.
if not self._pending_pub_acks_queue.empty():
await self._pending_pub_acks_queue.get()
try:
cb = self._pub_ack_map[pub_ack.guid]
await cb(pub_ack)
del self._pub_ack_map[pub_ack.guid]
except KeyError:
# Just skip the pub ack
return
except:
# TODO: Check for protocol error
return | [
"async",
"def",
"_process_ack",
"(",
"self",
",",
"msg",
")",
":",
"pub_ack",
"=",
"protocol",
".",
"PubAck",
"(",
")",
"pub_ack",
".",
"ParseFromString",
"(",
"msg",
".",
"data",
")",
"# Unblock pending acks queue if required.",
"if",
"not",
"self",
".",
"_... | Receives acks from the publishes via the _STAN.acks subscription. | [
"Receives",
"acks",
"from",
"the",
"publishes",
"via",
"the",
"_STAN",
".",
"acks",
"subscription",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L170-L190 | train | 28,081 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client._process_msg | async def _process_msg(self, sub):
"""
Receives the msgs from the STAN subscriptions and replies.
By default it will reply back with an ack unless manual acking
was specified in one of the subscription options.
"""
while True:
try:
raw_msg = await sub._msgs_queue.get()
msg = Msg()
msg_proto = protocol.MsgProto()
msg_proto.ParseFromString(raw_msg.data)
msg.proto = msg_proto
msg.sub = sub
# Yield the message to the subscription callback.
await sub.cb(msg)
if not sub.manual_acks:
# Process auto-ack if not done manually in the callback,
# by publishing into the ack inbox from the subscription.
msg_ack = protocol.Ack()
msg_ack.subject = msg.proto.subject
msg_ack.sequence = msg.proto.sequence
await self._nc.publish(sub.ack_inbox, msg_ack.SerializeToString())
except asyncio.CancelledError:
break
except Exception as ex:
if sub.error_cb:
try:
await sub.error_cb(ex)
except:
logger.exception(
"Exception in error callback for subscription to '%s'",
sub.subject
)
continue | python | async def _process_msg(self, sub):
"""
Receives the msgs from the STAN subscriptions and replies.
By default it will reply back with an ack unless manual acking
was specified in one of the subscription options.
"""
while True:
try:
raw_msg = await sub._msgs_queue.get()
msg = Msg()
msg_proto = protocol.MsgProto()
msg_proto.ParseFromString(raw_msg.data)
msg.proto = msg_proto
msg.sub = sub
# Yield the message to the subscription callback.
await sub.cb(msg)
if not sub.manual_acks:
# Process auto-ack if not done manually in the callback,
# by publishing into the ack inbox from the subscription.
msg_ack = protocol.Ack()
msg_ack.subject = msg.proto.subject
msg_ack.sequence = msg.proto.sequence
await self._nc.publish(sub.ack_inbox, msg_ack.SerializeToString())
except asyncio.CancelledError:
break
except Exception as ex:
if sub.error_cb:
try:
await sub.error_cb(ex)
except:
logger.exception(
"Exception in error callback for subscription to '%s'",
sub.subject
)
continue | [
"async",
"def",
"_process_msg",
"(",
"self",
",",
"sub",
")",
":",
"while",
"True",
":",
"try",
":",
"raw_msg",
"=",
"await",
"sub",
".",
"_msgs_queue",
".",
"get",
"(",
")",
"msg",
"=",
"Msg",
"(",
")",
"msg_proto",
"=",
"protocol",
".",
"MsgProto",... | Receives the msgs from the STAN subscriptions and replies.
By default it will reply back with an ack unless manual acking
was specified in one of the subscription options. | [
"Receives",
"the",
"msgs",
"from",
"the",
"STAN",
"subscriptions",
"and",
"replies",
".",
"By",
"default",
"it",
"will",
"reply",
"back",
"with",
"an",
"ack",
"unless",
"manual",
"acking",
"was",
"specified",
"in",
"one",
"of",
"the",
"subscription",
"option... | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L192-L228 | train | 28,082 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client.ack | async def ack(self, msg):
"""
Used to manually acks a message.
:param msg: Message which is pending to be acked by client.
"""
ack_proto = protocol.Ack()
ack_proto.subject = msg.proto.subject
ack_proto.sequence = msg.proto.sequence
await self._nc.publish(msg.sub.ack_inbox, ack_proto.SerializeToString()) | python | async def ack(self, msg):
"""
Used to manually acks a message.
:param msg: Message which is pending to be acked by client.
"""
ack_proto = protocol.Ack()
ack_proto.subject = msg.proto.subject
ack_proto.sequence = msg.proto.sequence
await self._nc.publish(msg.sub.ack_inbox, ack_proto.SerializeToString()) | [
"async",
"def",
"ack",
"(",
"self",
",",
"msg",
")",
":",
"ack_proto",
"=",
"protocol",
".",
"Ack",
"(",
")",
"ack_proto",
".",
"subject",
"=",
"msg",
".",
"proto",
".",
"subject",
"ack_proto",
".",
"sequence",
"=",
"msg",
".",
"proto",
".",
"sequenc... | Used to manually acks a message.
:param msg: Message which is pending to be acked by client. | [
"Used",
"to",
"manually",
"acks",
"a",
"message",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L230-L239 | train | 28,083 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client.publish | async def publish(self, subject, payload,
ack_handler=None,
ack_wait=DEFAULT_ACK_WAIT,
):
"""
Publishes a payload onto a subject. By default, it will block
until the message which has been published has been acked back.
An optional async handler can be publi
:param subject: Subject of the message.
:param payload: Payload of the message which wil be published.
:param ack_handler: Optional handler for async publishing.
:param ack_wait: How long in seconds to wait for an ack to be received.
"""
stan_subject = ''.join([self._pub_prefix, '.', subject])
guid = new_guid()
pe = protocol.PubMsg()
pe.clientID = self._client_id
pe.guid = guid
pe.subject = subject
pe.data = payload
# Control max inflight pubs for the client with a buffered queue.
await self._pending_pub_acks_queue.put(None)
# Process asynchronously if a handler is given.
if ack_handler is not None:
self._pub_ack_map[guid] = ack_handler
try:
await self._nc.publish_request(
stan_subject,
self._ack_subject,
pe.SerializeToString(),
)
return
except Exception as e:
del self._pub_ack_map[guid]
raise e
else:
# Synchronous wait for ack handling.
future = asyncio.Future(loop=self._loop)
async def cb(pub_ack):
nonlocal future
future.set_result(pub_ack)
self._pub_ack_map[guid] = cb
try:
await self._nc.publish_request(
stan_subject,
self._ack_subject,
pe.SerializeToString(),
)
await asyncio.wait_for(future, ack_wait, loop=self._loop)
return future.result()
except Exception as e:
# Remove pending future before raising error.
future.cancel()
del self._pub_ack_map[guid]
raise e | python | async def publish(self, subject, payload,
ack_handler=None,
ack_wait=DEFAULT_ACK_WAIT,
):
"""
Publishes a payload onto a subject. By default, it will block
until the message which has been published has been acked back.
An optional async handler can be publi
:param subject: Subject of the message.
:param payload: Payload of the message which wil be published.
:param ack_handler: Optional handler for async publishing.
:param ack_wait: How long in seconds to wait for an ack to be received.
"""
stan_subject = ''.join([self._pub_prefix, '.', subject])
guid = new_guid()
pe = protocol.PubMsg()
pe.clientID = self._client_id
pe.guid = guid
pe.subject = subject
pe.data = payload
# Control max inflight pubs for the client with a buffered queue.
await self._pending_pub_acks_queue.put(None)
# Process asynchronously if a handler is given.
if ack_handler is not None:
self._pub_ack_map[guid] = ack_handler
try:
await self._nc.publish_request(
stan_subject,
self._ack_subject,
pe.SerializeToString(),
)
return
except Exception as e:
del self._pub_ack_map[guid]
raise e
else:
# Synchronous wait for ack handling.
future = asyncio.Future(loop=self._loop)
async def cb(pub_ack):
nonlocal future
future.set_result(pub_ack)
self._pub_ack_map[guid] = cb
try:
await self._nc.publish_request(
stan_subject,
self._ack_subject,
pe.SerializeToString(),
)
await asyncio.wait_for(future, ack_wait, loop=self._loop)
return future.result()
except Exception as e:
# Remove pending future before raising error.
future.cancel()
del self._pub_ack_map[guid]
raise e | [
"async",
"def",
"publish",
"(",
"self",
",",
"subject",
",",
"payload",
",",
"ack_handler",
"=",
"None",
",",
"ack_wait",
"=",
"DEFAULT_ACK_WAIT",
",",
")",
":",
"stan_subject",
"=",
"''",
".",
"join",
"(",
"[",
"self",
".",
"_pub_prefix",
",",
"'.'",
... | Publishes a payload onto a subject. By default, it will block
until the message which has been published has been acked back.
An optional async handler can be publi
:param subject: Subject of the message.
:param payload: Payload of the message which wil be published.
:param ack_handler: Optional handler for async publishing.
:param ack_wait: How long in seconds to wait for an ack to be received. | [
"Publishes",
"a",
"payload",
"onto",
"a",
"subject",
".",
"By",
"default",
"it",
"will",
"block",
"until",
"the",
"message",
"which",
"has",
"been",
"published",
"has",
"been",
"acked",
"back",
".",
"An",
"optional",
"async",
"handler",
"can",
"be",
"publi... | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L241-L300 | train | 28,084 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client._close | async def _close(self):
"""
Removes any present internal state from the client.
"""
# Remove the core NATS Streaming subscriptions.
try:
if self._hb_inbox_sid is not None:
await self._nc.unsubscribe(self._hb_inbox_sid)
self._hb_inbox = None
self._hb_inbox_sid = None
if self._ack_subject_sid is not None:
await self._nc.unsubscribe(self._ack_subject_sid)
self._ack_subject = None
self._ack_subject_sid = None
except:
# FIXME: async error in case these fail?
pass
# Remove all the related subscriptions
for _, sub in self._sub_map.items():
if sub._msgs_task is not None:
sub._msgs_task.cancel()
try:
await self._nc.unsubscribe(sub.sid)
except:
continue
self._sub_map = {} | python | async def _close(self):
"""
Removes any present internal state from the client.
"""
# Remove the core NATS Streaming subscriptions.
try:
if self._hb_inbox_sid is not None:
await self._nc.unsubscribe(self._hb_inbox_sid)
self._hb_inbox = None
self._hb_inbox_sid = None
if self._ack_subject_sid is not None:
await self._nc.unsubscribe(self._ack_subject_sid)
self._ack_subject = None
self._ack_subject_sid = None
except:
# FIXME: async error in case these fail?
pass
# Remove all the related subscriptions
for _, sub in self._sub_map.items():
if sub._msgs_task is not None:
sub._msgs_task.cancel()
try:
await self._nc.unsubscribe(sub.sid)
except:
continue
self._sub_map = {} | [
"async",
"def",
"_close",
"(",
"self",
")",
":",
"# Remove the core NATS Streaming subscriptions.",
"try",
":",
"if",
"self",
".",
"_hb_inbox_sid",
"is",
"not",
"None",
":",
"await",
"self",
".",
"_nc",
".",
"unsubscribe",
"(",
"self",
".",
"_hb_inbox_sid",
")... | Removes any present internal state from the client. | [
"Removes",
"any",
"present",
"internal",
"state",
"from",
"the",
"client",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L425-L452 | train | 28,085 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Client.close | async def close(self):
"""
Close terminates a session with NATS Streaming.
"""
# Remove the core NATS Streaming subscriptions.
await self._close()
req = protocol.CloseRequest()
req.clientID = self._client_id
msg = await self._nc.request(
self._close_req_subject,
req.SerializeToString(),
self._connect_timeout,
)
resp = protocol.CloseResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
raise StanError(resp.error) | python | async def close(self):
"""
Close terminates a session with NATS Streaming.
"""
# Remove the core NATS Streaming subscriptions.
await self._close()
req = protocol.CloseRequest()
req.clientID = self._client_id
msg = await self._nc.request(
self._close_req_subject,
req.SerializeToString(),
self._connect_timeout,
)
resp = protocol.CloseResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
raise StanError(resp.error) | [
"async",
"def",
"close",
"(",
"self",
")",
":",
"# Remove the core NATS Streaming subscriptions.",
"await",
"self",
".",
"_close",
"(",
")",
"req",
"=",
"protocol",
".",
"CloseRequest",
"(",
")",
"req",
".",
"clientID",
"=",
"self",
".",
"_client_id",
"msg",
... | Close terminates a session with NATS Streaming. | [
"Close",
"terminates",
"a",
"session",
"with",
"NATS",
"Streaming",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L454-L473 | train | 28,086 |
nats-io/asyncio-nats-streaming | stan/aio/client.py | Subscription.unsubscribe | async def unsubscribe(self):
"""
Remove subscription on a topic in this client.
"""
await self._nc.unsubscribe(self.sid)
try:
# Stop the processing task for the subscription.
sub = self._sc._sub_map[self.inbox]
sub._msgs_task.cancel()
del self._sc._sub_map[self.inbox]
except KeyError:
pass
req = protocol.UnsubscribeRequest()
req.clientID = self._sc._client_id
req.subject = self.subject
req.inbox = self.ack_inbox
if self.durable_name is not None:
req.durableName = self.durable_name
msg = await self._nc.request(
self._sc._unsub_req_subject,
req.SerializeToString(),
self._sc._connect_timeout,
)
resp = protocol.SubscriptionResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
raise StanError(resp.error) | python | async def unsubscribe(self):
"""
Remove subscription on a topic in this client.
"""
await self._nc.unsubscribe(self.sid)
try:
# Stop the processing task for the subscription.
sub = self._sc._sub_map[self.inbox]
sub._msgs_task.cancel()
del self._sc._sub_map[self.inbox]
except KeyError:
pass
req = protocol.UnsubscribeRequest()
req.clientID = self._sc._client_id
req.subject = self.subject
req.inbox = self.ack_inbox
if self.durable_name is not None:
req.durableName = self.durable_name
msg = await self._nc.request(
self._sc._unsub_req_subject,
req.SerializeToString(),
self._sc._connect_timeout,
)
resp = protocol.SubscriptionResponse()
resp.ParseFromString(msg.data)
if resp.error != "":
raise StanError(resp.error) | [
"async",
"def",
"unsubscribe",
"(",
"self",
")",
":",
"await",
"self",
".",
"_nc",
".",
"unsubscribe",
"(",
"self",
".",
"sid",
")",
"try",
":",
"# Stop the processing task for the subscription.",
"sub",
"=",
"self",
".",
"_sc",
".",
"_sub_map",
"[",
"self",... | Remove subscription on a topic in this client. | [
"Remove",
"subscription",
"on",
"a",
"topic",
"in",
"this",
"client",
"."
] | 344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5 | https://github.com/nats-io/asyncio-nats-streaming/blob/344d28f645e6dd0e0b7938516b83e2e6f3c3c9f5/stan/aio/client.py#L508-L539 | train | 28,087 |
django-ldapdb/django-ldapdb | ldapdb/models/fields.py | datetime_from_ldap | def datetime_from_ldap(value):
"""Convert a LDAP-style datetime to a Python aware object.
See https://tools.ietf.org/html/rfc4517#section-3.3.13 for details.
Args:
value (str): the datetime to parse
"""
if not value:
return None
match = LDAP_DATETIME_RE.match(value)
if not match:
return None
groups = match.groupdict()
if groups['microsecond']:
groups['microsecond'] = groups['microsecond'].ljust(6, '0')[:6]
tzinfo = groups.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = timezone.utc
else:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) == 5 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = - offset
tzinfo = timezone.get_fixed_timezone(offset)
kwargs = {k: int(v) for k, v in groups.items() if v is not None}
kwargs['tzinfo'] = tzinfo
return datetime.datetime(**kwargs) | python | def datetime_from_ldap(value):
"""Convert a LDAP-style datetime to a Python aware object.
See https://tools.ietf.org/html/rfc4517#section-3.3.13 for details.
Args:
value (str): the datetime to parse
"""
if not value:
return None
match = LDAP_DATETIME_RE.match(value)
if not match:
return None
groups = match.groupdict()
if groups['microsecond']:
groups['microsecond'] = groups['microsecond'].ljust(6, '0')[:6]
tzinfo = groups.pop('tzinfo')
if tzinfo == 'Z':
tzinfo = timezone.utc
else:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) == 5 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = - offset
tzinfo = timezone.get_fixed_timezone(offset)
kwargs = {k: int(v) for k, v in groups.items() if v is not None}
kwargs['tzinfo'] = tzinfo
return datetime.datetime(**kwargs) | [
"def",
"datetime_from_ldap",
"(",
"value",
")",
":",
"if",
"not",
"value",
":",
"return",
"None",
"match",
"=",
"LDAP_DATETIME_RE",
".",
"match",
"(",
"value",
")",
"if",
"not",
"match",
":",
"return",
"None",
"groups",
"=",
"match",
".",
"groupdict",
"(... | Convert a LDAP-style datetime to a Python aware object.
See https://tools.ietf.org/html/rfc4517#section-3.3.13 for details.
Args:
value (str): the datetime to parse | [
"Convert",
"a",
"LDAP",
"-",
"style",
"datetime",
"to",
"a",
"Python",
"aware",
"object",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/fields.py#L293-L320 | train | 28,088 |
django-ldapdb/django-ldapdb | ldapdb/models/fields.py | LdapFieldMixin.get_db_prep_value | def get_db_prep_value(self, value, connection, prepared=False):
"""Prepare a value for DB interaction.
Returns:
- list(bytes) if not prepared
- list(str) if prepared
"""
if prepared:
return value
if value is None:
return []
values = value if self.multi_valued_field else [value]
prepared_values = [self.get_prep_value(v) for v in values]
# Remove duplicates.
# https://tools.ietf.org/html/rfc4511#section-4.1.7 :
# "The set of attribute values is unordered."
# We keep those values sorted in natural order to avoid useless
# updates to the LDAP server.
return list(sorted(set(v for v in prepared_values if v))) | python | def get_db_prep_value(self, value, connection, prepared=False):
"""Prepare a value for DB interaction.
Returns:
- list(bytes) if not prepared
- list(str) if prepared
"""
if prepared:
return value
if value is None:
return []
values = value if self.multi_valued_field else [value]
prepared_values = [self.get_prep_value(v) for v in values]
# Remove duplicates.
# https://tools.ietf.org/html/rfc4511#section-4.1.7 :
# "The set of attribute values is unordered."
# We keep those values sorted in natural order to avoid useless
# updates to the LDAP server.
return list(sorted(set(v for v in prepared_values if v))) | [
"def",
"get_db_prep_value",
"(",
"self",
",",
"value",
",",
"connection",
",",
"prepared",
"=",
"False",
")",
":",
"if",
"prepared",
":",
"return",
"value",
"if",
"value",
"is",
"None",
":",
"return",
"[",
"]",
"values",
"=",
"value",
"if",
"self",
"."... | Prepare a value for DB interaction.
Returns:
- list(bytes) if not prepared
- list(str) if prepared | [
"Prepare",
"a",
"value",
"for",
"DB",
"interaction",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/fields.py#L113-L134 | train | 28,089 |
django-ldapdb/django-ldapdb | ldapdb/models/base.py | Model.build_rdn | def build_rdn(self):
"""
Build the Relative Distinguished Name for this entry.
"""
bits = []
for field in self._meta.fields:
if field.db_column and field.primary_key:
bits.append("%s=%s" % (field.db_column,
getattr(self, field.name)))
if not len(bits):
raise Exception("Could not build Distinguished Name")
return '+'.join(bits) | python | def build_rdn(self):
"""
Build the Relative Distinguished Name for this entry.
"""
bits = []
for field in self._meta.fields:
if field.db_column and field.primary_key:
bits.append("%s=%s" % (field.db_column,
getattr(self, field.name)))
if not len(bits):
raise Exception("Could not build Distinguished Name")
return '+'.join(bits) | [
"def",
"build_rdn",
"(",
"self",
")",
":",
"bits",
"=",
"[",
"]",
"for",
"field",
"in",
"self",
".",
"_meta",
".",
"fields",
":",
"if",
"field",
".",
"db_column",
"and",
"field",
".",
"primary_key",
":",
"bits",
".",
"append",
"(",
"\"%s=%s\"",
"%",
... | Build the Relative Distinguished Name for this entry. | [
"Build",
"the",
"Relative",
"Distinguished",
"Name",
"for",
"this",
"entry",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/base.py#L34-L45 | train | 28,090 |
django-ldapdb/django-ldapdb | ldapdb/models/base.py | Model.delete | def delete(self, using=None):
"""
Delete this entry.
"""
using = using or router.db_for_write(self.__class__, instance=self)
connection = connections[using]
logger.debug("Deleting LDAP entry %s" % self.dn)
connection.delete_s(self.dn)
signals.post_delete.send(sender=self.__class__, instance=self) | python | def delete(self, using=None):
"""
Delete this entry.
"""
using = using or router.db_for_write(self.__class__, instance=self)
connection = connections[using]
logger.debug("Deleting LDAP entry %s" % self.dn)
connection.delete_s(self.dn)
signals.post_delete.send(sender=self.__class__, instance=self) | [
"def",
"delete",
"(",
"self",
",",
"using",
"=",
"None",
")",
":",
"using",
"=",
"using",
"or",
"router",
".",
"db_for_write",
"(",
"self",
".",
"__class__",
",",
"instance",
"=",
"self",
")",
"connection",
"=",
"connections",
"[",
"using",
"]",
"logge... | Delete this entry. | [
"Delete",
"this",
"entry",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/base.py#L53-L61 | train | 28,091 |
django-ldapdb/django-ldapdb | ldapdb/models/base.py | Model._save_table | def _save_table(self, raw=False, cls=None, force_insert=None, force_update=None, using=None, update_fields=None):
"""
Saves the current instance.
"""
# Connection aliasing
connection = connections[using]
create = bool(force_insert or not self.dn)
# Prepare fields
if update_fields:
target_fields = [
self._meta.get_field(name)
for name in update_fields
]
else:
target_fields = [
field
for field in cls._meta.get_fields(include_hidden=True)
if field.concrete and not field.primary_key
]
def get_field_value(field, instance):
python_value = getattr(instance, field.attname)
return field.get_db_prep_save(python_value, connection=connection)
if create:
old = None
else:
old = cls.objects.using(using).get(dn=self._saved_dn)
changes = {
field.db_column: (
None if old is None else get_field_value(field, old),
get_field_value(field, self),
)
for field in target_fields
}
# Actual saving
old_dn = self.dn
new_dn = self.build_dn()
updated = False
# Insertion
if create:
# FIXME(rbarrois): This should be handled through a hidden field.
hidden_values = [
('objectClass', [obj_class.encode('utf-8') for obj_class in self.object_classes])
]
new_values = hidden_values + [
(colname, change[1])
for colname, change in sorted(changes.items())
if change[1] != []
]
new_dn = self.build_dn()
logger.debug("Creating new LDAP entry %s", new_dn)
connection.add_s(new_dn, new_values)
# Update
else:
modlist = []
for colname, change in sorted(changes.items()):
old_value, new_value = change
if old_value == new_value:
continue
modlist.append((
ldap.MOD_DELETE if new_value == [] else ldap.MOD_REPLACE,
colname,
new_value,
))
if new_dn != old_dn:
logger.debug("renaming ldap entry %s to %s", old_dn, new_dn)
connection.rename_s(old_dn, self.build_rdn())
if modlist:
logger.debug("Modifying existing LDAP entry %s", new_dn)
connection.modify_s(new_dn, modlist)
updated = True
self.dn = new_dn
# Finishing
self._saved_dn = self.dn
return updated | python | def _save_table(self, raw=False, cls=None, force_insert=None, force_update=None, using=None, update_fields=None):
"""
Saves the current instance.
"""
# Connection aliasing
connection = connections[using]
create = bool(force_insert or not self.dn)
# Prepare fields
if update_fields:
target_fields = [
self._meta.get_field(name)
for name in update_fields
]
else:
target_fields = [
field
for field in cls._meta.get_fields(include_hidden=True)
if field.concrete and not field.primary_key
]
def get_field_value(field, instance):
python_value = getattr(instance, field.attname)
return field.get_db_prep_save(python_value, connection=connection)
if create:
old = None
else:
old = cls.objects.using(using).get(dn=self._saved_dn)
changes = {
field.db_column: (
None if old is None else get_field_value(field, old),
get_field_value(field, self),
)
for field in target_fields
}
# Actual saving
old_dn = self.dn
new_dn = self.build_dn()
updated = False
# Insertion
if create:
# FIXME(rbarrois): This should be handled through a hidden field.
hidden_values = [
('objectClass', [obj_class.encode('utf-8') for obj_class in self.object_classes])
]
new_values = hidden_values + [
(colname, change[1])
for colname, change in sorted(changes.items())
if change[1] != []
]
new_dn = self.build_dn()
logger.debug("Creating new LDAP entry %s", new_dn)
connection.add_s(new_dn, new_values)
# Update
else:
modlist = []
for colname, change in sorted(changes.items()):
old_value, new_value = change
if old_value == new_value:
continue
modlist.append((
ldap.MOD_DELETE if new_value == [] else ldap.MOD_REPLACE,
colname,
new_value,
))
if new_dn != old_dn:
logger.debug("renaming ldap entry %s to %s", old_dn, new_dn)
connection.rename_s(old_dn, self.build_rdn())
if modlist:
logger.debug("Modifying existing LDAP entry %s", new_dn)
connection.modify_s(new_dn, modlist)
updated = True
self.dn = new_dn
# Finishing
self._saved_dn = self.dn
return updated | [
"def",
"_save_table",
"(",
"self",
",",
"raw",
"=",
"False",
",",
"cls",
"=",
"None",
",",
"force_insert",
"=",
"None",
",",
"force_update",
"=",
"None",
",",
"using",
"=",
"None",
",",
"update_fields",
"=",
"None",
")",
":",
"# Connection aliasing",
"co... | Saves the current instance. | [
"Saves",
"the",
"current",
"instance",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/base.py#L63-L148 | train | 28,092 |
django-ldapdb/django-ldapdb | ldapdb/models/base.py | Model.scoped | def scoped(base_class, base_dn):
"""
Returns a copy of the current class with a different base_dn.
"""
class Meta:
proxy = True
verbose_name = base_class._meta.verbose_name
verbose_name_plural = base_class._meta.verbose_name_plural
import re
suffix = re.sub('[=,]', '_', base_dn)
name = "%s_%s" % (base_class.__name__, str(suffix))
new_class = type(str(name), (base_class,), {
'base_dn': base_dn, '__module__': base_class.__module__,
'Meta': Meta})
return new_class | python | def scoped(base_class, base_dn):
"""
Returns a copy of the current class with a different base_dn.
"""
class Meta:
proxy = True
verbose_name = base_class._meta.verbose_name
verbose_name_plural = base_class._meta.verbose_name_plural
import re
suffix = re.sub('[=,]', '_', base_dn)
name = "%s_%s" % (base_class.__name__, str(suffix))
new_class = type(str(name), (base_class,), {
'base_dn': base_dn, '__module__': base_class.__module__,
'Meta': Meta})
return new_class | [
"def",
"scoped",
"(",
"base_class",
",",
"base_dn",
")",
":",
"class",
"Meta",
":",
"proxy",
"=",
"True",
"verbose_name",
"=",
"base_class",
".",
"_meta",
".",
"verbose_name",
"verbose_name_plural",
"=",
"base_class",
".",
"_meta",
".",
"verbose_name_plural",
... | Returns a copy of the current class with a different base_dn. | [
"Returns",
"a",
"copy",
"of",
"the",
"current",
"class",
"with",
"a",
"different",
"base_dn",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/models/base.py#L151-L165 | train | 28,093 |
django-ldapdb/django-ldapdb | ldapdb/backends/ldap/base.py | DatabaseWrapper.get_connection_params | def get_connection_params(self):
"""Compute appropriate parameters for establishing a new connection.
Computed at system startup.
"""
return {
'uri': self.settings_dict['NAME'],
'tls': self.settings_dict.get('TLS', False),
'bind_dn': self.settings_dict['USER'],
'bind_pw': self.settings_dict['PASSWORD'],
'retry_max': self.settings_dict.get('RETRY_MAX', 1),
'retry_delay': self.settings_dict.get('RETRY_DELAY', 60.0),
'options': {
k if isinstance(k, int) else k.lower(): v
for k, v in self.settings_dict.get('CONNECTION_OPTIONS', {}).items()
},
} | python | def get_connection_params(self):
"""Compute appropriate parameters for establishing a new connection.
Computed at system startup.
"""
return {
'uri': self.settings_dict['NAME'],
'tls': self.settings_dict.get('TLS', False),
'bind_dn': self.settings_dict['USER'],
'bind_pw': self.settings_dict['PASSWORD'],
'retry_max': self.settings_dict.get('RETRY_MAX', 1),
'retry_delay': self.settings_dict.get('RETRY_DELAY', 60.0),
'options': {
k if isinstance(k, int) else k.lower(): v
for k, v in self.settings_dict.get('CONNECTION_OPTIONS', {}).items()
},
} | [
"def",
"get_connection_params",
"(",
"self",
")",
":",
"return",
"{",
"'uri'",
":",
"self",
".",
"settings_dict",
"[",
"'NAME'",
"]",
",",
"'tls'",
":",
"self",
".",
"settings_dict",
".",
"get",
"(",
"'TLS'",
",",
"False",
")",
",",
"'bind_dn'",
":",
"... | Compute appropriate parameters for establishing a new connection.
Computed at system startup. | [
"Compute",
"appropriate",
"parameters",
"for",
"establishing",
"a",
"new",
"connection",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/backends/ldap/base.py#L220-L236 | train | 28,094 |
django-ldapdb/django-ldapdb | ldapdb/backends/ldap/base.py | DatabaseWrapper.get_new_connection | def get_new_connection(self, conn_params):
"""Build a connection from its parameters."""
connection = ldap.ldapobject.ReconnectLDAPObject(
uri=conn_params['uri'],
retry_max=conn_params['retry_max'],
retry_delay=conn_params['retry_delay'],
bytes_mode=False)
options = conn_params['options']
for opt, value in options.items():
if opt == 'query_timeout':
connection.timeout = int(value)
elif opt == 'page_size':
self.page_size = int(value)
else:
connection.set_option(opt, value)
if conn_params['tls']:
connection.start_tls_s()
connection.simple_bind_s(
conn_params['bind_dn'],
conn_params['bind_pw'],
)
return connection | python | def get_new_connection(self, conn_params):
"""Build a connection from its parameters."""
connection = ldap.ldapobject.ReconnectLDAPObject(
uri=conn_params['uri'],
retry_max=conn_params['retry_max'],
retry_delay=conn_params['retry_delay'],
bytes_mode=False)
options = conn_params['options']
for opt, value in options.items():
if opt == 'query_timeout':
connection.timeout = int(value)
elif opt == 'page_size':
self.page_size = int(value)
else:
connection.set_option(opt, value)
if conn_params['tls']:
connection.start_tls_s()
connection.simple_bind_s(
conn_params['bind_dn'],
conn_params['bind_pw'],
)
return connection | [
"def",
"get_new_connection",
"(",
"self",
",",
"conn_params",
")",
":",
"connection",
"=",
"ldap",
".",
"ldapobject",
".",
"ReconnectLDAPObject",
"(",
"uri",
"=",
"conn_params",
"[",
"'uri'",
"]",
",",
"retry_max",
"=",
"conn_params",
"[",
"'retry_max'",
"]",
... | Build a connection from its parameters. | [
"Build",
"a",
"connection",
"from",
"its",
"parameters",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/backends/ldap/base.py#L251-L275 | train | 28,095 |
django-ldapdb/django-ldapdb | ldapdb/backends/ldap/compiler.py | query_as_ldap | def query_as_ldap(query, compiler, connection):
"""Convert a django.db.models.sql.query.Query to a LdapLookup."""
if query.is_empty():
return
if query.model._meta.model_name == 'migration' and not hasattr(query.model, 'object_classes'):
# FIXME(rbarrois): Support migrations
return
# FIXME(rbarrois): this could be an extra Where clause
filterstr = ''.join(['(objectClass=%s)' % cls for cls in
query.model.object_classes])
# FIXME(rbarrois): Remove this code as part of #101
if (len(query.where.children) == 1
and not isinstance(query.where.children[0], WhereNode)
and query.where.children[0].lhs.target.column == 'dn'):
lookup = query.where.children[0]
if lookup.lookup_name != 'exact':
raise LdapDBError("Unsupported dn lookup: %s" % lookup.lookup_name)
return LdapLookup(
base=lookup.rhs,
scope=ldap.SCOPE_BASE,
filterstr='(&%s)' % filterstr,
)
sql, params = compiler.compile(query.where)
if sql:
filterstr += '(%s)' % (sql % tuple(escape_ldap_filter(param) for param in params))
return LdapLookup(
base=query.model.base_dn,
scope=query.model.search_scope,
filterstr='(&%s)' % filterstr,
) | python | def query_as_ldap(query, compiler, connection):
"""Convert a django.db.models.sql.query.Query to a LdapLookup."""
if query.is_empty():
return
if query.model._meta.model_name == 'migration' and not hasattr(query.model, 'object_classes'):
# FIXME(rbarrois): Support migrations
return
# FIXME(rbarrois): this could be an extra Where clause
filterstr = ''.join(['(objectClass=%s)' % cls for cls in
query.model.object_classes])
# FIXME(rbarrois): Remove this code as part of #101
if (len(query.where.children) == 1
and not isinstance(query.where.children[0], WhereNode)
and query.where.children[0].lhs.target.column == 'dn'):
lookup = query.where.children[0]
if lookup.lookup_name != 'exact':
raise LdapDBError("Unsupported dn lookup: %s" % lookup.lookup_name)
return LdapLookup(
base=lookup.rhs,
scope=ldap.SCOPE_BASE,
filterstr='(&%s)' % filterstr,
)
sql, params = compiler.compile(query.where)
if sql:
filterstr += '(%s)' % (sql % tuple(escape_ldap_filter(param) for param in params))
return LdapLookup(
base=query.model.base_dn,
scope=query.model.search_scope,
filterstr='(&%s)' % filterstr,
) | [
"def",
"query_as_ldap",
"(",
"query",
",",
"compiler",
",",
"connection",
")",
":",
"if",
"query",
".",
"is_empty",
"(",
")",
":",
"return",
"if",
"query",
".",
"model",
".",
"_meta",
".",
"model_name",
"==",
"'migration'",
"and",
"not",
"hasattr",
"(",
... | Convert a django.db.models.sql.query.Query to a LdapLookup. | [
"Convert",
"a",
"django",
".",
"db",
".",
"models",
".",
"sql",
".",
"query",
".",
"Query",
"to",
"a",
"LdapLookup",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/backends/ldap/compiler.py#L37-L72 | train | 28,096 |
django-ldapdb/django-ldapdb | ldapdb/backends/ldap/compiler.py | where_node_as_ldap | def where_node_as_ldap(where, compiler, connection):
"""Parse a django.db.models.sql.where.WhereNode.
Returns:
(clause, [params]): the filter clause, with a list of unescaped parameters.
"""
bits, params = [], []
for item in where.children:
if isinstance(item, WhereNode):
clause, clause_params = compiler.compile(item)
else:
clause, clause_params = item.as_sql(compiler, connection)
bits.append(clause)
params.extend(clause_params)
if not bits:
return '', []
# FIXME(rbarrois): shouldn't we flatten recursive AND / OR?
if len(bits) == 1:
clause = bits[0]
elif where.connector == AND:
clause = '&' + ''.join('(%s)' % bit for bit in bits)
elif where.connector == OR:
clause = '|' + ''.join('(%s)' % bit for bit in bits)
else:
raise LdapDBError("Unhandled WHERE connector: %s" % where.connector)
if where.negated:
clause = ('!(%s)' % clause)
return clause, params | python | def where_node_as_ldap(where, compiler, connection):
"""Parse a django.db.models.sql.where.WhereNode.
Returns:
(clause, [params]): the filter clause, with a list of unescaped parameters.
"""
bits, params = [], []
for item in where.children:
if isinstance(item, WhereNode):
clause, clause_params = compiler.compile(item)
else:
clause, clause_params = item.as_sql(compiler, connection)
bits.append(clause)
params.extend(clause_params)
if not bits:
return '', []
# FIXME(rbarrois): shouldn't we flatten recursive AND / OR?
if len(bits) == 1:
clause = bits[0]
elif where.connector == AND:
clause = '&' + ''.join('(%s)' % bit for bit in bits)
elif where.connector == OR:
clause = '|' + ''.join('(%s)' % bit for bit in bits)
else:
raise LdapDBError("Unhandled WHERE connector: %s" % where.connector)
if where.negated:
clause = ('!(%s)' % clause)
return clause, params | [
"def",
"where_node_as_ldap",
"(",
"where",
",",
"compiler",
",",
"connection",
")",
":",
"bits",
",",
"params",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"item",
"in",
"where",
".",
"children",
":",
"if",
"isinstance",
"(",
"item",
",",
"WhereNode",
")",
... | Parse a django.db.models.sql.where.WhereNode.
Returns:
(clause, [params]): the filter clause, with a list of unescaped parameters. | [
"Parse",
"a",
"django",
".",
"db",
".",
"models",
".",
"sql",
".",
"where",
".",
"WhereNode",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/backends/ldap/compiler.py#L75-L107 | train | 28,097 |
django-ldapdb/django-ldapdb | ldapdb/backends/ldap/compiler.py | SQLCompiler.compile | def compile(self, node, *args, **kwargs):
"""Parse a WhereNode to a LDAP filter string."""
if isinstance(node, WhereNode):
return where_node_as_ldap(node, self, self.connection)
return super(SQLCompiler, self).compile(node, *args, **kwargs) | python | def compile(self, node, *args, **kwargs):
"""Parse a WhereNode to a LDAP filter string."""
if isinstance(node, WhereNode):
return where_node_as_ldap(node, self, self.connection)
return super(SQLCompiler, self).compile(node, *args, **kwargs) | [
"def",
"compile",
"(",
"self",
",",
"node",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"node",
",",
"WhereNode",
")",
":",
"return",
"where_node_as_ldap",
"(",
"node",
",",
"self",
",",
"self",
".",
"connection",
")... | Parse a WhereNode to a LDAP filter string. | [
"Parse",
"a",
"WhereNode",
"to",
"a",
"LDAP",
"filter",
"string",
"."
] | f3a35d3c8e42e7799d71e5d156ab97fb69e6b479 | https://github.com/django-ldapdb/django-ldapdb/blob/f3a35d3c8e42e7799d71e5d156ab97fb69e6b479/ldapdb/backends/ldap/compiler.py#L113-L117 | train | 28,098 |
emilmont/pyStatParser | stat_parser/eval_parser.py | TreeOperations.to_spans | def to_spans(self):
"Convert the tree to a set of nonterms and spans."
s = set()
self._convert_to_spans(self.tree, 1, s)
return s | python | def to_spans(self):
"Convert the tree to a set of nonterms and spans."
s = set()
self._convert_to_spans(self.tree, 1, s)
return s | [
"def",
"to_spans",
"(",
"self",
")",
":",
"s",
"=",
"set",
"(",
")",
"self",
".",
"_convert_to_spans",
"(",
"self",
".",
"tree",
",",
"1",
",",
"s",
")",
"return",
"s"
] | Convert the tree to a set of nonterms and spans. | [
"Convert",
"the",
"tree",
"to",
"a",
"set",
"of",
"nonterms",
"and",
"spans",
"."
] | 0e4990d7c1f0e3a0e0626ea2059ffd9030edf323 | https://github.com/emilmont/pyStatParser/blob/0e4990d7c1f0e3a0e0626ea2059ffd9030edf323/stat_parser/eval_parser.py#L55-L59 | train | 28,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.