idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
28,000
def get_pull_request_number ( task , source_env_prefix ) : pull_request = _extract_from_env_in_payload ( task , source_env_prefix + '_PULL_REQUEST_NUMBER' ) if pull_request is not None : pull_request = int ( pull_request ) return pull_request
Get what Github pull request created the graph .
28,001
def get_and_check_project ( valid_vcs_rules , source_url ) : project_path = match_url_regex ( valid_vcs_rules , source_url , match_url_path_callback ) if project_path is None : raise ValueError ( "Unknown repo for source url {}!" . format ( source_url ) ) project = project_path . split ( '/' ) [ - 1 ] return project
Given vcs rules and a source_url return the project .
28,002
def get_and_check_tasks_for ( context , task , msg_prefix = '' ) : tasks_for = task [ 'extra' ] [ 'tasks_for' ] if tasks_for not in context . config [ 'valid_tasks_for' ] : raise ValueError ( '{}Unknown tasks_for: {}' . format ( msg_prefix , tasks_for ) ) return tasks_for
Given a parent task return the reason the parent task was spawned .
28,003
def get_repo_scope ( task , name ) : repo_scopes = [ ] for scope in task [ 'scopes' ] : if REPO_SCOPE_REGEX . match ( scope ) : repo_scopes . append ( scope ) if len ( repo_scopes ) > 1 : raise ValueError ( "{}: Too many repo_scopes: {}!" . format ( name , repo_scopes ) ) if repo_scopes : return repo_scopes [ 0 ]
Given a parent task return the repo scope for the task .
28,004
def is_github_task ( task ) : return any ( ( task . get ( 'schedulerId' ) == 'taskcluster-github' , task . get ( 'extra' , { } ) . get ( 'tasks_for' , '' ) . startswith ( 'github-' ) , is_github_url ( task . get ( 'metadata' , { } ) . get ( 'source' , '' ) ) , ) )
Determine if a task is related to GitHub .
28,005
def is_action ( task ) : result = False if _extract_from_env_in_payload ( task , 'ACTION_CALLBACK' ) : result = True if task . get ( 'extra' , { } ) . get ( 'action' ) is not None : result = True return result
Determine if a task is an action task .
28,006
def prepare_to_run_task ( context , claim_task ) : current_task_info = { } context . claim_task = claim_task current_task_info [ 'taskId' ] = get_task_id ( claim_task ) current_task_info [ 'runId' ] = get_run_id ( claim_task ) log . info ( "Going to run taskId {taskId} runId {runId}!" . format ( ** current_task_info ) ...
Given a claim_task json dict prepare the context and work_dir .
28,007
async def run_task ( context , to_cancellable_process ) : kwargs = { 'stdout' : PIPE , 'stderr' : PIPE , 'stdin' : None , 'close_fds' : True , 'preexec_fn' : lambda : os . setsid ( ) , } subprocess = await asyncio . create_subprocess_exec ( * context . config [ 'task_script' ] , ** kwargs ) context . proc = await to_ca...
Run the task sending stdout + stderr to files .
28,008
async def reclaim_task ( context , task ) : while True : log . debug ( "waiting %s seconds before reclaiming..." % context . config [ 'reclaim_interval' ] ) await asyncio . sleep ( context . config [ 'reclaim_interval' ] ) if task != context . task : return log . debug ( "Reclaiming task..." ) try : context . reclaim_t...
Try to reclaim a task from the queue .
28,009
async def complete_task ( context , result ) : args = [ get_task_id ( context . claim_task ) , get_run_id ( context . claim_task ) ] reversed_statuses = get_reversed_statuses ( context ) try : if result == 0 : log . info ( "Reporting task complete..." ) response = await context . temp_queue . reportCompleted ( * args )...
Mark the task as completed in the queue .
28,010
async def claim_work ( context ) : log . debug ( "Calling claimWork..." ) payload = { 'workerGroup' : context . config [ 'worker_group' ] , 'workerId' : context . config [ 'worker_id' ] , 'tasks' : 1 , } try : return await context . queue . claimWork ( context . config [ 'provisioner_id' ] , context . config [ 'worker_...
Find and claim the next pending task in the queue if any .
28,011
def raise_on_errors ( errors , level = logging . CRITICAL ) : if errors : log . log ( level , "\n" . join ( errors ) ) raise CoTError ( "\n" . join ( errors ) )
Raise a CoTError if errors .
28,012
def guess_task_type ( name , task_defn ) : parts = name . split ( ':' ) task_type = parts [ - 1 ] if task_type == 'parent' : if is_action ( task_defn ) : task_type = 'action' else : task_type = 'decision' if task_type not in get_valid_task_types ( ) : raise CoTError ( "Invalid task type for {}!" . format ( name ) ) ret...
Guess the task type of the task .
28,013
def check_interactive_docker_worker ( link ) : errors = [ ] log . info ( "Checking for {} {} interactive docker-worker" . format ( link . name , link . task_id ) ) try : if link . task [ 'payload' ] [ 'features' ] . get ( 'interactive' ) : errors . append ( "{} is interactive: task.payload.features.interactive!" . form...
Given a task make sure the task was not defined as interactive .
28,014
def verify_docker_image_sha ( chain , link ) : cot = link . cot task = link . task errors = [ ] if isinstance ( task [ 'payload' ] . get ( 'image' ) , dict ) : docker_image_task_id = task [ 'extra' ] [ 'chainOfTrust' ] [ 'inputs' ] [ 'docker-image' ] log . debug ( "Verifying {} {} against docker-image {}" . format ( li...
Verify that built docker shas match the artifact .
28,015
def find_sorted_task_dependencies ( task , task_name , task_id ) : log . info ( "find_sorted_task_dependencies {} {}" . format ( task_name , task_id ) ) cot_input_dependencies = [ _craft_dependency_tuple ( task_name , task_type , task_id ) for task_type , task_id in task [ 'extra' ] . get ( 'chainOfTrust' , { } ) . get...
Find the taskIds of the chain of trust dependencies of a given task .
28,016
async def build_task_dependencies ( chain , task , name , my_task_id ) : log . info ( "build_task_dependencies {} {}" . format ( name , my_task_id ) ) if name . count ( ':' ) > chain . context . config [ 'max_chain_length' ] : raise CoTError ( "Too deep recursion!\n{}" . format ( name ) ) sorted_dependencies = find_sor...
Recursively build the task dependencies of a task .
28,017
async def download_cot ( chain ) : artifact_tasks = [ ] for link in chain . links : task_id = link . task_id parent_dir = link . cot_dir urls = [ ] unsigned_url = get_artifact_url ( chain . context , task_id , 'public/chain-of-trust.json' ) urls . append ( unsigned_url ) if chain . context . config [ 'verify_cot_signat...
Download the signed chain of trust artifacts .
28,018
async def download_cot_artifact ( chain , task_id , path ) : link = chain . get_link ( task_id ) log . debug ( "Verifying {} is in {} cot artifacts..." . format ( path , task_id ) ) if not link . cot : log . warning ( 'Chain of Trust for "{}" in {} does not exist. See above log for more details. \Skipping download of t...
Download an artifact and verify its SHA against the chain of trust .
28,019
async def download_cot_artifacts ( chain ) : upstream_artifacts = chain . task [ 'payload' ] . get ( 'upstreamArtifacts' , [ ] ) all_artifacts_per_task_id = get_all_artifacts_per_task_id ( chain , upstream_artifacts ) mandatory_artifact_tasks = [ ] optional_artifact_tasks = [ ] for task_id , paths in all_artifacts_per_...
Call download_cot_artifact in parallel for each upstreamArtifacts .
28,020
def is_artifact_optional ( chain , task_id , path ) : upstream_artifacts = chain . task [ 'payload' ] . get ( 'upstreamArtifacts' , [ ] ) optional_artifacts_per_task_id = get_optional_artifacts_per_task_id ( upstream_artifacts ) return path in optional_artifacts_per_task_id . get ( task_id , [ ] )
Tells whether an artifact is flagged as optional or not .
28,021
def get_all_artifacts_per_task_id ( chain , upstream_artifacts ) : all_artifacts_per_task_id = { } for link in chain . links : if link . task_type in PARENT_TASK_TYPES : add_enumerable_item_to_dict ( dict_ = all_artifacts_per_task_id , key = link . task_id , item = 'public/task-graph.json' ) if link . task_type in DECI...
Return every artifact to download including the Chain Of Trust Artifacts .
28,022
def verify_link_ed25519_cot_signature ( chain , link , unsigned_path , signature_path ) : if chain . context . config [ 'verify_cot_signature' ] : log . debug ( "Verifying the {} {} {} ed25519 chain of trust signature" . format ( link . name , link . task_id , link . worker_impl ) ) signature = read_from_file ( signatu...
Verify the ed25519 signatures of the chain of trust artifacts populated in download_cot .
28,023
def verify_cot_signatures ( chain ) : for link in chain . links : unsigned_path = link . get_artifact_full_path ( 'public/chain-of-trust.json' ) ed25519_signature_path = link . get_artifact_full_path ( 'public/chain-of-trust.json.sig' ) verify_link_ed25519_cot_signature ( chain , link , unsigned_path , ed25519_signatur...
Verify the signatures of the chain of trust artifacts populated in download_cot .
28,024
def verify_task_in_task_graph ( task_link , graph_defn , level = logging . CRITICAL ) : ignore_keys = ( "created" , "deadline" , "expires" , "dependencies" , "schedulerId" ) errors = [ ] runtime_defn = deepcopy ( task_link . task ) bad_deps = set ( runtime_defn [ 'dependencies' ] ) - set ( graph_defn [ 'task' ] [ 'depe...
Verify a given task_link s task against a given graph task definition .
28,025
def verify_link_in_task_graph ( chain , decision_link , task_link ) : log . info ( "Verifying the {} {} task definition is part of the {} {} task graph..." . format ( task_link . name , task_link . task_id , decision_link . name , decision_link . task_id ) ) if task_link . task_id in decision_link . task_graph : graph_...
Compare the runtime task definition against the decision task graph .
28,026
async def get_pushlog_info ( decision_link ) : source_env_prefix = decision_link . context . config [ 'source_env_prefix' ] repo = get_repo ( decision_link . task , source_env_prefix ) rev = get_revision ( decision_link . task , source_env_prefix ) context = decision_link . context pushlog_url = context . config [ 'pus...
Get pushlog info for a decision LinkOfTrust .
28,027
async def get_scm_level ( context , project ) : await context . populate_projects ( ) level = context . projects [ project ] [ 'access' ] . replace ( "scm_level_" , "" ) return level
Get the scm level for a project from projects . yml .
28,028
async def populate_jsone_context ( chain , parent_link , decision_link , tasks_for ) : task_ids = { "default" : parent_link . task_id , "decision" : decision_link . task_id , } source_url = get_source_url ( decision_link ) project = get_and_check_project ( chain . context . config [ 'valid_vcs_rules' ] , source_url ) l...
Populate the json - e context to rebuild parent_link s task definition .
28,029
async def get_in_tree_template ( link ) : context = link . context source_url = get_source_url ( link ) if not source_url . endswith ( ( '.yml' , '.yaml' ) ) : raise CoTError ( "{} source url {} doesn't end in .yml or .yaml!" . format ( link . name , source_url ) ) tmpl = await load_json_or_yaml_from_url ( context , so...
Get the in - tree json - e template for a given link .
28,030
async def get_action_context_and_template ( chain , parent_link , decision_link ) : actions_path = decision_link . get_artifact_full_path ( 'public/actions.json' ) all_actions = load_json_or_yaml ( actions_path , is_path = True ) [ 'actions' ] action_name = get_action_callback_name ( parent_link . task ) action_defn = ...
Get the appropriate json - e context and template for an action task .
28,031
async def get_jsone_context_and_template ( chain , parent_link , decision_link , tasks_for ) : if tasks_for == 'action' : jsone_context , tmpl = await get_action_context_and_template ( chain , parent_link , decision_link ) else : tmpl = await get_in_tree_template ( decision_link ) jsone_context = await populate_jsone_c...
Get the appropriate json - e context and template for any parent task .
28,032
def check_and_update_action_task_group_id ( parent_link , decision_link , rebuilt_definitions ) : rebuilt_gid = rebuilt_definitions [ 'tasks' ] [ 0 ] [ 'payload' ] [ 'env' ] [ 'ACTION_TASK_GROUP_ID' ] runtime_gid = parent_link . task [ 'payload' ] [ 'env' ] [ 'ACTION_TASK_GROUP_ID' ] acceptable_gids = { parent_link . t...
Update the ACTION_TASK_GROUP_ID of an action after verifying .
28,033
def compare_jsone_task_definition ( parent_link , rebuilt_definitions ) : diffs = [ ] for compare_definition in rebuilt_definitions [ 'tasks' ] : if 'taskId' in compare_definition : del ( compare_definition [ 'taskId' ] ) compare_definition = remove_empty_keys ( compare_definition ) runtime_definition = remove_empty_ke...
Compare the json - e rebuilt task definition vs the runtime definition .
28,034
async def verify_parent_task ( chain , link ) : worker_type = get_worker_type ( link . task ) if worker_type not in chain . context . config [ 'valid_decision_worker_types' ] : raise CoTError ( "{} is not a valid decision workerType!" . format ( worker_type ) ) if chain is not link : path = link . get_artifact_full_pat...
Verify the parent task Link .
28,035
async def verify_docker_image_task ( chain , link ) : errors = [ ] worker_type = get_worker_type ( link . task ) if worker_type not in chain . context . config [ 'valid_docker_image_worker_types' ] : errors . append ( "{} is not a valid docker-image workerType!" . format ( worker_type ) ) raise_on_errors ( errors )
Verify the docker image Link .
28,036
def check_num_tasks ( chain , task_count ) : errors = [ ] min_decision_tasks = 1 if task_count [ 'decision' ] < min_decision_tasks : errors . append ( "{} decision tasks; we must have at least {}!" . format ( task_count [ 'decision' ] , min_decision_tasks ) ) raise_on_errors ( errors )
Make sure there are a specific number of specific task types .
28,037
async def verify_docker_worker_task ( chain , link ) : if chain != link : check_interactive_docker_worker ( link ) verify_docker_image_sha ( chain , link )
Docker - worker specific checks .
28,038
async def verify_scriptworker_task ( chain , obj ) : errors = [ ] if obj . worker_impl != "scriptworker" : errors . append ( "{} {} must be run from scriptworker!" . format ( obj . name , obj . task_id ) ) raise_on_errors ( errors )
Verify the signing trust object .
28,039
def verify_repo_matches_url ( repo , url ) : repo_parts = urlparse ( repo ) url_parts = urlparse ( url ) errors = [ ] repo_path_parts = repo_parts . path . split ( '/' ) url_path_parts = url_parts . path . split ( '/' ) if repo_parts . hostname != url_parts . hostname : errors . append ( "verify_repo_matches_url: Hostn...
Verify url is a part of repo .
28,040
def get_source_url ( obj ) : source_env_prefix = obj . context . config [ 'source_env_prefix' ] task = obj . task log . debug ( "Getting source url for {} {}..." . format ( obj . name , obj . task_id ) ) repo = get_repo ( obj . task , source_env_prefix = source_env_prefix ) source = task [ 'metadata' ] [ 'source' ] if ...
Get the source url for a Trust object .
28,041
async def trace_back_to_tree ( chain ) : errors = [ ] repos = { } restricted_privs = None rules = { } for my_key , config_key in { 'scopes' : 'cot_restricted_scopes' , 'trees' : 'cot_restricted_trees' } . items ( ) : rules [ my_key ] = chain . context . config [ config_key ] for obj in [ chain ] + chain . links : sourc...
Trace the chain back to the tree .
28,042
async def verify_chain_of_trust ( chain ) : log_path = os . path . join ( chain . context . config [ "task_log_dir" ] , "chain_of_trust.log" ) scriptworker_log = logging . getLogger ( 'scriptworker' ) with contextual_log_handler ( chain . context , path = log_path , log_obj = scriptworker_log , formatter = AuditLogForm...
Build and verify the chain of trust .
28,043
async def is_try_or_pull_request ( self ) : tasks = [ asyncio . ensure_future ( link . is_try_or_pull_request ( ) ) for link in self . links ] tasks . insert ( 0 , asyncio . ensure_future ( is_try_or_pull_request ( self . context , self . task ) ) ) conditions = await raise_future_exceptions ( tasks ) return any ( cond...
Determine if any task in the chain is a try task .
28,044
def get_link ( self , task_id ) : links = [ x for x in self . links if x . task_id == task_id ] if len ( links ) != 1 : raise CoTError ( "No single Link matches task_id {}!\n{}" . format ( task_id , self . dependent_task_ids ( ) ) ) return links [ 0 ]
Get a LinkOfTrust by task id .
28,045
def get_all_links_in_chain ( self ) : if self . is_decision ( ) and self . get_link ( self . task_id ) : return self . links return [ self ] + self . links
Return all links in the chain of trust including the target task .
28,046
def format ( self , record ) : if record . levelno == logging . DEBUG : record . msg = ' {}' . format ( record . msg ) return super ( AuditLogFormatter , self ) . format ( record )
Space debug messages for more legibility .
28,047
def get_version_string ( version ) : version_len = len ( version ) if version_len == 3 : version_string = '%d.%d.%d' % version elif version_len == 4 : version_string = '%d.%d.%d-%s' % version else : raise Exception ( 'Version tuple is non-semver-compliant {} length!' . format ( version_len ) ) return version_string
Translate a version tuple into a string .
28,048
def get_unfrozen_copy ( values ) : if isinstance ( values , ( frozendict , dict ) ) : return { key : get_unfrozen_copy ( value ) for key , value in values . items ( ) } elif isinstance ( values , ( list , tuple ) ) : return [ get_unfrozen_copy ( value ) for value in values ] return values
Recursively convert value s tuple values into lists and frozendicts into dicts .
28,049
def read_worker_creds ( key = "credentials" ) : for path in CREDS_FILES : if not os . path . exists ( path ) : continue contents = load_json_or_yaml ( path , is_path = True , exception = None ) if contents . get ( key ) : return contents [ key ] else : if key == "credentials" and os . environ . get ( "TASKCLUSTER_ACCES...
Get credentials from CREDS_FILES or the environment .
28,050
def check_config ( config , path ) : messages = [ ] config_copy = get_frozen_copy ( config ) missing_keys = set ( DEFAULT_CONFIG . keys ( ) ) - set ( config_copy . keys ( ) ) if missing_keys : messages . append ( "Missing config keys {}!" . format ( missing_keys ) ) for key , value in config_copy . items ( ) : if key n...
Validate the config against DEFAULT_CONFIG .
28,051
def apply_product_config ( config ) : cot_product = config [ 'cot_product' ] for key in config : if isinstance ( config [ key ] , Mapping ) and 'by-cot-product' in config [ key ] : try : config [ key ] = config [ key ] [ 'by-cot-product' ] [ cot_product ] except KeyError : raise ConfigError ( "Product {} not specified ...
Apply config values that are keyed by cot_product .
28,052
def create_config ( config_path = "scriptworker.yaml" ) : if not os . path . exists ( config_path ) : print ( "{} doesn't exist! Exiting..." . format ( config_path ) , file = sys . stderr ) sys . exit ( 1 ) with open ( config_path , "r" , encoding = "utf-8" ) as fh : secrets = safe_load ( fh ) config = dict ( deepcopy ...
Create a config from DEFAULT_CONFIG arguments and config file .
28,053
def get_context_from_cmdln ( args , desc = "Run scriptworker" ) : context = Context ( ) parser = argparse . ArgumentParser ( description = desc ) parser . add_argument ( "config_path" , type = str , nargs = "?" , default = "scriptworker.yaml" , help = "the path to the config file" ) parsed_args = parser . parse_args ( ...
Create a Context object from args .
28,054
def get_cot_artifacts ( context ) : artifacts = { } filepaths = filepaths_in_dir ( context . config [ 'artifact_dir' ] ) hash_alg = context . config [ 'chain_of_trust_hash_algorithm' ] for filepath in sorted ( filepaths ) : path = os . path . join ( context . config [ 'artifact_dir' ] , filepath ) sha = get_hash ( path...
Generate the artifact relative paths and shas for the chain of trust .
28,055
def generate_cot_body ( context ) : try : cot = { 'artifacts' : get_cot_artifacts ( context ) , 'chainOfTrustVersion' : 1 , 'runId' : context . claim_task [ 'runId' ] , 'task' : context . task , 'taskId' : context . claim_task [ 'status' ] [ 'taskId' ] , 'workerGroup' : context . claim_task [ 'workerGroup' ] , 'workerI...
Generate the chain of trust dictionary .
28,056
def generate_cot ( context , parent_path = None ) : body = generate_cot_body ( context ) schema = load_json_or_yaml ( context . config [ 'cot_schema_path' ] , is_path = True , exception = ScriptWorkerException , message = "Can't read schema file {}: %(exc)s" . format ( context . config [ 'cot_schema_path' ] ) ) validat...
Format and sign the cot body and write to disk .
28,057
def is_github_repo_owner_the_official_one ( context , repo_owner ) : official_repo_owner = context . config [ 'official_github_repos_owner' ] if not official_repo_owner : raise ConfigError ( 'This worker does not have a defined owner for official GitHub repositories. ' 'Given "official_github_repos_owner": {}' . format...
Given a repo_owner check if it matches the one configured to be the official one .
28,058
def get_tag_hash ( self , tag_name ) : tag_object = get_single_item_from_sequence ( sequence = self . _github_repository . tags ( ) , condition = lambda tag : tag . name == tag_name , no_item_error_message = 'No tag "{}" exist' . format ( tag_name ) , too_many_item_error_message = 'Too many tags "{}" found' . format ( ...
Fetch the commit hash that was tagged with tag_name .
28,059
async def has_commit_landed_on_repository ( self , context , revision ) : if not _is_git_full_hash ( revision ) : revision = self . get_tag_hash ( tag_name = revision ) repo = self . _github_repository . html_url url = '/' . join ( [ repo . rstrip ( '/' ) , 'branch_commits' , revision ] ) html_data = await retry_reques...
Tell if a commit was landed on the repository or if it just comes from a pull request .
28,060
def update_logging_config ( context , log_name = None , file_name = 'worker.log' ) : log_name = log_name or __name__ . split ( '.' ) [ 0 ] top_level_logger = logging . getLogger ( log_name ) datefmt = context . config [ 'log_datefmt' ] fmt = context . config [ 'log_fmt' ] formatter = logging . Formatter ( fmt = fmt , d...
Update python logging settings from config .
28,061
async def pipe_to_log ( pipe , filehandles = ( ) , level = logging . INFO ) : while True : line = await pipe . readline ( ) if line : line = to_unicode ( line ) log . log ( level , line . rstrip ( ) ) for filehandle in filehandles : print ( line , file = filehandle , end = "" ) else : break
Log from a subprocess PIPE .
28,062
def get_log_filehandle ( context ) : log_file_name = get_log_filename ( context ) makedirs ( context . config [ 'task_log_dir' ] ) with open ( log_file_name , "w" , encoding = "utf-8" ) as filehandle : yield filehandle
Open the log and error filehandles .
28,063
def contextual_log_handler ( context , path , log_obj = None , level = logging . DEBUG , formatter = None ) : log_obj = log_obj or log formatter = formatter or logging . Formatter ( fmt = context . config [ 'log_fmt' ] , datefmt = context . config [ 'log_datefmt' ] , ) parent_path = os . path . dirname ( path ) makedir...
Add a short - lived log with a contextmanager for cleanup .
28,064
async def upload_artifacts ( context , files ) : def to_upload_future ( target_path ) : path = os . path . join ( context . config [ 'artifact_dir' ] , target_path ) content_type , content_encoding = compress_artifact_if_supported ( path ) return asyncio . ensure_future ( retry_create_artifact ( context , path , target...
Compress and upload the requested files from artifact_dir preserving relative paths .
28,065
def compress_artifact_if_supported ( artifact_path ) : content_type , encoding = guess_content_type_and_encoding ( artifact_path ) log . debug ( '"{}" is encoded with "{}" and has mime/type "{}"' . format ( artifact_path , encoding , content_type ) ) if encoding is None and content_type in _GZIP_SUPPORTED_CONTENT_TYPE ...
Compress artifacts with GZip if they re known to be supported .
28,066
def guess_content_type_and_encoding ( path ) : for ext , content_type in _EXTENSION_TO_MIME_TYPE . items ( ) : if path . endswith ( ext ) : return content_type content_type , encoding = mimetypes . guess_type ( path ) content_type = content_type or "application/binary" return content_type , encoding
Guess the content type of a path using mimetypes .
28,067
async def create_artifact ( context , path , target_path , content_type , content_encoding , storage_type = 's3' , expires = None ) : payload = { "storageType" : storage_type , "expires" : expires or get_expiration_arrow ( context ) . isoformat ( ) , "contentType" : content_type , } args = [ get_task_id ( context . cla...
Create an artifact and upload it .
28,068
def get_artifact_url ( context , task_id , path ) : if path . startswith ( "public/" ) : url = context . queue . buildUrl ( 'getLatestArtifact' , task_id , path ) else : url = context . queue . buildSignedUrl ( 'getLatestArtifact' , task_id , path , ) return url
Get a TaskCluster artifact url .
28,069
async def download_artifacts ( context , file_urls , parent_dir = None , session = None , download_func = download_file , valid_artifact_task_ids = None ) : parent_dir = parent_dir or context . config [ 'work_dir' ] session = session or context . session tasks = [ ] files = [ ] valid_artifact_rules = context . config [...
Download artifacts in parallel after validating their URLs .
28,070
def get_upstream_artifacts_full_paths_per_task_id ( context ) : upstream_artifacts = context . task [ 'payload' ] [ 'upstreamArtifacts' ] task_ids_and_relative_paths = [ ( artifact_definition [ 'taskId' ] , artifact_definition [ 'paths' ] ) for artifact_definition in upstream_artifacts ] optional_artifacts_per_task_id ...
List the downloaded upstream artifacts .
28,071
def get_and_check_single_upstream_artifact_full_path ( context , task_id , path ) : abs_path = get_single_upstream_artifact_full_path ( context , task_id , path ) if not os . path . exists ( abs_path ) : raise ScriptWorkerTaskException ( 'upstream artifact with path: {}, does not exist' . format ( abs_path ) ) return a...
Return the full path where an upstream artifact is located on disk .
28,072
def get_single_upstream_artifact_full_path ( context , task_id , path ) : return os . path . abspath ( os . path . join ( context . config [ 'work_dir' ] , 'cot' , task_id , path ) )
Return the full path where an upstream artifact should be located .
28,073
def get_optional_artifacts_per_task_id ( upstream_artifacts ) : optional_artifacts_per_task_id = { } for artifact_definition in upstream_artifacts : if artifact_definition . get ( 'optional' , False ) is True : task_id = artifact_definition [ 'taskId' ] artifacts_paths = artifact_definition [ 'paths' ] add_enumerable_i...
Return every optional artifact defined in upstream_artifacts ordered by taskId .
28,074
def set_chat_description ( chat_id , description , ** kwargs ) : if len ( description ) > 255 : raise ValueError ( "Chat description must be less than 255 characters." ) params = dict ( chat_id = chat_id , description = description ) return TelegramBotRPCRequest ( 'setChatTitle' , params = params , on_result = lambda r...
Use this method to change the description of a supergroup or a channel . The bot must be an administrator in the chat for this to work and must have the appropriate admin rights . Returns True on success .
28,075
def send_audio ( chat_id , audio , caption = None , duration = None , performer = None , title = None , reply_to_message_id = None , reply_markup = None , disable_notification = False , parse_mode = None , ** kwargs ) : files = None if isinstance ( audio , InputFile ) : files = [ audio ] audio = None elif not isinstanc...
Use this method to send audio files if you want Telegram clients to display them in the music player .
28,076
def unban_chat_member ( chat_id , user_id , ** kwargs ) : params = dict ( chat_id = chat_id , user_id = user_id , ) return TelegramBotRPCRequest ( 'unbanChatMember' , params = params , on_result = lambda result : result , ** kwargs )
Use this method to unban a previously kicked user in a supergroup . The user will not return to the group automatically but will be able to join via link etc . The bot must be an administrator in the group for this to work
28,077
def get_chat_member ( chat_id , user_id , ** kwargs ) : params = dict ( chat_id = chat_id , user_id = user_id , ) return TelegramBotRPCRequest ( 'getChatMember' , params = params , on_result = lambda result : ChatMember . from_result ( result ) , ** kwargs )
Use this method to get information about a member of a chat
28,078
def get_file ( file_id , ** kwargs ) : params = dict ( file_id = file_id ) return TelegramBotRPCRequest ( 'getFile' , params = params , on_result = File . from_result , ** kwargs )
Use this method to get basic info about a file and prepare it for downloading .
28,079
def get_updates ( offset = None , limit = None , timeout = None , allowed_updates = None , ** kwargs ) : params = _clean_params ( offset = offset , limit = limit , timeout = timeout , allowed_updates = allowed_updates , ) return TelegramBotRPCRequest ( 'getUpdates' , params = params , on_result = Update . from_result ,...
Use this method to receive incoming updates using long polling .
28,080
async def connect ( self , cluster_id , client_id , nats = None , connect_timeout = DEFAULT_CONNECT_TIMEOUT , max_pub_acks_inflight = DEFAULT_MAX_PUB_ACKS_INFLIGHT , loop = None , ) : self . _cluster_id = cluster_id self . _client_id = client_id self . _loop = loop self . _connect_timeout = connect_timeout if nats is n...
Starts a session with a NATS Streaming cluster .
28,081
async def _process_ack ( self , msg ) : pub_ack = protocol . PubAck ( ) pub_ack . ParseFromString ( msg . data ) if not self . _pending_pub_acks_queue . empty ( ) : await self . _pending_pub_acks_queue . get ( ) try : cb = self . _pub_ack_map [ pub_ack . guid ] await cb ( pub_ack ) del self . _pub_ack_map [ pub_ack . g...
Receives acks from the publishes via the _STAN . acks subscription .
28,082
async def _process_msg ( self , sub ) : while True : try : raw_msg = await sub . _msgs_queue . get ( ) msg = Msg ( ) msg_proto = protocol . MsgProto ( ) msg_proto . ParseFromString ( raw_msg . data ) msg . proto = msg_proto msg . sub = sub await sub . cb ( msg ) if not sub . manual_acks : msg_ack = protocol . Ack ( ) m...
Receives the msgs from the STAN subscriptions and replies . By default it will reply back with an ack unless manual acking was specified in one of the subscription options .
28,083
async def ack ( self , msg ) : ack_proto = protocol . Ack ( ) ack_proto . subject = msg . proto . subject ack_proto . sequence = msg . proto . sequence await self . _nc . publish ( msg . sub . ack_inbox , ack_proto . SerializeToString ( ) )
Used to manually acks a message .
28,084
async def publish ( self , subject , payload , ack_handler = None , ack_wait = DEFAULT_ACK_WAIT , ) : stan_subject = '' . join ( [ self . _pub_prefix , '.' , subject ] ) guid = new_guid ( ) pe = protocol . PubMsg ( ) pe . clientID = self . _client_id pe . guid = guid pe . subject = subject pe . data = payload await sel...
Publishes a payload onto a subject . By default it will block until the message which has been published has been acked back . An optional async handler can be publi
28,085
async def _close ( self ) : try : if self . _hb_inbox_sid is not None : await self . _nc . unsubscribe ( self . _hb_inbox_sid ) self . _hb_inbox = None self . _hb_inbox_sid = None if self . _ack_subject_sid is not None : await self . _nc . unsubscribe ( self . _ack_subject_sid ) self . _ack_subject = None self . _ack_s...
Removes any present internal state from the client .
28,086
async def close ( self ) : await self . _close ( ) req = protocol . CloseRequest ( ) req . clientID = self . _client_id msg = await self . _nc . request ( self . _close_req_subject , req . SerializeToString ( ) , self . _connect_timeout , ) resp = protocol . CloseResponse ( ) resp . ParseFromString ( msg . data ) if re...
Close terminates a session with NATS Streaming .
28,087
async def unsubscribe ( self ) : await self . _nc . unsubscribe ( self . sid ) try : sub = self . _sc . _sub_map [ self . inbox ] sub . _msgs_task . cancel ( ) del self . _sc . _sub_map [ self . inbox ] except KeyError : pass req = protocol . UnsubscribeRequest ( ) req . clientID = self . _sc . _client_id req . subject...
Remove subscription on a topic in this client .
28,088
def datetime_from_ldap ( value ) : if not value : return None match = LDAP_DATETIME_RE . match ( value ) if not match : return None groups = match . groupdict ( ) if groups [ 'microsecond' ] : groups [ 'microsecond' ] = groups [ 'microsecond' ] . ljust ( 6 , '0' ) [ : 6 ] tzinfo = groups . pop ( 'tzinfo' ) if tzinfo ==...
Convert a LDAP - style datetime to a Python aware object .
28,089
def get_db_prep_value ( self , value , connection , prepared = False ) : if prepared : return value if value is None : return [ ] values = value if self . multi_valued_field else [ value ] prepared_values = [ self . get_prep_value ( v ) for v in values ] return list ( sorted ( set ( v for v in prepared_values if v ) ) ...
Prepare a value for DB interaction .
28,090
def build_rdn ( self ) : bits = [ ] for field in self . _meta . fields : if field . db_column and field . primary_key : bits . append ( "%s=%s" % ( field . db_column , getattr ( self , field . name ) ) ) if not len ( bits ) : raise Exception ( "Could not build Distinguished Name" ) return '+' . join ( bits )
Build the Relative Distinguished Name for this entry .
28,091
def delete ( self , using = None ) : using = using or router . db_for_write ( self . __class__ , instance = self ) connection = connections [ using ] logger . debug ( "Deleting LDAP entry %s" % self . dn ) connection . delete_s ( self . dn ) signals . post_delete . send ( sender = self . __class__ , instance = self )
Delete this entry .
28,092
def _save_table ( self , raw = False , cls = None , force_insert = None , force_update = None , using = None , update_fields = None ) : connection = connections [ using ] create = bool ( force_insert or not self . dn ) if update_fields : target_fields = [ self . _meta . get_field ( name ) for name in update_fields ] el...
Saves the current instance .
28,093
def scoped ( base_class , base_dn ) : class Meta : proxy = True verbose_name = base_class . _meta . verbose_name verbose_name_plural = base_class . _meta . verbose_name_plural import re suffix = re . sub ( '[=,]' , '_' , base_dn ) name = "%s_%s" % ( base_class . __name__ , str ( suffix ) ) new_class = type ( str ( name...
Returns a copy of the current class with a different base_dn .
28,094
def get_connection_params ( self ) : return { 'uri' : self . settings_dict [ 'NAME' ] , 'tls' : self . settings_dict . get ( 'TLS' , False ) , 'bind_dn' : self . settings_dict [ 'USER' ] , 'bind_pw' : self . settings_dict [ 'PASSWORD' ] , 'retry_max' : self . settings_dict . get ( 'RETRY_MAX' , 1 ) , 'retry_delay' : se...
Compute appropriate parameters for establishing a new connection .
28,095
def get_new_connection ( self , conn_params ) : connection = ldap . ldapobject . ReconnectLDAPObject ( uri = conn_params [ 'uri' ] , retry_max = conn_params [ 'retry_max' ] , retry_delay = conn_params [ 'retry_delay' ] , bytes_mode = False ) options = conn_params [ 'options' ] for opt , value in options . items ( ) : i...
Build a connection from its parameters .
28,096
def query_as_ldap ( query , compiler , connection ) : if query . is_empty ( ) : return if query . model . _meta . model_name == 'migration' and not hasattr ( query . model , 'object_classes' ) : return filterstr = '' . join ( [ '(objectClass=%s)' % cls for cls in query . model . object_classes ] ) if ( len ( query . wh...
Convert a django . db . models . sql . query . Query to a LdapLookup .
28,097
def where_node_as_ldap ( where , compiler , connection ) : bits , params = [ ] , [ ] for item in where . children : if isinstance ( item , WhereNode ) : clause , clause_params = compiler . compile ( item ) else : clause , clause_params = item . as_sql ( compiler , connection ) bits . append ( clause ) params . extend (...
Parse a django . db . models . sql . where . WhereNode .
28,098
def compile ( self , node , * args , ** kwargs ) : if isinstance ( node , WhereNode ) : return where_node_as_ldap ( node , self , self . connection ) return super ( SQLCompiler , self ) . compile ( node , * args , ** kwargs )
Parse a WhereNode to a LDAP filter string .
28,099
def to_spans ( self ) : "Convert the tree to a set of nonterms and spans." s = set ( ) self . _convert_to_spans ( self . tree , 1 , s ) return s
Convert the tree to a set of nonterms and spans .