query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Write the Field to the file - like object out .
def pack ( self , out : IO ) : out . write ( self . access_flags . pack ( ) ) out . write ( pack ( '>HH' , self . _name_index , self . _descriptor_index ) ) self . attributes . pack ( out )
3,200
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/fields.py#L74-L87
[ "def", "_parse_guild_disband_info", "(", "self", ",", "info_container", ")", ":", "m", "=", "disband_regex", ".", "search", "(", "info_container", ".", "text", ")", "if", "m", ":", "self", ".", "disband_condition", "=", "m", ".", "group", "(", "2", ")", "self", ".", "disband_date", "=", "parse_tibia_date", "(", "m", ".", "group", "(", "1", ")", ".", "replace", "(", "\"\\xa0\"", ",", "\" \"", ")", ")" ]
Removes a Field from the table by identity .
def remove ( self , field : Field ) : self . _table = [ fld for fld in self . _table if fld is not field ]
3,201
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/fields.py#L104-L108
[ "def", "makeSolution", "(", "self", ",", "cNrm", ",", "mNrm", ")", ":", "solution", "=", "ConsumerSolution", "(", ")", "# An empty solution to which we'll add state-conditional solutions", "# Calculate the MPC at each market resource gridpoint in each state (if desired)", "if", "self", ".", "CubicBool", ":", "dcda", "=", "self", ".", "EndOfPrdvPP", "/", "self", ".", "uPP", "(", "np", ".", "array", "(", "self", ".", "cNrmNow", ")", ")", "MPC", "=", "dcda", "/", "(", "dcda", "+", "1.0", ")", "self", ".", "MPC_temp", "=", "np", ".", "hstack", "(", "(", "np", ".", "reshape", "(", "self", ".", "MPCmaxNow", ",", "(", "self", ".", "StateCount", ",", "1", ")", ")", ",", "MPC", ")", ")", "interpfunc", "=", "self", ".", "makeCubiccFunc", "else", ":", "interpfunc", "=", "self", ".", "makeLinearcFunc", "# Loop through each current period state and add its solution to the overall solution", "for", "i", "in", "range", "(", "self", ".", "StateCount", ")", ":", "# Set current-period-conditional human wealth and MPC bounds", "self", ".", "hNrmNow_j", "=", "self", ".", "hNrmNow", "[", "i", "]", "self", ".", "MPCminNow_j", "=", "self", ".", "MPCminNow", "[", "i", "]", "if", "self", ".", "CubicBool", ":", "self", ".", "MPC_temp_j", "=", "self", ".", "MPC_temp", "[", "i", ",", ":", "]", "# Construct the consumption function by combining the constrained and unconstrained portions", "self", ".", "cFuncNowCnst", "=", "LinearInterp", "(", "[", "self", ".", "mNrmMin_list", "[", "i", "]", ",", "self", ".", "mNrmMin_list", "[", "i", "]", "+", "1.0", "]", ",", "[", "0.0", ",", "1.0", "]", ")", "cFuncNowUnc", "=", "interpfunc", "(", "mNrm", "[", "i", ",", ":", "]", ",", "cNrm", "[", "i", ",", ":", "]", ")", "cFuncNow", "=", "LowerEnvelope", "(", "cFuncNowUnc", ",", "self", ".", "cFuncNowCnst", ")", "# Make the marginal value function and pack up the current-state-conditional solution", "vPfuncNow", "=", "MargValueFunc", "(", "cFuncNow", ",", "self", ".", "CRRA", ")", "solution_cond", "=", "ConsumerSolution", "(", "cFunc", "=", "cFuncNow", ",", "vPfunc", "=", "vPfuncNow", ",", "mNrmMin", "=", "self", ".", "mNrmMinNow", ")", "if", "self", ".", "CubicBool", ":", "# Add the state-conditional marginal marginal value function (if desired)", "solution_cond", "=", "self", ".", "addvPPfunc", "(", "solution_cond", ")", "# Add the current-state-conditional solution to the overall period solution", "solution", ".", "appendSolution", "(", "solution_cond", ")", "# Add the lower bounds of market resources, MPC limits, human resources,", "# and the value functions to the overall solution", "solution", ".", "mNrmMin", "=", "self", ".", "mNrmMin_list", "solution", "=", "self", ".", "addMPCandHumanWealth", "(", "solution", ")", "if", "self", ".", "vFuncBool", ":", "vFuncNow", "=", "self", ".", "makevFunc", "(", "solution", ")", "solution", ".", "vFunc", "=", "vFuncNow", "# Return the overall solution to this period", "return", "solution" ]
Read the FieldTable from the file - like object source .
def unpack ( self , source : IO ) : field_count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , field_count ) : field = Field ( self . _cf ) field . unpack ( source ) self . append ( field )
3,202
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/fields.py#L150-L165
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Write the FieldTable to the file - like object out .
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self ) ) ) for field in self . _table : field . pack ( out )
3,203
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/fields.py#L167-L180
[ "def", "__contribution_from_parameters", "(", "self", ",", "parameter_names", ")", ":", "#get the prior and posterior for the base case", "bprior", ",", "bpost", "=", "self", ".", "prior_prediction", ",", "self", ".", "posterior_prediction", "#get the prior and posterior for the conditioned case", "la_cond", "=", "self", ".", "get_conditional_instance", "(", "parameter_names", ")", "cprior", ",", "cpost", "=", "la_cond", ".", "prior_prediction", ",", "la_cond", ".", "posterior_prediction", "return", "cprior", ",", "cpost" ]
Iterates over the fields table yielding each matching method . Calling without any arguments is equivalent to iterating over the table .
def find ( self , * , name : str = None , type_ : str = None , f : Callable = None ) -> Iterator [ Field ] : for field in self . _table : if name is not None and field . name . value != name : continue descriptor = field . descriptor . value if type_ is not None and type_ != descriptor : continue if f is not None and not f ( field ) : continue yield field
3,204
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/fields.py#L185-L206
[ "def", "set_relay_off", "(", "self", ")", ":", "if", "self", ".", "get_relay_state", "(", ")", ":", "try", ":", "request", "=", "requests", ".", "get", "(", "'{}/relay'", ".", "format", "(", "self", ".", "resource", ")", ",", "params", "=", "{", "'state'", ":", "'0'", "}", ",", "timeout", "=", "self", ".", "timeout", ")", "if", "request", ".", "status_code", "==", "200", ":", "self", ".", "data", "[", "'relay'", "]", "=", "False", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "raise", "exceptions", ".", "MyStromConnectionError", "(", ")" ]
Check if given value is a valid host string .
def is_valid_host ( value ) : host_validators = validators . ipv4 , validators . ipv6 , validators . domain return any ( f ( value ) for f in host_validators )
3,205
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/validation.py#L15-L22
[ "def", "__write_to_character_device", "(", "self", ",", "event_list", ",", "timeval", "=", "None", ")", ":", "# Remember the position of the stream", "pos", "=", "self", ".", "_character_device", ".", "tell", "(", ")", "# Go to the end of the stream", "self", ".", "_character_device", ".", "seek", "(", "0", ",", "2", ")", "# Write the new data to the end", "for", "event", "in", "event_list", ":", "self", ".", "_character_device", ".", "write", "(", "event", ")", "# Add a sync marker", "sync", "=", "self", ".", "create_event_object", "(", "\"Sync\"", ",", "0", ",", "0", ",", "timeval", ")", "self", ".", "_character_device", ".", "write", "(", "sync", ")", "# Put the stream back to its original position", "self", ".", "_character_device", ".", "seek", "(", "pos", ")" ]
Check if given value is a valid URL string .
def is_valid_url ( value ) : match = URL_REGEX . match ( value ) host_str = urlparse ( value ) . hostname return match and is_valid_host ( host_str )
3,206
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/validation.py#L33-L41
[ "def", "merge_cts_records", "(", "file_name", ",", "crypto_idfp", ",", "crypto_idfps", ")", ":", "db", "=", "XonoticDB", ".", "load_path", "(", "file_name", ")", "db", ".", "merge_cts_records", "(", "crypto_idfp", ",", "crypto_idfps", ")", "db", ".", "save", "(", "file_name", ")" ]
Return a wrapper that runs given method only for valid hosts .
def accepts_valid_host ( func ) : @ functools . wraps ( func ) def wrapper ( obj , value , * args , * * kwargs ) : """Run the function and return a value for a valid host. :param obj: an object in whose class the func is defined :param value: a value expected to be a valid host string :returns: a return value of the function func :raises InvalidHostError: if the value is not valid """ if not is_valid_host ( value ) : raise InvalidHostError return func ( obj , value , * args , * * kwargs ) return wrapper
3,207
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/validation.py#L44-L62
[ "def", "parse_sentence", "(", "obj", ":", "dict", ")", "->", "BioCSentence", ":", "sentence", "=", "BioCSentence", "(", ")", "sentence", ".", "offset", "=", "obj", "[", "'offset'", "]", "sentence", ".", "infons", "=", "obj", "[", "'infons'", "]", "sentence", ".", "text", "=", "obj", "[", "'text'", "]", "for", "annotation", "in", "obj", "[", "'annotations'", "]", ":", "sentence", ".", "add_annotation", "(", "parse_annotation", "(", "annotation", ")", ")", "for", "relation", "in", "obj", "[", "'relations'", "]", ":", "sentence", ".", "add_relation", "(", "parse_relation", "(", "relation", ")", ")", "return", "sentence" ]
Return a wrapper that runs given method only for valid URLs .
def accepts_valid_urls ( func ) : @ functools . wraps ( func ) def wrapper ( obj , urls , * args , * * kwargs ) : """Run the function and return a value for valid URLs. :param obj: an object in whose class f is defined :param urls: an iterable containing URLs :returns: a return value of the function f :raises InvalidURLError: if the iterable contains invalid URLs """ invalid_urls = [ u for u in urls if not is_valid_url ( u ) ] if invalid_urls : msg_tpl = 'The values: {} are not valid URLs' msg = msg_tpl . format ( ',' . join ( invalid_urls ) ) raise InvalidURLError ( msg ) return func ( obj , urls , * args , * * kwargs ) return wrapper
3,208
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/validation.py#L65-L86
[ "def", "parse_sentence", "(", "obj", ":", "dict", ")", "->", "BioCSentence", ":", "sentence", "=", "BioCSentence", "(", ")", "sentence", ".", "offset", "=", "obj", "[", "'offset'", "]", "sentence", ".", "infons", "=", "obj", "[", "'infons'", "]", "sentence", ".", "text", "=", "obj", "[", "'text'", "]", "for", "annotation", "in", "obj", "[", "'annotations'", "]", ":", "sentence", ".", "add_annotation", "(", "parse_annotation", "(", "annotation", ")", ")", "for", "relation", "in", "obj", "[", "'relations'", "]", ":", "sentence", ".", "add_relation", "(", "parse_relation", "(", "relation", ")", ")", "return", "sentence" ]
Returns the Constant at index raising a KeyError if it does not exist .
def get ( self , index ) : constant = self . _pool [ index ] if not isinstance ( constant , Constant ) : constant = _constant_types [ constant [ 0 ] ] ( self , index , * constant [ 1 : ] ) self . _pool [ index ] = constant return constant
3,209
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/constants.py#L352-L361
[ "def", "variant", "(", "samples", ")", ":", "checkpoints", "=", "_variant_checkpoints", "(", "samples", ")", "if", "checkpoints", "[", "\"align\"", "]", ":", "align_wf", "=", "_alignment", "(", "checkpoints", ")", "alignin", "=", "[", "[", "\"files\"", "]", ",", "[", "\"analysis\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"align_split_size\"", "]", ",", "[", "\"reference\"", ",", "\"fasta\"", ",", "\"base\"", "]", ",", "[", "\"rgnames\"", ",", "\"pl\"", "]", ",", "[", "\"rgnames\"", ",", "\"sample\"", "]", ",", "[", "\"rgnames\"", ",", "\"pu\"", "]", ",", "[", "\"rgnames\"", ",", "\"lane\"", "]", ",", "[", "\"rgnames\"", ",", "\"rg\"", "]", ",", "[", "\"rgnames\"", ",", "\"lb\"", "]", ",", "[", "\"reference\"", ",", "\"aligner\"", ",", "\"indexes\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"aligner\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"trim_reads\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"adapters\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"bam_clean\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"variant_regions\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"mark_duplicates\"", "]", "]", "if", "checkpoints", "[", "\"hla\"", "]", ":", "alignin", ".", "append", "(", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"hlacaller\"", "]", ")", "if", "checkpoints", "[", "\"umi\"", "]", ":", "alignin", ".", "append", "(", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"umi_type\"", "]", ")", "align", "=", "[", "s", "(", "\"alignment_to_rec\"", ",", "\"multi-combined\"", ",", "alignin", ",", "[", "cwlout", "(", "\"alignment_rec\"", ",", "\"record\"", ")", "]", ",", "\"bcbio-vc\"", ",", "disk", "=", "{", "\"files\"", ":", "1.5", "}", ",", "cores", "=", "1", ",", "no_files", "=", "True", ")", ",", "w", "(", "\"alignment\"", ",", "\"multi-parallel\"", ",", "align_wf", ",", "[", "[", "\"align_split\"", "]", ",", "[", "\"process_alignment_rec\"", "]", ",", "[", "\"work_bam\"", "]", ",", "[", "\"config\"", ",", "\"algorithm\"", ",", "\"quality_format\"", "]", "]", ")", "]", "else", ":", "align", "=", "[", "s", "(", "\"organize_noalign\"", ",", "\"multi-parallel\"", ",", "[", "\"files\"", "]", ",", "[", "cwlout", "(", "[", "\"align_bam\"", "]", ",", "[", "\"File\"", ",", "\"null\"", "]", ",", "[", "\".bai\"", "]", ")", ",", "cwlout", "(", "[", "\"work_bam_plus\"", ",", "\"disc\"", "]", ",", "[", "\"File\"", ",", "\"null\"", "]", ")", ",", "cwlout", "(", "[", "\"work_bam_plus\"", ",", "\"sr\"", "]", ",", "[", "\"File\"", ",", "\"null\"", "]", ")", ",", "cwlout", "(", "[", "\"hla\"", ",", "\"fastq\"", "]", ",", "[", "\"File\"", ",", "\"null\"", "]", ")", "]", ",", "\"bcbio-vc\"", ",", "cores", "=", "1", ")", "]", "align_out", "=", "[", "[", "\"rgnames\"", ",", "\"sample\"", "]", ",", "[", "\"align_bam\"", "]", "]", "pp_align", ",", "pp_align_out", "=", "_postprocess_alignment", "(", "checkpoints", ")", "if", "checkpoints", "[", "\"umi\"", "]", ":", "align_out", "+=", "[", "[", "\"umi_bam\"", "]", "]", "vc", ",", "vc_out", "=", "_variant_vc", "(", "checkpoints", ")", "sv", ",", "sv_out", "=", "_variant_sv", "(", "checkpoints", ")", "hla", ",", "hla_out", "=", "_variant_hla", "(", "checkpoints", ")", "qc", ",", "qc_out", "=", "_qc_workflow", "(", "checkpoints", ")", "steps", "=", "align", "+", "pp_align", "+", "hla", "+", "vc", "+", "sv", "+", "qc", "final_outputs", "=", "align_out", "+", "pp_align_out", "+", "vc_out", "+", "hla_out", "+", "sv_out", "+", "qc_out", "return", "steps", ",", "final_outputs" ]
Iterates over the pool yielding each matching Constant . Calling without any arguments is equivalent to iterating over the pool .
def find ( self , type_ = None , f = None ) : for constant in self : if type_ is not None and not isinstance ( constant , type_ ) : continue if f is not None and not f ( constant ) : continue yield constant
3,210
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/constants.py#L369-L384
[ "def", "write_segment", "(", "self", ",", "objects", ")", ":", "segment", "=", "TdmsSegment", "(", "objects", ")", "segment", ".", "write", "(", "self", ".", "_file", ")" ]
Write the ConstantPool to the file - like object fout .
def pack ( self , fout ) : write = fout . write write ( pack ( '>H' , self . raw_count ) ) for constant in self : write ( constant . pack ( ) )
3,211
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/constants.py#L583-L598
[ "def", "postprocess_segments", "(", "self", ")", ":", "# make segs a list of mask arrays, it's easier to store", "# as there is a hdf5 equivalent", "for", "iseg", ",", "seg", "in", "enumerate", "(", "self", ".", "segs", ")", ":", "mask", "=", "np", ".", "zeros", "(", "self", ".", "_adata", ".", "shape", "[", "0", "]", ",", "dtype", "=", "bool", ")", "mask", "[", "seg", "]", "=", "True", "self", ".", "segs", "[", "iseg", "]", "=", "mask", "# convert to arrays", "self", ".", "segs", "=", "np", ".", "array", "(", "self", ".", "segs", ")", "self", ".", "segs_tips", "=", "np", ".", "array", "(", "self", ".", "segs_tips", ")" ]
Checkout branch . Create it if necessary
def checkout_and_create_branch ( repo , name ) : local_branch = repo . branches [ name ] if name in repo . branches else None if not local_branch : if name in repo . remotes . origin . refs : # If origin branch exists but not local, git.checkout is the fatest way # to create local branch with origin link automatically msg = repo . git . checkout ( name ) _LOGGER . debug ( msg ) return # Create local branch, will be link to origin later local_branch = repo . create_head ( name ) local_branch . checkout ( )
3,212
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L9-L21
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Checkout this branch . Create it if necessary and push it to origin .
def checkout_create_push_branch ( repo , name ) : try : repo . git . checkout ( name ) _LOGGER . info ( "Checkout %s success" , name ) except GitCommandError : _LOGGER . info ( "Checkout %s was impossible (branch does not exist). Creating it and push it." , name ) checkout_and_create_branch ( repo , name ) repo . git . push ( 'origin' , name , set_upstream = True )
3,213
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L23-L32
[ "def", "rows", "(", "self", ")", ":", "# We use DataFrames for serialization of IndexedRows from", "# Java, so we first convert the RDD of rows to a DataFrame", "# on the Scala/Java side. Then we map each Row in the", "# DataFrame back to an IndexedRow on this side.", "rows_df", "=", "callMLlibFunc", "(", "\"getIndexedRows\"", ",", "self", ".", "_java_matrix_wrapper", ".", "_java_model", ")", "rows", "=", "rows_df", ".", "rdd", ".", "map", "(", "lambda", "row", ":", "IndexedRow", "(", "row", "[", "0", "]", ",", "row", "[", "1", "]", ")", ")", "return", "rows" ]
Get the SHA1 of the current repo
def get_repo_hexsha ( git_folder ) : repo = Repo ( str ( git_folder ) ) if repo . bare : not_git_hexsha = "notgitrepo" _LOGGER . warning ( "Not a git repo, SHA1 used will be: %s" , not_git_hexsha ) return not_git_hexsha hexsha = repo . head . commit . hexsha _LOGGER . info ( "Found REST API repo SHA1: %s" , hexsha ) return hexsha
3,214
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L49-L58
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Fetch the refspec and checkout FETCH_HEAD . Beware that you will ne in detached head mode .
def checkout_with_fetch ( git_folder , refspec , repository = "origin" ) : _LOGGER . info ( "Trying to fetch and checkout %s" , refspec ) repo = Repo ( str ( git_folder ) ) repo . git . fetch ( repository , refspec ) # FETCH_HEAD should be set repo . git . checkout ( "FETCH_HEAD" ) _LOGGER . info ( "Fetch and checkout success for %s" , refspec )
3,215
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L60-L68
[ "def", "remove_users_from_organization", "(", "self", ",", "organization_id", ",", "users_list", ")", ":", "log", ".", "warning", "(", "'Removing users...'", ")", "url", "=", "'rest/servicedeskapi/organization/{}/user'", ".", "format", "(", "organization_id", ")", "data", "=", "{", "'usernames'", ":", "users_list", "}", "return", "self", ".", "delete", "(", "url", ",", "headers", "=", "self", ".", "experimental_headers", ",", "data", "=", "data", ")" ]
Clone the given URL to the folder .
def clone_to_path ( https_authenticated_url , folder , branch_or_commit = None ) : _LOGGER . info ( "Cloning repo" ) repo = Repo . clone_from ( https_authenticated_url , str ( folder ) ) # Do NOT clone and set branch at the same time, since we allow branch to be a SHA1 # And you can't clone a SHA1 if branch_or_commit : _LOGGER . info ( "Checkout branch_or_commit %s" , branch_or_commit ) repo . git . checkout ( branch_or_commit ) _LOGGER . info ( "Clone success" )
3,216
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L70-L83
[ "def", "run_mapper", "(", "self", ",", "stdin", "=", "sys", ".", "stdin", ",", "stdout", "=", "sys", ".", "stdout", ")", ":", "self", ".", "init_hadoop", "(", ")", "self", ".", "init_mapper", "(", ")", "outputs", "=", "self", ".", "_map_input", "(", "(", "line", "[", ":", "-", "1", "]", "for", "line", "in", "stdin", ")", ")", "if", "self", ".", "reducer", "==", "NotImplemented", ":", "self", ".", "writer", "(", "outputs", ",", "stdout", ")", "else", ":", "self", ".", "internal_writer", "(", "outputs", ",", "stdout", ")" ]
List of files in HEAD commit .
def get_files_in_commit ( git_folder , commit_id = "HEAD" ) : repo = Repo ( str ( git_folder ) ) output = repo . git . diff ( "--name-only" , commit_id + "^" , commit_id ) return output . splitlines ( )
3,217
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/git_tools.py#L85-L90
[ "def", "get_sleep_timer", "(", "self", ")", ":", "resp", "=", "self", ".", "avTransport", ".", "GetRemainingSleepTimerDuration", "(", "[", "(", "'InstanceID'", ",", "0", ")", ",", "]", ")", "if", "resp", "[", "'RemainingSleepTimerDuration'", "]", ":", "times", "=", "resp", "[", "'RemainingSleepTimerDuration'", "]", ".", "split", "(", "':'", ")", "return", "(", "int", "(", "times", "[", "0", "]", ")", "*", "3600", "+", "int", "(", "times", "[", "1", "]", ")", "*", "60", "+", "int", "(", "times", "[", "2", "]", ")", ")", "else", ":", "return", "None" ]
extract values from query
def parse_values ( self , query ) : values = { } for name , filt in self . filters . items ( ) : val = filt . parse_value ( query ) if val is None : continue values [ name ] = val return values
3,218
https://github.com/qwiglydee/drf-mongo-filters/blob/f7e397c329bac6d7b8cbb1df70d96eccdcfbc1ec/drf_mongo_filters/filtersets.py#L49-L59
[ "def", "get_configure", "(", "self", ",", "repo", "=", "None", ",", "name", "=", "None", ",", "groups", "=", "None", ",", "main_cfg", "=", "False", ")", ":", "constraints", "=", "locals", "(", ")", "del", "constraints", "[", "'main_cfg'", "]", "status", "=", "(", "True", ",", "None", ")", "template_dict", "=", "{", "}", "return_str", "=", "''", "if", "main_cfg", ":", "vent_cfg", "=", "Template", "(", "self", ".", "vent_config", ")", "for", "section", "in", "vent_cfg", ".", "sections", "(", ")", "[", "1", "]", ":", "template_dict", "[", "section", "]", "=", "{", "}", "for", "vals", "in", "vent_cfg", ".", "section", "(", "section", ")", "[", "1", "]", ":", "template_dict", "[", "section", "]", "[", "vals", "[", "0", "]", "]", "=", "vals", "[", "1", "]", "else", ":", "# all possible vent.template options stored in plugin_manifest", "options", "=", "[", "'info'", ",", "'service'", ",", "'settings'", ",", "'docker'", ",", "'gpu'", "]", "tools", "=", "Template", "(", "System", "(", ")", ".", "manifest", ")", ".", "constrain_opts", "(", "constraints", ",", "options", ")", "[", "0", "]", "if", "tools", ":", "# should only be one tool", "tool", "=", "list", "(", "tools", ".", "keys", "(", ")", ")", "[", "0", "]", "# load all vent.template options into dict", "for", "section", "in", "tools", "[", "tool", "]", ":", "template_dict", "[", "section", "]", "=", "json", ".", "loads", "(", "tools", "[", "tool", "]", "[", "section", "]", ")", "else", ":", "status", "=", "(", "False", ",", "\"Couldn't get vent.template information\"", ")", "if", "status", "[", "0", "]", ":", "# display all those options as they would in the file", "for", "section", "in", "template_dict", ":", "return_str", "+=", "'['", "+", "section", "+", "']\\n'", "# ensure instances shows up in configuration", "for", "option", "in", "template_dict", "[", "section", "]", ":", "if", "option", ".", "startswith", "(", "'#'", ")", ":", "return_str", "+=", "option", "+", "'\\n'", "else", ":", "return_str", "+=", "option", "+", "' = '", "return_str", "+=", "template_dict", "[", "section", "]", "[", "option", "]", "+", "'\\n'", "return_str", "+=", "'\\n'", "# only one newline at end of file", "status", "=", "(", "True", ",", "return_str", "[", ":", "-", "1", "]", ")", "return", "status" ]
convert values to filtering params and apply to queryset
def filter_queryset ( self , queryset ) : for name , filt in self . filters . items ( ) : val = self . values . get ( name , None ) if name is None : continue params = filt . filter_params ( val ) if not params : continue if isinstance ( params , dict ) : queryset = queryset . filter ( * * params ) if isinstance ( params , QNode ) : queryset = queryset . filter ( params ) return queryset
3,219
https://github.com/qwiglydee/drf-mongo-filters/blob/f7e397c329bac6d7b8cbb1df70d96eccdcfbc1ec/drf_mongo_filters/filtersets.py#L61-L76
[ "def", "configure_visual_directories", "(", "cls", ",", "driver_info", ")", ":", "if", "cls", ".", "screenshots_directory", "is", "None", ":", "# Unique screenshots and videos directories", "date", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y-%m-%d_%H%M%S'", ")", "folder_name", "=", "'%s_%s'", "%", "(", "date", ",", "driver_info", ")", "if", "driver_info", "else", "date", "folder_name", "=", "get_valid_filename", "(", "folder_name", ")", "cls", ".", "screenshots_directory", "=", "os", ".", "path", ".", "join", "(", "cls", ".", "output_directory", ",", "'screenshots'", ",", "folder_name", ")", "cls", ".", "screenshots_number", "=", "1", "cls", ".", "videos_directory", "=", "os", ".", "path", ".", "join", "(", "cls", ".", "output_directory", ",", "'videos'", ",", "folder_name", ")", "cls", ".", "logs_directory", "=", "os", ".", "path", ".", "join", "(", "cls", ".", "output_directory", ",", "'logs'", ",", "folder_name", ")", "cls", ".", "videos_number", "=", "1", "# Unique visualtests directories", "cls", ".", "visual_output_directory", "=", "os", ".", "path", ".", "join", "(", "cls", ".", "output_directory", ",", "'visualtests'", ",", "folder_name", ")", "cls", ".", "visual_number", "=", "1" ]
Image url of current playing media .
def media_image_url ( self ) : if self . is_nowplaying : base = self . server . construct_url ( API_URL ) try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Thumb' ] image_type = 'Thumb' except KeyError : try : image_id = self . session [ 'NowPlayingItem' ] [ 'ImageTags' ] [ 'Primary' ] image_type = 'Primary' except KeyError : return None url = '{0}/Items/{1}/Images/{2}?width=500&tag={3}&api_key={4}' . format ( base , self . media_id , image_type , image_id , self . server . api_key ) return url else : return None
3,220
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/device.py#L178-L196
[ "def", "use_options", "(", "self", ",", "options", ",", "extractor", "=", "None", ")", ":", "# Extract if necessary", "if", "not", "extractor", ":", "extracted", "=", "options", "else", ":", "extracted", "=", "extractor", "(", "self", ".", "template", ",", "options", ")", "# Get values as [(key, val), ...]", "if", "isinstance", "(", "extracted", ",", "dict", ")", ":", "extracted", "=", "extracted", ".", "items", "(", ")", "# Add our values if there are any", "# Normalising the keys as we go along", "if", "extracted", "is", "not", "None", ":", "for", "key", ",", "val", "in", "extracted", ":", "self", ".", "values", "[", "self", ".", "normalise_key", "(", "key", ")", "]", "=", "val" ]
Return current playstate of the device .
def state ( self ) : if self . is_active : if 'NowPlayingItem' in self . session : if self . session [ 'PlayState' ] [ 'IsPaused' ] : return STATE_PAUSED else : return STATE_PLAYING else : return STATE_IDLE else : return STATE_OFF
3,221
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/device.py#L224-L235
[ "def", "_createLink", "(", "self", ",", "linkResult", ",", "replaceParamFile", ")", ":", "link", "=", "None", "# Cases", "if", "linkResult", "[", "'type'", "]", "==", "'XSEC'", ":", "# Cross section link handler", "link", "=", "self", ".", "_createCrossSection", "(", "linkResult", ",", "replaceParamFile", ")", "elif", "linkResult", "[", "'type'", "]", "==", "'STRUCTURE'", ":", "# Structure link handler", "link", "=", "self", ".", "_createStructure", "(", "linkResult", ",", "replaceParamFile", ")", "elif", "linkResult", "[", "'type'", "]", "in", "(", "'RESERVOIR'", ",", "'LAKE'", ")", ":", "# Reservoir/lake handler", "link", "=", "self", ".", "_createReservoir", "(", "linkResult", ",", "replaceParamFile", ")", "return", "link" ]
Send media commands to server .
async def set_playstate ( self , state , pos = 0 ) : url = '{}/Sessions/{}/Playing/{}' . format ( self . server . construct_url ( API_URL ) , self . session_id , state ) params = { 'api_key' : self . server . api_key } if state == 'seek' : params [ 'SeekPositionTicks' ] = int ( pos * 10000000 ) params [ 'static' ] = 'true' _LOGGER . debug ( 'Playstate URL: %s' , url ) post = await self . server . api_post ( url , params ) if post is None : _LOGGER . debug ( 'Error sending command.' ) else : _LOGGER . debug ( 'Post response: %s' , post )
3,222
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/device.py#L250-L266
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
Create and immediately drop into a Python shell .
def start_shell ( local_ns : Dict = None , banner : str = '' ) : if IPYTHON_SHELL_AVAILABLE : # Don't try to stop IPython from displaying its banner, since # it's different in every major version terminal = embed . InteractiveShellEmbed ( user_ns = { } ) terminal . mainloop ( local_ns = local_ns ) else : code . interact ( banner = banner , local = local_ns )
3,223
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/util/shell.py#L13-L29
[ "def", "catread", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "catread", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "2", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "r1", ",", "r2", "=", "args", "p1fp", ",", "p2fp", "=", "FastqPairedIterator", "(", "r1", ",", "r2", ")", "outfile", "=", "pairspf", "(", "(", "r1", ",", "r2", ")", ")", "+", "\".cat.fastq\"", "fw", "=", "must_open", "(", "outfile", ",", "\"w\"", ")", "while", "True", ":", "a", "=", "list", "(", "islice", "(", "p1fp", ",", "4", ")", ")", "if", "not", "a", ":", "break", "atitle", ",", "aseq", ",", "_", ",", "aqual", "=", "a", "btitle", ",", "bseq", ",", "_", ",", "bqual", "=", "list", "(", "islice", "(", "p2fp", ",", "4", ")", ")", "print", "(", "\"\\n\"", ".", "join", "(", "(", "atitle", ".", "strip", "(", ")", ",", "aseq", ".", "strip", "(", ")", "+", "bseq", ".", "strip", "(", ")", ",", "\"+\"", ",", "aqual", ".", "strip", "(", ")", "+", "bqual", ".", "strip", "(", ")", ")", ")", ",", "file", "=", "fw", ")" ]
Replace CONSTANT_INDEX operands with the literal Constant object from the constant pool .
def expand_constants ( ins : Instruction , * , cf ) -> Instruction : for i , operand in enumerate ( ins . operands ) : if not isinstance ( operand , Operand ) : continue if operand . op_type == OperandTypes . CONSTANT_INDEX : ins . operands [ i ] = cf . constants [ operand . value ] return ins
3,224
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/transforms.py#L9-L24
[ "def", "on_end_validation", "(", "self", ",", "event", ")", ":", "self", ".", "Enable", "(", ")", "self", ".", "Show", "(", ")", "self", ".", "magic_gui_frame", ".", "Destroy", "(", ")" ]
Replaces one instruction with another based on the transform rules in the bytecode definitions . This can help simplify your code as it reduces the overall number of instructions . For example aload_0 will become aload 0 .
def simple_swap ( ins : Instruction ) -> Instruction : try : rule = ins . details [ 'transform' ] [ 'simple_swap' ] except KeyError : return ins replacement_ins = opcode_table [ rule [ 'op' ] ] return Instruction ( replacement_ins [ 'mnemonic' ] , replacement_ins [ 'op' ] , [ Operand ( replacement_ins [ 'operands' ] [ i ] [ 1 ] , r ) for i , r in enumerate ( rule [ 'operands' ] ) ] , ins . pos )
3,225
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/transforms.py#L27-L51
[ "def", "set", "(", "self", ",", "data", "=", "None", ")", ":", "self", ".", "__data", "=", "data", "self", ".", "__exception", "=", "None", "self", ".", "__event", ".", "set", "(", ")" ]
Inspect running environment for request object . There should be one but don t rely on it .
def find_request ( ) : frame = inspect . currentframe ( ) request = None f = frame while not request and f : if 'request' in f . f_locals and isinstance ( f . f_locals [ 'request' ] , HttpRequest ) : request = f . f_locals [ 'request' ] f = f . f_back del frame return request
3,226
https://github.com/samastur/Impostor/blob/1a9b1cf1568d5d657b069af5fdf882f2d9bfefce/impostor/backend.py#L15-L30
[ "def", "delete", "(", "self", ",", "cascade", "=", "False", ",", "delete_shares", "=", "False", ")", ":", "if", "self", ".", "id", ":", "self", ".", "connection", ".", "post", "(", "'delete_video'", ",", "video_id", "=", "self", ".", "id", ",", "cascade", "=", "cascade", ",", "delete_shares", "=", "delete_shares", ")", "self", ".", "id", "=", "None" ]
Create the Error view Must be instantiated
def error_view ( template_dir = None ) : if not template_dir : template_dir = "Pylot/Error" template_page = "%s/index.html" % template_dir class Error ( Pylot ) : """ Error Views """ @ classmethod def register ( cls , app , * * kwargs ) : super ( cls , cls ) . register ( app , * * kwargs ) @ app . errorhandler ( 400 ) def error_400 ( error ) : return cls . index ( error , 400 ) @ app . errorhandler ( 401 ) def error_401 ( error ) : return cls . index ( error , 401 ) @ app . errorhandler ( 403 ) def error_403 ( error ) : return cls . index ( error , 403 ) @ app . errorhandler ( 404 ) def error_404 ( error ) : return cls . index ( error , 404 ) @ app . errorhandler ( 500 ) def error_500 ( error ) : return cls . index ( error , 500 ) @ app . errorhandler ( 503 ) def error_503 ( error ) : return cls . index ( error , 503 ) @ classmethod def index ( cls , error , code ) : cls . meta_ ( title = "Error %s" % code ) return cls . render ( error = error , view_template = template_page ) , code return Error
3,227
https://github.com/mardix/pylot/blob/506a33a56ebdfc0925b94015e8cf98ccb16a143c/pylot/component/views.py#L1680-L1733
[ "def", "sensor_names", "(", "self", ")", ":", "res", "=", "self", "[", "'/attr/instrument_name'", "]", "if", "isinstance", "(", "res", ",", "np", ".", "ndarray", ")", ":", "res", "=", "str", "(", "res", ".", "astype", "(", "str", ")", ")", "res", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "res", ".", "split", "(", "','", ")", "]", "if", "len", "(", "res", ")", "==", "1", ":", "return", "res", "[", "0", "]", "return", "res" ]
Allow to create Signed object to upload to S3 via JS
def sign_s3_upload ( self ) : AWS_ACCESS_KEY = self . config_ ( 'AWS_ACCESS_KEY_ID' ) AWS_SECRET_KEY = self . config_ ( 'AWS_SECRET_ACCESS_KEY' ) S3_BUCKET = self . config_ ( 'AWS_S3_BUCKET_NAME' ) object_name = request . args . get ( 's3_object_name' ) mime_type = request . args . get ( 's3_object_type' ) expires = long ( time . time ( ) + 10 ) amz_headers = "x-amz-acl:public-read" put_request = "PUT\n\n%s\n%d\n%s\n/%s/%s" % ( mime_type , expires , amz_headers , S3_BUCKET , object_name ) signature = base64 . encodestring ( hmac . new ( AWS_SECRET_KEY , put_request , sha1 ) . digest ( ) ) signature = urllib . quote ( urllib . quote_plus ( signature . strip ( ) ) ) url = 'https://s3.amazonaws.com/%s/%s' % ( S3_BUCKET , object_name ) return jsonify ( { 'signed_request' : '%s?AWSAccessKeyId=%s&Expires=%d&Signature=%s' % ( url , AWS_ACCESS_KEY , expires , signature ) , 'url' : url } )
3,228
https://github.com/mardix/pylot/blob/506a33a56ebdfc0925b94015e8cf98ccb16a143c/pylot/component/views.py#L148-L167
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
Register as callback for when new devices are added .
def add_new_devices_callback ( self , callback ) : self . _new_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added new devices callback to %s' , callback )
3,229
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L114-L117
[ "def", "expire", "(", "self", ",", "key", ",", "timeout", ")", ":", "return", "self", ".", "_execute", "(", "[", "b'EXPIRE'", ",", "key", ",", "ascii", "(", "timeout", ")", ".", "encode", "(", "'ascii'", ")", "]", ",", "1", ")" ]
Register as callback for when stale devices exist .
def add_stale_devices_callback ( self , callback ) : self . _stale_devices_callbacks . append ( callback ) _LOGGER . debug ( 'Added stale devices callback to %s' , callback )
3,230
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L125-L128
[ "def", "egress", "(", "self", ",", "envelope", ",", "http_headers", ",", "operation", ",", "binding_options", ")", ":", "if", "self", ".", "_logger", ".", "isEnabledFor", "(", "logging", ".", "INFO", ")", ":", "service_name", "=", "operation", ".", "binding", ".", "wsdl", ".", "services", ".", "keys", "(", ")", "[", "0", "]", "self", ".", "_logger", ".", "info", "(", "_REQUEST_LOG_LINE", ",", "service_name", ",", "operation", ".", "name", ",", "binding_options", "[", "'address'", "]", ")", "if", "self", ".", "_logger", ".", "isEnabledFor", "(", "logging", ".", "DEBUG", ")", ":", "http_headers_safe", "=", "http_headers", ".", "copy", "(", ")", "if", "self", ".", "_AUTHORIZATION_HEADER", "in", "http_headers_safe", ":", "http_headers_safe", "[", "self", ".", "_AUTHORIZATION_HEADER", "]", "=", "self", ".", "_REDACTED", "request_string", "=", "etree", ".", "tostring", "(", "envelope", ",", "pretty_print", "=", "True", ")", "safe_request", "=", "self", ".", "_DEVELOPER_TOKEN_SUB", ".", "sub", "(", "self", ".", "_REDACTED", ",", "request_string", ".", "decode", "(", "'utf-8'", ")", ")", "self", ".", "_logger", ".", "debug", "(", "_REQUEST_XML_LOG_LINE", ",", "http_headers_safe", ",", "safe_request", ")", "return", "envelope", ",", "http_headers" ]
Register as callback for when a matching device changes .
def add_update_callback ( self , callback , device ) : self . _update_callbacks . append ( [ callback , device ] ) _LOGGER . debug ( 'Added update callback to %s on %s' , callback , device )
3,231
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L136-L139
[ "def", "get_max_size", "(", "pool", ",", "num_option", ",", "item_length", ")", ":", "max_items", "=", "POOL_SIZE", "/", "item_length", "# existing items plus the reserved for min size. If there is an option has 1 item, POOL_OPTION_MIN_SIZE - 1 space", "# is reserved.", "existing", "=", "POOL_OPTION_MIN_SIZE", "*", "num_option", "+", "sum", "(", "[", "max", "(", "0", ",", "len", "(", "pool", ".", "get", "(", "i", ",", "{", "}", ")", ")", "-", "5", ")", "for", "i", "in", "xrange", "(", "num_option", ")", "]", ")", "return", "int", "(", "max_items", "-", "existing", ")" ]
Remove a registered update callback .
def remove_update_callback ( self , callback , device ) : if [ callback , device ] in self . _update_callbacks : self . _update_callbacks . remove ( [ callback , device ] ) _LOGGER . debug ( 'Removed update callback %s for %s' , callback , device )
3,232
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L141-L146
[ "def", "write_FORCE_CONSTANTS", "(", "force_constants", ",", "filename", "=", "'FORCE_CONSTANTS'", ",", "p2s_map", "=", "None", ")", ":", "lines", "=", "get_FORCE_CONSTANTS_lines", "(", "force_constants", ",", "p2s_map", "=", "p2s_map", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "w", ":", "w", ".", "write", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")" ]
Public method for initiating connectivity with the emby server .
def start ( self ) : asyncio . ensure_future ( self . register ( ) , loop = self . _event_loop ) if self . _own_loop : _LOGGER . info ( "Starting up our own event loop." ) self . _event_loop . run_forever ( ) self . _event_loop . close ( ) _LOGGER . info ( "Connection shut down." )
3,233
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L156-L164
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
Async method for stopping connectivity with the emby server .
async def stop ( self ) : self . _shutdown = True if self . wsck : _LOGGER . info ( 'Closing Emby server websocket.' ) await self . wsck . close ( ) self . wsck = None if self . _own_loop : _LOGGER . info ( "Shutting down Emby server loop..." ) self . _event_loop . call_soon_threadsafe ( self . _event_loop . stop )
3,234
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L166-L177
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]
Register library device id and get initial device list .
async def register ( self ) : url = '{}/Sessions' . format ( self . construct_url ( API_URL ) ) params = { 'api_key' : self . _api_key } reg = await self . api_request ( url , params ) if reg is None : self . _registered = False _LOGGER . error ( 'Unable to register emby client.' ) else : self . _registered = True _LOGGER . info ( 'Emby client registered!, Id: %s' , self . unique_id ) self . _sessions = reg # Build initial device list. self . update_device_list ( self . _sessions ) asyncio . ensure_future ( self . socket_connection ( ) , loop = self . _event_loop )
3,235
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L194-L211
[ "def", "_enforce_max_region_size", "(", "in_file", ",", "data", ")", ":", "max_size", "=", "20000", "overlap_size", "=", "250", "def", "_has_larger_regions", "(", "f", ")", ":", "return", "any", "(", "r", ".", "stop", "-", "r", ".", "start", ">", "max_size", "for", "r", "in", "pybedtools", ".", "BedTool", "(", "f", ")", ")", "out_file", "=", "\"%s-regionlimit%s\"", "%", "utils", ".", "splitext_plus", "(", "in_file", ")", "if", "not", "utils", ".", "file_exists", "(", "out_file", ")", ":", "if", "_has_larger_regions", "(", "in_file", ")", ":", "with", "file_transaction", "(", "data", ",", "out_file", ")", "as", "tx_out_file", ":", "pybedtools", ".", "BedTool", "(", ")", ".", "window_maker", "(", "w", "=", "max_size", ",", "s", "=", "max_size", "-", "overlap_size", ",", "b", "=", "pybedtools", ".", "BedTool", "(", "in_file", ")", ")", ".", "saveas", "(", "tx_out_file", ")", "else", ":", "utils", ".", "symlink_plus", "(", "in_file", ",", "out_file", ")", "return", "out_file" ]
Open websocket connection .
async def socket_connection ( self ) : if not self . _registered : _LOGGER . error ( 'Client not registered, cannot start socket.' ) return url = '{}?DeviceID={}&api_key={}' . format ( self . construct_url ( SOCKET_URL ) , self . _api_id , self . _api_key ) fail_count = 0 while True : _LOGGER . debug ( 'Attempting Socket Connection.' ) try : with async_timeout . timeout ( DEFAULT_TIMEOUT , loop = self . _event_loop ) : self . wsck = await self . _api_session . ws_connect ( url ) # Enable sever session updates: try : msg = await self . wsck . send_str ( '{"MessageType":"SessionsStart", "Data": "0,1500"}' ) except Exception as err : # Catch all for now _LOGGER . error ( 'Failure setting session updates: %s' , err ) raise ValueError ( 'Session updates error.' ) _LOGGER . debug ( 'Socket Connected!' ) fail_count = 0 while True : msg = await self . wsck . receive ( ) if msg . type == aiohttp . WSMsgType . text : # Process data self . process_msg ( msg . data ) elif msg . type == aiohttp . WSMsgType . closed : raise ValueError ( 'Websocket was closed.' ) elif msg . type == aiohttp . WSMsgType . error : _LOGGER . debug ( 'Websocket encountered an error: %s' , msg ) raise ValueError ( 'Websocket error.' ) except ( aiohttp . ClientError , asyncio . TimeoutError , aiohttp . WSServerHandshakeError , ConnectionRefusedError , OSError , ValueError ) as err : if not self . _shutdown : fail_count += 1 _LOGGER . debug ( 'Websocket unintentionally closed.' ' Trying reconnect in %ss. Error: %s' , ( fail_count * 5 ) + 5 , err ) await asyncio . sleep ( 15 , self . _event_loop ) continue else : break
3,236
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L255-L307
[ "def", "download_storyitem", "(", "self", ",", "item", ":", "StoryItem", ",", "target", ":", "str", ")", "->", "bool", ":", "date_local", "=", "item", ".", "date_local", "dirname", "=", "_PostPathFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "dirname_pattern", ",", "target", "=", "target", ")", "filename", "=", "dirname", "+", "'/'", "+", "self", ".", "format_filename", "(", "item", ",", "target", "=", "target", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ",", "exist_ok", "=", "True", ")", "downloaded", "=", "False", "if", "not", "item", ".", "is_video", "or", "self", ".", "download_video_thumbnails", "is", "True", ":", "url", "=", "item", ".", "url", "downloaded", "=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "url", ",", "mtime", "=", "date_local", ")", "if", "item", ".", "is_video", "and", "self", ".", "download_videos", "is", "True", ":", "downloaded", "|=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "item", ".", "video_url", ",", "mtime", "=", "date_local", ")", "# Save caption if desired", "metadata_string", "=", "_ArbitraryItemFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "storyitem_metadata_txt_pattern", ")", ".", "strip", "(", ")", "if", "metadata_string", ":", "self", ".", "save_caption", "(", "filename", "=", "filename", ",", "mtime", "=", "item", ".", "date_local", ",", "caption", "=", "metadata_string", ")", "# Save metadata as JSON if desired.", "if", "self", ".", "save_metadata", "is", "not", "False", ":", "self", ".", "save_metadata_json", "(", "filename", ",", "item", ")", "self", ".", "context", ".", "log", "(", ")", "return", "downloaded" ]
Process messages from the event stream .
def process_msg ( self , msg ) : jmsg = json . loads ( msg ) msgtype = jmsg [ 'MessageType' ] msgdata = jmsg [ 'Data' ] _LOGGER . debug ( 'New websocket message recieved of type: %s' , msgtype ) if msgtype == 'Sessions' : self . _sessions = msgdata # Check for new devices and update as needed. self . update_device_list ( self . _sessions ) """ May process other message types in the future. Other known types are: - PlaybackStarted - PlaybackStopped - SessionEnded """
3,237
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L309-L326
[ "def", "actually_mount", "(", "self", ",", "client", ")", ":", "a_obj", "=", "self", ".", "config", ".", "copy", "(", ")", "if", "'description'", "in", "a_obj", ":", "del", "a_obj", "[", "'description'", "]", "try", ":", "m_fun", "=", "getattr", "(", "client", ",", "self", ".", "mount_fun", ")", "if", "self", ".", "description", "and", "a_obj", ":", "m_fun", "(", "self", ".", "backend", ",", "mount_point", "=", "self", ".", "path", ",", "description", "=", "self", ".", "description", ",", "config", "=", "a_obj", ")", "elif", "self", ".", "description", ":", "m_fun", "(", "self", ".", "backend", ",", "mount_point", "=", "self", ".", "path", ",", "description", "=", "self", ".", "description", ")", "elif", "a_obj", ":", "m_fun", "(", "self", ".", "backend", ",", "mount_point", "=", "self", ".", "path", ",", "config", "=", "a_obj", ")", "else", ":", "m_fun", "(", "self", ".", "backend", ",", "mount_point", "=", "self", ".", "path", ")", "except", "hvac", ".", "exceptions", ".", "InvalidRequest", "as", "exception", ":", "match", "=", "re", ".", "match", "(", "'existing mount at (?P<path>.+)'", ",", "str", "(", "exception", ")", ")", "if", "match", ":", "e_msg", "=", "\"%s has a mountpoint conflict with %s\"", "%", "(", "self", ".", "path", ",", "match", ".", "group", "(", "'path'", ")", ")", "raise", "aomi_excep", ".", "VaultConstraint", "(", "e_msg", ")", "else", ":", "raise" ]
Update device list .
def update_device_list ( self , sessions ) : if sessions is None : _LOGGER . error ( 'Error updating Emby devices.' ) return new_devices = [ ] active_devices = [ ] dev_update = False for device in sessions : dev_name = '{}.{}' . format ( device [ 'DeviceId' ] , device [ 'Client' ] ) try : _LOGGER . debug ( 'Session msg on %s of type: %s, themeflag: %s' , dev_name , device [ 'NowPlayingItem' ] [ 'Type' ] , device [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] ) except KeyError : pass active_devices . append ( dev_name ) if dev_name not in self . _devices and device [ 'DeviceId' ] != str ( self . _api_id ) : _LOGGER . debug ( 'New Emby DeviceID: %s. Adding to device list.' , dev_name ) new = EmbyDevice ( device , self ) self . _devices [ dev_name ] = new new_devices . append ( new ) elif device [ 'DeviceId' ] != str ( self . _api_id ) : # Before we send in new data check for changes to state # to decide if we need to fire the update callback if not self . _devices [ dev_name ] . is_active : # Device wasn't active on the last update # We need to fire a device callback to let subs now dev_update = True do_update = self . update_check ( self . _devices [ dev_name ] , device ) self . _devices [ dev_name ] . update_data ( device ) self . _devices [ dev_name ] . set_active ( True ) if dev_update : self . _do_new_devices_callback ( 0 ) dev_update = False if do_update : self . _do_update_callback ( dev_name ) # Need to check for new inactive devices and flag for dev_id in self . _devices : if dev_id not in active_devices : # Device no longer active if self . _devices [ dev_id ] . is_active : self . _devices [ dev_id ] . set_active ( False ) self . _do_update_callback ( dev_id ) self . _do_stale_devices_callback ( dev_id ) # Call device callback if new devices were found. if new_devices : self . _do_new_devices_callback ( 0 )
3,238
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L328-L384
[ "def", "cublasSgbmv", "(", "handle", ",", "trans", ",", "m", ",", "n", ",", "kl", ",", "ku", ",", "alpha", ",", "A", ",", "lda", ",", "x", ",", "incx", ",", "beta", ",", "y", ",", "incy", ")", ":", "status", "=", "_libcublas", ".", "cublasSgbmv_v2", "(", "handle", ",", "trans", ",", "m", ",", "n", ",", "kl", ",", "ku", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_float", "(", "alpha", ")", ")", ",", "int", "(", "A", ")", ",", "lda", ",", "int", "(", "x", ")", ",", "incx", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_float", "(", "beta", ")", ")", ",", "int", "(", "y", ")", ",", "incy", ")", "cublasCheckStatus", "(", "status", ")" ]
Check device state to see if we need to fire the callback .
def update_check ( self , existing , new ) : old_state = existing . state if 'NowPlayingItem' in existing . session_raw : try : old_theme = existing . session_raw [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] except KeyError : old_theme = False else : old_theme = False if 'NowPlayingItem' in new : if new [ 'PlayState' ] [ 'IsPaused' ] : new_state = STATE_PAUSED else : new_state = STATE_PLAYING try : new_theme = new [ 'NowPlayingItem' ] [ 'IsThemeMedia' ] except KeyError : new_theme = False else : new_state = STATE_IDLE new_theme = False if old_theme or new_theme : return False elif old_state == STATE_PLAYING or new_state == STATE_PLAYING : return True elif old_state != new_state : return True else : return False
3,239
https://github.com/mezz64/pyEmby/blob/6bb621e4e25bf1b9b0aba2c38b588e68f8816226/pyemby/server.py#L386-L424
[ "def", "save_to_file", "(", "self", ",", "filename", ",", "remap_dim0", "=", "None", ",", "remap_dim1", "=", "None", ")", ":", "# rows - first index", "# columns - second index", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fobj", ":", "columns", "=", "list", "(", "sorted", "(", "self", ".", "_dim1", ")", ")", "for", "col", "in", "columns", ":", "fobj", ".", "write", "(", "','", ")", "fobj", ".", "write", "(", "str", "(", "remap_dim1", "[", "col", "]", "if", "remap_dim1", "else", "col", ")", ")", "fobj", ".", "write", "(", "'\\n'", ")", "for", "row", "in", "sorted", "(", "self", ".", "_dim0", ")", ":", "fobj", ".", "write", "(", "str", "(", "remap_dim0", "[", "row", "]", "if", "remap_dim0", "else", "row", ")", ")", "for", "col", "in", "columns", ":", "fobj", ".", "write", "(", "','", ")", "fobj", ".", "write", "(", "str", "(", "self", "[", "row", ",", "col", "]", ")", ")", "fobj", ".", "write", "(", "'\\n'", ")" ]
Entry point for f2format .
def main ( ) : parser = get_parser ( ) args = parser . parse_args ( ) # set up variables ARCHIVE = args . archive_path archive = ( not args . no_archive ) os . environ [ 'F2FORMAT_VERSION' ] = args . python os . environ [ 'F2FORMAT_ENCODING' ] = args . encoding def find ( root ) : """Recursively find all files under root.""" flst = list ( ) temp = os . listdir ( root ) for file in temp : path = os . path . join ( root , file ) if os . path . isdir ( path ) : flst . extend ( find ( path ) ) elif os . path . isfile ( path ) : flst . append ( path ) elif os . path . islink ( path ) : # exclude symbolic links continue yield from flst def rename ( path ) : stem , ext = os . path . splitext ( path ) name = '%s-%s%s' % ( stem , uuid . uuid4 ( ) , ext ) return os . path . join ( ARCHIVE , name ) # make archive directory if archive : os . makedirs ( ARCHIVE , exist_ok = True ) # fetch file list filelist = list ( ) for path in sys . argv [ 1 : ] : if os . path . isfile ( path ) : if archive : dest = rename ( path ) os . makedirs ( os . path . dirname ( dest ) , exist_ok = True ) shutil . copy ( path , dest ) filelist . append ( path ) if os . path . isdir ( path ) : if archive : shutil . copytree ( path , rename ( path ) ) filelist . extend ( find ( path ) ) # check if file is Python source code def ispy ( file ) : return ( os . path . isfile ( file ) and ( os . path . splitext ( file ) [ 1 ] in ( '.py' , '.pyw' ) ) ) filelist = sorted ( filter ( ispy , filelist ) ) # if no file supplied if len ( filelist ) == 0 : parser . error ( 'argument PATH: no valid source file found' ) # process files if mp is None or CPU_CNT <= 1 : [ f2format ( filename ) for filename in filelist ] else : mp . Pool ( processes = CPU_CNT ) . map ( f2format , filelist )
3,240
https://github.com/JarryShaw/f2format/blob/a144250268247ce0a98d734a26d53faadff7a6f8/src/__main__.py#L64-L124
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
A utility which sets up reasonable defaults for a new public class .
def create ( cls , this : str , super_ : str = u'java/lang/Object' ) -> 'ClassFile' : cf = ClassFile ( ) cf . access_flags . acc_public = True cf . access_flags . acc_super = True cf . this = cf . constants . create_class ( this ) cf . super_ = cf . constants . create_class ( super_ ) return cf
3,241
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cf.py#L97-L111
[ "def", "get_dbs", "(", ")", ":", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "'DBS'", ")", "response", "=", "requests", ".", "get", "(", "url", ")", "dbs", "=", "response", ".", "content", ".", "decode", "(", "'ascii'", ")", ".", "splitlines", "(", ")", "dbs", "=", "[", "re", ".", "sub", "(", "'\\t{2,}'", ",", "'\\t'", ",", "line", ")", ".", "split", "(", "'\\t'", ")", "for", "line", "in", "dbs", "]", "return", "dbs" ]
Saves the class to the file - like object source .
def save ( self , source : IO ) : write = source . write write ( pack ( '>IHH' , ClassFile . MAGIC , self . version . minor , self . version . major ) ) self . _constants . pack ( source ) write ( self . access_flags . pack ( ) ) write ( pack ( f'>HHH{len(self._interfaces)}H' , self . _this , self . _super , len ( self . _interfaces ) , * self . _interfaces ) ) self . fields . pack ( source ) self . methods . pack ( source ) self . attributes . pack ( source )
3,242
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cf.py#L113-L141
[ "def", "delete_network_acl", "(", "network_acl_id", "=", "None", ",", "network_acl_name", "=", "None", ",", "disassociate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "disassociate", ":", "network_acl", "=", "_get_resource", "(", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "network_acl", "and", "network_acl", ".", "associations", ":", "subnet_id", "=", "network_acl", ".", "associations", "[", "0", "]", ".", "subnet_id", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "disassociate_network_acl", "(", "subnet_id", ")", "except", "BotoServerError", ":", "pass", "return", "_delete_resource", "(", "resource", "=", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "resource_id", "=", "network_acl_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")" ]
Loads an existing JVM ClassFile from any file - like object .
def _from_io ( self , source : IO ) : read = source . read if unpack ( '>I' , source . read ( 4 ) ) [ 0 ] != ClassFile . MAGIC : raise ValueError ( 'invalid magic number' ) # The version is swapped on disk to (minor, major), so swap it back. self . version = unpack ( '>HH' , source . read ( 4 ) ) [ : : - 1 ] self . _constants . unpack ( source ) # ClassFile access_flags, see section #4.1 of the JVM specs. self . access_flags . unpack ( read ( 2 ) ) # The CONSTANT_Class indexes for "this" class and its superclass. # Interfaces are a simple list of CONSTANT_Class indexes. self . _this , self . _super , interfaces_count = unpack ( '>HHH' , read ( 6 ) ) self . _interfaces = unpack ( f'>{interfaces_count}H' , read ( 2 * interfaces_count ) ) self . fields . unpack ( source ) self . methods . unpack ( source ) self . attributes . unpack ( source )
3,243
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cf.py#L143-L170
[ "def", "_remove_player", "(", "self", ",", "player_id", ")", ":", "player", "=", "self", ".", "_mpris_players", ".", "get", "(", "player_id", ")", "if", "player", ":", "if", "player", ".", "get", "(", "\"subscription\"", ")", ":", "player", "[", "\"subscription\"", "]", ".", "disconnect", "(", ")", "del", "self", ".", "_mpris_players", "[", "player_id", "]" ]
A list of direct superinterfaces of this class as indexes into the constant pool in left - to - right order .
def interfaces ( self ) -> Iterable [ ConstantClass ] : return [ self . _constants [ idx ] for idx in self . _interfaces ]
3,244
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cf.py#L223-L228
[ "def", "save", "(", "self", ")", ":", "response", "=", "pu", ".", "open", "(", "title", "=", "'Save perspective warp'", ",", "patterns", "=", "[", "'*.h5'", "]", ")", "if", "response", "is", "not", "None", ":", "self", ".", "warp_actor", ".", "save", "(", "response", ")" ]
Returns the bootstrap methods table from the BootstrapMethods attribute if one exists . If it does not one will be created .
def bootstrap_methods ( self ) -> BootstrapMethod : bootstrap = self . attributes . find_one ( name = 'BootstrapMethods' ) if bootstrap is None : bootstrap = self . attributes . create ( ATTRIBUTE_CLASSES [ 'BootstrapMethods' ] ) return bootstrap . table
3,245
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cf.py#L231-L245
[ "def", "georec", "(", "lon", ",", "lat", ",", "alt", ",", "re", ",", "f", ")", ":", "lon", "=", "ctypes", ".", "c_double", "(", "lon", ")", "lat", "=", "ctypes", ".", "c_double", "(", "lat", ")", "alt", "=", "ctypes", ".", "c_double", "(", "alt", ")", "re", "=", "ctypes", ".", "c_double", "(", "re", ")", "f", "=", "ctypes", ".", "c_double", "(", "f", ")", "rectan", "=", "stypes", ".", "emptyDoubleVector", "(", "3", ")", "libspice", ".", "georec_c", "(", "lon", ",", "lat", ",", "alt", ",", "re", ",", "f", ",", "rectan", ")", "return", "stypes", ".", "cVectorToPython", "(", "rectan", ")" ]
List enabled Attributes .
def attributes ( ) : attribute_classes = get_attribute_classes ( ) for name , class_ in attribute_classes . items ( ) : click . echo ( u'{name} - Added in: {ai} ({cv})' . format ( name = click . style ( name , fg = 'green' ) , ai = click . style ( class_ . ADDED_IN , fg = 'yellow' ) , cv = click . style ( ClassVersion ( * class_ . MINIMUM_CLASS_VERSION ) . human , fg = 'yellow' ) ) )
3,246
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L20-L36
[ "def", "build_news", "(", "ctx", ",", "draft", "=", "False", ",", "yes", "=", "False", ")", ":", "report", ".", "info", "(", "ctx", ",", "\"docs.build-news\"", ",", "\"building changelog from news fragments\"", ")", "build_command", "=", "f\"towncrier --version {ctx.metadata['version']}\"", "if", "draft", ":", "report", ".", "warn", "(", "ctx", ",", "\"docs.build-news\"", ",", "\"building changelog as draft (results are written to stdout)\"", ",", ")", "build_command", "+=", "\" --draft\"", "elif", "yes", ":", "report", ".", "warn", "(", "ctx", ",", "\"docs.build-news\"", ",", "\"removing news files without user confirmation (-y)\"", ")", "build_command", "+=", "\" --yes\"", "ctx", ".", "run", "(", "build_command", ",", "hide", "=", "None", ")" ]
Lookup instruction information .
def ins ( mnemonic ) : try : opcode = bytecode . opcode_table [ mnemonic ] except KeyError : click . secho ( u'No definition found.' , fg = 'red' ) return click . echo ( u'{mnemonic} (0x{op})' . format ( mnemonic = click . style ( opcode [ 'mnemonic' ] , fg = 'green' , underline = True ) , op = click . style ( format ( opcode [ 'op' ] , '02x' ) , fg = 'green' ) ) ) if opcode . get ( 'desc' ) : click . secho ( 'Description:' , fg = 'yellow' ) click . echo ( opcode [ 'desc' ] ) if opcode [ 'can_be_wide' ] : click . echo ( u'This instruction can be prefixed by the WIDE opcode.' ) if opcode . get ( 'runtime' ) : click . secho ( 'Possible runtime exceptions:' , fg = 'yellow' ) for runtime_exception in opcode [ 'runtime' ] : click . echo ( '- {runtime_exception}' . format ( runtime_exception = click . style ( runtime_exception , fg = 'red' ) ) ) if opcode [ 'operands' ] : click . secho ( u'Operand Format:' , fg = 'yellow' ) for operand_fmt , operand_type in opcode [ 'operands' ] : click . echo ( u'- {ty} as a {fmt}' . format ( ty = click . style ( operand_type . name , fg = 'yellow' ) , fmt = click . style ( operand_fmt . name , fg = 'yellow' ) ) ) elif opcode [ 'op' ] in ( 0xAB , 0xAA , 0xC4 ) : # lookup[table|switch] and WIDE. click . secho ( u'\nOperand Format:' , fg = 'yellow' ) click . echo ( u'This is a special-case opcode with variable operand parsing.' )
3,247
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L41-L83
[ "def", "validate_arguments", "(", "log", ",", "whitelisted_args", ",", "args", ")", ":", "valid_patterns", "=", "{", "re", ".", "compile", "(", "p", ")", ":", "v", "for", "p", ",", "v", "in", "whitelisted_args", ".", "items", "(", ")", "}", "def", "validate", "(", "idx", ")", ":", "arg", "=", "args", "[", "idx", "]", "for", "pattern", ",", "has_argument", "in", "valid_patterns", ".", "items", "(", ")", ":", "if", "pattern", ".", "match", "(", "arg", ")", ":", "return", "2", "if", "has_argument", "else", "1", "log", ".", "warn", "(", "\"Zinc argument '{}' is not supported, and is subject to change/removal!\"", ".", "format", "(", "arg", ")", ")", "return", "1", "arg_index", "=", "0", "while", "arg_index", "<", "len", "(", "args", ")", ":", "arg_index", "+=", "validate", "(", "arg_index", ")" ]
Drop into a debugging shell .
def shell_command ( class_path ) : loader = ClassLoader ( * class_path ) shell . start_shell ( local_ns = { 'ClassFile' : ClassFile , 'loader' : loader , 'constants' : importlib . import_module ( 'jawa.constants' ) , } )
3,248
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L88-L95
[ "def", "__ordinal", "(", "self", ",", "num", ")", ":", "if", "10", "<=", "num", "%", "100", "<", "20", ":", "return", "str", "(", "num", ")", "+", "'th'", "else", ":", "ord_info", "=", "{", "1", ":", "'st'", ",", "2", ":", "'nd'", ",", "3", ":", "'rd'", "}", ".", "get", "(", "num", "%", "10", ",", "'th'", ")", "return", "'{}{}'", ".", "format", "(", "num", ",", "ord_info", ")" ]
Convert a bytecode . yaml file into a prepared bytecode . json .
def definition_to_json ( source ) : try : import yaml except ImportError : click . echo ( 'The pyyaml module could not be found and is required' ' to use this command.' , err = True ) return y = yaml . load ( source ) for k , v in y . items ( ) : # We guarantee some keys should always exist to make life easier for # developers. v . setdefault ( 'operands' , None ) v . setdefault ( 'can_be_wide' , False ) v . setdefault ( 'transform' , { } ) v [ 'mnemonic' ] = k click . echo ( json . dumps ( y , indent = 4 , sort_keys = True ) )
3,249
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L100-L130
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_access", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"Cleaning up\"", ")", "pci_cleanup", "(", "self", ".", "_access", ")", "self", ".", "_access", "=", "None" ]
Output a list of all classes referenced by the given source .
def dependencies ( source ) : loader = ClassLoader ( source , max_cache = - 1 ) all_dependencies = set ( ) for klass in loader . classes : new_dependencies = loader . dependencies ( klass ) - all_dependencies all_dependencies . update ( new_dependencies ) for new_dep in new_dependencies : click . echo ( new_dep )
3,250
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L135-L143
[ "def", "EnableJob", "(", "self", ",", "job_id", ",", "token", "=", "None", ")", ":", "job_urn", "=", "self", ".", "CRON_JOBS_PATH", ".", "Add", "(", "job_id", ")", "cron_job", "=", "aff4", ".", "FACTORY", ".", "Open", "(", "job_urn", ",", "mode", "=", "\"rw\"", ",", "aff4_type", "=", "CronJob", ",", "token", "=", "token", ")", "cron_job", ".", "Set", "(", "cron_job", ".", "Schema", ".", "DISABLED", "(", "0", ")", ")", "cron_job", ".", "Close", "(", ")" ]
Grep the constant pool of all classes in source .
def grep ( source , regex , stop_on_first = False ) : loader = ClassLoader ( source , max_cache = - 1 ) r = re . compile ( regex ) def _matches ( constant ) : return r . match ( constant . value ) for klass in loader . classes : it = loader . search_constant_pool ( path = klass , type_ = UTF8 , f = _matches ) if next ( it , None ) : print ( klass ) if stop_on_first : break
3,251
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/cli.py#L155-L168
[ "def", "_init_dates", "(", "self", ")", ":", "if", "self", ".", "total_transactions", "==", "0", ":", "return", "None", "self", ".", "epoch_start", "=", "Result", ".", "select", "(", "Result", ".", "epoch", ")", ".", "order_by", "(", "Result", ".", "epoch", ".", "asc", "(", ")", ")", ".", "limit", "(", "1", ")", ".", "get", "(", ")", ".", "epoch", "self", ".", "epoch_finish", "=", "Result", ".", "select", "(", "Result", ".", "epoch", ")", ".", "order_by", "(", "Result", ".", "epoch", ".", "desc", "(", ")", ")", ".", "limit", "(", "1", ")", ".", "get", "(", ")", ".", "epoch", "self", ".", "start_datetime", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "self", ".", "epoch_start", ")", ")", "self", ".", "finish_datetime", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "self", ".", "epoch_finish", ")", ")" ]
fetch an URL .
def fetch ( * args , * * kwargs ) : data = kwargs . get ( 'data' , None ) files = kwargs . get ( 'files' , { } ) if data and isinstance ( data , ( basestring , dict ) ) or files : return post ( * args , * * kwargs ) return get ( * args , * * kwargs )
3,252
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L528-L540
[ "def", "delete_everything", "(", "self", ")", ":", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "basebackups", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of a base backup'", ")", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "wal_directory", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of wal logs'", ")", "if", "self", ".", "deleter", ":", "self", ".", "deleter", ".", "close", "(", ")" ]
Return a dictionary of parsed url
def parse_url ( url ) : try : url = unicode ( url ) except UnicodeDecodeError : pass if py3k : make_utf8 = lambda x : x else : make_utf8 = lambda x : isinstance ( x , unicode ) and x . encode ( 'utf-8' ) or x if '://' in url : scheme , url = url . split ( '://' , 1 ) else : scheme = 'http' url = 'http://' + url parsed = urlparse . urlsplit ( url ) r = ObjectDict ( ) r [ 'scheme' ] = make_utf8 ( scheme ) r [ 'netloc' ] = make_utf8 ( parsed . netloc ) r [ 'path' ] = make_utf8 ( parsed . path ) r [ 'query' ] = make_utf8 ( parsed . query ) r [ 'fragment' ] = make_utf8 ( parsed . fragment ) r [ 'uri' ] = make_utf8 ( parsed . path ) if parsed . query : r [ 'uri' ] += '?' + make_utf8 ( parsed . query ) r [ 'username' ] = make_utf8 ( parsed . username ) r [ 'password' ] = make_utf8 ( parsed . password ) host = make_utf8 ( parsed . hostname . encode ( 'idna' ) . decode ( 'utf-8' ) ) r [ 'host' ] = r [ 'hostname' ] = host try : r [ 'port' ] = parsed . port except ValueError : r [ 'port' ] = None if r [ 'port' ] : r [ 'http_host' ] = '%s:%d' % ( r [ 'host' ] , r [ 'port' ] ) else : r [ 'http_host' ] = r [ 'host' ] return r
3,253
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L801-L845
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Get proxies from os . environ .
def get_proxies_from_environ ( ) : proxies = { } http_proxy = os . getenv ( 'http_proxy' ) or os . getenv ( 'HTTP_PROXY' ) https_proxy = os . getenv ( 'https_proxy' ) or os . getenv ( 'HTTPS_PROXY' ) if http_proxy : proxies [ 'http' ] = http_proxy if https_proxy : proxies [ 'https' ] = https_proxy return proxies
3,254
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L848-L857
[ "def", "add_reference", "(", "self", ",", "source", ",", "target", ",", "*", "*", "kwargs", ")", ":", "# Tweak keyword arguments for addReference", "addRef_kw", "=", "kwargs", ".", "copy", "(", ")", "addRef_kw", ".", "setdefault", "(", "\"referenceClass\"", ",", "self", ".", "referenceClass", ")", "if", "\"schema\"", "in", "addRef_kw", ":", "del", "addRef_kw", "[", "\"schema\"", "]", "uid", "=", "api", ".", "get_uid", "(", "target", ")", "rc", "=", "api", ".", "get_tool", "(", "\"reference_catalog\"", ")", "# throws IndexError if uid is invalid", "rc", ".", "addReference", "(", "source", ",", "uid", ",", "self", ".", "relationship", ",", "*", "*", "addRef_kw", ")", "# link the version of the reference", "self", ".", "link_version", "(", "source", ",", "target", ")" ]
Returns a User - Agent string randomly from file .
def random_useragent ( filename = True ) : import random default_ua = 'urlfetch/%s' % __version__ if isinstance ( filename , basestring ) : filenames = [ filename ] else : filenames = [ ] if filename and UAFILE : filenames . append ( UAFILE ) for filename in filenames : try : st = os . stat ( filename ) if stat . S_ISREG ( st . st_mode ) and os . access ( filename , os . R_OK ) : break except : pass else : return default_ua with open ( filename , 'rb' ) as f : filesize = st . st_size pos = 0 r = random . Random ( ) # try getting a valid line for no more than 3 times for i in range ( 3 ) : pos += r . randint ( 0 , filesize ) pos %= filesize f . seek ( pos ) # in case we are in middle of a line f . readline ( ) line = f . readline ( ) if not line : if f . tell ( ) == filesize : # end of file f . seek ( 0 ) line = f . readline ( ) line = line . strip ( ) if line and line [ 0 ] != '#' : return line return default_ua
3,255
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L873-L929
[ "def", "device_removed", "(", "self", ",", "device", ")", ":", "if", "not", "self", ".", "_mounter", ".", "is_handleable", "(", "device", ")", ":", "return", "device_file", "=", "device", ".", "device_presentation", "if", "(", "device", ".", "is_drive", "or", "device", ".", "is_toplevel", ")", "and", "device_file", ":", "self", ".", "_show_notification", "(", "'device_removed'", ",", "_", "(", "'Device removed'", ")", ",", "_", "(", "'device disappeared on {0.device_presentation}'", ",", "device", ")", ",", "device", ".", "icon_name", ")" ]
Concatenate url and argument dictionary
def url_concat ( url , args , keep_existing = True ) : if not args : return url if keep_existing : if url [ - 1 ] not in ( '?' , '&' ) : url += '&' if ( '?' in url ) else '?' return url + urlencode ( args , 1 ) else : url , seq , query = url . partition ( '?' ) query = urlparse . parse_qs ( query , True ) query . update ( args ) return url + '?' + urlencode ( query , 1 )
3,256
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L932-L954
[ "def", "GetClientsForHash", "(", "cls", ",", "hash_obj", ",", "token", "=", "None", ",", "age", "=", "aff4", ".", "NEWEST_TIME", ")", ":", "if", "age", "==", "aff4", ".", "ALL_TIMES", ":", "raise", "ValueError", "(", "\"age==aff4.ALL_TIMES is not supported.\"", ")", "results", "=", "cls", ".", "GetClientsForHashes", "(", "[", "hash_obj", "]", ",", "token", "=", "token", ",", "age", "=", "age", ")", "for", "_", ",", "client_files", "in", "results", ":", "for", "client_file", "in", "client_files", ":", "yield", "client_file" ]
Generate a multipart boundry .
def choose_boundary ( ) : global BOUNDARY_PREFIX if BOUNDARY_PREFIX is None : BOUNDARY_PREFIX = "urlfetch" try : uid = repr ( os . getuid ( ) ) BOUNDARY_PREFIX += "." + uid except AttributeError : pass try : pid = repr ( os . getpid ( ) ) BOUNDARY_PREFIX += "." + pid except AttributeError : pass return "%s.%s" % ( BOUNDARY_PREFIX , uuid . uuid4 ( ) . hex )
3,257
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L957-L976
[ "def", "_get_events", "(", "self", ",", "result", ")", ":", "events", "=", "[", "]", "for", "event_data", "in", "result", ":", "event", "=", "Event", ".", "factory", "(", "event_data", ")", "if", "event", "is", "not", "None", ":", "events", ".", "append", "(", "event", ")", "if", "isinstance", "(", "event", ",", "DeviceStateChangedEvent", ")", ":", "# change device state", "if", "self", ".", "__devices", "[", "event", ".", "device_url", "]", "is", "None", ":", "raise", "Exception", "(", "\"Received device change \"", "+", "\"state for unknown device '\"", "+", "event", ".", "device_url", "+", "\"'\"", ")", "self", ".", "__devices", "[", "event", ".", "device_url", "]", ".", "set_active_states", "(", "event", ".", "states", ")", "return", "events" ]
Encode multipart .
def encode_multipart ( data , files ) : body = BytesIO ( ) boundary = choose_boundary ( ) part_boundary = b ( '--%s\r\n' % boundary ) writer = codecs . lookup ( 'utf-8' ) [ 3 ] if isinstance ( data , dict ) : for name , values in data . items ( ) : if not isinstance ( values , ( list , tuple , set ) ) : # behave like urllib.urlencode(dict, 1) values = ( values , ) for value in values : body . write ( part_boundary ) writer ( body ) . write ( 'Content-Disposition: form-data; ' 'name="%s"\r\n' % name ) body . write ( b'Content-Type: text/plain\r\n\r\n' ) if isinstance ( value , int ) : value = str ( value ) if py3k and isinstance ( value , str ) : writer ( body ) . write ( value ) else : body . write ( value ) body . write ( b'\r\n' ) for fieldname , f in files . items ( ) : if isinstance ( f , tuple ) : filename , f = f elif hasattr ( f , 'name' ) : filename = basename ( f . name ) else : filename = None raise UrlfetchException ( "file must has filename" ) if hasattr ( f , 'read' ) : value = f . read ( ) elif isinstance ( f , basestring ) : value = f else : value = str ( f ) body . write ( part_boundary ) if filename : writer ( body ) . write ( 'Content-Disposition: form-data; name="%s"; ' 'filename="%s"\r\n' % ( fieldname , filename ) ) body . write ( b'Content-Type: application/octet-stream\r\n\r\n' ) else : writer ( body ) . write ( 'Content-Disposition: form-data; name="%s"' '\r\n' % name ) body . write ( b'Content-Type: text/plain\r\n\r\n' ) if py3k and isinstance ( value , str ) : writer ( body ) . write ( value ) else : body . write ( value ) body . write ( b'\r\n' ) body . write ( b ( '--' + boundary + '--\r\n' ) ) content_type = 'multipart/form-data; boundary=%s' % boundary return content_type , body . getvalue ( )
3,258
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L979-L1046
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Response body .
def body ( self ) : content = [ ] length = 0 for chunk in self : content . append ( chunk ) length += len ( chunk ) if self . length_limit and length > self . length_limit : self . close ( ) raise ContentLimitExceeded ( "Content length is more than %d " "bytes" % self . length_limit ) return b ( "" ) . join ( content )
3,259
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L285-L300
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Load response body as json .
def json ( self ) : try : return json . loads ( self . text ) except Exception as e : raise ContentDecodingError ( e )
3,260
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L314-L322
[ "def", "libvlc_video_set_spu", "(", "p_mi", ",", "i_spu", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_spu'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_spu'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaPlayer", ",", "ctypes", ".", "c_int", ")", "return", "f", "(", "p_mi", ",", "i_spu", ")" ]
Response headers .
def headers ( self ) : if py3k : return dict ( ( k . lower ( ) , v ) for k , v in self . getheaders ( ) ) else : return dict ( self . getheaders ( ) )
3,261
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L325-L350
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Cookies in dict
def cookies ( self ) : c = Cookie . SimpleCookie ( self . getheader ( 'set-cookie' ) ) return dict ( ( i . key , i . value ) for i in c . values ( ) )
3,262
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L353-L356
[ "def", "console", "(", "self", ",", "console", ")", ":", "if", "console", "==", "self", ".", "_console", ":", "return", "if", "self", ".", "_console_type", "==", "\"vnc\"", "and", "console", "is", "not", "None", "and", "console", "<", "5900", ":", "raise", "NodeError", "(", "\"VNC console require a port superior or equal to 5900 currently it's {}\"", ".", "format", "(", "console", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "console", "is", "not", "None", ":", "if", "self", ".", "console_type", "==", "\"vnc\"", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ",", "port_range_start", "=", "5900", ",", "port_range_end", "=", "6000", ")", "else", ":", "self", ".", "_console", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "console", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: console port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "console", ")", ")" ]
Links parsed from HTTP Link header
def links ( self ) : ret = [ ] linkheader = self . getheader ( 'link' ) if not linkheader : return ret for i in linkheader . split ( ',' ) : try : url , params = i . split ( ';' , 1 ) except ValueError : url , params = i , '' link = { } link [ 'url' ] = url . strip ( '''<> '"''' ) for param in params . split ( ';' ) : try : k , v = param . split ( '=' ) except ValueError : break link [ k . strip ( ''' '"''' ) ] = v . strip ( ''' '"''' ) ret . append ( link ) return ret
3,263
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L364-L384
[ "def", "save_reg", "(", "data", ")", ":", "reg_dir", "=", "_reg_dir", "(", ")", "regfile", "=", "os", ".", "path", ".", "join", "(", "reg_dir", ",", "'register'", ")", "try", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "reg_dir", ")", ":", "os", ".", "makedirs", "(", "reg_dir", ")", "except", "OSError", "as", "exc", ":", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ":", "pass", "else", ":", "raise", "try", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "regfile", ",", "'a'", ")", "as", "fh_", ":", "salt", ".", "utils", ".", "msgpack", ".", "dump", "(", "data", ",", "fh_", ")", "except", "Exception", ":", "log", ".", "error", "(", "'Could not write to msgpack file %s'", ",", "__opts__", "[", "'outdir'", "]", ")", "raise" ]
Cookie string setter
def cookiestring ( self , value ) : c = Cookie . SimpleCookie ( value ) sc = [ ( i . key , i . value ) for i in c . values ( ) ] self . cookies = dict ( sc )
3,264
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L452-L456
[ "def", "serverinfo", "(", "url", "=", "'http://localhost:8080/manager'", ",", "timeout", "=", "180", ")", ":", "data", "=", "_wget", "(", "'serverinfo'", ",", "{", "}", ",", "url", ",", "timeout", "=", "timeout", ")", "if", "data", "[", "'res'", "]", "is", "False", ":", "return", "{", "'error'", ":", "data", "[", "'msg'", "]", "}", "ret", "=", "{", "}", "data", "[", "'msg'", "]", ".", "pop", "(", "0", ")", "for", "line", "in", "data", "[", "'msg'", "]", ":", "tmp", "=", "line", ".", "split", "(", "':'", ")", "ret", "[", "tmp", "[", "0", "]", ".", "strip", "(", ")", "]", "=", "tmp", "[", "1", "]", ".", "strip", "(", ")", "return", "ret" ]
Issue a request .
def request ( self , * args , * * kwargs ) : headers = self . headers . copy ( ) if self . cookiestring : headers [ 'Cookie' ] = self . cookiestring headers . update ( kwargs . get ( 'headers' , { } ) ) kwargs [ 'headers' ] = headers r = request ( * args , * * kwargs ) self . cookies . update ( r . cookies ) return r
3,265
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L465-L476
[ "def", "get_urls", "(", "self", ")", ":", "not_clone_url", "=", "[", "url", "(", "r'^(.+)/will_not_clone/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "will_not_clone", ")", ")", "]", "restore_url", "=", "[", "url", "(", "r'^(.+)/restore/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "restore", ")", ")", "]", "return", "not_clone_url", "+", "restore_url", "+", "super", "(", "VersionedAdmin", ",", "self", ")", ".", "get_urls", "(", ")" ]
Wrapper works for conversion .
def f2format ( filename ) : print ( 'Now converting %r...' % filename ) # fetch encoding encoding = os . getenv ( 'F2FORMAT_ENCODING' , LOCALE_ENCODING ) lineno = dict ( ) # line number -> file offset content = list ( ) # file content with open ( filename , 'r' , encoding = encoding ) as file : lineno [ 1 ] = 0 for lnum , line in enumerate ( file , start = 1 ) : content . append ( line ) lineno [ lnum + 1 ] = lineno [ lnum ] + len ( line ) # now, do the dirty works string = '' . join ( content ) text = convert ( string , lineno ) # dump back to the file with open ( filename , 'w' , encoding = encoding ) as file : file . write ( text )
3,266
https://github.com/JarryShaw/f2format/blob/a144250268247ce0a98d734a26d53faadff7a6f8/src/core.py#L199-L225
[ "def", "load", "(", "self", ",", "filething", ")", ":", "fileobj", "=", "filething", ".", "fileobj", "self", ".", "metadata_blocks", "=", "[", "]", "self", ".", "tags", "=", "None", "self", ".", "cuesheet", "=", "None", "self", ".", "seektable", "=", "None", "fileobj", "=", "StrictFileObject", "(", "fileobj", ")", "self", ".", "__check_header", "(", "fileobj", ",", "filething", ".", "name", ")", "while", "self", ".", "__read_metadata_block", "(", "fileobj", ")", ":", "pass", "try", ":", "self", ".", "metadata_blocks", "[", "0", "]", ".", "length", "except", "(", "AttributeError", ",", "IndexError", ")", ":", "raise", "FLACNoHeaderError", "(", "\"Stream info block not found\"", ")", "if", "self", ".", "info", ".", "length", ":", "start", "=", "fileobj", ".", "tell", "(", ")", "fileobj", ".", "seek", "(", "0", ",", "2", ")", "self", ".", "info", ".", "bitrate", "=", "int", "(", "float", "(", "fileobj", ".", "tell", "(", ")", "-", "start", ")", "*", "8", "/", "self", ".", "info", ".", "length", ")", "else", ":", "self", ".", "info", ".", "bitrate", "=", "0" ]
If any exception comes log them in the given Github obj .
def exception_to_github ( github_obj_to_comment , summary = "" ) : context = ExceptionContext ( ) try : yield context except Exception : # pylint: disable=broad-except if summary : summary = ": ({})" . format ( summary ) error_type = "an unknown error" try : raise except CalledProcessError as err : error_type = "a Subprocess error" content = "Command: {}\n" . format ( err . cmd ) content += "Finished with return code {}\n" . format ( err . returncode ) if err . output : content += "and output:\n```shell\n{}\n```" . format ( err . output ) else : content += "and no output" except Exception : # pylint: disable=broad-except content = "```python\n{}\n```" . format ( traceback . format_exc ( ) ) response = "<details><summary>Encountered {}{}</summary><p>\n\n" . format ( error_type , summary ) response += content response += "\n\n</p></details>" context . comment = create_comment ( github_obj_to_comment , response )
3,267
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L28-L56
[ "def", "configure", "(", "self", ",", "*", "*", "_options", ")", ":", "language", ",", "voice", ",", "voiceinfo", ",", "options", "=", "self", ".", "_configure", "(", "*", "*", "_options", ")", "self", ".", "languages_options", "[", "language", "]", "=", "(", "voice", ",", "options", ")" ]
Create a comment whatever the object is a PR a commit or an issue .
def create_comment ( github_object , body ) : try : return github_object . create_issue_comment ( body ) # It's a PR except AttributeError : return github_object . create_comment ( body )
3,268
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L63-L69
[ "def", "get_available_options", "(", "self", ",", "service_name", ")", ":", "options", "=", "{", "}", "for", "data_dir", "in", "self", ".", "data_dirs", ":", "# Traverse all the directories trying to find the best match.", "service_glob", "=", "\"{0}-*.json\"", ".", "format", "(", "service_name", ")", "path", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "service_glob", ")", "found", "=", "glob", ".", "glob", "(", "path", ")", "for", "match", "in", "found", ":", "# Rip apart the path to determine the API version.", "base", "=", "os", ".", "path", ".", "basename", "(", "match", ")", "bits", "=", "os", ".", "path", ".", "splitext", "(", "base", ")", "[", "0", "]", ".", "split", "(", "'-'", ",", "1", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "continue", "api_version", "=", "bits", "[", "1", "]", "options", ".", "setdefault", "(", "api_version", ",", "[", "]", ")", "options", "[", "api_version", "]", ".", "append", "(", "match", ")", "return", "options" ]
If the SDK git id is incomplete try to complete it with user login
def get_full_sdk_id ( gh_token , sdk_git_id ) : if not '/' in sdk_git_id : login = user_from_token ( gh_token ) . login return '{}/{}' . format ( login , sdk_git_id ) return sdk_git_id
3,269
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L95-L100
[ "def", "print_menu", "(", "place", ",", "static", "=", "False", ")", ":", "day", "=", "get_day", "(", ")", "if", "static", ":", "plan", "=", "get", "(", "FILES", "[", "1", "]", ")", "for", "meal", "in", "plan", "[", "\"weeks\"", "]", "[", "0", "]", "[", "\"days\"", "]", "[", "day", "]", "[", "place", "]", "[", "\"meals\"", "]", ":", "if", "place", "==", "\"Diner\"", ":", "print", "(", "meal", "[", "\"category\"", "]", "+", "\" \"", "+", "meal", "[", "\"meal\"", "]", ")", "else", ":", "print", "(", "meal", "[", "\"category\"", "]", "+", "\": \"", "+", "meal", "[", "\"meal\"", "]", ")", "else", ":", "plan", "=", "get", "(", "FILES", "[", "0", "]", ")", "for", "meal", "in", "plan", "[", "\"weeks\"", "]", "[", "0", "]", "[", "\"days\"", "]", "[", "day", "]", "[", "place", "]", "[", "\"meals\"", "]", ":", "print", "(", "meal", "[", "\"category\"", "]", "+", "\": \"", "+", "meal", "[", "\"meal\"", "]", ")" ]
Sync the current branch in this fork against the direct parent on Github
def sync_fork ( gh_token , github_repo_id , repo , push = True ) : if not gh_token : _LOGGER . warning ( 'Skipping the upstream repo sync, no token' ) return _LOGGER . info ( 'Check if repo has to be sync with upstream' ) github_con = Github ( gh_token ) github_repo = github_con . get_repo ( github_repo_id ) if not github_repo . parent : _LOGGER . warning ( 'This repo has no upstream' ) return upstream_url = 'https://github.com/{}.git' . format ( github_repo . parent . full_name ) upstream = repo . create_remote ( 'upstream' , url = upstream_url ) upstream . fetch ( ) active_branch_name = repo . active_branch . name if not active_branch_name in repo . remotes . upstream . refs : _LOGGER . info ( 'Upstream has no branch %s to merge from' , active_branch_name ) return else : _LOGGER . info ( 'Merge from upstream' ) msg = repo . git . rebase ( 'upstream/{}' . format ( repo . active_branch . name ) ) _LOGGER . debug ( msg ) if push : msg = repo . git . push ( ) _LOGGER . debug ( msg )
3,270
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L102-L128
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Try to create the PR . If the PR exists try to find it instead . Raises otherwise .
def get_or_create_pull ( github_repo , title , body , head , base , * , none_if_no_commit = False ) : try : # Try to create or get a PR return github_repo . create_pull ( title = title , body = body , head = head , base = base ) except GithubException as err : err_message = err . data [ 'errors' ] [ 0 ] . get ( 'message' , '' ) if err . status == 422 and err_message . startswith ( 'A pull request already exists' ) : _LOGGER . info ( 'PR already exists, get this PR' ) return list ( github_repo . get_pulls ( head = head , base = base ) ) [ 0 ] elif none_if_no_commit and err . status == 422 and err_message . startswith ( 'No commits between' ) : _LOGGER . info ( 'No PR possible since head %s and base %s are the same' , head , base ) return None else : _LOGGER . warning ( "Unable to create PR:\n%s" , err . data ) raise except Exception as err : response = traceback . format_exc ( ) _LOGGER . warning ( "Unable to create PR:\n%s" , response ) raise
3,271
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L130-L165
[ "def", "end_timing", "(", "self", ")", ":", "if", "self", ".", "_callback", "!=", "None", ":", "elapsed", "=", "time", ".", "perf_counter", "(", ")", "*", "1000", "-", "self", ".", "_start", "self", ".", "_callback", ".", "end_timing", "(", "self", ".", "_counter", ",", "elapsed", ")" ]
Clone the given repo_id to the folder .
def clone_to_path ( gh_token , folder , sdk_git_id , branch_or_commit = None , * , pr_number = None ) : _LOGGER . info ( "Clone SDK repository %s" , sdk_git_id ) url_parsing = urlsplit ( sdk_git_id ) sdk_git_id = url_parsing . path if sdk_git_id . startswith ( "/" ) : sdk_git_id = sdk_git_id [ 1 : ] credentials_part = '' if gh_token : login = user_from_token ( gh_token ) . login credentials_part = '{user}:{token}@' . format ( user = login , token = gh_token ) else : _LOGGER . warning ( 'Will clone the repo without writing credentials' ) https_authenticated_url = 'https://{credentials}github.com/{sdk_git_id}.git' . format ( credentials = credentials_part , sdk_git_id = sdk_git_id ) # Clone the repo _git_clone_to_path ( https_authenticated_url , folder ) # If this is a PR, do some fetch to improve the number of SHA1 available if pr_number : try : checkout_with_fetch ( folder , "pull/{}/merge" . format ( pr_number ) ) return except Exception : # pylint: disable=broad-except pass # Assume "merge" doesn't exist anymore, fetch "head" checkout_with_fetch ( folder , "pull/{}/head" . format ( pr_number ) ) # If there is SHA1, checkout it. If PR number was given, SHA1 could be inside that PR. if branch_or_commit : repo = Repo ( str ( folder ) ) repo . git . checkout ( branch_or_commit )
3,272
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L167-L212
[ "def", "poll", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "timeout", "is", "None", ":", "timeout", "=", "-", "1", "message", "=", "super", "(", "AvroConsumer", ",", "self", ")", ".", "poll", "(", "timeout", ")", "if", "message", "is", "None", ":", "return", "None", "if", "not", "message", ".", "error", "(", ")", ":", "try", ":", "if", "message", ".", "value", "(", ")", "is", "not", "None", ":", "decoded_value", "=", "self", ".", "_serializer", ".", "decode_message", "(", "message", ".", "value", "(", ")", ",", "is_key", "=", "False", ")", "message", ".", "set_value", "(", "decoded_value", ")", "if", "message", ".", "key", "(", ")", "is", "not", "None", ":", "decoded_key", "=", "self", ".", "_serializer", ".", "decode_message", "(", "message", ".", "key", "(", ")", ",", "is_key", "=", "True", ")", "message", ".", "set_key", "(", "decoded_key", ")", "except", "SerializerError", "as", "e", ":", "raise", "SerializerError", "(", "\"Message deserialization failed for message at {} [{}] offset {}: {}\"", ".", "format", "(", "message", ".", "topic", "(", ")", ",", "message", ".", "partition", "(", ")", ",", "message", ".", "offset", "(", ")", ",", "e", ")", ")", "return", "message" ]
Do the PR
def do_pr ( gh_token , sdk_git_id , sdk_pr_target_repo_id , branch_name , base_branch , pr_body = "" ) : # pylint: disable=too-many-arguments if not gh_token : _LOGGER . info ( 'Skipping the PR, no token found' ) return None if not sdk_pr_target_repo_id : _LOGGER . info ( 'Skipping the PR, no target repo id' ) return None github_con = Github ( gh_token ) sdk_pr_target_repo = github_con . get_repo ( sdk_pr_target_repo_id ) if '/' in sdk_git_id : sdk_git_owner = sdk_git_id . split ( '/' ) [ 0 ] _LOGGER . info ( "Do the PR from %s" , sdk_git_owner ) head_name = "{}:{}" . format ( sdk_git_owner , branch_name ) else : head_name = branch_name sdk_git_repo = github_con . get_repo ( sdk_git_id ) sdk_git_owner = sdk_git_repo . owner . login try : github_pr = sdk_pr_target_repo . create_pull ( title = 'Automatic PR from {}' . format ( branch_name ) , body = pr_body , head = head_name , base = base_branch ) except GithubException as err : if err . status == 422 and err . data [ 'errors' ] [ 0 ] . get ( 'message' , '' ) . startswith ( 'A pull request already exists' ) : matching_pulls = sdk_pr_target_repo . get_pulls ( base = base_branch , head = sdk_git_owner + ":" + head_name ) matching_pull = matching_pulls [ 0 ] _LOGGER . info ( 'PR already exists: %s' , matching_pull . html_url ) return matching_pull raise _LOGGER . info ( "Made PR %s" , github_pr . html_url ) return github_pr
3,273
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L214-L250
[ "def", "restore", "(", "archive", ",", "oqdata", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "oqdata", ")", ":", "sys", ".", "exit", "(", "'%s exists already'", "%", "oqdata", ")", "if", "'://'", "in", "archive", ":", "# get the zip archive from an URL", "resp", "=", "requests", ".", "get", "(", "archive", ")", "_", ",", "archive", "=", "archive", ".", "rsplit", "(", "'/'", ",", "1", ")", "with", "open", "(", "archive", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "resp", ".", "content", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "archive", ")", ":", "sys", ".", "exit", "(", "'%s does not exist'", "%", "archive", ")", "t0", "=", "time", ".", "time", "(", ")", "oqdata", "=", "os", ".", "path", ".", "abspath", "(", "oqdata", ")", "assert", "archive", ".", "endswith", "(", "'.zip'", ")", ",", "archive", "os", ".", "mkdir", "(", "oqdata", ")", "zipfile", ".", "ZipFile", "(", "archive", ")", ".", "extractall", "(", "oqdata", ")", "dbpath", "=", "os", ".", "path", ".", "join", "(", "oqdata", ",", "'db.sqlite3'", ")", "db", "=", "Db", "(", "sqlite3", ".", "connect", ",", "dbpath", ",", "isolation_level", "=", "None", ",", "detect_types", "=", "sqlite3", ".", "PARSE_DECLTYPES", ")", "n", "=", "0", "for", "fname", "in", "os", ".", "listdir", "(", "oqdata", ")", ":", "mo", "=", "re", ".", "match", "(", "'calc_(\\d+)\\.hdf5'", ",", "fname", ")", "if", "mo", ":", "job_id", "=", "int", "(", "mo", ".", "group", "(", "1", ")", ")", "fullname", "=", "os", ".", "path", ".", "join", "(", "oqdata", ",", "fname", ")", "[", ":", "-", "5", "]", "# strip .hdf5", "db", "(", "\"UPDATE job SET user_name=?x, ds_calc_dir=?x WHERE id=?x\"", ",", "getpass", ".", "getuser", "(", ")", ",", "fullname", ",", "job_id", ")", "safeprint", "(", "'Restoring '", "+", "fname", ")", "n", "+=", "1", "dt", "=", "time", ".", "time", "(", ")", "-", "t0", "safeprint", "(", "'Extracted %d calculations into %s in %d seconds'", "%", "(", "n", ",", "oqdata", ",", "dt", ")", ")" ]
Clear the readonly bit and reattempt the removal
def remove_readonly ( func , path , _ ) : os . chmod ( path , stat . S_IWRITE ) func ( path )
3,274
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L253-L256
[ "def", "decorate", "(", "self", ",", "app", ")", ":", "from", "functools", "import", "wraps", "@", "wraps", "(", "app", ")", "def", "decorated", "(", "environ", ",", "start_response", ")", ":", "# capture any start_response from the app", "app_response", "=", "{", "}", "app_response", "[", "'status'", "]", "=", "\"200 OK\"", "app_response", "[", "'headers'", "]", "=", "[", "]", "app_response", "[", "'written'", "]", "=", "BytesIO", "(", ")", "def", "custom_start_response", "(", "status", ",", "headers", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "app_response", "[", "'status'", "]", "=", "status", "app_response", "[", "'headers'", "]", "=", "headers", "app_response", "[", "'args'", "]", "=", "args", "app_response", "[", "'kwargs'", "]", "=", "kwargs", "return", "app_response", "[", "'written'", "]", ".", "write", "returned", "=", "app", "(", "environ", ",", "custom_start_response", ")", "# callbacks from the serialization", "def", "set_http_code", "(", "status", ")", ":", "app_response", "[", "'status'", "]", "=", "str", "(", "status", ")", "def", "set_header", "(", "header", ",", "value", ")", ":", "app_response", "[", "'headers'", "]", "=", "[", "(", "h", ",", "v", ")", "for", "(", "h", ",", "v", ")", "in", "app_response", "[", "'headers'", "]", "if", "h", ".", "lower", "(", ")", "!=", "header", ".", "lower", "(", ")", "]", "app_response", "[", "'headers'", "]", ".", "append", "(", "(", "header", ",", "value", ")", ")", "def", "set_content_type", "(", "content_type", ")", ":", "set_header", "(", "'Content-Type'", ",", "content_type", ")", "# do the serialization", "accept", "=", "environ", ".", "get", "(", "'HTTP_ACCEPT'", ",", "''", ")", "new_return", "=", "self", ".", "output", "(", "returned", ",", "accept", ",", "set_http_code", ",", "set_content_type", ")", "# set the Vary header", "vary_headers", "=", "(", "v", "for", "(", "h", ",", "v", ")", "in", "app_response", "[", "'headers'", "]", "if", "h", ".", "lower", "(", ")", "==", "'vary'", ")", "vary_elements", "=", "list", "(", "itertools", ".", "chain", "(", "*", "[", "v", ".", "split", "(", "','", ")", "for", "v", "in", "vary_headers", "]", ")", ")", "vary_elements", "=", "list", "(", "set", "(", "[", "v", ".", "strip", "(", ")", "for", "v", "in", "vary_elements", "]", ")", ")", "if", "'*'", "not", "in", "vary_elements", "and", "'accept'", "not", "in", "(", "v", ".", "lower", "(", ")", "for", "v", "in", "vary_elements", ")", ":", "vary_elements", ".", "append", "(", "'Accept'", ")", "set_header", "(", "'Vary'", ",", "', '", ".", "join", "(", "vary_elements", ")", ")", "# pass on the result to the parent WSGI server", "parent_writer", "=", "start_response", "(", "app_response", "[", "'status'", "]", ",", "app_response", "[", "'headers'", "]", ",", "*", "app_response", ".", "get", "(", "'args'", ",", "[", "]", ")", ",", "*", "*", "app_response", ".", "get", "(", "'kwargs'", ",", "{", "}", ")", ")", "written", "=", "app_response", "[", "'written'", "]", ".", "getvalue", "(", ")", "if", "len", "(", "written", ")", ">", "0", ":", "parent_writer", "(", "written", ")", "return", "new_return", "return", "decorated" ]
Context manager to avoid readonly problem while cleanup the temp dir .
def manage_git_folder ( gh_token , temp_dir , git_id , * , pr_number = None ) : _LOGGER . debug ( "Git ID %s" , git_id ) if Path ( git_id ) . exists ( ) : yield git_id return # Do not erase a local folder, just skip here # Clone the specific branch split_git_id = git_id . split ( "@" ) branch = split_git_id [ 1 ] if len ( split_git_id ) > 1 else None clone_to_path ( gh_token , temp_dir , split_git_id [ 0 ] , branch_or_commit = branch , pr_number = pr_number ) try : yield temp_dir # Pre-cleanup for Windows http://bugs.python.org/issue26660 finally : _LOGGER . debug ( "Preclean Rest folder" ) shutil . rmtree ( temp_dir , onerror = remove_readonly )
3,275
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L259-L278
[ "def", "hook_changed", "(", "self", ",", "hook_name", ",", "widget", ",", "new_data", ")", ":", "if", "hook_name", "==", "'song'", ":", "self", ".", "song_changed", "(", "widget", ",", "new_data", ")", "elif", "hook_name", "==", "'state'", ":", "self", ".", "state_changed", "(", "widget", ",", "new_data", ")", "elif", "hook_name", "==", "'elapsed_and_total'", ":", "elapsed", ",", "total", "=", "new_data", "self", ".", "time_changed", "(", "widget", ",", "elapsed", ",", "total", ")" ]
Returns a GithubLink to a raw content .
def as_raw_link ( self ) : if self . link_type == "raw" : return self # Can be discussed if we need an hard copy, or fail if self . link_type != "blob" : raise ValueError ( "Cannot get a download link from a tree link" ) return self . __class__ ( self . gitid , "raw" , self . branch_or_commit , self . path , self . token )
3,276
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L316-L329
[ "def", "_broadcast_shapes", "(", "s1", ",", "s2", ")", ":", "n1", "=", "len", "(", "s1", ")", "n2", "=", "len", "(", "s2", ")", "n", "=", "max", "(", "n1", ",", "n2", ")", "res", "=", "[", "1", "]", "*", "n", "for", "i", "in", "range", "(", "n", ")", ":", "if", "i", ">=", "n1", ":", "c1", "=", "1", "else", ":", "c1", "=", "s1", "[", "n1", "-", "1", "-", "i", "]", "if", "i", ">=", "n2", ":", "c2", "=", "1", "else", ":", "c2", "=", "s2", "[", "n2", "-", "1", "-", "i", "]", "if", "c1", "==", "1", ":", "rc", "=", "c2", "elif", "c2", "==", "1", "or", "c1", "==", "c2", ":", "rc", "=", "c1", "else", ":", "raise", "ValueError", "(", "'array shapes %r and %r are not compatible'", "%", "(", "s1", ",", "s2", ")", ")", "res", "[", "n", "-", "1", "-", "i", "]", "=", "rc", "return", "tuple", "(", "res", ")" ]
Mimic issue API so we can use it everywhere . Return dashboard comment .
def create_comment ( self , text ) : return DashboardComment . get_or_create ( self . _issue_or_pr , self . _header , text )
3,277
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L336-L340
[ "def", "rebalance_replication_groups", "(", "self", ")", ":", "# Balance replicas over replication-groups for each partition", "if", "any", "(", "b", ".", "inactive", "for", "b", "in", "six", ".", "itervalues", "(", "self", ".", "cluster_topology", ".", "brokers", ")", ")", ":", "self", ".", "log", ".", "error", "(", "\"Impossible to rebalance replication groups because of inactive \"", "\"brokers.\"", ")", "raise", "RebalanceError", "(", "\"Impossible to rebalance replication groups because of inactive \"", "\"brokers\"", ")", "# Balance replica-count over replication-groups", "self", ".", "rebalance_replicas", "(", ")", "# Balance partition-count over replication-groups", "self", ".", "_rebalance_groups_partition_cnt", "(", ")" ]
Get or create the dashboard comment in this issue .
def get_or_create ( cls , issue , header , text = None ) : for comment in get_comments ( issue ) : try : if comment . body . splitlines ( ) [ 0 ] == header : obj = cls ( comment , header ) break except IndexError : # The comment body is empty pass # Hooooooo, no dashboard comment, let's create one else : comment = create_comment ( issue , header ) obj = cls ( comment , header ) if text : obj . edit ( text ) return obj
3,278
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/github_tools.py#L348-L364
[ "def", "dispatch", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "REG_VALIDATION_STR", "not", "in", "request", ".", "session", ":", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "try", ":", "self", ".", "temporaryRegistration", "=", "TemporaryRegistration", ".", "objects", ".", "get", "(", "id", "=", "self", ".", "request", ".", "session", "[", "REG_VALIDATION_STR", "]", ".", "get", "(", "'temporaryRegistrationId'", ")", ")", "except", "ObjectDoesNotExist", ":", "messages", ".", "error", "(", "request", ",", "_", "(", "'Invalid registration identifier passed to sign-up form.'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "expiry", "=", "parse_datetime", "(", "self", ".", "request", ".", "session", "[", "REG_VALIDATION_STR", "]", ".", "get", "(", "'temporaryRegistrationExpiry'", ",", "''", ")", ",", ")", "if", "not", "expiry", "or", "expiry", "<", "timezone", ".", "now", "(", ")", ":", "messages", ".", "info", "(", "request", ",", "_", "(", "'Your registration session has expired. Please try again.'", ")", ")", "return", "HttpResponseRedirect", "(", "reverse", "(", "'registration'", ")", ")", "return", "super", "(", "StudentInfoView", ",", "self", ")", ".", "dispatch", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Disconnects a function from a hook
def disconnect ( self , name , func , dispatch_uid = None ) : try : signal = self . _registry [ name ] except KeyError : return signal . disconnect ( func , dispatch_uid = dispatch_uid )
3,279
https://github.com/nitely/django-hooks/blob/26ea2150c9be110e90b9ee60fbfd1065ac30ab1d/hooks/signalhook.py#L56-L70
[ "def", "read_data", "(", "self", ",", "blocksize", "=", "4096", ")", ":", "frames", "=", "ctypes", ".", "c_uint", "(", "blocksize", "//", "self", ".", "_client_fmt", ".", "mBytesPerFrame", ")", "buf", "=", "ctypes", ".", "create_string_buffer", "(", "blocksize", ")", "buflist", "=", "AudioBufferList", "(", ")", "buflist", ".", "mNumberBuffers", "=", "1", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mNumberChannels", "=", "self", ".", "_client_fmt", ".", "mChannelsPerFrame", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mDataByteSize", "=", "blocksize", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mData", "=", "ctypes", ".", "cast", "(", "buf", ",", "ctypes", ".", "c_void_p", ")", "while", "True", ":", "check", "(", "_coreaudio", ".", "ExtAudioFileRead", "(", "self", ".", "_obj", ",", "ctypes", ".", "byref", "(", "frames", ")", ",", "ctypes", ".", "byref", "(", "buflist", ")", ")", ")", "assert", "buflist", ".", "mNumberBuffers", "==", "1", "size", "=", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mDataByteSize", "if", "not", "size", ":", "break", "data", "=", "ctypes", ".", "cast", "(", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mData", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_char", ")", ")", "blob", "=", "data", "[", ":", "size", "]", "yield", "blob" ]
Use the factories to create a host object .
def create_host ( factories , value ) : data = [ value ] for func in factories : try : return func ( value ) except InvalidHostError as ex : data . append ( str ( ex ) ) msg_tpl = ( "Failed to create a host object for '{}', raising the following errors" " in the process:" + "\n" . join ( data ) ) raise InvalidHostError ( msg_tpl . format ( value ) )
3,280
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/structures.py#L190-L216
[ "def", "cancel", "(", "self", ",", "at_period_end", "=", "djstripe_settings", ".", "CANCELLATION_AT_PERIOD_END", ")", ":", "# If plan has trial days and customer cancels before", "# trial period ends, then end subscription now,", "# i.e. at_period_end=False", "if", "self", ".", "trial_end", "and", "self", ".", "trial_end", ">", "timezone", ".", "now", "(", ")", ":", "at_period_end", "=", "False", "if", "at_period_end", ":", "stripe_subscription", "=", "self", ".", "api_retrieve", "(", ")", "stripe_subscription", ".", "cancel_at_period_end", "=", "True", "stripe_subscription", ".", "save", "(", ")", "else", ":", "try", ":", "stripe_subscription", "=", "self", ".", "_api_delete", "(", ")", "except", "InvalidRequestError", "as", "exc", ":", "if", "\"No such subscription:\"", "in", "str", "(", "exc", ")", ":", "# cancel() works by deleting the subscription. The object still", "# exists in Stripe however, and can still be retrieved.", "# If the subscription was already canceled (status=canceled),", "# that api_retrieve() call will fail with \"No such subscription\".", "# However, this may also happen if the subscription legitimately", "# does not exist, in which case the following line will re-raise.", "stripe_subscription", "=", "self", ".", "api_retrieve", "(", ")", "else", ":", "raise", "return", "Subscription", ".", "sync_from_stripe_data", "(", "stripe_subscription", ")" ]
Test if the object is a subdomain of the other .
def is_subdomain ( self , other ) : compared = other . value if hasattr ( other , 'value' ) else other try : return self . value . is_subdomain ( compared ) except AttributeError : return False
3,281
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/structures.py#L84-L94
[ "def", "write_FORCE_CONSTANTS", "(", "force_constants", ",", "filename", "=", "'FORCE_CONSTANTS'", ",", "p2s_map", "=", "None", ")", ":", "lines", "=", "get_FORCE_CONSTANTS_lines", "(", "force_constants", ",", "p2s_map", "=", "p2s_map", ")", "with", "open", "(", "filename", ",", "'w'", ")", "as", "w", ":", "w", ".", "write", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")" ]
Assemble the given iterable of mnemonics operands and lables .
def assemble ( code ) : final = [ ] # We need to make three passes, because we cannot know the offset for # jump labels until after we've figured out the PC for each instructions, # which is complicated by the variable-width instructions set and # alignment padding. for line in code : if isinstance ( line , Label ) : final . append ( line ) continue mnemonic , operands = line [ 0 ] , line [ 1 : ] operand_fmts = opcode_table [ mnemonic ] [ 'operands' ] # We need to coerce each opcodes operands into their # final `Operand` form. final_operands = [ ] for i , operand in enumerate ( operands ) : if isinstance ( operand , Operand ) : # Already in Operand form. final_operands . append ( operand ) elif isinstance ( operand , Constant ) : # Convert constants into CONSTANT_INDEX'es final_operands . append ( Operand ( OperandTypes . CONSTANT_INDEX , operand . index ) ) elif isinstance ( operand , dict ) : # lookupswitch's operand is a dict as # a special usability case. final_operands . append ( operand ) elif isinstance ( operand , Label ) : final_operands . append ( operand ) else : # For anything else, lookup that opcode's operand # type from its definition. final_operands . append ( Operand ( operand_fmts [ i ] [ 1 ] , operand ) ) # Build the final, immutable `Instruction`. final . append ( Instruction . create ( mnemonic , final_operands ) ) label_pcs = { } # The second pass, find the absolute PC for each label. current_pc = 0 for ins in final : if isinstance ( ins , Label ) : label_pcs [ ins . name ] = current_pc continue # size_on_disk must know the current pc because of alignment on # tableswitch and lookupswitch. current_pc += ins . size_on_disk ( current_pc ) # The third pass, now that we know where each label is we can figure # out the offset for each jump. current_pc = 0 for ins in final : if isinstance ( ins , Label ) : continue for i , operand in enumerate ( ins . operands ) : if isinstance ( operand , dict ) : # lookupswitch is a special case for k , v in operand . items ( ) : if isinstance ( v , Label ) : operand [ k ] = Operand ( 40 , label_pcs [ v . name ] - current_pc ) elif isinstance ( operand , Label ) : ins . operands [ i ] = Operand ( 40 , label_pcs [ operand . name ] - current_pc ) current_pc += ins . size_on_disk ( current_pc ) yield ins
3,282
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/assemble.py#L15-L115
[ "def", "add_sim_options", "(", "p", ")", ":", "p", ".", "add_option", "(", "\"--distance\"", ",", "default", "=", "500", ",", "type", "=", "\"int\"", ",", "help", "=", "\"Outer distance between the two ends [default: %default]\"", ")", "p", ".", "add_option", "(", "\"--readlen\"", ",", "default", "=", "150", ",", "type", "=", "\"int\"", ",", "help", "=", "\"Length of the read\"", ")", "p", ".", "set_depth", "(", "depth", "=", "10", ")", "p", ".", "set_outfile", "(", "outfile", "=", "None", ")" ]
Register a hook .
def register ( self , hook ) : assert callable ( hook ) , "Hook must be a callable" assert issubclass ( hook , HookBase ) , "The hook does not inherit from HookBase" self . _registry . append ( hook )
3,283
https://github.com/nitely/django-hooks/blob/26ea2150c9be110e90b9ee60fbfd1065ac30ab1d/hooks/viewhook.py#L101-L112
[ "def", "fromtif", "(", "path", ",", "ext", "=", "'tif'", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "recursive", "=", "False", ",", "nplanes", "=", "None", ",", "npartitions", "=", "None", ",", "labels", "=", "None", ",", "engine", "=", "None", ",", "credentials", "=", "None", ",", "discard_extra", "=", "False", ")", ":", "from", "tifffile", "import", "TiffFile", "if", "nplanes", "is", "not", "None", "and", "nplanes", "<=", "0", ":", "raise", "ValueError", "(", "'nplanes must be positive if passed, got %d'", "%", "nplanes", ")", "def", "getarray", "(", "idx_buffer_filename", ")", ":", "idx", ",", "buf", ",", "fname", "=", "idx_buffer_filename", "fbuf", "=", "BytesIO", "(", "buf", ")", "tfh", "=", "TiffFile", "(", "fbuf", ")", "ary", "=", "tfh", ".", "asarray", "(", ")", "pageCount", "=", "ary", ".", "shape", "[", "0", "]", "if", "nplanes", "is", "not", "None", ":", "extra", "=", "pageCount", "%", "nplanes", "if", "extra", ":", "if", "discard_extra", ":", "pageCount", "=", "pageCount", "-", "extra", "logging", ".", "getLogger", "(", "'thunder'", ")", ".", "warn", "(", "'Ignored %d pages in file %s'", "%", "(", "extra", ",", "fname", ")", ")", "else", ":", "raise", "ValueError", "(", "\"nplanes '%d' does not evenly divide '%d in file %s'\"", "%", "(", "nplanes", ",", "pageCount", ",", "fname", ")", ")", "values", "=", "[", "ary", "[", "i", ":", "(", "i", "+", "nplanes", ")", "]", "for", "i", "in", "range", "(", "0", ",", "pageCount", ",", "nplanes", ")", "]", "else", ":", "values", "=", "[", "ary", "]", "tfh", ".", "close", "(", ")", "if", "ary", ".", "ndim", "==", "3", ":", "values", "=", "[", "val", ".", "squeeze", "(", ")", "for", "val", "in", "values", "]", "nvals", "=", "len", "(", "values", ")", "keys", "=", "[", "(", "idx", "*", "nvals", "+", "timepoint", ",", ")", "for", "timepoint", "in", "range", "(", "nvals", ")", "]", "return", "zip", "(", "keys", ",", "values", ")", "recount", "=", "False", "if", "nplanes", "is", "None", "else", "True", "data", "=", "frompath", "(", "path", ",", "accessor", "=", "getarray", ",", "ext", "=", "ext", ",", "start", "=", "start", ",", "stop", "=", "stop", ",", "recursive", "=", "recursive", ",", "npartitions", "=", "npartitions", ",", "recount", "=", "recount", ",", "labels", "=", "labels", ",", "engine", "=", "engine", ",", "credentials", "=", "credentials", ")", "if", "engine", "is", "not", "None", "and", "npartitions", "is", "not", "None", "and", "data", ".", "npartitions", "(", ")", "<", "npartitions", ":", "data", "=", "data", ".", "repartition", "(", "npartitions", ")", "return", "data" ]
Save all the forms
def save ( self , * args , * * kwargs ) : return [ ( form , form . save ( * args , * * kwargs ) ) for form in self . instances ]
3,284
https://github.com/nitely/django-hooks/blob/26ea2150c9be110e90b9ee60fbfd1065ac30ab1d/hooks/formhook.py#L41-L53
[ "def", "_generate_noise_temporal", "(", "stimfunction_tr", ",", "tr_duration", ",", "dimensions", ",", "template", ",", "mask", ",", "noise_dict", ")", ":", "# Set up common parameters", "# How many TRs are there", "trs", "=", "len", "(", "stimfunction_tr", ")", "# What time points are sampled by a TR?", "timepoints", "=", "list", "(", "np", ".", "linspace", "(", "0", ",", "(", "trs", "-", "1", ")", "*", "tr_duration", ",", "trs", ")", ")", "# Preset the volume", "noise_volume", "=", "np", ".", "zeros", "(", "(", "dimensions", "[", "0", "]", ",", "dimensions", "[", "1", "]", ",", "dimensions", "[", "2", "]", ",", "trs", ")", ")", "# Generate the drift noise", "if", "noise_dict", "[", "'drift_sigma'", "]", "!=", "0", ":", "# Calculate the drift time course", "noise", "=", "_generate_noise_temporal_drift", "(", "trs", ",", "tr_duration", ",", ")", "# Create a volume with the drift properties", "volume", "=", "np", ".", "ones", "(", "dimensions", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'drift_sigma'", "]", "# Generate the physiological noise", "if", "noise_dict", "[", "'physiological_sigma'", "]", "!=", "0", ":", "# Calculate the physiological time course", "noise", "=", "_generate_noise_temporal_phys", "(", "timepoints", ",", ")", "# Create a brain shaped volume with similar smoothing properties", "volume", "=", "_generate_noise_spatial", "(", "dimensions", "=", "dimensions", ",", "mask", "=", "mask", ",", "fwhm", "=", "noise_dict", "[", "'fwhm'", "]", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'physiological_sigma'", "]", "# Generate the AR noise", "if", "noise_dict", "[", "'auto_reg_sigma'", "]", "!=", "0", ":", "# Calculate the AR time course volume", "noise", "=", "_generate_noise_temporal_autoregression", "(", "timepoints", ",", "noise_dict", ",", "dimensions", ",", "mask", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "noise", "*", "noise_dict", "[", "'auto_reg_sigma'", "]", "# Generate the task related noise", "if", "noise_dict", "[", "'task_sigma'", "]", "!=", "0", "and", "np", ".", "sum", "(", "stimfunction_tr", ")", ">", "0", ":", "# Calculate the task based noise time course", "noise", "=", "_generate_noise_temporal_task", "(", "stimfunction_tr", ",", ")", "# Create a brain shaped volume with similar smoothing properties", "volume", "=", "_generate_noise_spatial", "(", "dimensions", "=", "dimensions", ",", "mask", "=", "mask", ",", "fwhm", "=", "noise_dict", "[", "'fwhm'", "]", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'task_sigma'", "]", "# Finally, z score each voxel so things mix nicely", "noise_volume", "=", "stats", ".", "zscore", "(", "noise_volume", ",", "3", ")", "# If it is a nan it is because you just divided by zero (since some", "# voxels are zeros in the template)", "noise_volume", "[", "np", ".", "isnan", "(", "noise_volume", ")", "]", "=", "0", "return", "noise_volume" ]
Lookup all builtin Attribute subclasses load them and return a dict
def get_attribute_classes ( ) -> Dict [ str , Attribute ] : attribute_children = pkgutil . iter_modules ( importlib . import_module ( 'jawa.attributes' ) . __path__ , prefix = 'jawa.attributes.' ) result = { } for _ , name , _ in attribute_children : classes = inspect . getmembers ( importlib . import_module ( name ) , lambda c : ( inspect . isclass ( c ) and issubclass ( c , Attribute ) and c is not Attribute ) ) for class_name , class_ in classes : attribute_name = getattr ( class_ , 'ATTRIBUTE_NAME' , class_name [ : - 9 ] ) result [ attribute_name ] = class_ return result
3,285
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/attribute.py#L161-L184
[ "def", "_repair_column", "(", "self", ")", ":", "# Repair any title columns", "check_for_title", "=", "True", "for", "column_index", "in", "range", "(", "self", ".", "start", "[", "1", "]", ",", "self", ".", "end", "[", "1", "]", ")", ":", "table_column", "=", "TableTranspose", "(", "self", ".", "table", ")", "[", "column_index", "]", "column_start", "=", "table_column", "[", "self", ".", "start", "[", "0", "]", "]", "# Only iterate through columns starting with a blank cell", "if", "check_for_title", "and", "is_empty_cell", "(", "column_start", ")", ":", "self", ".", "_stringify_column", "(", "column_index", ")", "# Check for year titles in column or row", "elif", "(", "isinstance", "(", "column_start", ",", "basestring", ")", "and", "re", ".", "search", "(", "allregex", ".", "year_regex", ",", "column_start", ")", ")", ":", "self", ".", "_check_stringify_year_column", "(", "column_index", ")", "else", ":", "check_for_title", "=", "False" ]
Read the ConstantPool from the file - like object source .
def unpack ( self , source : IO ) : count = unpack ( '>H' , source . read ( 2 ) ) [ 0 ] for _ in repeat ( None , count ) : name_index , length = unpack ( '>HI' , source . read ( 6 ) ) info_blob = source . read ( length ) self . _table . append ( ( name_index , info_blob ) )
3,286
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/attribute.py#L67-L82
[ "def", "postprocess_segments", "(", "self", ")", ":", "# make segs a list of mask arrays, it's easier to store", "# as there is a hdf5 equivalent", "for", "iseg", ",", "seg", "in", "enumerate", "(", "self", ".", "segs", ")", ":", "mask", "=", "np", ".", "zeros", "(", "self", ".", "_adata", ".", "shape", "[", "0", "]", ",", "dtype", "=", "bool", ")", "mask", "[", "seg", "]", "=", "True", "self", ".", "segs", "[", "iseg", "]", "=", "mask", "# convert to arrays", "self", ".", "segs", "=", "np", ".", "array", "(", "self", ".", "segs", ")", "self", ".", "segs_tips", "=", "np", ".", "array", "(", "self", ".", "segs_tips", ")" ]
Write the AttributeTable to the file - like object out .
def pack ( self , out : IO ) : out . write ( pack ( '>H' , len ( self . _table ) ) ) for attribute in self : info = attribute . pack ( ) out . write ( pack ( '>HI' , attribute . name . index , len ( info ) ) ) out . write ( info )
3,287
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/attribute.py#L103-L122
[ "def", "get_container_id", "(", "self", ",", "container_id", "=", "None", ")", ":", "# The user must provide a container_id, or have one with the client", "if", "container_id", "==", "None", "and", "self", ".", "container_id", "==", "None", ":", "bot", ".", "exit", "(", "'You must provide a container_id.'", ")", "# Choose whichever is not None, with preference for function provided", "container_id", "=", "container_id", "or", "self", ".", "container_id", "return", "container_id" ]
Creates a new attribute of type_ appending it to the attribute table and returning it .
def create ( self , type_ , * args , * * kwargs ) -> Any : attribute = type_ ( self , * args , * * kwargs ) self . _table . append ( attribute ) return attribute
3,288
https://github.com/TkTech/Jawa/blob/94c8424e699029ac33fbc0e866fff0ecb2742289/jawa/attribute.py#L124-L131
[ "def", "get_lang_dict", "(", "self", ")", ":", "r", "=", "self", ".", "yandex_translate_request", "(", "\"getLangs\"", ")", "self", ".", "handle_errors", "(", "r", ")", "return", "r", ".", "json", "(", ")", "[", "\"langs\"", "]" ]
Get valid location header values from responses .
def get_locations ( self , url ) : if not is_valid_url ( url ) : raise InvalidURLError ( '{} is not a valid URL' . format ( url ) ) try : response = self . session . head ( url ) except ( ConnectionError , InvalidSchema , Timeout ) : raise StopIteration try : generator = self . session . resolve_redirects ( response , response . request ) for response in generator : yield response . url except InvalidURL : pass except ( ConnectionError , InvalidSchema , Timeout ) as error : last_url = response . headers [ 'location' ] if isinstance ( error , Timeout ) or is_valid_url ( last_url ) : yield last_url
3,289
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/composites.py#L68-L98
[ "def", "set_table", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "with", "self", ".", "connection", "(", "*", "*", "kwargs", ")", "as", "connection", ":", "kwargs", "[", "'connection'", "]", "=", "connection", "if", "self", ".", "has_table", "(", "str", "(", "schema", ")", ",", "*", "*", "kwargs", ")", ":", "return", "True", "try", ":", "with", "self", ".", "transaction", "(", "*", "*", "kwargs", ")", ":", "self", ".", "_set_table", "(", "schema", ",", "*", "*", "kwargs", ")", "for", "index_name", ",", "index", "in", "schema", ".", "indexes", ".", "items", "(", ")", ":", "self", ".", "set_index", "(", "schema", ",", "name", "=", "index", ".", "name", ",", "fields", "=", "index", ".", "fields", ",", "connection", "=", "connection", ",", "*", "*", "index", ".", "options", ")", "except", "InterfaceError", ":", "# check to see if this table now exists, it might have been created", "# in another thread", "if", "not", "self", ".", "has_table", "(", "schema", ",", "*", "*", "kwargs", ")", ":", "raise" ]
Get valid location header values for all given URLs .
def get_new_locations ( self , urls ) : seen = set ( urls ) for i in urls : for k in self . get_locations ( i ) : if k not in seen : seen . add ( k ) yield k
3,290
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/composites.py#L100-L116
[ "def", "union", "(", "self", ",", "*", "dstreams", ")", ":", "if", "not", "dstreams", ":", "raise", "ValueError", "(", "\"should have at least one DStream to union\"", ")", "if", "len", "(", "dstreams", ")", "==", "1", ":", "return", "dstreams", "[", "0", "]", "if", "len", "(", "set", "(", "s", ".", "_jrdd_deserializer", "for", "s", "in", "dstreams", ")", ")", ">", "1", ":", "raise", "ValueError", "(", "\"All DStreams should have same serializer\"", ")", "if", "len", "(", "set", "(", "s", ".", "_slideDuration", "for", "s", "in", "dstreams", ")", ")", ">", "1", ":", "raise", "ValueError", "(", "\"All DStreams should have same slide duration\"", ")", "cls", "=", "SparkContext", ".", "_jvm", ".", "org", ".", "apache", ".", "spark", ".", "streaming", ".", "api", ".", "java", ".", "JavaDStream", "jdstreams", "=", "SparkContext", ".", "_gateway", ".", "new_array", "(", "cls", ",", "len", "(", "dstreams", ")", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "dstreams", ")", ")", ":", "jdstreams", "[", "i", "]", "=", "dstreams", "[", "i", "]", ".", "_jdstream", "return", "DStream", "(", "self", ".", "_jssc", ".", "union", "(", "jdstreams", ")", ",", "self", ",", "dstreams", "[", "0", "]", ".", "_jrdd_deserializer", ")" ]
Get URLs and their redirection addresses .
def get_urls_and_locations ( self , urls ) : location_generator = self . get_new_locations ( urls ) initial_cache = list ( set ( urls ) ) return CachedIterable ( location_generator , initial_cache )
3,291
https://github.com/piotr-rusin/spam-lists/blob/fd616e8761b28f3eaa503fee5e45f7748e8f88f2/spam_lists/composites.py#L118-L127
[ "def", "find_video_file", "(", "rtdc_dataset", ")", ":", "video", "=", "None", "if", "rtdc_dataset", ".", "_fdir", ".", "exists", "(", ")", ":", "# Cell images (video)", "videos", "=", "[", "v", ".", "name", "for", "v", "in", "rtdc_dataset", ".", "_fdir", ".", "rglob", "(", "\"*.avi\"", ")", "]", "# Filter videos according to measurement number", "meas_id", "=", "rtdc_dataset", ".", "_mid", "videos", "=", "[", "v", "for", "v", "in", "videos", "if", "v", ".", "split", "(", "\"_\"", ")", "[", "0", "]", "==", "meas_id", "]", "videos", ".", "sort", "(", ")", "if", "len", "(", "videos", ")", "!=", "0", ":", "# Defaults to first avi file", "video", "=", "videos", "[", "0", "]", "# g/q video file names. q comes first.", "for", "v", "in", "videos", ":", "if", "v", ".", "endswith", "(", "\"imag.avi\"", ")", ":", "video", "=", "v", "break", "# add this here, because fRT-DC measurements also contain", "# videos ..._proc.avi", "elif", "v", ".", "endswith", "(", "\"imaq.avi\"", ")", ":", "video", "=", "v", "break", "if", "video", "is", "None", ":", "return", "None", "else", ":", "return", "rtdc_dataset", ".", "_fdir", "/", "video" ]
An OCSP GET request contains the DER - in - base64 encoded OCSP request in the HTTP request URL .
def _handle_get ( self , request_data ) : der = base64 . b64decode ( request_data ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
3,292
https://github.com/threema-ch/ocspresponder/blob/b9486af68dd02b84e01bedabe4f6843a6ff0f698/ocspresponder/__init__.py#L111-L118
[ "def", "_convert_to_closest_type", "(", "value", ")", ":", "value", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bool", "(", "value", ".", "strip", "(", ")", ")", "if", "isinstance", "(", "value", ",", "bool", ")", ":", "return", "value", "return", "salt", ".", "utils", ".", "stringutils", ".", "to_none", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_num", "(", "value", ")", ")" ]
An OCSP POST request contains the DER encoded OCSP request in the HTTP request body .
def _handle_post ( self ) : der = request . body . read ( ) ocsp_request = self . _parse_ocsp_request ( der ) return self . _build_http_response ( ocsp_request )
3,293
https://github.com/threema-ch/ocspresponder/blob/b9486af68dd02b84e01bedabe4f6843a6ff0f698/ocspresponder/__init__.py#L120-L127
[ "def", "_convert_to_closest_type", "(", "value", ")", ":", "value", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bool", "(", "value", ".", "strip", "(", ")", ")", "if", "isinstance", "(", "value", ",", "bool", ")", ":", "return", "value", "return", "salt", ".", "utils", ".", "stringutils", ".", "to_none", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_num", "(", "value", ")", ")" ]
Create and return an OCSP response from an OCSP request .
def _build_ocsp_response ( self , ocsp_request : OCSPRequest ) -> OCSPResponse : # Get the certificate serial tbs_request = ocsp_request [ 'tbs_request' ] request_list = tbs_request [ 'request_list' ] if len ( request_list ) != 1 : logger . warning ( 'Received OCSP request with multiple sub requests' ) raise NotImplemented ( 'Combined requests not yet supported' ) single_request = request_list [ 0 ] # TODO: Support more than one request req_cert = single_request [ 'req_cert' ] serial = req_cert [ 'serial_number' ] . native # Check certificate status try : certificate_status , revocation_date = self . _validate ( serial ) except Exception as e : logger . exception ( 'Could not determine certificate status: %s' , e ) return self . _fail ( ResponseStatus . internal_error ) # Retrieve certificate try : subject_cert_contents = self . _cert_retrieve ( serial ) except Exception as e : logger . exception ( 'Could not retrieve certificate with serial %s: %s' , serial , e ) return self . _fail ( ResponseStatus . internal_error ) # Parse certificate try : subject_cert = asymmetric . load_certificate ( subject_cert_contents . encode ( 'utf8' ) ) except Exception as e : logger . exception ( 'Returned certificate with serial %s is invalid: %s' , serial , e ) return self . _fail ( ResponseStatus . internal_error ) # Build the response builder = OCSPResponseBuilder ( * * { 'response_status' : ResponseStatus . successful . value , 'certificate' : subject_cert , 'certificate_status' : certificate_status . value , 'revocation_date' : revocation_date , } ) # Parse extensions for extension in tbs_request [ 'request_extensions' ] : extn_id = extension [ 'extn_id' ] . native critical = extension [ 'critical' ] . native value = extension [ 'extn_value' ] . parsed # This variable tracks whether any unknown extensions were encountered unknown = False # Handle nonce extension if extn_id == 'nonce' : builder . nonce = value . native # That's all we know else : unknown = True # If an unknown critical extension is encountered (which should not # usually happen, according to RFC 6960 4.1.2), we should throw our # hands up in despair and run. if unknown is True and critical is True : logger . warning ( 'Could not parse unknown critical extension: %r' , dict ( extension . native ) ) return self . _fail ( ResponseStatus . internal_error ) # If it's an unknown non-critical extension, we can safely ignore it. elif unknown is True : logger . info ( 'Ignored unknown non-critical extension: %r' , dict ( extension . native ) ) # Set certificate issuer builder . certificate_issuer = self . _issuer_cert # Set next update date builder . next_update = datetime . now ( timezone . utc ) + timedelta ( days = self . _next_update_days ) return builder . build ( self . _responder_key , self . _responder_cert )
3,294
https://github.com/threema-ch/ocspresponder/blob/b9486af68dd02b84e01bedabe4f6843a6ff0f698/ocspresponder/__init__.py#L139-L217
[ "def", "groups_set_read_only", "(", "self", ",", "room_id", ",", "read_only", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "__call_api_post", "(", "'groups.setReadOnly'", ",", "roomId", "=", "room_id", ",", "readOnly", "=", "bool", "(", "read_only", ")", ",", "kwargs", "=", "kwargs", ")" ]
Hook tag to call within templates
def hook_tag ( context , name , * args , * * kwargs ) : return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in hook ( name , context , * args , * * kwargs ) ) )
3,295
https://github.com/nitely/django-hooks/blob/26ea2150c9be110e90b9ee60fbfd1065ac30ab1d/hooks/templatetags/hooks_tags.py#L15-L35
[ "def", "import_file", "(", "filename", ")", ":", "#file_path = os.path.relpath(filename)", "file_path", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")", "log", "(", "DEBUG", ",", "\"Loading prices from %s\"", ",", "file_path", ")", "prices", "=", "__read_prices_from_file", "(", "file_path", ")", "with", "BookAggregate", "(", "for_writing", "=", "True", ")", "as", "svc", ":", "svc", ".", "prices", ".", "import_prices", "(", "prices", ")", "print", "(", "\"Saving book...\"", ")", "svc", ".", "book", ".", "save", "(", ")" ]
Helper to include in your own templatetag for static TemplateHooks
def template_hook_collect ( module , hook_name , * args , * * kwargs ) : try : templatehook = getattr ( module , hook_name ) except AttributeError : return "" return format_html_join ( sep = "\n" , format_string = "{}" , args_generator = ( ( response , ) for response in templatehook ( * args , * * kwargs ) ) )
3,296
https://github.com/nitely/django-hooks/blob/26ea2150c9be110e90b9ee60fbfd1065ac30ab1d/hooks/templatetags/hooks_tags.py#L38-L71
[ "def", "split", "(", "self", ",", "verbose", "=", "None", ",", "end_in_new_line", "=", "None", ")", ":", "elapsed_time", "=", "self", ".", "get_elapsed_time", "(", ")", "self", ".", "split_elapsed_time", ".", "append", "(", "elapsed_time", ")", "self", ".", "_cumulative_elapsed_time", "+=", "elapsed_time", "self", ".", "_elapsed_time", "=", "datetime", ".", "timedelta", "(", ")", "if", "verbose", "is", "None", ":", "verbose", "=", "self", ".", "verbose_end", "if", "verbose", ":", "if", "end_in_new_line", "is", "None", ":", "end_in_new_line", "=", "self", ".", "end_in_new_line", "if", "end_in_new_line", ":", "self", ".", "log", "(", "\"{} done in {}\"", ".", "format", "(", "self", ".", "description", ",", "elapsed_time", ")", ")", "else", ":", "self", ".", "log", "(", "\" done in {}\"", ".", "format", "(", "elapsed_time", ")", ")", "self", ".", "_start_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")" ]
Extracts data from mbox files . Mutates _data .
def _extract ( self , source , * args , * * kwargs ) : # Extract data self . _data = mbox_to_pandas ( source ) self . _data [ 'MessageID' ] = pd . Series ( range ( 0 , len ( self . _data ) ) )
3,297
https://github.com/networks-lab/tidyextractors/blob/658448ed533beecf32adcc188fc64d1068d15ca6/tidyextractors/tidymbox/mbox_extractor.py#L36-L47
[ "def", "reset", "(", "self", ")", ":", "# Reset Union Temporal Pooler fields", "self", ".", "_poolingActivation", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", "self", ".", "_unionSDR", "=", "numpy", ".", "array", "(", "[", "]", ",", "dtype", "=", "UINT_DTYPE", ")", "self", ".", "_poolingTimer", "=", "numpy", ".", "ones", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", "*", "1000", "self", ".", "_poolingActivationInitLevel", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", "self", ".", "_preActiveInput", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumInputs", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", "self", ".", "_prePredictedActiveInput", "=", "numpy", ".", "zeros", "(", "(", "self", ".", "getNumInputs", "(", ")", ",", "self", ".", "_historyLength", ")", ",", "dtype", "=", "REAL_DTYPE", ")", "# Reset Spatial Pooler fields", "self", ".", "setOverlapDutyCycles", "(", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", ")", "self", ".", "setActiveDutyCycles", "(", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", ")", "self", ".", "setMinOverlapDutyCycles", "(", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", ")", "self", ".", "setBoostFactors", "(", "numpy", ".", "ones", "(", "self", ".", "getNumColumns", "(", ")", ",", "dtype", "=", "REAL_DTYPE", ")", ")" ]
Create a WebhookMetadata from a comment added to an issue .
def build_from_issue_comment ( gh_token , body ) : if body [ "action" ] in [ "created" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'comment' ] [ 'body' ] try : comment = issue . get_comment ( body [ 'comment' ] [ 'id' ] ) except UnknownObjectException : # If the comment has already disapeared, skip the command return None return WebhookMetadata ( repo , issue , text , comment ) return None
3,298
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/bot_framework.py#L25-L39
[ "def", "same_types", "(", "self", ",", "index1", ",", "index2", ")", ":", "try", ":", "same", "=", "self", ".", "table", "[", "index1", "]", ".", "type", "==", "self", ".", "table", "[", "index2", "]", ".", "type", "!=", "SharedData", ".", "TYPES", ".", "NO_TYPE", "except", "Exception", ":", "self", ".", "error", "(", ")", "return", "same" ]
Create a WebhookMetadata from an opening issue text .
def build_from_issues ( gh_token , body ) : if body [ "action" ] in [ "opened" , "edited" ] : github_con = Github ( gh_token ) repo = github_con . get_repo ( body [ 'repository' ] [ 'full_name' ] ) issue = repo . get_issue ( body [ 'issue' ] [ 'number' ] ) text = body [ 'issue' ] [ 'body' ] comment = issue # It's where we update the comment: in the issue itself return WebhookMetadata ( repo , issue , text , comment ) return None
3,299
https://github.com/Azure/azure-python-devtools/blob/2bf87b1f3cedd2b26fb2e4fd47a9baf435dcf936/src/azure_devtools/ci_tools/bot_framework.py#L41-L51
[ "def", "same_types", "(", "self", ",", "index1", ",", "index2", ")", ":", "try", ":", "same", "=", "self", ".", "table", "[", "index1", "]", ".", "type", "==", "self", ".", "table", "[", "index2", "]", ".", "type", "!=", "SharedData", ".", "TYPES", ".", "NO_TYPE", "except", "Exception", ":", "self", ".", "error", "(", ")", "return", "same" ]