query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Returns a response object for the given image . Can be overridden to return different responses .
def create_response ( self , request , image , content_type ) : return HttpResponse ( content = image , content_type = content_type )
5,500
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/views.py#L198-L201
[ "def", "cleanup", "(", "self", ")", ":", "if", "self", ".", "sock", "is", "not", "None", ":", "self", ".", "sock", ".", "close", "(", ")", "if", "self", ".", "outfile", "is", "not", "None", ":", "self", ".", "outfile", ".", "close", "(", ")", "if", "self", ".", "bar", "is", "not", "None", ":", "self", ".", "update_progress", "(", "complete", "=", "True", ")" ]
Returns a response object for the request . Can be overridden to return different responses .
def create_response ( self , request , content , content_type ) : return HttpResponse ( content = content , content_type = content_type )
5,501
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/views.py#L324-L327
[ "def", "_log10_Inorm_extern_planckint", "(", "self", ",", "Teff", ")", ":", "log10_Inorm", "=", "libphoebe", ".", "wd_planckint", "(", "Teff", ",", "self", ".", "extern_wd_idx", ",", "self", ".", "wd_data", "[", "\"planck_table\"", "]", ")", "return", "log10_Inorm" ]
Truncates the filename if necessary saves the model and returns a response
def process_temporary_file ( self , tmp_file ) : #Truncate filename if necessary if len ( tmp_file . filename ) > 100 : base_filename = tmp_file . filename [ : tmp_file . filename . rfind ( "." ) ] tmp_file . filename = "%s.%s" % ( base_filename [ : 99 - len ( tmp_file . extension ) ] , tmp_file . extension ) tmp_file . save ( ) data = { 'uuid' : str ( tmp_file . uuid ) } response = HttpResponse ( json . dumps ( data ) , status = 201 ) response [ 'Content-type' ] = "text/plain" return response
5,502
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/views.py#L438-L455
[ "def", "extract_bus_routine", "(", "page", ")", ":", "if", "not", "isinstance", "(", "page", ",", "pq", ")", ":", "page", "=", "pq", "(", "page", ")", "stations", "=", "extract_stations", "(", "page", ")", "return", "{", "# Routine name.", "'name'", ":", "extract_routine_name", "(", "page", ")", ",", "# Bus stations.", "'stations'", ":", "stations", ",", "# Current routine.", "'current'", ":", "extract_current_routine", "(", "page", ",", "stations", ")", "}" ]
2D labeling at zmax
def createLabels2D ( self ) : logger . debug ( " Creating 2D labels..." ) self . zmax = np . argmax ( self . values , axis = 1 ) self . vmax = self . values [ np . arange ( len ( self . pixels ) , dtype = int ) , self . zmax ] kwargs = dict ( pixels = self . pixels , values = self . vmax , nside = self . nside , threshold = self . threshold , xsize = self . xsize ) labels , nlabels = CandidateSearch . labelHealpix ( * * kwargs ) self . nlabels = nlabels self . labels = np . repeat ( labels , len ( self . distances ) ) . reshape ( len ( labels ) , len ( self . distances ) ) return self . labels , self . nlabels
5,503
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/analysis/search.py#L73-L84
[ "def", "__roll", "(", "self", ",", "unrolled", ")", ":", "rolled", "=", "[", "]", "index", "=", "0", "for", "count", "in", "range", "(", "len", "(", "self", ".", "__sizes", ")", "-", "1", ")", ":", "in_size", "=", "self", ".", "__sizes", "[", "count", "]", "out_size", "=", "self", ".", "__sizes", "[", "count", "+", "1", "]", "theta_unrolled", "=", "np", ".", "matrix", "(", "unrolled", "[", "index", ":", "index", "+", "(", "in_size", "+", "1", ")", "*", "out_size", "]", ")", "theta_rolled", "=", "theta_unrolled", ".", "reshape", "(", "(", "out_size", ",", "in_size", "+", "1", ")", ")", "rolled", ".", "append", "(", "theta_rolled", ")", "index", "+=", "(", "in_size", "+", "1", ")", "*", "out_size", "return", "rolled" ]
Take a file - like object and yield OrderedDicts to be inserted into raw spending database .
def process_csv ( f ) : reader = unicodecsv . DictReader ( f , encoding = _ENCODING ) for row in reader : month , year = parse_month_year ( row [ 'Return Month' ] ) yield OrderedDict ( [ ( 'customer_name' , row [ 'CustomerName' ] ) , ( 'supplier_name' , row [ 'SupplierName' ] ) , ( 'month' , month ) , ( 'year' , year ) , ( 'date' , datetime . date ( year , month , 1 ) ) , ( 'total_ex_vat' , parse_price ( row [ 'EvidencedSpend' ] ) ) , ( 'lot' , parse_lot_name ( row [ 'LotDescription' ] ) ) , ( 'customer_sector' , parse_customer_sector ( row [ 'Sector' ] ) ) , ( 'supplier_type' , parse_sme_or_large ( row [ 'SME or Large' ] ) ) , ] )
5,504
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/gcloud/sales_parser.py#L71-L90
[ "def", "validate_broker_ids_subset", "(", "broker_ids", ",", "subset_ids", ")", ":", "all_ids", "=", "set", "(", "broker_ids", ")", "valid", "=", "True", "for", "subset_id", "in", "subset_ids", ":", "valid", "=", "valid", "and", "subset_id", "in", "all_ids", "if", "subset_id", "not", "in", "all_ids", ":", "print", "(", "\"Error: user specified broker id {0} does not exist in cluster.\"", ".", "format", "(", "subset_id", ")", ")", "return", "valid" ]
Attempt to cast the string value to an int and failing that a float failing that raise a ValueError .
def try_number ( value ) : for cast_function in [ int , float ] : try : return cast_function ( value ) except ValueError : pass raise ValueError ( "Unable to use value as int or float: {0!r}" . format ( value ) )
5,505
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/ga/core.py#L37-L50
[ "def", "prepare_blobs", "(", "self", ")", ":", "self", ".", "raw_header", "=", "self", ".", "extract_header", "(", ")", "if", "self", ".", "cache_enabled", ":", "self", ".", "_cache_offsets", "(", ")" ]
Convert session duration metrics from seconds to milliseconds .
def convert_durations ( metric ) : if metric [ 0 ] == 'avgSessionDuration' and metric [ 1 ] : new_metric = ( metric [ 0 ] , metric [ 1 ] * 1000 ) else : new_metric = metric return new_metric
5,506
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/ga/core.py#L53-L61
[ "def", "arcball_constrain_to_axis", "(", "point", ",", "axis", ")", ":", "v", "=", "np", ".", "array", "(", "point", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "a", "=", "np", ".", "array", "(", "axis", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "v", "-=", "a", "*", "np", ".", "dot", "(", "a", ",", "v", ")", "# on plane", "n", "=", "vector_norm", "(", "v", ")", "if", "n", ">", "_EPS", ":", "if", "v", "[", "2", "]", "<", "0.0", ":", "np", ".", "negative", "(", "v", ",", "v", ")", "v", "/=", "n", "return", "v", "if", "a", "[", "2", "]", "==", "1.0", ":", "return", "np", ".", "array", "(", "[", "1.0", ",", "0.0", ",", "0.0", "]", ")", "return", "unit_vector", "(", "[", "-", "a", "[", "1", "]", ",", "a", "[", "0", "]", ",", "0.0", "]", ")" ]
Extract the first date from key matching YYYY - MM - DD or YYYY - MM and convert to datetime .
def to_datetime ( date_key ) : match = re . search ( r'\d{4}-\d{2}(-\d{2})?' , date_key ) formatter = '%Y-%m' if len ( match . group ( ) ) == 10 : formatter += '-%d' return datetime . strptime ( match . group ( ) , formatter ) . replace ( tzinfo = pytz . UTC )
5,507
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/piwik/core.py#L77-L87
[ "def", "max_pv_count", "(", "self", ")", ":", "self", ".", "open", "(", ")", "count", "=", "lvm_vg_get_max_pv", "(", "self", ".", "handle", ")", "self", ".", "close", "(", ")", "return", "count" ]
Convert a float to a 38 - precision Decimal
def float_to_decimal ( f ) : n , d = f . as_integer_ratio ( ) numerator , denominator = Decimal ( n ) , Decimal ( d ) return DECIMAL_CONTEXT . divide ( numerator , denominator )
5,508
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L13-L17
[ "async", "def", "delTrigger", "(", "self", ",", "iden", ")", ":", "trig", "=", "self", ".", "cell", ".", "triggers", ".", "get", "(", "iden", ")", "self", ".", "_trig_auth_check", "(", "trig", ".", "get", "(", "'useriden'", ")", ")", "self", ".", "cell", ".", "triggers", ".", "delete", "(", "iden", ")" ]
Returns True if the value is a Dynamo - formatted value
def is_dynamo_value ( value ) : if not isinstance ( value , dict ) or len ( value ) != 1 : return False subkey = six . next ( six . iterkeys ( value ) ) return subkey in TYPES_REV
5,509
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L35-L40
[ "def", "fetch", "(", "dataset", ",", "annot", ",", "cat", "=", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "evt_type", "=", "None", ",", "stage", "=", "None", ",", "cycle", "=", "None", ",", "chan_full", "=", "None", ",", "epoch", "=", "None", ",", "epoch_dur", "=", "30", ",", "epoch_overlap", "=", "0", ",", "epoch_step", "=", "None", ",", "reject_epoch", "=", "False", ",", "reject_artf", "=", "False", ",", "min_dur", "=", "0", ",", "buffer", "=", "0", ")", ":", "bundles", "=", "get_times", "(", "annot", ",", "evt_type", "=", "evt_type", ",", "stage", "=", "stage", ",", "cycle", "=", "cycle", ",", "chan", "=", "chan_full", ",", "exclude", "=", "reject_epoch", ",", "buffer", "=", "buffer", ")", "# Remove artefacts", "if", "reject_artf", "and", "bundles", ":", "for", "bund", "in", "bundles", ":", "bund", "[", "'times'", "]", "=", "remove_artf_evts", "(", "bund", "[", "'times'", "]", ",", "annot", ",", "bund", "[", "'chan'", "]", ",", "min_dur", "=", "0", ")", "# Divide bundles into segments to be concatenated", "if", "bundles", ":", "if", "'locked'", "==", "epoch", ":", "bundles", "=", "_divide_bundles", "(", "bundles", ")", "elif", "'unlocked'", "==", "epoch", ":", "if", "epoch_step", "is", "not", "None", ":", "step", "=", "epoch_step", "else", ":", "step", "=", "epoch_dur", "-", "(", "epoch_dur", "*", "epoch_overlap", ")", "bundles", "=", "_concat", "(", "bundles", ",", "cat", ")", "bundles", "=", "_find_intervals", "(", "bundles", ",", "epoch_dur", ",", "step", ")", "elif", "not", "epoch", ":", "bundles", "=", "_concat", "(", "bundles", ",", "cat", ")", "# Minimum duration", "bundles", "=", "_longer_than", "(", "bundles", ",", "min_dur", ")", "segments", "=", "Segments", "(", "dataset", ")", "segments", ".", "segments", "=", "bundles", "return", "segments" ]
Encode a set for the DynamoDB format
def encode_set ( dynamizer , value ) : inner_value = next ( iter ( value ) ) inner_type = dynamizer . raw_encode ( inner_value ) [ 0 ] return inner_type + 'S' , [ dynamizer . raw_encode ( v ) [ 1 ] for v in value ]
5,510
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L77-L81
[ "def", "_process_name", "(", "name", ")", ":", "# Unescape HTML entities", "name", "=", "unescape", "(", "name", ")", "# Remove bracketed stuff on the end", "name", "=", "NG_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", ")", "# Nomenclature groups", "name", "=", "END_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", "', '", ")", "# Words", "name", "=", "RATIO_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", "', '", ")", "# Ratios", "# Remove stuff off start", "name", "=", "START_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", ")", "# Remove balanced start and end brackets if none in between", "name", "=", "BRACKET_RE", ".", "sub", "(", "'\\g<1>'", ",", "name", ")", "# Un-invert CAS style names", "comps", "=", "name", ".", "split", "(", "', '", ")", "if", "len", "(", "comps", ")", "==", "2", ":", "if", "comps", "[", "1", "]", ".", "endswith", "(", "'-'", ")", ":", "name", "=", "comps", "[", "0", "]", "name", "=", "'%s%s'", "%", "(", "comps", "[", "1", "]", ",", "name", ")", "elif", "len", "(", "comps", ")", ">", "2", ":", "name", "=", "comps", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "comps", ")", ")", ":", "if", "comps", "[", "i", "]", ".", "endswith", "(", "'-'", ")", ":", "name", "=", "'%s%s'", "%", "(", "comps", "[", "i", "]", ",", "name", ")", "else", ":", "name", "=", "'%s %s'", "%", "(", "name", ",", "comps", "[", "i", "]", ")", "return", "name" ]
Encode a list for the DynamoDB format
def encode_list ( dynamizer , value ) : encoded_list = [ ] dict ( map ( dynamizer . raw_encode , value ) ) for v in value : encoded_type , encoded_value = dynamizer . raw_encode ( v ) encoded_list . append ( { encoded_type : encoded_value , } ) return 'L' , encoded_list
5,511
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L84-L93
[ "def", "validate_pai_trial_conifg", "(", "experiment_config", ")", ":", "if", "experiment_config", ".", "get", "(", "'trainingServicePlatform'", ")", "==", "'pai'", ":", "if", "experiment_config", ".", "get", "(", "'trial'", ")", ".", "get", "(", "'shmMB'", ")", "and", "experiment_config", "[", "'trial'", "]", "[", "'shmMB'", "]", ">", "experiment_config", "[", "'trial'", "]", "[", "'memoryMB'", "]", ":", "print_error", "(", "'shmMB should be no more than memoryMB!'", ")", "exit", "(", "1", ")" ]
Encode a dict for the DynamoDB format
def encode_dict ( dynamizer , value ) : encoded_dict = { } for k , v in six . iteritems ( value ) : encoded_type , encoded_value = dynamizer . raw_encode ( v ) encoded_dict [ k ] = { encoded_type : encoded_value , } return 'M' , encoded_dict
5,512
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L96-L104
[ "def", "GetMemSwappedMB", "(", "self", ")", ":", "counter", "=", "c_uint", "(", ")", "ret", "=", "vmGuestLib", ".", "VMGuestLib_GetMemSwappedMB", "(", "self", ".", "handle", ".", "value", ",", "byref", "(", "counter", ")", ")", "if", "ret", "!=", "VMGUESTLIB_ERROR_SUCCESS", ":", "raise", "VMGuestLibException", "(", "ret", ")", "return", "counter", ".", "value" ]
Run the encoder on a value
def raw_encode ( self , value ) : if type ( value ) in self . encoders : encoder = self . encoders [ type ( value ) ] return encoder ( self , value ) raise ValueError ( "No encoder for value '%s' of type '%s'" % ( value , type ( value ) ) )
5,513
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L150-L156
[ "def", "return_port", "(", "port", ")", ":", "if", "port", "in", "_random_ports", ":", "_random_ports", ".", "remove", "(", "port", ")", "elif", "port", "in", "_owned_ports", ":", "_owned_ports", ".", "remove", "(", "port", ")", "_free_ports", ".", "add", "(", "port", ")", "elif", "port", "in", "_free_ports", ":", "logging", ".", "info", "(", "\"Returning a port that was already returned: %s\"", ",", "port", ")", "else", ":", "logging", ".", "info", "(", "\"Returning a port that wasn't given by portpicker: %s\"", ",", "port", ")" ]
Run the encoder on a dict of values
def encode_keys ( self , keys ) : return dict ( ( ( k , self . encode ( v ) ) for k , v in six . iteritems ( keys ) if not is_null ( v ) ) )
5,514
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L158-L161
[ "def", "return_port", "(", "port", ")", ":", "if", "port", "in", "_random_ports", ":", "_random_ports", ".", "remove", "(", "port", ")", "elif", "port", "in", "_owned_ports", ":", "_owned_ports", ".", "remove", "(", "port", ")", "_free_ports", ".", "add", "(", "port", ")", "elif", "port", "in", "_free_ports", ":", "logging", ".", "info", "(", "\"Returning a port that was already returned: %s\"", ",", "port", ")", "else", ":", "logging", ".", "info", "(", "\"Returning a port that wasn't given by portpicker: %s\"", ",", "port", ")" ]
Same as encode_keys but a no - op if already in Dynamo format
def maybe_encode_keys ( self , keys ) : ret = { } for k , v in six . iteritems ( keys ) : if is_dynamo_value ( v ) : return keys elif not is_null ( v ) : ret [ k ] = self . encode ( v ) return ret
5,515
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L163-L171
[ "def", "compare_root_path", "(", "path_cost1", ",", "path_cost2", ",", "bridge_id1", ",", "bridge_id2", ",", "port_id1", ",", "port_id2", ")", ":", "result", "=", "Stp", ".", "_cmp_value", "(", "path_cost1", ",", "path_cost2", ")", "if", "not", "result", ":", "result", "=", "Stp", ".", "_cmp_value", "(", "bridge_id1", ",", "bridge_id2", ")", "if", "not", "result", ":", "result", "=", "Stp", ".", "_cmp_value", "(", "port_id1", ",", "port_id2", ")", "return", "result" ]
Run the decoder on a dict of values
def decode_keys ( self , keys ) : return dict ( ( ( k , self . decode ( v ) ) for k , v in six . iteritems ( keys ) ) )
5,516
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L177-L179
[ "def", "perturbed_contents", "(", "self", ")", ":", "animal", "=", "json", ".", "loads", "(", "self", ".", "contents", ")", "for", "prop", ",", "prop_range", "in", "self", ".", "properties", ".", "items", "(", ")", ":", "range", "=", "prop_range", "[", "1", "]", "-", "prop_range", "[", "0", "]", "jittered", "=", "animal", "[", "prop", "]", "+", "random", ".", "gauss", "(", "0", ",", "0.1", "*", "range", ")", "animal", "[", "prop", "]", "=", "max", "(", "min", "(", "jittered", ",", "prop_range", "[", "1", "]", ")", ",", "prop_range", "[", "0", "]", ")", "return", "json", ".", "dumps", "(", "animal", ")" ]
Decode a dynamo value into a python value
def decode ( self , dynamo_value ) : type , value = next ( six . iteritems ( dynamo_value ) ) if type == STRING : return value elif type == BINARY : return Binary ( value ) elif type == NUMBER : return Decimal ( value ) elif type == STRING_SET : return set ( value ) elif type == BINARY_SET : return set ( ( Binary ( v ) for v in value ) ) elif type == NUMBER_SET : return set ( ( Decimal ( v ) for v in value ) ) elif type == BOOL : return value elif type == LIST : return [ self . decode ( v ) for v in value ] elif type == MAP : decoded_dict = { } for k , v in six . iteritems ( value ) : decoded_dict [ k ] = self . decode ( v ) return decoded_dict elif type == NULL : return None else : raise TypeError ( "Received unrecognized type %r from dynamo" , type )
5,517
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/types.py#L181-L208
[ "def", "get_readme", "(", "name", "=", "'README.rst'", ")", ":", "with", "open", "(", "name", ")", "as", "f", ":", "return", "'\\n'", ".", "join", "(", "line", "for", "line", "in", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", "if", "not", "line", ".", "startswith", "(", "'|'", ")", "or", "not", "line", ".", "endswith", "(", "'|'", ")", ")" ]
Write a paragraph of 5 sentences .
def get_paragraph ( self ) : self . text = '' for x in range ( randint ( 5 , 12 ) ) : sentence = self . _write_sentence ( ) self . text = self . text + sentence return self . text
5,518
https://github.com/accraze/python-markov-novel/blob/ff451639e93a3ac11fb0268b92bc0cffc00bfdbe/src/markov_novel/paragraph.py#L18-L27
[ "def", "cross_net", "(", "stream", ",", "env", "=", "False", ",", "debug", "=", "0", ",", "master", "=", "False", ")", ":", "event", "=", "Event", "(", ")", "event", ".", "origins", ".", "append", "(", "Origin", "(", ")", ")", "event", ".", "creation_info", "=", "CreationInfo", "(", "author", "=", "'EQcorrscan'", ",", "creation_time", "=", "UTCDateTime", "(", ")", ")", "event", ".", "comments", ".", "append", "(", "Comment", "(", "text", "=", "'cross_net'", ")", ")", "samp_rate", "=", "stream", "[", "0", "]", ".", "stats", ".", "sampling_rate", "if", "not", "env", ":", "if", "debug", ">", "2", ":", "print", "(", "'Using the raw data'", ")", "st", "=", "stream", ".", "copy", "(", ")", "st", ".", "resample", "(", "samp_rate", ")", "else", ":", "st", "=", "stream", ".", "copy", "(", ")", "if", "debug", ">", "2", ":", "print", "(", "'Computing envelope'", ")", "for", "tr", "in", "st", ":", "tr", ".", "resample", "(", "samp_rate", ")", "tr", ".", "data", "=", "envelope", "(", "tr", ".", "data", ")", "if", "not", "master", ":", "master", "=", "st", "[", "0", "]", "else", ":", "master", "=", "master", "master", ".", "data", "=", "np", ".", "nan_to_num", "(", "master", ".", "data", ")", "for", "i", ",", "tr", "in", "enumerate", "(", "st", ")", ":", "tr", ".", "data", "=", "np", ".", "nan_to_num", "(", "tr", ".", "data", ")", "if", "debug", ">", "2", ":", "msg", "=", "' '", ".", "join", "(", "[", "'Comparing'", ",", "tr", ".", "stats", ".", "station", ",", "tr", ".", "stats", ".", "channel", ",", "'with the master'", "]", ")", "print", "(", "msg", ")", "shift_len", "=", "int", "(", "0.3", "*", "len", "(", "tr", ")", ")", "if", "debug", ">", "2", ":", "print", "(", "'Shift length is set to '", "+", "str", "(", "shift_len", ")", "+", "' samples'", ")", "index", ",", "cc", "=", "xcorr", "(", "master", ",", "tr", ",", "shift_len", ")", "wav_id", "=", "WaveformStreamID", "(", "station_code", "=", "tr", ".", "stats", ".", "station", ",", "channel_code", "=", "tr", ".", "stats", ".", "channel", ",", "network_code", "=", "tr", ".", "stats", ".", "network", ")", "event", ".", "picks", ".", "append", "(", "Pick", "(", "time", "=", "tr", ".", "stats", ".", "starttime", "+", "(", "index", "/", "tr", ".", "stats", ".", "sampling_rate", ")", ",", "waveform_id", "=", "wav_id", ",", "phase_hint", "=", "'S'", ",", "onset", "=", "'emergent'", ")", ")", "if", "debug", ">", "2", ":", "print", "(", "event", ".", "picks", "[", "i", "]", ")", "event", ".", "origins", "[", "0", "]", ".", "time", "=", "min", "(", "[", "pick", ".", "time", "for", "pick", "in", "event", ".", "picks", "]", ")", "-", "1", "# event.origins[0].latitude = float('nan')", "# event.origins[0].longitude = float('nan')", "# Set arbitrary origin time", "del", "st", "return", "event" ]
Should we skip the job based on its number
def skip_job ( counter ) : try : host_number = int ( socket . gethostname ( ) . split ( '-' ) [ - 1 ] ) except ValueError : return False if ( counter + host_number - ( NUMBER_OF_HOSTS - 1 ) ) % NUMBER_OF_HOSTS == 0 : return False return True
5,519
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/crontab.py#L57-L75
[ "def", "parse_source_file", "(", "source_file", ")", ":", "if", "not", "source_file", ":", "return", "None", "vcsinfo", "=", "source_file", ".", "split", "(", "':'", ")", "if", "len", "(", "vcsinfo", ")", "==", "4", ":", "# These are repositories or cloud file systems (e.g. hg, git, s3)", "vcstype", ",", "root", ",", "vcs_source_file", ",", "revision", "=", "vcsinfo", "return", "vcs_source_file", "if", "len", "(", "vcsinfo", ")", "==", "2", ":", "# These are directories on someone's Windows computer and vcstype is a", "# file system (e.g. \"c:\", \"d:\", \"f:\")", "vcstype", ",", "vcs_source_file", "=", "vcsinfo", "return", "vcs_source_file", "if", "source_file", ".", "startswith", "(", "'/'", ")", ":", "# These are directories on OSX or Linux", "return", "source_file", "# We have no idea what this is, so return None", "return", "None" ]
Returns a crontab with jobs from job path
def generate_crontab ( current_crontab , path_to_jobs , path_to_app , unique_id ) : set_disable_envar = '' if os . environ . get ( 'DISABLE_COLLECTORS' ) == 'true' : set_disable_envar = 'DISABLE_COLLECTORS={} ' . format ( os . environ . get ( 'DISABLE_COLLECTORS' ) ) job_template = '{schedule} ' '{set_disable_envar}' '{app_path}/venv/bin/pp-collector ' '-l {collector_slug} ' '-c {app_path}/config/{credentials} ' '-t {app_path}/config/{token} ' '-b {app_path}/config/{performanceplatform} ' '>> {app_path}/log/out.log 2>> {app_path}/log/error.log' crontab = [ line . strip ( ) for line in current_crontab ] crontab = remove_existing_crontab_for_app ( crontab , unique_id ) additional_crontab = [ ] job_number = 0 with open ( path_to_jobs ) as jobs : try : for job in jobs : parsed = parse_job_line ( job ) if parsed is not None : job_number += 1 if skip_job ( job_number ) : continue schedule , collector_slug , credentials , token , performanceplatform = parsed cronjob = job_template . format ( schedule = schedule , set_disable_envar = set_disable_envar , app_path = path_to_app , collector_slug = collector_slug , credentials = credentials , token = token , performanceplatform = performanceplatform ) additional_crontab . append ( cronjob ) except ValueError as e : raise ParseError ( str ( e ) ) if additional_crontab : crontab . append ( crontab_begin_comment ( unique_id ) ) crontab . extend ( additional_crontab ) crontab . append ( crontab_end_comment ( unique_id ) ) return crontab
5,520
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/crontab.py#L78-L135
[ "def", "diff_packages", "(", "pkg1", ",", "pkg2", "=", "None", ")", ":", "if", "pkg2", "is", "None", ":", "it", "=", "iter_packages", "(", "pkg1", ".", "name", ")", "pkgs", "=", "[", "x", "for", "x", "in", "it", "if", "x", ".", "version", "<", "pkg1", ".", "version", "]", "if", "not", "pkgs", ":", "raise", "RezError", "(", "\"No package to diff with - %s is the earliest \"", "\"package version\"", "%", "pkg1", ".", "qualified_name", ")", "pkgs", "=", "sorted", "(", "pkgs", ",", "key", "=", "lambda", "x", ":", "x", ".", "version", ")", "pkg2", "=", "pkgs", "[", "-", "1", "]", "def", "_check_pkg", "(", "pkg", ")", ":", "if", "not", "(", "pkg", ".", "vcs", "and", "pkg", ".", "revision", ")", ":", "raise", "RezError", "(", "\"Cannot diff package %s: it is a legacy format \"", "\"package that does not contain enough information\"", "%", "pkg", ".", "qualified_name", ")", "_check_pkg", "(", "pkg1", ")", "_check_pkg", "(", "pkg2", ")", "path", "=", "mkdtemp", "(", "prefix", "=", "\"rez-pkg-diff\"", ")", "paths", "=", "[", "]", "for", "pkg", "in", "(", "pkg1", ",", "pkg2", ")", ":", "print", "\"Exporting %s...\"", "%", "pkg", ".", "qualified_name", "path_", "=", "os", ".", "path", ".", "join", "(", "path", ",", "pkg", ".", "qualified_name", ")", "vcs_cls_1", "=", "plugin_manager", ".", "get_plugin_class", "(", "\"release_vcs\"", ",", "pkg1", ".", "vcs", ")", "vcs_cls_1", ".", "export", "(", "revision", "=", "pkg", ".", "revision", ",", "path", "=", "path_", ")", "paths", ".", "append", "(", "path_", ")", "difftool", "=", "config", ".", "difftool", "print", "\"Opening diff viewer %s...\"", "%", "difftool", "proc", "=", "Popen", "(", "[", "difftool", "]", "+", "paths", ")", "proc", ".", "wait", "(", ")" ]
Maps parameters to form field names
def map_parameters ( cls , params ) : d = { } for k , v in six . iteritems ( params ) : d [ cls . FIELD_MAP . get ( k . lower ( ) , k ) ] = v return d
5,521
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/interfaces/arcgis/forms.py#L21-L27
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
Function generator to create functions for converting from world coordinates to image coordinates
def world_to_image ( bbox , size ) : px_per_unit = ( float ( size [ 0 ] ) / bbox . width , float ( size [ 1 ] ) / bbox . height ) return lambda x , y : ( ( x - bbox . xmin ) * px_per_unit [ 0 ] , size [ 1 ] - ( y - bbox . ymin ) * px_per_unit [ 1 ] )
5,522
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/geoimage.py#L173-L177
[ "def", "getTotalPrice", "(", "self", ")", ":", "price", ",", "vat", "=", "self", ".", "getAnalysisProfilePrice", "(", ")", ",", "self", ".", "getVATAmount", "(", ")", "return", "float", "(", "price", ")", "+", "float", "(", "vat", ")" ]
Returns a copy of this image warped to a target size and bounding box
def warp ( self , target_bbox , target_size = None ) : # Determine target size based on pixels per unit of the source image and the target bounding box reprojected # to the source projection. if not target_size : px_per_unit = ( float ( self . image . size [ 0 ] ) / self . bbox . width , float ( self . image . size [ 1 ] ) / self . bbox . height ) src_bbox = target_bbox . project ( self . bbox . projection ) target_size = ( int ( round ( src_bbox . width * px_per_unit [ 0 ] ) ) , int ( round ( src_bbox . height * px_per_unit [ 1 ] ) ) ) canvas_size = ( max ( target_size [ 0 ] , self . image . size [ 0 ] ) , max ( target_size [ 1 ] , self . image . size [ 1 ] ) ) # If target and source bounds are the same and source and target sizes are the same, return a reference to # this image. if self . bbox == target_bbox and self . image . size == target_size : return self # If target and source projections are the same, perform a simple resize elif self . bbox . projection . srs == target_bbox . projection . srs : to_source_image = world_to_image ( self . bbox , self . image . size ) upper_left = to_source_image ( * ( target_bbox . xmin , target_bbox . ymax ) ) lower_right = to_source_image ( * ( target_bbox . xmax , target_bbox . ymin ) ) if canvas_size == self . image . size : im = self . image else : im = Image . new ( "RGBA" , canvas_size , ( 0 , 0 , 0 , 0 ) ) im . paste ( self . image , ( 0 , 0 ) ) new_image = im . transform ( target_size , Image . EXTENT , ( upper_left [ 0 ] , upper_left [ 1 ] , lower_right [ 0 ] , lower_right [ 1 ] ) , Image . NEAREST ) # Full warp else : if canvas_size == self . image . size : im = self . image else : im = Image . new ( "RGBA" , canvas_size , ( 0 , 0 , 0 , 0 ) ) im . paste ( self . image , ( 0 , 0 ) ) new_image = im . transform ( target_size , Image . MESH , self . _create_mesh ( target_bbox , target_size ) , Image . NEAREST ) return GeoImage ( new_image , target_bbox )
5,523
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/geoimage.py#L121-L170
[ "def", "print_element_xray_transitions", "(", "self", ",", "element", ",", "file", "=", "sys", ".", "stdout", ",", "tabulate_kwargs", "=", "None", ")", ":", "header", "=", "[", "'IUPAC'", ",", "'Siegbahn'", ",", "'Energy (eV)'", ",", "'Probability'", "]", "rows", "=", "[", "]", "for", "xraytransition", "in", "self", ".", "element_xray_transitions", "(", "element", ")", ":", "try", ":", "iupac", "=", "self", ".", "xray_transition_notation", "(", "xraytransition", ",", "'iupac'", ")", "except", ":", "iupac", "=", "''", "try", ":", "siegbahn", "=", "self", ".", "xray_transition_notation", "(", "xraytransition", ",", "'siegbahn'", ")", "except", ":", "siegbahn", "=", "''", "try", ":", "energy_eV", "=", "self", ".", "xray_transition_energy_eV", "(", "element", ",", "xraytransition", ")", "except", ":", "energy_eV", "=", "''", "try", ":", "probability", "=", "self", ".", "xray_transition_probability", "(", "element", ",", "xraytransition", ")", "except", ":", "probability", "=", "''", "rows", ".", "append", "(", "[", "iupac", ",", "siegbahn", ",", "energy_eV", ",", "probability", "]", ")", "rows", ".", "sort", "(", "key", "=", "operator", ".", "itemgetter", "(", "2", ")", ")", "if", "tabulate_kwargs", "is", "None", ":", "tabulate_kwargs", "=", "{", "}", "file", ".", "write", "(", "tabulate", ".", "tabulate", "(", "rows", ",", "header", ",", "*", "*", "tabulate_kwargs", ")", ")" ]
Try to take the first department code or fall back to string as passed
def try_get_department ( department_or_code ) : try : value = take_first_department_code ( department_or_code ) except AssertionError : value = department_or_code if value in DEPARTMENT_MAPPING : value = DEPARTMENT_MAPPING [ value ] return value
5,524
https://github.com/alphagov/performanceplatform-collector/blob/de68ab4aa500c31e436e050fa1268fa928c522a5/performanceplatform/collector/ga/plugins/department.py#L62-L74
[ "def", "percentOverlap", "(", "x1", ",", "x2", ",", "numColumns", ")", ":", "nonZeroX1", "=", "np", ".", "count_nonzero", "(", "x1", ")", "nonZeroX2", "=", "np", ".", "count_nonzero", "(", "x2", ")", "sparseCols", "=", "min", "(", "nonZeroX1", ",", "nonZeroX2", ")", "# transform input vector specifying columns into binary vector", "binX1", "=", "np", ".", "zeros", "(", "numColumns", ",", "dtype", "=", "\"uint32\"", ")", "binX2", "=", "np", ".", "zeros", "(", "numColumns", ",", "dtype", "=", "\"uint32\"", ")", "for", "i", "in", "range", "(", "sparseCols", ")", ":", "binX1", "[", "x1", "[", "i", "]", "]", "=", "1", "binX2", "[", "x2", "[", "i", "]", "]", "=", "1", "return", "float", "(", "np", ".", "dot", "(", "binX1", ",", "binX2", ")", ")", "/", "float", "(", "sparseCols", ")" ]
Retrieve the debug information from the identity manager .
def debug ( self ) : url = '{}debug/status' . format ( self . url ) try : return make_request ( url , timeout = self . timeout ) except ServerError as err : return { "error" : str ( err ) }
5,525
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L44-L50
[ "def", "download_from_category", "(", "category_name", ",", "output_path", ",", "width", ")", ":", "file_names", "=", "get_category_files_from_api", "(", "category_name", ")", "files_to_download", "=", "izip_longest", "(", "file_names", ",", "[", "]", ",", "fillvalue", "=", "width", ")", "download_files_if_not_in_manifest", "(", "files_to_download", ",", "output_path", ")" ]
Send user identity information to the identity manager .
def login ( self , username , json_document ) : url = '{}u/{}' . format ( self . url , username ) make_request ( url , method = 'PUT' , body = json_document , timeout = self . timeout )
5,526
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L52-L62
[ "def", "post_update", "(", "self", ",", "post_id", ",", "tags", "=", "None", ",", "file_", "=", "None", ",", "rating", "=", "None", ",", "source", "=", "None", ",", "is_rating_locked", "=", "None", ",", "is_note_locked", "=", "None", ",", "parent_id", "=", "None", ")", ":", "params", "=", "{", "'id'", ":", "post_id", ",", "'post[tags]'", ":", "tags", ",", "'post[rating]'", ":", "rating", ",", "'post[source]'", ":", "source", ",", "'post[is_rating_locked]'", ":", "is_rating_locked", ",", "'post[is_note_locked]'", ":", "is_note_locked", ",", "'post[parent_id]'", ":", "parent_id", "}", "if", "file_", "is", "not", "None", ":", "file_", "=", "{", "'post[file]'", ":", "open", "(", "file_", ",", "'rb'", ")", "}", "return", "self", ".", "_get", "(", "'post/update'", ",", "params", ",", "'PUT'", ",", "file_", ")", "else", ":", "return", "self", ".", "_get", "(", "'post/update'", ",", "params", ",", "'PUT'", ")" ]
Discharge the macarooon for the identity .
def discharge ( self , username , macaroon ) : caveats = macaroon . third_party_caveats ( ) if len ( caveats ) != 1 : raise InvalidMacaroon ( 'Invalid number of third party caveats (1 != {})' '' . format ( len ( caveats ) ) ) url = '{}discharger/discharge?discharge-for-user={}&id={}' . format ( self . url , quote ( username ) , caveats [ 0 ] [ 1 ] ) logging . debug ( 'Sending identity info to {}' . format ( url ) ) logging . debug ( 'data is {}' . format ( caveats [ 0 ] [ 1 ] ) ) response = make_request ( url , method = 'POST' , timeout = self . timeout ) try : macaroon = response [ 'Macaroon' ] json_macaroon = json . dumps ( macaroon ) except ( KeyError , UnicodeDecodeError ) as err : raise InvalidMacaroon ( 'Invalid macaroon from discharger: {}' . format ( err . message ) ) return base64 . urlsafe_b64encode ( json_macaroon . encode ( 'utf-8' ) )
5,527
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L64-L92
[ "def", "_recompress_archive", "(", "archive", ",", "verbosity", "=", "0", ",", "interactive", "=", "True", ")", ":", "format", ",", "compression", "=", "get_archive_format", "(", "archive", ")", "if", "compression", ":", "# only recompress the compression itself (eg. for .tar.xz)", "format", "=", "compression", "tmpdir", "=", "util", ".", "tmpdir", "(", ")", "tmpdir2", "=", "util", ".", "tmpdir", "(", ")", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "archive", ")", ")", "archive2", "=", "util", ".", "get_single_outfile", "(", "tmpdir2", ",", "base", ",", "extension", "=", "ext", ")", "try", ":", "# extract", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "format", "=", "format", ",", "outdir", "=", "tmpdir", ")", "path", "=", "_extract_archive", "(", "archive", ",", "*", "*", "kwargs", ")", "# compress to new file", "olddir", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "path", ")", "try", ":", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "interactive", "=", "interactive", ",", "format", "=", "format", ")", "files", "=", "tuple", "(", "os", ".", "listdir", "(", "path", ")", ")", "_create_archive", "(", "archive2", ",", "files", ",", "*", "*", "kwargs", ")", "finally", ":", "os", ".", "chdir", "(", "olddir", ")", "# check file sizes and replace if new file is smaller", "filesize", "=", "util", ".", "get_filesize", "(", "archive", ")", "filesize2", "=", "util", ".", "get_filesize", "(", "archive2", ")", "if", "filesize2", "<", "filesize", ":", "# replace file", "os", ".", "remove", "(", "archive", ")", "shutil", ".", "move", "(", "archive2", ",", "archive", ")", "diffsize", "=", "filesize", "-", "filesize2", "return", "\"... recompressed file is now %s smaller.\"", "%", "util", ".", "strsize", "(", "diffsize", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tmpdir", ",", "onerror", "=", "rmtree_log_error", ")", "shutil", ".", "rmtree", "(", "tmpdir2", ",", "onerror", "=", "rmtree_log_error", ")", "return", "\"... recompressed file is not smaller, leaving archive as is.\"" ]
Discharge token for a user .
def discharge_token ( self , username ) : url = '{}discharge-token-for-user?username={}' . format ( self . url , quote ( username ) ) logging . debug ( 'Sending identity info to {}' . format ( url ) ) response = make_request ( url , method = 'GET' , timeout = self . timeout ) try : macaroon = response [ 'DischargeToken' ] json_macaroon = json . dumps ( macaroon ) except ( KeyError , UnicodeDecodeError ) as err : raise InvalidMacaroon ( 'Invalid macaroon from discharger: {}' . format ( err . message ) ) return base64 . urlsafe_b64encode ( "[{}]" . format ( json_macaroon ) . encode ( 'utf-8' ) )
5,528
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L94-L113
[ "def", "delete_lifecycle_configuration", "(", "Bucket", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "delete_bucket_lifecycle", "(", "Bucket", "=", "Bucket", ")", "return", "{", "'deleted'", ":", "True", ",", "'name'", ":", "Bucket", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'deleted'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
Set extra info for the given user .
def set_extra_info ( self , username , extra_info ) : url = self . _get_extra_info_url ( username ) make_request ( url , method = 'PUT' , body = extra_info , timeout = self . timeout )
5,529
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L122-L132
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
Get extra info for the given user .
def get_extra_info ( self , username ) : url = self . _get_extra_info_url ( username ) return make_request ( url , timeout = self . timeout )
5,530
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/identity_manager.py#L134-L142
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
Do all required parameters have values?
def is_complete ( self ) : return all ( p . name in self . values for p in self . parameters if p . required )
5,531
https://github.com/consbio/ncdjango/blob/f807bfd1e4083ab29fbc3c4d4418be108383a710/ncdjango/geoprocessing/params.py#L95-L98
[ "def", "_error_messages", "(", "self", ",", "driver_id", ")", ":", "assert", "isinstance", "(", "driver_id", ",", "ray", ".", "DriverID", ")", "message", "=", "self", ".", "redis_client", ".", "execute_command", "(", "\"RAY.TABLE_LOOKUP\"", ",", "ray", ".", "gcs_utils", ".", "TablePrefix", ".", "ERROR_INFO", ",", "\"\"", ",", "driver_id", ".", "binary", "(", ")", ")", "# If there are no errors, return early.", "if", "message", "is", "None", ":", "return", "[", "]", "gcs_entries", "=", "ray", ".", "gcs_utils", ".", "GcsTableEntry", ".", "GetRootAsGcsTableEntry", "(", "message", ",", "0", ")", "error_messages", "=", "[", "]", "for", "i", "in", "range", "(", "gcs_entries", ".", "EntriesLength", "(", ")", ")", ":", "error_data", "=", "ray", ".", "gcs_utils", ".", "ErrorTableData", ".", "GetRootAsErrorTableData", "(", "gcs_entries", ".", "Entries", "(", "i", ")", ",", "0", ")", "assert", "driver_id", ".", "binary", "(", ")", "==", "error_data", ".", "DriverId", "(", ")", "error_message", "=", "{", "\"type\"", ":", "decode", "(", "error_data", ".", "Type", "(", ")", ")", ",", "\"message\"", ":", "decode", "(", "error_data", ".", "ErrorMessage", "(", ")", ")", ",", "\"timestamp\"", ":", "error_data", ".", "Timestamp", "(", ")", ",", "}", "error_messages", ".", "append", "(", "error_message", ")", "return", "error_messages" ]
Encode an item write command
def _encode_write ( dynamizer , data , action , key ) : # Strip null values out of data data = dict ( ( ( k , dynamizer . encode ( v ) ) for k , v in six . iteritems ( data ) if not is_null ( v ) ) ) return { action : { key : data , } }
5,532
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L148-L157
[ "def", "using_transport", "(", "self", ",", "transport", "=", "None", ",", "path", "=", "None", ",", "logs", "=", "True", ")", ":", "if", "not", "transport", ":", "return", "self", "if", "self", ".", "transport", "==", "transport", "and", "self", ".", "path", "==", "path", ":", "return", "self", "path_required", "=", "[", "SkopeoTransport", ".", "DIRECTORY", ",", "SkopeoTransport", ".", "DOCKER_ARCHIVE", ",", "SkopeoTransport", ".", "OCI", "]", "if", "transport", "in", "path_required", ":", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, temporary path was used\"", ")", "self", ".", "path", "=", "self", ".", "mount", "(", "path", ")", ".", "mount_point", "elif", "transport", "==", "SkopeoTransport", ".", "OSTREE", ":", "if", "path", "and", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "raise", "ConuException", "(", "\"Path '\"", ",", "path", ",", "\"' for OSTree transport is not absolute\"", ")", "if", "not", "path", "and", "logs", ":", "logging", ".", "debug", "(", "\"path not provided, default /ostree/repo path was used\"", ")", "self", ".", "path", "=", "path", "else", ":", "if", "path", "and", "logs", ":", "logging", ".", "warning", "(", "\"path %s was ignored!\"", ",", "path", ")", "self", ".", "path", "=", "None", "self", ".", "transport", "=", "transport", "return", "self" ]
Encode query constraints in Dynamo format
def encode_query_kwargs ( dynamizer , kwargs ) : ret = { } for k , v in six . iteritems ( kwargs ) : if '__' not in k : raise TypeError ( "Invalid query argument '%s'" % k ) name , condition_key = k . split ( '__' ) # Convert ==None to IS_NULL if condition_key == 'eq' and is_null ( v ) : condition_key = 'null' v = True # null is a special case if condition_key == 'null' : ret [ name ] = { 'ComparisonOperator' : 'NULL' if v else 'NOT_NULL' } continue elif condition_key not in ( 'in' , 'between' ) : v = ( v , ) ret [ name ] = { 'AttributeValueList' : [ dynamizer . encode ( value ) for value in v ] , 'ComparisonOperator' : CONDITIONS [ condition_key ] , } return ret
5,533
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L165-L188
[ "def", "ttl", "(", "self", ",", "value", ")", ":", "# get timer", "timer", "=", "getattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "None", ")", "# if timer is running, stop the timer", "if", "timer", "is", "not", "None", ":", "timer", ".", "cancel", "(", ")", "# initialize timestamp", "timestamp", "=", "None", "# if value is None", "if", "value", "is", "None", ":", "# nonify timer", "timer", "=", "None", "else", ":", "# else, renew a timer", "# get timestamp", "timestamp", "=", "time", "(", ")", "+", "value", "# start a new timer", "timer", "=", "Timer", "(", "value", ",", "self", ".", "__del__", ")", "timer", ".", "start", "(", ")", "# set/update attributes", "setattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "timer", ")", "setattr", "(", "self", ",", "Annotation", ".", "__TS", ",", "timestamp", ")" ]
Get the attributes for the update
def attrs ( self , dynamizer ) : ret = { self . key : { 'Action' : self . action , } } if not is_null ( self . value ) : ret [ self . key ] [ 'Value' ] = dynamizer . encode ( self . value ) return ret
5,534
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L109-L118
[ "def", "isPIDValid", "(", "self", ",", "pid", ")", ":", "## Slightly copied wholesale from http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid", "## Thanks to http://stackoverflow.com/users/1777162/ntrrgc and http://stackoverflow.com/users/234270/speedplane", "class", "ExitCodeProcess", "(", "ctypes", ".", "Structure", ")", ":", "_fields_", "=", "[", "(", "'hProcess'", ",", "ctypes", ".", "c_void_p", ")", ",", "(", "'lpExitCode'", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_ulong", ")", ")", "]", "SYNCHRONIZE", "=", "0x100000", "PROCESS_QUERY_LIMITED_INFORMATION", "=", "0x1000", "process", "=", "self", ".", "_kernel32", ".", "OpenProcess", "(", "SYNCHRONIZE", "|", "PROCESS_QUERY_LIMITED_INFORMATION", ",", "0", ",", "pid", ")", "if", "not", "process", ":", "return", "False", "ec", "=", "ExitCodeProcess", "(", ")", "out", "=", "self", ".", "_kernel32", ".", "GetExitCodeProcess", "(", "process", ",", "ctypes", ".", "byref", "(", "ec", ")", ")", "if", "not", "out", ":", "err", "=", "self", ".", "_kernel32", ".", "GetLastError", "(", ")", "if", "self", ".", "_kernel32", ".", "GetLastError", "(", ")", "==", "5", ":", "# Access is denied.", "logging", ".", "warning", "(", "\"Access is denied to get pid info.\"", ")", "self", ".", "_kernel32", ".", "CloseHandle", "(", "process", ")", "return", "False", "elif", "bool", "(", "ec", ".", "lpExitCode", ")", ":", "# There is an exit code, it quit", "self", ".", "_kernel32", ".", "CloseHandle", "(", "process", ")", "return", "False", "# No exit code, it's running.", "self", ".", "_kernel32", ".", "CloseHandle", "(", "process", ")", "return", "True" ]
Get the expected values for the update
def expected ( self , dynamizer ) : if self . _expect_kwargs : return encode_query_kwargs ( dynamizer , self . _expect_kwargs ) if self . _expected is not NO_ARG : ret = { } if is_null ( self . _expected ) : ret [ 'Exists' ] = False else : ret [ 'Value' ] = dynamizer . encode ( self . _expected ) ret [ 'Exists' ] = True return { self . key : ret } return { }
5,535
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L120-L132
[ "def", "frompng", "(", "path", ",", "ext", "=", "'png'", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "recursive", "=", "False", ",", "npartitions", "=", "None", ",", "labels", "=", "None", ",", "engine", "=", "None", ",", "credentials", "=", "None", ")", ":", "from", "scipy", ".", "misc", "import", "imread", "def", "getarray", "(", "idx_buffer_filename", ")", ":", "idx", ",", "buf", ",", "_", "=", "idx_buffer_filename", "fbuf", "=", "BytesIO", "(", "buf", ")", "yield", "(", "idx", ",", ")", ",", "imread", "(", "fbuf", ")", "return", "frompath", "(", "path", ",", "accessor", "=", "getarray", ",", "ext", "=", "ext", ",", "start", "=", "start", ",", "stop", "=", "stop", ",", "recursive", "=", "recursive", ",", "npartitions", "=", "npartitions", ",", "labels", "=", "labels", ",", "engine", "=", "engine", ",", "credentials", "=", "credentials", ")" ]
Flush pending items to Dynamo
def flush ( self ) : items = [ ] for data in self . _to_put : items . append ( encode_put ( self . connection . dynamizer , data ) ) for data in self . _to_delete : items . append ( encode_delete ( self . connection . dynamizer , data ) ) self . _write ( items ) self . _to_put = [ ] self . _to_delete = [ ]
5,536
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L261-L272
[ "def", "from_string", "(", "contents", ")", ":", "if", "contents", "[", "-", "1", "]", "!=", "\"\\n\"", ":", "contents", "+=", "\"\\n\"", "white_space", "=", "r\"[ \\t\\r\\f\\v]\"", "natoms_line", "=", "white_space", "+", "r\"*\\d+\"", "+", "white_space", "+", "r\"*\\n\"", "comment_line", "=", "r\"[^\\n]*\\n\"", "coord_lines", "=", "r\"(\\s*\\w+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s*\\n)+\"", "frame_pattern_text", "=", "natoms_line", "+", "comment_line", "+", "coord_lines", "pat", "=", "re", ".", "compile", "(", "frame_pattern_text", ",", "re", ".", "MULTILINE", ")", "mols", "=", "[", "]", "for", "xyz_match", "in", "pat", ".", "finditer", "(", "contents", ")", ":", "xyz_text", "=", "xyz_match", ".", "group", "(", "0", ")", "mols", ".", "append", "(", "XYZ", ".", "_from_frame_string", "(", "xyz_text", ")", ")", "return", "XYZ", "(", "mols", ")" ]
Perform a batch write and handle the response
def _write ( self , items ) : response = self . _batch_write_item ( items ) if 'consumed_capacity' in response : # Comes back as a list from BatchWriteItem self . consumed_capacity = sum ( response [ 'consumed_capacity' ] , self . consumed_capacity ) if response . get ( 'UnprocessedItems' ) : unprocessed = response [ 'UnprocessedItems' ] . get ( self . tablename , [ ] ) # Some items have not been processed. Stow them for now & # re-attempt processing on ``__exit__``. LOG . info ( "%d items were unprocessed. Storing for later." , len ( unprocessed ) ) self . _unprocessed . extend ( unprocessed ) # Getting UnprocessedItems indicates that we are exceeding our # throughput. So sleep for a bit. self . _attempt += 1 self . connection . exponential_sleep ( self . _attempt ) else : # No UnprocessedItems means our request rate is fine, so we can # reset the attempt number. self . _attempt = 0 return response
5,537
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L274-L299
[ "def", "is_valid_complex_fault_mesh_spacing", "(", "self", ")", ":", "rms", "=", "getattr", "(", "self", ",", "'rupture_mesh_spacing'", ",", "None", ")", "if", "rms", "and", "not", "getattr", "(", "self", ",", "'complex_fault_mesh_spacing'", ",", "None", ")", ":", "self", ".", "complex_fault_mesh_spacing", "=", "self", ".", "rupture_mesh_spacing", "return", "True" ]
Resend all unprocessed items
def resend_unprocessed ( self ) : LOG . info ( "Re-sending %d unprocessed items." , len ( self . _unprocessed ) ) while self . _unprocessed : to_resend = self . _unprocessed [ : MAX_WRITE_BATCH ] self . _unprocessed = self . _unprocessed [ MAX_WRITE_BATCH : ] LOG . info ( "Sending %d items" , len ( to_resend ) ) self . _write ( to_resend ) LOG . info ( "%d unprocessed items left" , len ( self . _unprocessed ) )
5,538
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L301-L310
[ "def", "add_args", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-host'", ",", "action", "=", "'store'", ",", "dest", "=", "'host'", ",", "default", "=", "'127.0.0.1'", ",", "help", "=", "'DEFAULT \"127.0.0.1\"'", ")", "parser", ".", "add_argument", "(", "'-port'", ",", "action", "=", "'store'", ",", "dest", "=", "'port'", ",", "default", "=", "'2947'", ",", "help", "=", "'DEFAULT 2947'", ",", "type", "=", "int", ")", "parser", ".", "add_argument", "(", "'-json'", ",", "dest", "=", "'gpsd_protocol'", ",", "const", "=", "'json'", ",", "action", "=", "'store_const'", ",", "default", "=", "'json'", ",", "help", "=", "'DEFAULT JSON objects */'", ")", "parser", ".", "add_argument", "(", "'-device'", ",", "dest", "=", "'devicepath'", ",", "action", "=", "'store'", ",", "help", "=", "'alternate devicepath e.g.,\"-device /dev/ttyUSB4\"'", ")", "# Infrequently used options", "parser", ".", "add_argument", "(", "'-nmea'", ",", "dest", "=", "'gpsd_protocol'", ",", "const", "=", "'nmea'", ",", "action", "=", "'store_const'", ",", "help", "=", "'*/ output in NMEA */'", ")", "# parser.add_argument('-rare', dest='gpsd_protocol', const='rare', action='store_const', help='*/ output of packets in hex */')", "# parser.add_argument('-raw', dest='gpsd_protocol', const='raw', action='store_const', help='*/ output of raw packets */')", "# parser.add_argument('-scaled', dest='gpsd_protocol', const='scaled', action='store_const', help='*/ scale output to floats */')", "# parser.add_argument('-timing', dest='gpsd_protocol', const='timing', action='store_const', help='*/ timing information */')", "# parser.add_argument('-split24', dest='gpsd_protocol', const='split24', action='store_const', help='*/ split AIS Type 24s */')", "# parser.add_argument('-pps', dest='gpsd_protocol', const='pps', action='store_const', help='*/ enable PPS JSON */')", "parser", ".", "add_argument", "(", "'-v'", ",", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "'Version: {}'", ".", "format", "(", "__version__", ")", ")", "cli_args", "=", "parser", ".", "parse_args", "(", ")", "return", "cli_args" ]
Make a BatchWriteItem call to Dynamo
def _batch_write_item ( self , items ) : kwargs = { 'RequestItems' : { self . tablename : items , } , 'ReturnConsumedCapacity' : self . return_capacity , 'ReturnItemCollectionMetrics' : self . return_item_collection_metrics , } return self . connection . call ( 'batch_write_item' , * * kwargs )
5,539
https://github.com/stevearc/dynamo3/blob/f897c40ece28586272dbcab8f0d99a14a1831dda/dynamo3/batch.py#L312-L321
[ "def", "from_string", "(", "contents", ")", ":", "if", "contents", "[", "-", "1", "]", "!=", "\"\\n\"", ":", "contents", "+=", "\"\\n\"", "white_space", "=", "r\"[ \\t\\r\\f\\v]\"", "natoms_line", "=", "white_space", "+", "r\"*\\d+\"", "+", "white_space", "+", "r\"*\\n\"", "comment_line", "=", "r\"[^\\n]*\\n\"", "coord_lines", "=", "r\"(\\s*\\w+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s*\\n)+\"", "frame_pattern_text", "=", "natoms_line", "+", "comment_line", "+", "coord_lines", "pat", "=", "re", ".", "compile", "(", "frame_pattern_text", ",", "re", ".", "MULTILINE", ")", "mols", "=", "[", "]", "for", "xyz_match", "in", "pat", ".", "finditer", "(", "contents", ")", ":", "xyz_text", "=", "xyz_match", ".", "group", "(", "0", ")", "mols", ".", "append", "(", "XYZ", ".", "_from_frame_string", "(", "xyz_text", ")", ")", "return", "XYZ", "(", "mols", ")" ]
Get the entity_id as a string if it is a Reference .
def _get_path ( entity_id ) : try : path = entity_id . path ( ) except AttributeError : path = entity_id if path . startswith ( 'cs:' ) : path = path [ 3 : ] return path
5,540
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L482-L495
[ "def", "complete_experiment", "(", "self", ",", "status", ")", ":", "self", ".", "log", "(", "\"Bot player completing experiment. Status: {}\"", ".", "format", "(", "status", ")", ")", "while", "True", ":", "url", "=", "\"{host}/{status}?participant_id={participant_id}\"", ".", "format", "(", "host", "=", "self", ".", "host", ",", "participant_id", "=", "self", ".", "participant_id", ",", "status", "=", "status", ")", "try", ":", "result", "=", "requests", ".", "get", "(", "url", ")", "result", ".", "raise_for_status", "(", ")", "except", "RequestException", ":", "self", ".", "stochastic_sleep", "(", ")", "continue", "return", "result" ]
Make a get request against the charmstore .
def _get ( self , url ) : try : response = requests . get ( url , verify = self . verify , cookies = self . cookies , timeout = self . timeout , auth = self . _client . auth ( ) ) response . raise_for_status ( ) return response except HTTPError as exc : if exc . response . status_code in ( 404 , 407 ) : raise EntityNotFound ( url ) else : message = ( 'Error during request: {url} ' 'status code:({code}) ' 'message: {message}' ) . format ( url = url , code = exc . response . status_code , message = exc . response . text ) logging . error ( message ) raise ServerError ( exc . response . status_code , exc . response . text , message ) except Timeout : message = 'Request timed out: {url} timeout: {timeout}' message = message . format ( url = url , timeout = self . timeout ) logging . error ( message ) raise ServerError ( message ) except RequestException as exc : message = ( 'Error during request: {url} ' 'message: {message}' ) . format ( url = url , message = exc ) logging . error ( message ) raise ServerError ( exc . args [ 0 ] [ 1 ] . errno , exc . args [ 0 ] [ 1 ] . strerror , message )
5,541
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L66-L106
[ "def", "export", "(", "self", ",", "name", ",", "columns", ",", "points", ")", ":", "# Check for completion of loop for all exports", "if", "name", "==", "self", ".", "plugins_to_export", "(", ")", "[", "0", "]", "and", "self", ".", "buffer", "!=", "{", "}", ":", "# One whole loop has been completed", "# Flush stats to file", "logger", ".", "debug", "(", "\"Exporting stats ({}) to JSON file ({})\"", ".", "format", "(", "listkeys", "(", "self", ".", "buffer", ")", ",", "self", ".", "json_filename", ")", ")", "# Export stats to JSON file", "data_json", "=", "json", ".", "dumps", "(", "self", ".", "buffer", ")", "self", ".", "json_file", ".", "write", "(", "\"{}\\n\"", ".", "format", "(", "data_json", ")", ")", "# Reset buffer", "self", ".", "buffer", "=", "{", "}", "# Add current stat to the buffer", "self", ".", "buffer", "[", "name", "]", "=", "dict", "(", "zip", "(", "columns", ",", "points", ")", ")" ]
Retrieve metadata about an entity in the charmstore .
def _meta ( self , entity_id , includes , channel = None ) : queries = [ ] if includes is not None : queries . extend ( [ ( 'include' , include ) for include in includes ] ) if channel is not None : queries . append ( ( 'channel' , channel ) ) if len ( queries ) : url = '{}/{}/meta/any?{}' . format ( self . url , _get_path ( entity_id ) , urlencode ( queries ) ) else : url = '{}/{}/meta/any' . format ( self . url , _get_path ( entity_id ) ) data = self . _get ( url ) return data . json ( )
5,542
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L108-L127
[ "def", "from_string", "(", "contents", ")", ":", "if", "contents", "[", "-", "1", "]", "!=", "\"\\n\"", ":", "contents", "+=", "\"\\n\"", "white_space", "=", "r\"[ \\t\\r\\f\\v]\"", "natoms_line", "=", "white_space", "+", "r\"*\\d+\"", "+", "white_space", "+", "r\"*\\n\"", "comment_line", "=", "r\"[^\\n]*\\n\"", "coord_lines", "=", "r\"(\\s*\\w+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s+[0-9\\-\\+\\.eEdD]+\\s*\\n)+\"", "frame_pattern_text", "=", "natoms_line", "+", "comment_line", "+", "coord_lines", "pat", "=", "re", ".", "compile", "(", "frame_pattern_text", ",", "re", ".", "MULTILINE", ")", "mols", "=", "[", "]", "for", "xyz_match", "in", "pat", ".", "finditer", "(", "contents", ")", ":", "xyz_text", "=", "xyz_match", ".", "group", "(", "0", ")", "mols", ".", "append", "(", "XYZ", ".", "_from_frame_string", "(", "xyz_text", ")", ")", "return", "XYZ", "(", "mols", ")" ]
Get the default data for entities .
def entities ( self , entity_ids ) : url = '%s/meta/any?include=id&' % self . url for entity_id in entity_ids : url += 'id=%s&' % _get_path ( entity_id ) # Remove the trailing '&' from the URL. url = url [ : - 1 ] data = self . _get ( url ) return data . json ( )
5,543
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L148-L159
[ "def", "_get_row_within_width", "(", "self", ",", "row", ")", ":", "table", "=", "self", ".", "_table", "lpw", ",", "rpw", "=", "table", ".", "left_padding_widths", ",", "table", ".", "right_padding_widths", "wep", "=", "table", ".", "width_exceed_policy", "list_of_rows", "=", "[", "]", "if", "(", "wep", "is", "WidthExceedPolicy", ".", "WEP_STRIP", "or", "wep", "is", "WidthExceedPolicy", ".", "WEP_ELLIPSIS", ")", ":", "# Let's strip the row", "delimiter", "=", "''", "if", "wep", "is", "WidthExceedPolicy", ".", "WEP_STRIP", "else", "'...'", "row_item_list", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row", ")", ":", "left_pad", "=", "table", ".", "_column_pad", "*", "lpw", "[", "index", "]", "right_pad", "=", "table", ".", "_column_pad", "*", "rpw", "[", "index", "]", "clmp_str", "=", "(", "left_pad", "+", "self", ".", "_clamp_string", "(", "row_item", ",", "index", ",", "delimiter", ")", "+", "right_pad", ")", "row_item_list", ".", "append", "(", "clmp_str", ")", "list_of_rows", ".", "append", "(", "row_item_list", ")", "elif", "wep", "is", "WidthExceedPolicy", ".", "WEP_WRAP", ":", "# Let's wrap the row", "string_partition", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row", ")", ":", "width", "=", "table", ".", "column_widths", "[", "index", "]", "-", "lpw", "[", "index", "]", "-", "rpw", "[", "index", "]", "string_partition", ".", "append", "(", "textwrap", "(", "row_item", ",", "width", ")", ")", "for", "row_items", "in", "zip_longest", "(", "*", "string_partition", ",", "fillvalue", "=", "''", ")", ":", "row_item_list", "=", "[", "]", "for", "index", ",", "row_item", "in", "enumerate", "(", "row_items", ")", ":", "left_pad", "=", "table", ".", "_column_pad", "*", "lpw", "[", "index", "]", "right_pad", "=", "table", ".", "_column_pad", "*", "rpw", "[", "index", "]", "row_item_list", ".", "append", "(", "left_pad", "+", "row_item", "+", "right_pad", ")", "list_of_rows", ".", "append", "(", "row_item_list", ")", "if", "len", "(", "list_of_rows", ")", "==", "0", ":", "return", "[", "[", "''", "]", "*", "table", ".", "column_count", "]", "else", ":", "return", "list_of_rows" ]
Get the default data for a bundle .
def bundle ( self , bundle_id , channel = None ) : return self . entity ( bundle_id , get_files = True , channel = channel )
5,544
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L161-L167
[ "def", "modify", "(", "self", ",", "current_modified_line", ",", "anchors", ",", "file_path", ",", "file_lines", "=", "None", ",", "index", "=", "None", ")", ":", "open_wrapper_index", "=", "current_modified_line", ".", "rfind", "(", "self", ".", "_open", ")", "# '- 1' removes trailing space. May want to modify to completely", "# strip whitespace at the end, instead of only working for a single", "# space", "return", "current_modified_line", "[", ":", "open_wrapper_index", "-", "1", "]", "+", "\"\\n\"" ]
Get the default data for a charm .
def charm ( self , charm_id , channel = None ) : return self . entity ( charm_id , get_files = True , channel = channel )
5,545
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L169-L175
[ "def", "_finalize_merge", "(", "out_file", ",", "bam_files", ",", "config", ")", ":", "# Ensure timestamps are up to date on output file and index", "# Works around issues on systems with inconsistent times", "for", "ext", "in", "[", "\"\"", ",", "\".bai\"", "]", ":", "if", "os", ".", "path", ".", "exists", "(", "out_file", "+", "ext", ")", ":", "subprocess", ".", "check_call", "(", "[", "\"touch\"", ",", "out_file", "+", "ext", "]", ")", "for", "b", "in", "bam_files", ":", "utils", ".", "save_diskspace", "(", "b", ",", "\"BAM merged to %s\"", "%", "out_file", ",", "config", ")" ]
Generate the path to the icon for charms .
def charm_icon_url ( self , charm_id , channel = None ) : url = '{}/{}/icon.svg' . format ( self . url , _get_path ( charm_id ) ) return _add_channel ( url , channel )
5,546
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L177-L185
[ "def", "_dump_cfg", "(", "cfg_file", ")", ":", "if", "__salt__", "[", "'file.file_exists'", "]", "(", "cfg_file", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "cfg_file", ",", "'r'", ")", "as", "fp_", ":", "log", ".", "debug", "(", "\"zonecfg - configuration file:\\n%s\"", ",", "\"\"", ".", "join", "(", "salt", ".", "utils", ".", "data", ".", "decode", "(", "fp_", ".", "readlines", "(", ")", ")", ")", ")" ]
Get the charm icon .
def charm_icon ( self , charm_id , channel = None ) : url = self . charm_icon_url ( charm_id , channel = channel ) response = self . _get ( url ) return response . content
5,547
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L187-L195
[ "def", "realtime_updates", "(", "self", ")", ":", "# TODO: make this available for every buffer", "logger", ".", "info", "(", "\"starting receiving events from docker\"", ")", "it", "=", "self", ".", "d", ".", "realtime_updates", "(", ")", "while", "True", ":", "try", ":", "event", "=", "next", "(", "it", ")", "except", "NotifyError", "as", "ex", ":", "self", ".", "ui", ".", "notify_message", "(", "\"error when receiving realtime events from docker: %s\"", "%", "ex", ",", "level", "=", "\"error\"", ")", "return", "# FIXME: we should pass events to all buffers", "# ATM the buffers can't be rendered since they are not displayed", "# and hence traceback like this: ListBoxError(\"Listbox contents too short! ...", "logger", ".", "debug", "(", "\"pass event to current buffer %s\"", ",", "self", ".", "ui", ".", "current_buffer", ")", "try", ":", "self", ".", "ui", ".", "current_buffer", ".", "process_realtime_event", "(", "event", ")", "except", "Exception", "as", "ex", ":", "# swallow any exc", "logger", ".", "error", "(", "\"error while processing runtime event: %r\"", ",", "ex", ")" ]
Get the bundle visualization .
def bundle_visualization ( self , bundle_id , channel = None ) : url = self . bundle_visualization_url ( bundle_id , channel = channel ) response = self . _get ( url ) return response . content
5,548
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L197-L205
[ "def", "comments_are_open", "(", "content_object", ")", ":", "moderator", "=", "get_model_moderator", "(", "content_object", ".", "__class__", ")", "if", "moderator", "is", "None", ":", "return", "True", "# Check the 'enable_field', 'auto_close_field' and 'close_after',", "# by reusing the basic Django policies.", "return", "CommentModerator", ".", "allow", "(", "moderator", ",", "None", ",", "content_object", ",", "None", ")" ]
Generate the path to the visualization for bundles .
def bundle_visualization_url ( self , bundle_id , channel = None ) : url = '{}/{}/diagram.svg' . format ( self . url , _get_path ( bundle_id ) ) return _add_channel ( url , channel )
5,549
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L207-L215
[ "def", "cc_to_local_params", "(", "pitch", ",", "radius", ",", "oligo", ")", ":", "rloc", "=", "numpy", ".", "sin", "(", "numpy", ".", "pi", "/", "oligo", ")", "*", "radius", "alpha", "=", "numpy", ".", "arctan", "(", "(", "2", "*", "numpy", ".", "pi", "*", "radius", ")", "/", "pitch", ")", "alphaloc", "=", "numpy", ".", "cos", "(", "(", "numpy", ".", "pi", "/", "2", ")", "-", "(", "(", "numpy", ".", "pi", ")", "/", "oligo", ")", ")", "*", "alpha", "pitchloc", "=", "(", "2", "*", "numpy", ".", "pi", "*", "rloc", ")", "/", "numpy", ".", "tan", "(", "alphaloc", ")", "return", "pitchloc", ",", "rloc", ",", "numpy", ".", "rad2deg", "(", "alphaloc", ")" ]
Generate the url path for the readme of an entity .
def entity_readme_url ( self , entity_id , channel = None ) : url = '{}/{}/readme' . format ( self . url , _get_path ( entity_id ) ) return _add_channel ( url , channel )
5,550
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L217-L224
[ "def", "_cast_int", "(", "self", ",", "value", ")", ":", "try", ":", "return", "int", "(", "value", ")", "except", "ValueError", ":", "msg", ".", "err", "(", "\"Cannot convert {} to int for line {}.\"", ".", "format", "(", "value", ",", "self", ".", "identifier", ")", ")", "exit", "(", "1", ")" ]
Get the readme for an entity .
def entity_readme_content ( self , entity_id , channel = None ) : readme_url = self . entity_readme_url ( entity_id , channel = channel ) response = self . _get ( readme_url ) return response . text
5,551
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L226-L234
[ "def", "coerce_value", "(", "cls", ",", "v", ")", ":", "if", "isinstance", "(", "v", ",", "cls", ".", "itemtype", ")", ":", "return", "v", "else", ":", "try", ":", "return", "cls", ".", "coerceitem", "(", "v", ")", "except", "Exception", "as", "e", ":", "raise", "exc", ".", "CollectionItemCoerceError", "(", "itemtype", "=", "cls", ".", "itemtype", ",", "colltype", "=", "cls", ",", "passed", "=", "v", ",", "exc", "=", "e", ",", ")" ]
Generate a URL for the archive of an entity ..
def archive_url ( self , entity_id , channel = None ) : url = '{}/{}/archive' . format ( self . url , _get_path ( entity_id ) ) return _add_channel ( url , channel )
5,552
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L236-L244
[ "def", "_fix_sitk_bug", "(", "path", ",", "metadata", ")", ":", "ds", "=", "dicom", ".", "read_file", "(", "path", ")", "try", ":", "metadata", "[", "\"voxelsize_mm\"", "]", "[", "0", "]", "=", "ds", ".", "SpacingBetweenSlices", "except", "Exception", "as", "e", ":", "logger", ".", "warning", "(", "\"Read dicom 'SpacingBetweenSlices' failed: \"", ",", "e", ")", "return", "metadata" ]
Generate a URL for a file in an archive without requesting it .
def file_url ( self , entity_id , filename , channel = None ) : url = '{}/{}/archive/{}' . format ( self . url , _get_path ( entity_id ) , filename ) return _add_channel ( url , channel )
5,553
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L246-L255
[ "def", "add_multizone", "(", "self", ",", "group_cast", ")", ":", "self", ".", "_groups", "[", "str", "(", "group_cast", ".", "uuid", ")", "]", "=", "{", "'chromecast'", ":", "group_cast", ",", "'listener'", ":", "Listener", "(", "group_cast", ",", "self", ".", "_casts", ")", ",", "'members'", ":", "set", "(", ")", "}" ]
Get the files or file contents of a file for an entity .
def files ( self , entity_id , manifest = None , filename = None , read_file = False , channel = None ) : if manifest is None : manifest_url = '{}/{}/meta/manifest' . format ( self . url , _get_path ( entity_id ) ) manifest_url = _add_channel ( manifest_url , channel ) manifest = self . _get ( manifest_url ) manifest = manifest . json ( ) files = { } for f in manifest : manifest_name = f [ 'Name' ] file_url = self . file_url ( _get_path ( entity_id ) , manifest_name , channel = channel ) files [ manifest_name ] = file_url if filename : file_url = files . get ( filename , None ) if file_url is None : raise EntityNotFound ( entity_id , filename ) if read_file : data = self . _get ( file_url ) return data . text else : return file_url else : return files
5,554
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L257-L303
[ "def", "_get_broadcast_shape", "(", "shape1", ",", "shape2", ")", ":", "if", "shape1", "==", "shape2", ":", "return", "shape1", "length1", "=", "len", "(", "shape1", ")", "length2", "=", "len", "(", "shape2", ")", "if", "length1", ">", "length2", ":", "shape", "=", "list", "(", "shape1", ")", "else", ":", "shape", "=", "list", "(", "shape2", ")", "i", "=", "max", "(", "length1", ",", "length2", ")", "-", "1", "for", "a", ",", "b", "in", "zip", "(", "shape1", "[", ":", ":", "-", "1", "]", ",", "shape2", "[", ":", ":", "-", "1", "]", ")", ":", "if", "a", "!=", "1", "and", "b", "!=", "1", "and", "a", "!=", "b", ":", "raise", "ValueError", "(", "'shape1=%s is not broadcastable to shape2=%s'", "%", "(", "shape1", ",", "shape2", ")", ")", "shape", "[", "i", "]", "=", "max", "(", "a", ",", "b", ")", "i", "-=", "1", "return", "tuple", "(", "shape", ")" ]
Return the resource url for a given resource on an entity .
def resource_url ( self , entity_id , name , revision ) : return '{}/{}/resource/{}/{}' . format ( self . url , _get_path ( entity_id ) , name , revision )
5,555
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L305-L316
[ "def", "_CompressHistogram", "(", "self", ",", "histo_ev", ")", ":", "return", "CompressedHistogramEvent", "(", "histo_ev", ".", "wall_time", ",", "histo_ev", ".", "step", ",", "compressor", ".", "compress_histogram_proto", "(", "histo_ev", ".", "histogram_value", ",", "self", ".", "_compression_bps", ")", ")" ]
Get the config data for a charm .
def config ( self , charm_id , channel = None ) : url = '{}/{}/meta/charm-config' . format ( self . url , _get_path ( charm_id ) ) data = self . _get ( _add_channel ( url , channel ) ) return data . json ( )
5,556
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L318-L326
[ "def", "workspace_state_changed", "(", "ob", ",", "event", ")", ":", "workspace", "=", "event", ".", "object", "roles", "=", "[", "'Guest'", ",", "]", "if", "event", ".", "new_state", ".", "id", "==", "'open'", ":", "api", ".", "group", ".", "grant_roles", "(", "groupname", "=", "INTRANET_USERS_GROUP_ID", ",", "obj", "=", "workspace", ",", "roles", "=", "roles", ",", ")", "workspace", ".", "reindexObjectSecurity", "(", ")", "elif", "event", ".", "old_state", ".", "id", "==", "'open'", ":", "api", ".", "group", ".", "revoke_roles", "(", "groupname", "=", "INTRANET_USERS_GROUP_ID", ",", "obj", "=", "workspace", ",", "roles", "=", "roles", ",", ")", "workspace", ".", "reindexObjectSecurity", "(", ")" ]
Get an entity s full id provided a partial one .
def entityId ( self , partial , channel = None ) : url = '{}/{}/meta/any' . format ( self . url , _get_path ( partial ) ) data = self . _get ( _add_channel ( url , channel ) ) return data . json ( ) [ 'Id' ]
5,557
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L328-L337
[ "def", "_clamp_string", "(", "self", ",", "row_item", ",", "column_index", ",", "delimiter", "=", "''", ")", ":", "width", "=", "(", "self", ".", "_table", ".", "column_widths", "[", "column_index", "]", "-", "self", ".", "_table", ".", "left_padding_widths", "[", "column_index", "]", "-", "self", ".", "_table", ".", "right_padding_widths", "[", "column_index", "]", ")", "if", "termwidth", "(", "row_item", ")", "<=", "width", ":", "return", "row_item", "else", ":", "if", "width", "-", "len", "(", "delimiter", ")", ">=", "0", ":", "clamped_string", "=", "(", "textwrap", "(", "row_item", ",", "width", "-", "len", "(", "delimiter", ")", ")", "[", "0", "]", "+", "delimiter", ")", "else", ":", "clamped_string", "=", "delimiter", "[", ":", "width", "]", "return", "clamped_string" ]
Search for entities in the charmstore .
def search ( self , text , includes = None , doc_type = None , limit = None , autocomplete = False , promulgated_only = False , tags = None , sort = None , owner = None , series = None ) : queries = self . _common_query_parameters ( doc_type , includes , owner , promulgated_only , series , sort ) if len ( text ) : queries . append ( ( 'text' , text ) ) if limit is not None : queries . append ( ( 'limit' , limit ) ) if autocomplete : queries . append ( ( 'autocomplete' , 1 ) ) if tags is not None : if type ( tags ) is list : tags = ',' . join ( tags ) queries . append ( ( 'tags' , tags ) ) if len ( queries ) : url = '{}/search?{}' . format ( self . url , urlencode ( queries ) ) else : url = '{}/search' . format ( self . url ) data = self . _get ( url ) return data . json ( ) [ 'Results' ]
5,558
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L339-L376
[ "def", "tcp", "(", "q", ",", "where", ",", "timeout", "=", "None", ",", "port", "=", "53", ",", "af", "=", "None", ",", "source", "=", "None", ",", "source_port", "=", "0", ",", "one_rr_per_rrset", "=", "False", ")", ":", "wire", "=", "q", ".", "to_wire", "(", ")", "if", "af", "is", "None", ":", "try", ":", "af", "=", "dns", ".", "inet", ".", "af_for_address", "(", "where", ")", "except", "Exception", ":", "af", "=", "dns", ".", "inet", ".", "AF_INET", "if", "af", "==", "dns", ".", "inet", ".", "AF_INET", ":", "destination", "=", "(", "where", ",", "port", ")", "if", "source", "is", "not", "None", ":", "source", "=", "(", "source", ",", "source_port", ")", "elif", "af", "==", "dns", ".", "inet", ".", "AF_INET6", ":", "destination", "=", "(", "where", ",", "port", ",", "0", ",", "0", ")", "if", "source", "is", "not", "None", ":", "source", "=", "(", "source", ",", "source_port", ",", "0", ",", "0", ")", "s", "=", "socket", ".", "socket", "(", "af", ",", "socket", ".", "SOCK_STREAM", ",", "0", ")", "try", ":", "expiration", "=", "_compute_expiration", "(", "timeout", ")", "s", ".", "setblocking", "(", "0", ")", "if", "source", "is", "not", "None", ":", "s", ".", "bind", "(", "source", ")", "_connect", "(", "s", ",", "destination", ")", "l", "=", "len", "(", "wire", ")", "# copying the wire into tcpmsg is inefficient, but lets us", "# avoid writev() or doing a short write that would get pushed", "# onto the net", "tcpmsg", "=", "struct", ".", "pack", "(", "\"!H\"", ",", "l", ")", "+", "wire", "_net_write", "(", "s", ",", "tcpmsg", ",", "expiration", ")", "ldata", "=", "_net_read", "(", "s", ",", "2", ",", "expiration", ")", "(", "l", ",", ")", "=", "struct", ".", "unpack", "(", "\"!H\"", ",", "ldata", ")", "wire", "=", "_net_read", "(", "s", ",", "l", ",", "expiration", ")", "finally", ":", "s", ".", "close", "(", ")", "r", "=", "dns", ".", "message", ".", "from_wire", "(", "wire", ",", "keyring", "=", "q", ".", "keyring", ",", "request_mac", "=", "q", ".", "mac", ",", "one_rr_per_rrset", "=", "one_rr_per_rrset", ")", "if", "not", "q", ".", "is_response", "(", "r", ")", ":", "raise", "BadResponse", "return", "r" ]
List entities in the charmstore .
def list ( self , includes = None , doc_type = None , promulgated_only = False , sort = None , owner = None , series = None ) : queries = self . _common_query_parameters ( doc_type , includes , owner , promulgated_only , series , sort ) if len ( queries ) : url = '{}/list?{}' . format ( self . url , urlencode ( queries ) ) else : url = '{}/list' . format ( self . url ) data = self . _get ( url ) return data . json ( ) [ 'Results' ]
5,559
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L378-L400
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Extract common query parameters between search and list into slice .
def _common_query_parameters ( self , doc_type , includes , owner , promulgated_only , series , sort ) : queries = [ ] if includes is not None : queries . extend ( [ ( 'include' , include ) for include in includes ] ) if doc_type is not None : queries . append ( ( 'type' , doc_type ) ) if promulgated_only : queries . append ( ( 'promulgated' , 1 ) ) if owner is not None : queries . append ( ( 'owner' , owner ) ) if series is not None : if type ( series ) is list : series = ',' . join ( series ) queries . append ( ( 'series' , series ) ) if sort is not None : queries . append ( ( 'sort' , sort ) ) return queries
5,560
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L402-L432
[ "def", "configure_for_turret", "(", "project_name", ",", "config_file", ")", ":", "config", "=", "configure", "(", "project_name", ",", "config_file", ")", "for", "key", "in", "WARNING_CONFIG_KEYS", ":", "if", "key", "not", "in", "config", ":", "print", "(", "\"WARNING: %s configuration key not present, the value will be set to default value\"", "%", "key", ")", "common_config", "=", "{", "'hq_address'", ":", "config", ".", "get", "(", "'hq_address'", ",", "'127.0.0.1'", ")", ",", "'hq_publisher'", ":", "config", ".", "get", "(", "'publish_port'", ",", "5000", ")", ",", "'hq_rc'", ":", "config", ".", "get", "(", "'rc_port'", ",", "5001", ")", ",", "'turrets_requirements'", ":", "config", ".", "get", "(", "'turrets_requirements'", ",", "[", "]", ")", "}", "configs", "=", "[", "]", "for", "turret", "in", "config", "[", "'turrets'", "]", ":", "if", "isinstance", "(", "turret", ",", "six", ".", "string_types", ")", ":", "turret", "=", "load_turret_config", "(", "project_name", ",", "turret", ")", "turret", ".", "update", "(", "common_config", ")", "turret", ".", "update", "(", "config", ".", "get", "(", "'extra_turret_config'", ",", "{", "}", ")", ")", "configs", ".", "append", "(", "turret", ")", "return", "configs" ]
Fetch related entity information .
def fetch_related ( self , ids ) : if not ids : return [ ] meta = '&id=' . join ( id [ 'Id' ] for id in ids ) url = ( '{url}/meta/any?id={meta}' '&include=bundle-metadata&include=stats' '&include=supported-series&include=extra-info' '&include=bundle-unit-count&include=owner' ) . format ( url = self . url , meta = meta ) data = self . _get ( url ) return data . json ( ) . values ( )
5,561
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L436-L453
[ "def", "_compute_adaptation", "(", "self", ",", "xyz", ",", "xyz_w", ",", "f_l", ",", "d", ")", ":", "# Transform input colors to cone responses", "rgb", "=", "self", ".", "_xyz_to_rgb", "(", "xyz", ")", "logger", ".", "debug", "(", "\"RGB: {}\"", ".", "format", "(", "rgb", ")", ")", "rgb_b", "=", "self", ".", "_xyz_to_rgb", "(", "self", ".", "_xyz_b", ")", "rgb_w", "=", "self", ".", "_xyz_to_rgb", "(", "xyz_w", ")", "rgb_w", "=", "Hunt", ".", "adjust_white_for_scc", "(", "rgb", ",", "rgb_b", ",", "rgb_w", ",", "self", ".", "_p", ")", "logger", ".", "debug", "(", "\"RGB_W: {}\"", ".", "format", "(", "rgb_w", ")", ")", "# Compute adapted tristimulus-responses", "rgb_c", "=", "self", ".", "_white_adaption", "(", "rgb", ",", "rgb_w", ",", "d", ")", "logger", ".", "debug", "(", "\"RGB_C: {}\"", ".", "format", "(", "rgb_c", ")", ")", "rgb_cw", "=", "self", ".", "_white_adaption", "(", "rgb_w", ",", "rgb_w", ",", "d", ")", "logger", ".", "debug", "(", "\"RGB_CW: {}\"", ".", "format", "(", "rgb_cw", ")", ")", "# Convert adapted tristimulus-responses to Hunt-Pointer-Estevez fundamentals", "rgb_p", "=", "self", ".", "_compute_hunt_pointer_estevez_fundamentals", "(", "rgb_c", ")", "logger", ".", "debug", "(", "\"RGB': {}\"", ".", "format", "(", "rgb_p", ")", ")", "rgb_wp", "=", "self", ".", "_compute_hunt_pointer_estevez_fundamentals", "(", "rgb_cw", ")", "logger", ".", "debug", "(", "\"RGB'_W: {}\"", ".", "format", "(", "rgb_wp", ")", ")", "# Compute post-adaptation non-linearities", "rgb_ap", "=", "self", ".", "_compute_nonlinearities", "(", "f_l", ",", "rgb_p", ")", "rgb_awp", "=", "self", ".", "_compute_nonlinearities", "(", "f_l", ",", "rgb_wp", ")", "return", "rgb_ap", ",", "rgb_awp" ]
Get the list of charms that provides or requires this interface .
def fetch_interfaces ( self , interface , way ) : if not interface : return [ ] if way == 'requires' : request = '&requires=' + interface else : request = '&provides=' + interface url = ( self . url + '/search?' + 'include=charm-metadata&include=stats&include=supported-series' '&include=extra-info&include=bundle-unit-count' '&limit=1000&include=owner' + request ) data = self . _get ( url ) return data . json ( ) . values ( )
5,562
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L455-L473
[ "def", "write_batch_json", "(", "self", ",", "content", ")", ":", "timestamp", "=", "str", "(", "time", ".", "time", "(", ")", ")", ".", "replace", "(", "'.'", ",", "''", ")", "batch_json_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "tcex", ".", "args", ".", "tc_temp_path", ",", "'batch-{}.json'", ".", "format", "(", "timestamp", ")", ")", "with", "open", "(", "batch_json_file", ",", "'w'", ")", "as", "fh", ":", "json", ".", "dump", "(", "content", ",", "fh", ",", "indent", "=", "2", ")" ]
Retrieve the debug information from the charmstore .
def debug ( self ) : url = '{}/debug/status' . format ( self . url ) data = self . _get ( url ) return data . json ( )
5,563
https://github.com/juju/theblues/blob/f4431f29e43d04fc32f38f4f86cea45cd4e6ae98/theblues/charmstore.py#L475-L479
[ "def", "guard_assign", "(", "analysis", ")", ":", "# Only if the request was done from worksheet context.", "if", "not", "is_worksheet_context", "(", ")", ":", "return", "False", "# Cannot assign if the Sample has not been received", "if", "not", "analysis", ".", "isSampleReceived", "(", ")", ":", "return", "False", "# Cannot assign if the analysis has a worksheet assigned already", "if", "analysis", ".", "getWorksheet", "(", ")", ":", "return", "False", "# Cannot assign if user does not have permissions to manage worksheets", "return", "user_can_manage_worksheets", "(", ")" ]
Server query for the isochrone file .
def query_server ( self , outfile , age , metallicity ) : params = copy . deepcopy ( self . download_defaults ) epsilon = 1e-4 lage = np . log10 ( age * 1e9 ) lage_min , lage_max = params [ 'isoc_lage0' ] , params [ 'isoc_lage1' ] if not ( lage_min - epsilon < lage < lage_max + epsilon ) : msg = 'Age outside of valid range: %g [%g < log(age) < %g]' % ( lage , lage_min , lage_max ) raise RuntimeError ( msg ) z_min , z_max = params [ 'isoc_z0' ] , params [ 'isoc_z1' ] if not ( z_min <= metallicity <= z_max ) : msg = 'Metallicity outside of valid range: %g [%g < z < %g]' % ( metallicity , z_min , z_max ) raise RuntimeError ( msg ) params [ 'photsys_file' ] = photsys_dict [ self . survey ] params [ 'isoc_age' ] = age * 1e9 params [ 'isoc_zeta' ] = metallicity server = self . download_url url = server + '/cgi-bin/cmd_%s' % params [ 'cmd_version' ] # First check that the server is alive logger . debug ( "Accessing %s..." % url ) urlopen ( url , timeout = 2 ) q = urlencode ( params ) . encode ( 'utf-8' ) logger . debug ( url + '?' + q ) c = str ( urlopen ( url , q ) . read ( ) ) aa = re . compile ( 'output\d+' ) fname = aa . findall ( c ) if len ( fname ) == 0 : msg = "Output filename not found" raise RuntimeError ( msg ) out = '{0}/tmp/{1}.dat' . format ( server , fname [ 0 ] ) cmd = 'wget --progress dot:binary %s -O %s' % ( out , outfile ) logger . debug ( cmd ) stdout = subprocess . check_output ( cmd , shell = True , stderr = subprocess . STDOUT ) logger . debug ( stdout ) return outfile
5,564
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/isochrone/parsec.py#L138-L192
[ "def", "detached", "(", "name", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", "}", "zones", "=", "__salt__", "[", "'zoneadm.list'", "]", "(", "installed", "=", "True", ",", "configured", "=", "True", ")", "if", "name", "in", "zones", ":", "if", "zones", "[", "name", "]", "[", "'state'", "]", "!=", "'configured'", ":", "if", "__opts__", "[", "'test'", "]", ":", "res_detach", "=", "{", "'status'", ":", "True", "}", "else", ":", "res_detach", "=", "__salt__", "[", "'zoneadm.detach'", "]", "(", "name", ")", "ret", "[", "'result'", "]", "=", "res_detach", "[", "'status'", "]", "if", "ret", "[", "'result'", "]", ":", "ret", "[", "'changes'", "]", "[", "name", "]", "=", "'detached'", "ret", "[", "'comment'", "]", "=", "'The zone {0} was detached.'", ".", "format", "(", "name", ")", "else", ":", "ret", "[", "'comment'", "]", "=", "[", "]", "ret", "[", "'comment'", "]", ".", "append", "(", "'Failed to detach zone {0}!'", ".", "format", "(", "name", ")", ")", "if", "'message'", "in", "res_detach", ":", "ret", "[", "'comment'", "]", ".", "append", "(", "res_detach", "[", "'message'", "]", ")", "ret", "[", "'comment'", "]", "=", "\"\\n\"", ".", "join", "(", "ret", "[", "'comment'", "]", ")", "else", ":", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'zone {0} already detached.'", ".", "format", "(", "name", ")", "else", ":", "## note: a non existing zone is not attached, we do not consider this a failure", "ret", "[", "'result'", "]", "=", "True", "ret", "[", "'comment'", "]", "=", "'zone {0} is not configured!'", ".", "format", "(", "name", ")", "return", "ret" ]
Run the likelihood grid search
def run ( self , coords = None , debug = False ) : #self.grid.precompute() self . grid . search ( coords = coords ) return self . grid
5,565
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/analysis/scan.py#L52-L58
[ "def", "f2tc", "(", "f", ",", "base", "=", "25", ")", ":", "try", ":", "f", "=", "int", "(", "f", ")", "except", ":", "return", "\"--:--:--:--\"", "hh", "=", "int", "(", "(", "f", "/", "base", ")", "/", "3600", ")", "mm", "=", "int", "(", "(", "(", "f", "/", "base", ")", "/", "60", ")", "-", "(", "hh", "*", "60", ")", ")", "ss", "=", "int", "(", "(", "f", "/", "base", ")", "-", "(", "hh", "*", "3600", ")", "-", "(", "mm", "*", "60", ")", ")", "ff", "=", "int", "(", "f", "-", "(", "hh", "*", "3600", "*", "base", ")", "-", "(", "mm", "*", "60", "*", "base", ")", "-", "(", "ss", "*", "base", ")", ")", "return", "\"{:02d}:{:02d}:{:02d}:{:02d}\"", ".", "format", "(", "hh", ",", "mm", ",", "ss", ",", "ff", ")" ]
Save the likelihood results as a sparse HEALPix map .
def write ( self , outfile ) : data = odict ( ) data [ 'PIXEL' ] = self . roi . pixels_target # Full data output (too large for survey) if self . config [ 'scan' ] [ 'full_pdf' ] : data [ 'LOG_LIKELIHOOD' ] = self . log_likelihood_sparse_array . T data [ 'RICHNESS' ] = self . richness_sparse_array . T data [ 'RICHNESS_LOWER' ] = self . richness_lower_sparse_array . T data [ 'RICHNESS_UPPER' ] = self . richness_upper_sparse_array . T data [ 'RICHNESS_LIMIT' ] = self . richness_upper_limit_sparse_array . T #data['STELLAR_MASS']=self.stellar_mass_sparse_array.T data [ 'FRACTION_OBSERVABLE' ] = self . fraction_observable_sparse_array . T else : data [ 'LOG_LIKELIHOOD' ] = self . log_likelihood_sparse_array . T data [ 'RICHNESS' ] = self . richness_sparse_array . T data [ 'FRACTION_OBSERVABLE' ] = self . fraction_observable_sparse_array . T # Convert to 32bit float for k in list ( data . keys ( ) ) [ 1 : ] : data [ k ] = data [ k ] . astype ( 'f4' , copy = False ) # Stellar mass can be calculated from STELLAR * RICHNESS header = odict ( ) header [ 'STELLAR' ] = round ( self . stellar_mass_conversion , 8 ) header [ 'LKDNSIDE' ] = self . config [ 'coords' ] [ 'nside_likelihood' ] header [ 'LKDPIX' ] = ang2pix ( self . config [ 'coords' ] [ 'nside_likelihood' ] , self . roi . lon , self . roi . lat ) header [ 'NROI' ] = self . roi . inROI ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NANNULUS' ] = self . roi . inAnnulus ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NINSIDE' ] = self . roi . inInterior ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NTARGET' ] = self . roi . inTarget ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) # Flatten if there is only a single distance modulus # ADW: Is this really what we want to do? if len ( self . distance_modulus_array ) == 1 : for key in data : data [ key ] = data [ key ] . flatten ( ) logger . info ( "Writing %s..." % outfile ) write_partial_map ( outfile , data , nside = self . config [ 'coords' ] [ 'nside_pixel' ] , header = header , clobber = True ) fitsio . write ( outfile , dict ( DISTANCE_MODULUS = self . distance_modulus_array . astype ( 'f4' , copy = False ) ) , extname = 'DISTANCE_MODULUS' , clobber = False )
5,566
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/analysis/scan.py#L326-L381
[ "def", "end_stream", "(", "self", ",", "stream_id", ")", ":", "with", "(", "yield", "from", "self", ".", "_get_stream", "(", "stream_id", ")", ".", "wlock", ")", ":", "yield", "from", "self", ".", "_resumed", ".", "wait", "(", ")", "self", ".", "_conn", ".", "end_stream", "(", "stream_id", ")", "self", ".", "_flush", "(", ")" ]
Construct a synchronous Beanstalk Client from a URI .
def from_uri ( cls , uri , socket_timeout = None , auto_decode = False ) : parts = six . moves . urllib . parse . urlparse ( uri ) if parts . scheme . lower ( ) not in ( 'beanstalk' , 'beanstalkd' ) : raise ValueError ( 'Invalid scheme %s' % parts . scheme ) ipv6_md = re . match ( r'^\[([0-9a-fA-F:]+)\](:[0-9]+)?$' , parts . netloc ) if ipv6_md : host = ipv6_md . group ( 1 ) port = ipv6_md . group ( 2 ) or '11300' port = port . lstrip ( ':' ) elif ':' in parts . netloc : host , port = parts . netloc . rsplit ( ':' , 1 ) else : host = parts . netloc port = 11300 port = int ( port ) return cls ( host , port , socket_timeout = socket_timeout , auto_decode = auto_decode )
5,567
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L96-L117
[ "def", "remove_cts_record", "(", "file_name", ",", "map", ",", "position", ")", ":", "db", "=", "XonoticDB", ".", "load_path", "(", "file_name", ")", "db", ".", "remove_cts_record", "(", "map", ",", "position", ")", "db", ".", "save", "(", "file_name", ")" ]
Return a list of tubes that this beanstalk instance knows about
def list_tubes ( self ) : with self . _sock_ctx ( ) as sock : self . _send_message ( 'list-tubes' , sock ) body = self . _receive_data_with_prefix ( b'OK' , sock ) tubes = yaml_load ( body ) return tubes
5,568
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L246-L255
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Return a dictionary with a bunch of instance - wide statistics
def stats ( self ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'stats' , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) stats = yaml_load ( body ) return stats
5,569
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L257-L266
[ "def", "try_recover_from_autosave", "(", "self", ")", ":", "autosave_dir", "=", "get_conf_path", "(", "'autosave'", ")", "autosave_mapping", "=", "CONF", ".", "get", "(", "'editor'", ",", "'autosave_mapping'", ",", "{", "}", ")", "dialog", "=", "RecoveryDialog", "(", "autosave_dir", ",", "autosave_mapping", ",", "parent", "=", "self", ".", "editor", ")", "dialog", ".", "exec_if_nonempty", "(", ")", "self", ".", "recover_files_to_open", "=", "dialog", ".", "files_to_open", "[", ":", "]" ]
Insert a new job into whatever queue is currently USEd
def put_job ( self , data , pri = 65536 , delay = 0 , ttr = 120 ) : with self . _sock_ctx ( ) as socket : message = 'put {pri} {delay} {ttr} {datalen}\r\n' . format ( pri = pri , delay = delay , ttr = ttr , datalen = len ( data ) , data = data ) . encode ( 'utf-8' ) if not isinstance ( data , bytes ) : data = data . encode ( 'utf-8' ) message += data message += b'\r\n' self . _send_message ( message , socket ) return self . _receive_id ( socket )
5,570
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L268-L298
[ "def", "parse_files", "(", "self", ")", ":", "log_re", "=", "self", ".", "log_format_regex", "log_lines", "=", "[", "]", "for", "log_file", "in", "self", ".", "matching_files", "(", ")", ":", "with", "open", "(", "log_file", ")", "as", "f", ":", "matches", "=", "re", ".", "finditer", "(", "log_re", ",", "f", ".", "read", "(", ")", ")", "for", "match", "in", "matches", ":", "log_lines", ".", "append", "(", "match", ".", "groupdict", "(", ")", ")", "return", "log_lines" ]
Set the watchlist to the given tubes
def watchlist ( self , tubes ) : tubes = set ( tubes ) for tube in tubes - self . _watchlist : self . watch ( tube ) for tube in self . _watchlist - tubes : self . ignore ( tube )
5,571
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L331-L342
[ "def", "_send_mfg_inspector_request", "(", "envelope_data", ",", "credentials", ",", "destination_url", ")", ":", "logging", ".", "info", "(", "'Uploading result...'", ")", "http", "=", "httplib2", ".", "Http", "(", ")", "if", "credentials", ".", "access_token_expired", ":", "credentials", ".", "refresh", "(", "http", ")", "credentials", ".", "authorize", "(", "http", ")", "resp", ",", "content", "=", "http", ".", "request", "(", "destination_url", ",", "'POST'", ",", "envelope_data", ")", "try", ":", "result", "=", "json", ".", "loads", "(", "content", ")", "except", "Exception", ":", "logging", ".", "debug", "(", "'Upload failed with response %s: %s'", ",", "resp", ",", "content", ")", "raise", "UploadFailedError", "(", "resp", ",", "content", ")", "if", "resp", ".", "status", "!=", "200", ":", "logging", ".", "debug", "(", "'Upload failed: %s'", ",", "result", ")", "raise", "UploadFailedError", "(", "result", "[", "'error'", "]", ",", "result", ")", "return", "result" ]
Add the given tube to the watchlist .
def watch ( self , tube ) : with self . _sock_ctx ( ) as socket : self . desired_watchlist . add ( tube ) if tube not in self . _watchlist : self . _send_message ( 'watch {0}' . format ( tube ) , socket ) self . _receive_id ( socket ) self . _watchlist . add ( tube ) if self . initial_watch : if tube != 'default' : self . ignore ( 'default' ) self . initial_watch = False
5,572
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L344-L363
[ "def", "generateHOSequence", "(", "sequence", ",", "symbolsPerSequence", ",", "numColumns", ",", "sparsity", ")", ":", "sequenceHO", "=", "[", "]", "sparseCols", "=", "int", "(", "numColumns", "*", "sparsity", ")", "for", "symbol", "in", "range", "(", "symbolsPerSequence", ")", ":", "if", "symbol", "==", "0", "or", "symbol", "==", "(", "symbolsPerSequence", "-", "1", ")", ":", "sequenceHO", ".", "append", "(", "generateRandomSymbol", "(", "numColumns", ",", "sparseCols", ")", ")", "else", ":", "sequenceHO", ".", "append", "(", "sequence", "[", "symbol", "]", ")", "return", "sequenceHO" ]
Remove the given tube from the watchlist .
def ignore ( self , tube ) : with self . _sock_ctx ( ) as socket : if tube not in self . _watchlist : raise KeyError ( tube ) if tube != 'default' : self . desired_watchlist . remove ( tube ) if tube in self . _watchlist : self . _send_message ( 'ignore {0}' . format ( tube ) , socket ) self . _receive_id ( socket ) self . _watchlist . remove ( tube ) if not self . _watchlist : self . _watchlist . add ( 'default' )
5,573
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L365-L383
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Fetch statistics about a single job
def stats_job ( self , job_id ) : with self . _sock_ctx ( ) as socket : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id self . _send_message ( 'stats-job {0}' . format ( job_id ) , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) job_status = yaml_load ( body ) return job_status
5,574
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L385-L396
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "return", "False", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: is closing\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "self", ".", "_wrap_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_internal_console_port", ",", "self", ".", "_project", ")", "self", ".", "_internal_console_port", "=", "None", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "self", ".", "_closed", "=", "True", "return", "True" ]
Fetch statistics about a single tube
def stats_tube ( self , tube_name ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'stats-tube {0}' . format ( tube_name ) , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) return yaml_load ( body )
5,575
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L398-L407
[ "def", "compose", "(", "list_of_files", ",", "destination_file", ",", "files_metadata", "=", "None", ",", "content_type", "=", "None", ",", "retry_params", "=", "None", ",", "_account_id", "=", "None", ")", ":", "api", "=", "storage_api", ".", "_get_storage_api", "(", "retry_params", "=", "retry_params", ",", "account_id", "=", "_account_id", ")", "if", "os", ".", "getenv", "(", "'SERVER_SOFTWARE'", ")", ".", "startswith", "(", "'Dev'", ")", ":", "def", "_temp_func", "(", "file_list", ",", "destination_file", ",", "content_type", ")", ":", "bucket", "=", "'/'", "+", "destination_file", ".", "split", "(", "'/'", ")", "[", "1", "]", "+", "'/'", "with", "open", "(", "destination_file", ",", "'w'", ",", "content_type", "=", "content_type", ")", "as", "gcs_merge", ":", "for", "source_file", "in", "file_list", ":", "with", "open", "(", "bucket", "+", "source_file", "[", "'Name'", "]", ",", "'r'", ")", "as", "gcs_source", ":", "gcs_merge", ".", "write", "(", "gcs_source", ".", "read", "(", ")", ")", "compose_object", "=", "_temp_func", "else", ":", "compose_object", "=", "api", ".", "compose_object", "file_list", ",", "_", "=", "_validate_compose_list", "(", "destination_file", ",", "list_of_files", ",", "files_metadata", ",", "32", ")", "compose_object", "(", "file_list", ",", "destination_file", ",", "content_type", ")" ]
Reserve a job for this connection . Blocks for TIMEOUT secionds and raises TIMED_OUT if no job was available
def reserve_job ( self , timeout = 5 ) : timeout = int ( timeout ) if self . socket_timeout is not None : if timeout >= self . socket_timeout : raise ValueError ( 'reserve_job timeout must be < socket timeout' ) if not self . _watchlist : raise ValueError ( 'Select a tube or two before reserving a job' ) with self . _sock_ctx ( ) as socket : self . _send_message ( 'reserve-with-timeout {0}' . format ( timeout ) , socket ) job_id , job_data = self . _receive_id_and_data_with_prefix ( b'RESERVED' , socket ) return Job ( job_id , job_data )
5,576
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L409-L424
[ "def", "delta_commits", "(", "self", ",", "baseline", ",", "target", ")", ":", "baseline_files", "=", "self", ".", "_get_policy_fents", "(", "baseline", ".", "tree", ")", "target_files", "=", "self", ".", "_get_policy_fents", "(", "target", ".", "tree", ")", "baseline_policies", "=", "PolicyCollection", "(", ")", "target_policies", "=", "PolicyCollection", "(", ")", "# Added", "for", "f", "in", "set", "(", "target_files", ")", "-", "set", "(", "baseline_files", ")", ":", "target_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "target", ")", "# Removed", "for", "f", "in", "set", "(", "baseline_files", ")", "-", "set", "(", "target_files", ")", ":", "baseline_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "baseline", ")", "# Modified", "for", "f", "in", "set", "(", "baseline_files", ")", ".", "intersection", "(", "target_files", ")", ":", "if", "baseline_files", "[", "f", "]", ".", "hex", "==", "target_files", "[", "f", "]", ".", "hex", ":", "continue", "target_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "target", ")", "baseline_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "baseline", ")", "return", "CollectionDelta", "(", "baseline_policies", ",", "target_policies", ",", "target", ",", "self", ".", "repo_uri", ")", ".", "delta", "(", ")" ]
Delete the given job id . The job must have been previously reserved by this connection
def delete_job ( self , job_id ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'delete {0}' . format ( job_id ) , socket ) self . _receive_word ( socket , b'DELETED' )
5,577
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L474-L480
[ "def", "delta_commits", "(", "self", ",", "baseline", ",", "target", ")", ":", "baseline_files", "=", "self", ".", "_get_policy_fents", "(", "baseline", ".", "tree", ")", "target_files", "=", "self", ".", "_get_policy_fents", "(", "target", ".", "tree", ")", "baseline_policies", "=", "PolicyCollection", "(", ")", "target_policies", "=", "PolicyCollection", "(", ")", "# Added", "for", "f", "in", "set", "(", "target_files", ")", "-", "set", "(", "baseline_files", ")", ":", "target_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "target", ")", "# Removed", "for", "f", "in", "set", "(", "baseline_files", ")", "-", "set", "(", "target_files", ")", ":", "baseline_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "baseline", ")", "# Modified", "for", "f", "in", "set", "(", "baseline_files", ")", ".", "intersection", "(", "target_files", ")", ":", "if", "baseline_files", "[", "f", "]", ".", "hex", "==", "target_files", "[", "f", "]", ".", "hex", ":", "continue", "target_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "target", ")", "baseline_policies", "+=", "self", ".", "_policy_file_rev", "(", "f", ",", "baseline", ")", "return", "CollectionDelta", "(", "baseline_policies", ",", "target_policies", ",", "target", ",", "self", ".", "repo_uri", ")", ".", "delta", "(", ")" ]
Mark the given job_id as buried . The job must have been previously reserved by this connection
def bury_job ( self , job_id , pri = 65536 ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'bury {0} {1}' . format ( job_id , pri ) , socket ) return self . _receive_word ( socket , b'BURIED' )
5,578
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L482-L493
[ "def", "user_can_update_attachments", "(", "self", ")", ":", "context", "=", "self", ".", "context", "pm", "=", "api", ".", "get_tool", "(", "\"portal_membership\"", ")", "return", "pm", ".", "checkPermission", "(", "EditResults", ",", "context", ")", "or", "pm", ".", "checkPermission", "(", "EditFieldResults", ",", "context", ")" ]
Kick the given job id . The job must either be in the DELAYED or BURIED state and will be immediately moved to the READY state .
def kick_job ( self , job_id ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'kick-job {0}' . format ( job_id ) , socket ) self . _receive_word ( socket , b'KICKED' )
5,579
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L512-L519
[ "def", "from_file", "(", "self", ",", "fname", ")", ":", "f", "=", "open", "(", "fname", ",", "\"rb\"", ")", "data", "=", "f", ".", "read", "(", ")", "self", ".", "update", "(", "data", ")", "f", ".", "close", "(", ")" ]
Start producing jobs into the given tube .
def use ( self , tube ) : with self . _sock_ctx ( ) as socket : if self . current_tube != tube : self . desired_tube = tube self . _send_message ( 'use {0}' . format ( tube ) , socket ) self . _receive_name ( socket ) self . current_tube = tube
5,580
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L521-L533
[ "def", "_ar_matrix", "(", "self", ")", ":", "Y", "=", "np", ".", "array", "(", "self", ".", "data", "[", "self", ".", "max_lag", ":", "self", ".", "data", ".", "shape", "[", "0", "]", "]", ")", "X", "=", "self", ".", "data", "[", "(", "self", ".", "max_lag", "-", "1", ")", ":", "-", "1", "]", "if", "self", ".", "ar", "!=", "0", ":", "for", "i", "in", "range", "(", "1", ",", "self", ".", "ar", ")", ":", "X", "=", "np", ".", "vstack", "(", "(", "X", ",", "self", ".", "data", "[", "(", "self", ".", "max_lag", "-", "i", "-", "1", ")", ":", "-", "i", "-", "1", "]", ")", ")", "return", "X" ]
Context - manager to insert jobs into a specific tube
def using ( self , tube ) : try : current_tube = self . current_tube self . use ( tube ) yield BeanstalkInsertingProxy ( self , tube ) finally : self . use ( current_tube )
5,581
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L536-L559
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Kick some number of jobs from the buried queue onto the ready queue .
def kick_jobs ( self , num_jobs ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'kick {0}' . format ( num_jobs ) , socket ) return self . _receive_id ( socket )
5,582
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L561-L570
[ "def", "extractdata", "(", "pattern", ",", "text", "=", "None", ",", "filepath", "=", "None", ")", ":", "y", "=", "[", "]", "if", "text", "is", "None", ":", "textsource", "=", "open", "(", "filepath", ",", "'r'", ")", "else", ":", "textsource", "=", "text", ".", "splitlines", "(", ")", "for", "line", "in", "textsource", ":", "match", "=", "scanf", "(", "pattern", ",", "line", ")", "if", "match", ":", "if", "len", "(", "y", ")", "==", "0", ":", "y", "=", "[", "[", "s", "]", "for", "s", "in", "match", "]", "else", ":", "for", "i", ",", "ydata", "in", "enumerate", "(", "y", ")", ":", "ydata", ".", "append", "(", "match", "[", "i", "]", ")", "if", "text", "is", "None", ":", "textsource", ".", "close", "(", ")", "return", "y" ]
Pause a tube for some number of seconds preventing it from issuing jobs .
def pause_tube ( self , tube , delay = 3600 ) : with self . _sock_ctx ( ) as socket : delay = int ( delay ) self . _send_message ( 'pause-tube {0} {1}' . format ( tube , delay ) , socket ) return self . _receive_word ( socket , b'PAUSED' )
5,583
https://github.com/EasyPost/pystalk/blob/96759ad1fda264b9897ee5346eef7926892a3a4c/pystalk/client.py#L572-L587
[ "def", "bin2real", "(", "binary_string", ",", "conv", ",", "endianness", "=", "\"@\"", ")", ":", "data", "=", "struct", ".", "unpack", "(", "endianness", "+", "conv", "[", "\"fmt\"", "]", ",", "binary_string", ")", "[", "0", "]", "return", "fix2real", "(", "data", ",", "conv", ")" ]
Pythonized interval for easy output to yaml
def interval ( best , lo = np . nan , hi = np . nan ) : return [ float ( best ) , [ float ( lo ) , float ( hi ) ] ]
5,584
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L28-L32
[ "def", "unregister_signal_handlers", "(", ")", ":", "signal", ".", "signal", "(", "SIGNAL_STACKTRACE", ",", "signal", ".", "SIG_IGN", ")", "signal", ".", "signal", "(", "SIGNAL_PDB", ",", "signal", ".", "SIG_IGN", ")" ]
Identify interval using Gaussian kernel density estimator .
def peak_interval ( data , alpha = _alpha , npoints = _npoints ) : peak = kde_peak ( data , npoints ) x = np . sort ( data . flat ) n = len ( x ) # The number of entries in the interval window = int ( np . rint ( ( 1.0 - alpha ) * n ) ) # The start, stop, and width of all possible intervals starts = x [ : n - window ] ends = x [ window : ] widths = ends - starts # Just the intervals containing the peak select = ( peak >= starts ) & ( peak <= ends ) widths = widths [ select ] if len ( widths ) == 0 : raise ValueError ( 'Too few elements for interval calculation' ) min_idx = np . argmin ( widths ) lo = x [ min_idx ] hi = x [ min_idx + window ] return interval ( peak , lo , hi )
5,585
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L82-L101
[ "def", "in_", "(", "this", ",", "that", ",", "axis", "=", "semantics", ".", "axis_default", ")", ":", "this", "=", "as_index", "(", "this", ",", "axis", "=", "axis", ",", "lex_as_struct", "=", "True", ",", "base", "=", "True", ")", "that", "=", "as_index", "(", "that", ",", "axis", "=", "axis", ",", "lex_as_struct", "=", "True", ")", "left", "=", "np", ".", "searchsorted", "(", "that", ".", "_keys", ",", "this", ".", "_keys", ",", "sorter", "=", "that", ".", "sorter", ",", "side", "=", "'left'", ")", "right", "=", "np", ".", "searchsorted", "(", "that", ".", "_keys", ",", "this", ".", "_keys", ",", "sorter", "=", "that", ".", "sorter", ",", "side", "=", "'right'", ")", "return", "left", "!=", "right" ]
Add some supplemental columns
def supplement ( self , coordsys = 'gal' ) : from ugali . utils . projector import gal2cel , gal2cel_angle from ugali . utils . projector import cel2gal , cel2gal_angle coordsys = coordsys . lower ( ) kwargs = dict ( usemask = False , asrecarray = True ) out = copy . deepcopy ( self ) if ( 'lon' in out . names ) and ( 'lat' in out . names ) : # Ignore entries that are all zero zeros = np . all ( self . ndarray == 0 , axis = 1 ) if coordsys == 'gal' : ra , dec = gal2cel ( out . lon , out . lat ) glon , glat = out . lon , out . lat else : ra , dec = out . lon , out . lat glon , glat = cel2gal ( out . lon , out . lat ) ra [ zeros ] = 0 dec [ zeros ] = 0 glon [ zeros ] = 0 glat [ zeros ] = 0 names = [ 'ra' , 'dec' , 'glon' , 'glat' ] arrs = [ ra , dec , glon , glat ] out = mlab . rec_append_fields ( out , names , arrs ) . view ( Samples ) #out = recfuncs.append_fields(out,names,arrs,**kwargs).view(Samples) if 'position_angle' in out . names : if coordsys == 'gal' : pa_gal = out . position_angle pa_cel = gal2cel_angle ( out . lon , out . lat , out . position_angle ) pa_cel = pa_cel - 180. * ( pa_cel > 180. ) else : pa_gal = cel2gal_angle ( out . lon , out . lat , out . position_angle ) pa_cel = out . position_angle pa_gal = pa_gal - 180. * ( pa_gal > 180. ) pa_gal [ zeros ] = 0 pa_cel [ zeros ] = 0 names = [ 'position_angle_gal' , 'position_angle_cel' ] arrs = [ pa_gal , pa_cel ] out = recfuncs . append_fields ( out , names , arrs , * * kwargs ) . view ( Samples ) return out
5,586
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L201-L244
[ "def", "_session_check", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "SESSION_FILE", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"Session file does not exist\"", ")", "return", "False", "with", "open", "(", "SESSION_FILE", ",", "'rb'", ")", "as", "f", ":", "cookies", "=", "requests", ".", "utils", ".", "cookiejar_from_dict", "(", "pickle", ".", "load", "(", "f", ")", ")", "self", ".", "_session", ".", "cookies", "=", "cookies", "self", ".", "_log", ".", "debug", "(", "\"Loaded cookies from session file\"", ")", "response", "=", "self", ".", "_session", ".", "get", "(", "url", "=", "self", ".", "TEST_URL", ",", "headers", "=", "self", ".", "HEADERS", ")", "if", "self", ".", "TEST_KEY", "in", "str", "(", "response", ".", "content", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"Session file appears invalid\"", ")", "return", "False", "self", ".", "_is_authenticated", "=", "True", "self", ".", "_process_state", "(", ")", "return", "True" ]
Median of the distribution .
def median ( self , name , * * kwargs ) : data = self . get ( name , * * kwargs ) return np . percentile ( data , [ 50 ] )
5,587
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L294-L299
[ "def", "remove", "(", "self", ",", "oid", ")", ":", "hba", "=", "self", ".", "lookup_by_oid", "(", "oid", ")", "partition", "=", "self", ".", "parent", "devno", "=", "hba", ".", "properties", ".", "get", "(", "'device-number'", ",", "None", ")", "if", "devno", ":", "partition", ".", "devno_free_if_allocated", "(", "devno", ")", "wwpn", "=", "hba", ".", "properties", ".", "get", "(", "'wwpn'", ",", "None", ")", "if", "wwpn", ":", "partition", ".", "wwpn_free_if_allocated", "(", "wwpn", ")", "assert", "'hba-uris'", "in", "partition", ".", "properties", "hba_uris", "=", "partition", ".", "properties", "[", "'hba-uris'", "]", "hba_uris", ".", "remove", "(", "hba", ".", "uri", ")", "super", "(", "FakedHbaManager", ",", "self", ")", ".", "remove", "(", "oid", ")" ]
Calculate peak of kernel density estimator
def kde_peak ( self , name , npoints = _npoints , * * kwargs ) : data = self . get ( name , * * kwargs ) return kde_peak ( data , npoints )
5,588
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L312-L317
[ "def", "set_right_table", "(", "self", ",", "table", ")", ":", "self", ".", "right_table", "=", "table", "if", "self", ".", "left_table", "is", "None", ":", "return", "# find table prefix", "if", "type", "(", "self", ".", "left_table", ")", "is", "ModelTable", "and", "type", "(", "self", ".", "right_table", ")", "is", "ModelTable", ":", "# loop through fields to find the field for this model", "# check if this join type is for a related field", "for", "field", "in", "self", ".", "get_all_related_objects", "(", "self", ".", "left_table", ")", ":", "related_model", "=", "field", ".", "model", "if", "hasattr", "(", "field", ",", "'related_model'", ")", ":", "related_model", "=", "field", ".", "related_model", "if", "related_model", "==", "self", ".", "right_table", ".", "model", ":", "if", "self", ".", "right_table", ".", "field_prefix", "is", "None", ":", "self", ".", "right_table", ".", "field_prefix", "=", "field", ".", "get_accessor_name", "(", ")", "if", "len", "(", "self", ".", "right_table", ".", "field_prefix", ")", ">", "4", "and", "self", ".", "right_table", ".", "field_prefix", "[", "-", "4", ":", "]", "==", "'_set'", ":", "self", ".", "right_table", ".", "field_prefix", "=", "self", ".", "right_table", ".", "field_prefix", "[", ":", "-", "4", "]", "return", "# check if this join type is for a foreign key", "for", "field", "in", "self", ".", "left_table", ".", "model", ".", "_meta", ".", "fields", ":", "if", "(", "field", ".", "get_internal_type", "(", ")", "==", "'OneToOneField'", "or", "field", ".", "get_internal_type", "(", ")", "==", "'ForeignKey'", ")", ":", "if", "field", ".", "remote_field", ".", "model", "==", "self", ".", "right_table", ".", "model", ":", "if", "self", ".", "right_table", ".", "field_prefix", "is", "None", ":", "self", ".", "right_table", ".", "field_prefix", "=", "field", ".", "name", "return" ]
Calculate kernel density estimator for parameter
def kde ( self , name , npoints = _npoints , * * kwargs ) : data = self . get ( name , * * kwargs ) return kde ( data , npoints )
5,589
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L319-L324
[ "def", "_parse_next_filename", "(", "filename", ")", ":", "components", "=", "filename", ".", "split", "(", "os", ".", "sep", ")", "section", ",", "filename", "=", "components", "[", "-", "2", ":", "]", "section", "=", "unsanitize_section", "(", "section", ")", "assert", "section", "in", "sections", ",", "f", "(", "\"Unknown section {section}\"", ")", "fields", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "filename", ".", "split", "(", "\".\"", ")", "]", "assert", "len", "(", "fields", ")", ">=", "4", ",", "f", "(", "\"Can't parse 'next' filename! filename {filename!r} fields {fields}\"", ")", "assert", "fields", "[", "-", "1", "]", "==", "\"rst\"", "metadata", "=", "{", "\"date\"", ":", "fields", "[", "0", "]", ",", "\"nonce\"", ":", "fields", "[", "-", "2", "]", ",", "\"section\"", ":", "section", "}", "for", "field", "in", "fields", "[", "1", ":", "-", "2", "]", ":", "for", "name", "in", "(", "\"bpo\"", ",", ")", ":", "_", ",", "got", ",", "value", "=", "field", ".", "partition", "(", "name", "+", "\"-\"", ")", "if", "got", ":", "metadata", "[", "name", "]", "=", "value", ".", "strip", "(", ")", "break", "else", ":", "assert", "False", ",", "\"Found unparsable field in 'next' filename: \"", "+", "repr", "(", "field", ")", "return", "metadata" ]
Calculate peak interval for parameter .
def peak_interval ( self , name , alpha = _alpha , npoints = _npoints , * * kwargs ) : data = self . get ( name , * * kwargs ) return peak_interval ( data , alpha , npoints )
5,590
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L326-L331
[ "def", "create", "(", "cls", ",", "destination", ")", ":", "mdb_gz_b64", "=", "\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"", "pristine", "=", "StringIO", "(", ")", "pristine", ".", "write", "(", "base64", ".", "b64decode", "(", "mdb_gz_b64", ")", ")", "pristine", ".", "seek", "(", "0", ")", "pristine", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "pristine", ",", "mode", "=", "'rb'", ")", "with", "open", "(", "destination", ",", "'wb'", ")", "as", "handle", ":", "shutil", ".", "copyfileobj", "(", "pristine", ",", "handle", ")", "return", "cls", "(", "destination", ")" ]
Calculate minimum interval for parameter .
def min_interval ( self , name , alpha = _alpha , * * kwargs ) : data = self . get ( name , * * kwargs ) return min_interval ( data , alpha )
5,591
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L333-L338
[ "def", "create", "(", "cls", ",", "destination", ")", ":", "mdb_gz_b64", "=", "\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"", "pristine", "=", "StringIO", "(", ")", "pristine", ".", "write", "(", "base64", ".", "b64decode", "(", "mdb_gz_b64", ")", ")", "pristine", ".", "seek", "(", "0", ")", "pristine", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "pristine", ",", "mode", "=", "'rb'", ")", "with", "open", "(", "destination", ",", "'wb'", ")", "as", "handle", ":", "shutil", ".", "copyfileobj", "(", "pristine", ",", "handle", ")", "return", "cls", "(", "destination", ")" ]
Calculate the results for a set of parameters .
def results ( self , names = None , alpha = _alpha , mode = 'peak' , * * kwargs ) : if names is None : names = self . names ret = odict ( ) for n in names : ret [ n ] = getattr ( self , '%s_interval' % mode ) ( n , * * kwargs ) return ret
5,592
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/stats.py#L340-L348
[ "def", "delete_share", "(", "self", ",", "share_name", ",", "fail_not_exist", "=", "False", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ",", "delete_snapshots", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'DELETE'", "request", ".", "host_locations", "=", "self", ".", "_get_host_locations", "(", ")", "request", ".", "path", "=", "_get_path", "(", "share_name", ")", "request", ".", "headers", "=", "{", "'x-ms-delete-snapshots'", ":", "_to_str", "(", "delete_snapshots", ")", "}", "request", ".", "query", "=", "{", "'restype'", ":", "'share'", ",", "'timeout'", ":", "_int_to_str", "(", "timeout", ")", ",", "'sharesnapshot'", ":", "_to_str", "(", "snapshot", ")", ",", "}", "if", "not", "fail_not_exist", ":", "try", ":", "self", ".", "_perform_request", "(", "request", ",", "expected_errors", "=", "[", "_SHARE_NOT_FOUND_ERROR_CODE", "]", ")", "return", "True", "except", "AzureHttpError", "as", "ex", ":", "_dont_fail_not_exist", "(", "ex", ")", "return", "False", "else", ":", "self", ".", "_perform_request", "(", "request", ")", "return", "True" ]
Increase the density of points along the parabolic curve .
def densify ( self , factor = 10 ) : x = [ ] y = [ ] for ii in range ( 0 , len ( self . x ) - 2 ) : p = Parabola ( self . x [ ii : ii + 3 ] , self . y [ ii : ii + 3 ] ) x . append ( np . linspace ( self . x [ ii ] , self . x [ ii + 1 ] , factor ) [ 0 : - 1 ] ) y . append ( p ( x [ - 1 ] ) ) p = Parabola ( self . x [ len ( self . x ) - 3 : ] , self . y [ len ( self . y ) - 3 : ] ) x . append ( np . linspace ( self . x [ - 2 ] , self . x [ - 1 ] , factor ) [ 0 : - 1 ] ) y . append ( p ( x [ - 1 ] ) ) x . append ( [ self . x [ - 1 ] ] ) y . append ( [ self . y [ - 1 ] ] ) #f = scipy.interpolate.interp1d(np.concatenate(x), np.concatenate(y)) #x = np.linspace(self.x[0], self.x[-1], len(x) * factor) #return x, f(x) return np . concatenate ( x ) , np . concatenate ( y )
5,593
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/parabola.py#L83-L105
[ "def", "has_changed", "(", "self", ",", "initial", ",", "data", ")", ":", "if", "initial", "is", "None", "and", "data", "is", "None", ":", "return", "False", "if", "data", "and", "not", "hasattr", "(", "data", ",", "'__iter__'", ")", ":", "data", "=", "self", ".", "widget", ".", "decompress", "(", "data", ")", "initial", "=", "self", ".", "to_python", "(", "initial", ")", "data", "=", "self", ".", "to_python", "(", "data", ")", "if", "hasattr", "(", "self", ",", "'_coerce'", ")", ":", "data", "=", "self", ".", "_coerce", "(", "data", ")", "if", "isinstance", "(", "data", ",", "Model", ")", "and", "isinstance", "(", "initial", ",", "Model", ")", ":", "return", "model_vars", "(", "data", ")", "!=", "model_vars", "(", "initial", ")", "else", ":", "return", "data", "!=", "initial" ]
Compute one - sided upperlimit via profile method .
def profileUpperLimit ( self , delta = 2.71 ) : a = self . p_2 b = self . p_1 if self . vertex_x < 0 : c = self . p_0 + delta else : c = self . p_0 - self . vertex_y + delta if b ** 2 - 4. * a * c < 0. : print ( 'WARNING' ) print ( a , b , c ) return 0. return max ( ( np . sqrt ( b ** 2 - 4. * a * c ) - b ) / ( 2. * a ) , ( - 1. * np . sqrt ( b ** 2 - 4. * a * c ) - b ) / ( 2. * a ) )
5,594
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/parabola.py#L107-L123
[ "def", "get_brokers", "(", "self", ",", "names_only", "=", "False", ")", ":", "try", ":", "broker_ids", "=", "self", ".", "get_children", "(", "\"/brokers/ids\"", ")", "except", "NoNodeError", ":", "_log", ".", "info", "(", "\"cluster is empty.\"", ")", "return", "{", "}", "# Return broker-ids only", "if", "names_only", ":", "return", "{", "int", "(", "b_id", ")", ":", "None", "for", "b_id", "in", "broker_ids", "}", "return", "{", "int", "(", "b_id", ")", ":", "self", ".", "get_broker_metadata", "(", "b_id", ")", "for", "b_id", "in", "broker_ids", "}" ]
Compute one - sided upper limit using Bayesian Method of Helene . Several methods of increasing numerical stability have been implemented .
def bayesianUpperLimit ( self , alpha , steps = 1.e5 , plot = False ) : x_dense , y_dense = self . densify ( ) y_dense -= np . max ( y_dense ) # Numeric stability f = scipy . interpolate . interp1d ( x_dense , y_dense , kind = 'linear' ) x = np . linspace ( 0. , np . max ( x_dense ) , steps ) pdf = np . exp ( f ( x ) / 2. ) cut = ( pdf / np . max ( pdf ) ) > 1.e-10 x = x [ cut ] pdf = pdf [ cut ] #pdf /= pdf[0] #forbidden = np.nonzero(pdf < 1.e-10)[0] #if len(forbidden) > 0: # index = forbidden[0] # Numeric stability # x = x[0: index] # pdf = pdf[0: index] cdf = np . cumsum ( pdf ) cdf /= cdf [ - 1 ] cdf_reflect = scipy . interpolate . interp1d ( cdf , x ) return cdf_reflect ( alpha )
5,595
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/parabola.py#L125-L148
[ "def", "to_workspace_value", "(", "self", ",", "result", ",", "assets", ")", ":", "return", "result", ".", "unstack", "(", ")", ".", "fillna", "(", "self", ".", "missing_value", ")", ".", "reindex", "(", "columns", "=", "assets", ",", "fill_value", "=", "self", ".", "missing_value", ",", ")", ".", "values" ]
Compute one - sided upper limit using Bayesian Method of Helene .
def bayesianUpperLimit2 ( self , alpha , steps = 1.e5 , plot = False ) : cut = ( ( self . y / 2. ) > - 30. ) # Numeric stability try : f = scipy . interpolate . interp1d ( self . x [ cut ] , self . y [ cut ] , kind = 'cubic' ) except : f = scipy . interpolate . interp1d ( self . x [ cut ] , self . y [ cut ] , kind = 'linear' ) x = np . linspace ( 0. , np . max ( self . x [ cut ] ) , steps ) y = np . exp ( f ( x ) / 2. ) #forbidden = np.nonzero((y / np.exp(self.vertex_y / 2.)) < 1.e-10)[0] forbidden = np . nonzero ( ( y / self . vertex_y ) < 1.e-10 ) [ 0 ] if len ( forbidden ) > 0 : index = forbidden [ 0 ] # Numeric stability x = x [ 0 : index ] y = y [ 0 : index ] cdf = np . cumsum ( y ) cdf /= cdf [ - 1 ] cdf_reflect = scipy . interpolate . interp1d ( cdf , x ) return cdf_reflect ( alpha )
5,596
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/parabola.py#L150-L171
[ "def", "_rows_from_json", "(", "values", ",", "schema", ")", ":", "from", "google", ".", "cloud", ".", "bigquery", "import", "Row", "field_to_index", "=", "_field_to_index_mapping", "(", "schema", ")", "return", "[", "Row", "(", "_row_tuple_from_json", "(", "r", ",", "schema", ")", ",", "field_to_index", ")", "for", "r", "in", "values", "]" ]
Compute two - sided confidence interval by taking x - values corresponding to the largest PDF - values first .
def confidenceInterval ( self , alpha = 0.6827 , steps = 1.e5 , plot = False ) : x_dense , y_dense = self . densify ( ) y_dense -= np . max ( y_dense ) # Numeric stability f = scipy . interpolate . interp1d ( x_dense , y_dense , kind = 'linear' ) x = np . linspace ( 0. , np . max ( x_dense ) , steps ) # ADW: Why does this start at 0, which often outside the input range? # Wouldn't starting at xmin be better: #x = np.linspace(np.min(x_dense), np.max(x_dense), steps) pdf = np . exp ( f ( x ) / 2. ) cut = ( pdf / np . max ( pdf ) ) > 1.e-10 x = x [ cut ] pdf = pdf [ cut ] sorted_pdf_indices = np . argsort ( pdf ) [ : : - 1 ] # Indices of PDF in descending value cdf = np . cumsum ( pdf [ sorted_pdf_indices ] ) cdf /= cdf [ - 1 ] sorted_pdf_index_max = np . argmin ( ( cdf - alpha ) ** 2 ) x_select = x [ sorted_pdf_indices [ 0 : sorted_pdf_index_max ] ] return np . min ( x_select ) , np . max ( x_select )
5,597
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/parabola.py#L174-L196
[ "def", "bind_objects", "(", "self", ",", "*", "objects", ")", ":", "self", ".", "control", ".", "bind_keys", "(", "objects", ")", "self", ".", "objects", "+=", "objects" ]
Convert longitude from hours minutes seconds in string or 3 - array format to decimal degrees .
def hms2dec ( hms ) : DEGREE = 360. HOUR = 24. MINUTE = 60. SECOND = 3600. if isstring ( hms ) : hour , minute , second = np . array ( re . split ( '[hms]' , hms ) ) [ : 3 ] . astype ( float ) else : hour , minute , second = hms . T decimal = ( hour + minute * 1. / MINUTE + second * 1. / SECOND ) * ( DEGREE / HOUR ) return decimal
5,598
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/projector.py#L422-L440
[ "def", "register_on_guest_keyboard", "(", "self", ",", "callback", ")", ":", "return", "self", ".", "event_source", ".", "register_callback", "(", "callback", ",", "library", ".", "VBoxEventType", ".", "on_guest_keyboard", ")" ]
Convert latitude from degrees minutes seconds in string or 3 - array format to decimal degrees .
def dms2dec ( dms ) : DEGREE = 360. HOUR = 24. MINUTE = 60. SECOND = 3600. # Be careful here, degree needs to be a float so that negative zero # can have its signbit set: # http://docs.scipy.org/doc/numpy-1.7.0/reference/c-api.coremath.html#NPY_NZERO if isstring ( dms ) : degree , minute , second = np . array ( re . split ( '[dms]' , hms ) ) [ : 3 ] . astype ( float ) else : degree , minute , second = dms . T sign = np . copysign ( 1.0 , degree ) decimal = np . abs ( degree ) + minute * 1. / MINUTE + second * 1. / SECOND decimal *= sign return decimal
5,599
https://github.com/DarkEnergySurvey/ugali/blob/21e890b4117fc810afb6fb058e8055d564f03382/ugali/utils/projector.py#L442-L464
[ "def", "register_on_guest_keyboard", "(", "self", ",", "callback", ")", ":", "return", "self", ".", "event_source", ".", "register_callback", "(", "callback", ",", "library", ".", "VBoxEventType", ".", "on_guest_keyboard", ")" ]