query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
generate one Schlumberger sounding configuration that is one set of configurations for one potential dipole MN .
def gen_schlumberger ( self , M , N , a = None ) : if a is None : a = np . abs ( M - N ) nr_of_steps_left = int ( min ( M , N ) - 1 / a ) nr_of_steps_right = int ( ( self . nr_electrodes - max ( M , N ) ) / a ) configs = [ ] for i in range ( 0 , min ( nr_of_steps_left , nr_of_steps_right ) ) : A = min ( M , N ) - ( i + 1 ) * a B = max ( M , N ) + ( i + 1 ) * a configs . append ( ( A , B , M , N ) ) configs = np . array ( configs ) self . add_to_configs ( configs ) return configs
8,400
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L459-L498
[ "def", "wait_for_start", "(", "self", ")", ":", "if", "self", ".", "wait_matchers", ":", "matcher", "=", "UnorderedMatcher", "(", "*", "self", ".", "wait_matchers", ")", "self", ".", "wait_for_logs_matching", "(", "matcher", ",", "timeout", "=", "self", ".", "wait_timeout", ")" ]
Add one or more measurement configurations to the stored configurations
def add_to_configs ( self , configs ) : if len ( configs ) == 0 : return None if self . configs is None : self . configs = np . atleast_2d ( configs ) else : configs = np . atleast_2d ( configs ) self . configs = np . vstack ( ( self . configs , configs ) ) return self . configs
8,401
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L521-L543
[ "def", "compress_pdf", "(", "pdf_fpath", ",", "output_fname", "=", "None", ")", ":", "import", "utool", "as", "ut", "ut", ".", "assertpath", "(", "pdf_fpath", ")", "suffix", "=", "'_'", "+", "ut", ".", "get_datestamp", "(", "False", ")", "+", "'_compressed'", "print", "(", "'pdf_fpath = %r'", "%", "(", "pdf_fpath", ",", ")", ")", "output_pdf_fpath", "=", "ut", ".", "augpath", "(", "pdf_fpath", ",", "suffix", ",", "newfname", "=", "output_fname", ")", "print", "(", "'output_pdf_fpath = %r'", "%", "(", "output_pdf_fpath", ",", ")", ")", "gs_exe", "=", "find_ghostscript_exe", "(", ")", "cmd_list", "=", "(", "gs_exe", ",", "'-sDEVICE=pdfwrite'", ",", "'-dCompatibilityLevel=1.4'", ",", "'-dNOPAUSE'", ",", "'-dQUIET'", ",", "'-dBATCH'", ",", "'-sOutputFile='", "+", "output_pdf_fpath", ",", "pdf_fpath", ")", "ut", ".", "cmd", "(", "*", "cmd_list", ")", "return", "output_pdf_fpath" ]
Split the stored configurations into normal and reciprocal measurements
def split_into_normal_and_reciprocal ( self , pad = False , return_indices = False ) : # for simplicity, we create an array where AB and MN are sorted configs = np . hstack ( ( np . sort ( self . configs [ : , 0 : 2 ] , axis = 1 ) , np . sort ( self . configs [ : , 2 : 4 ] , axis = 1 ) ) ) ab_min = configs [ : , 0 ] mn_min = configs [ : , 2 ] # rule 1 indices_normal = np . where ( ab_min < mn_min ) [ 0 ] # now look for reciprocals indices_used = [ ] normal = [ ] normal_indices = [ ] reciprocal_indices = [ ] reciprocal = [ ] duplicates = [ ] for index in indices_normal : indices_used . append ( index ) normal . append ( self . configs [ index , : ] ) normal_indices . append ( index ) # look for reciprocal configuration index_rec = np . where ( # A == M, B == N, M == A, N == B ( configs [ : , 0 ] == configs [ index , 2 ] ) & ( configs [ : , 1 ] == configs [ index , 3 ] ) & ( configs [ : , 2 ] == configs [ index , 0 ] ) & ( configs [ : , 3 ] == configs [ index , 1 ] ) ) [ 0 ] if len ( index_rec ) == 0 and pad : reciprocal . append ( np . ones ( 4 ) * np . nan ) elif len ( index_rec ) == 1 : reciprocal . append ( self . configs [ index_rec [ 0 ] , : ] ) indices_used . append ( index_rec [ 0 ] ) reciprocal_indices . append ( index_rec [ 0 ] ) elif len ( index_rec > 1 ) : # take the first one reciprocal . append ( self . configs [ index_rec [ 0 ] , : ] ) reciprocal_indices . append ( index_rec [ 0 ] ) duplicates += list ( index_rec [ 1 : ] ) indices_used += list ( index_rec ) # now determine all reciprocal-only parameters set_all_indices = set ( list ( range ( 0 , configs . shape [ 0 ] ) ) ) set_used_indices = set ( indices_used ) reciprocal_only_indices = set_all_indices - set_used_indices for index in reciprocal_only_indices : if pad : normal . append ( np . ones ( 4 ) * np . nan ) reciprocal . append ( self . configs [ index , : ] ) normals = np . array ( normal ) reciprocals = np . array ( reciprocal ) if return_indices : return normals , reciprocals , normal_indices , reciprocal_indices else : return normals , reciprocals
8,402
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L545-L638
[ "def", "dump_memory_map", "(", "memoryMap", ",", "mappedFilenames", "=", "None", ",", "bits", "=", "None", ")", ":", "if", "not", "memoryMap", ":", "return", "''", "table", "=", "Table", "(", ")", "if", "mappedFilenames", ":", "table", ".", "addRow", "(", "\"Address\"", ",", "\"Size\"", ",", "\"State\"", ",", "\"Access\"", ",", "\"Type\"", ",", "\"File\"", ")", "else", ":", "table", ".", "addRow", "(", "\"Address\"", ",", "\"Size\"", ",", "\"State\"", ",", "\"Access\"", ",", "\"Type\"", ")", "# For each memory block in the map...", "for", "mbi", "in", "memoryMap", ":", "# Address and size of memory block.", "BaseAddress", "=", "HexDump", ".", "address", "(", "mbi", ".", "BaseAddress", ",", "bits", ")", "RegionSize", "=", "HexDump", ".", "address", "(", "mbi", ".", "RegionSize", ",", "bits", ")", "# State (free or allocated).", "mbiState", "=", "mbi", ".", "State", "if", "mbiState", "==", "win32", ".", "MEM_RESERVE", ":", "State", "=", "\"Reserved\"", "elif", "mbiState", "==", "win32", ".", "MEM_COMMIT", ":", "State", "=", "\"Commited\"", "elif", "mbiState", "==", "win32", ".", "MEM_FREE", ":", "State", "=", "\"Free\"", "else", ":", "State", "=", "\"Unknown\"", "# Page protection bits (R/W/X/G).", "if", "mbiState", "!=", "win32", ".", "MEM_COMMIT", ":", "Protect", "=", "\"\"", "else", ":", "mbiProtect", "=", "mbi", ".", "Protect", "if", "mbiProtect", "&", "win32", ".", "PAGE_NOACCESS", ":", "Protect", "=", "\"--- \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_READONLY", ":", "Protect", "=", "\"R-- \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_READWRITE", ":", "Protect", "=", "\"RW- \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_WRITECOPY", ":", "Protect", "=", "\"RC- \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_EXECUTE", ":", "Protect", "=", "\"--X \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_EXECUTE_READ", ":", "Protect", "=", "\"R-X \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_EXECUTE_READWRITE", ":", "Protect", "=", "\"RWX \"", "elif", "mbiProtect", "&", "win32", ".", "PAGE_EXECUTE_WRITECOPY", ":", "Protect", "=", "\"RCX \"", "else", ":", "Protect", "=", "\"??? \"", "if", "mbiProtect", "&", "win32", ".", "PAGE_GUARD", ":", "Protect", "+=", "\"G\"", "else", ":", "Protect", "+=", "\"-\"", "if", "mbiProtect", "&", "win32", ".", "PAGE_NOCACHE", ":", "Protect", "+=", "\"N\"", "else", ":", "Protect", "+=", "\"-\"", "if", "mbiProtect", "&", "win32", ".", "PAGE_WRITECOMBINE", ":", "Protect", "+=", "\"W\"", "else", ":", "Protect", "+=", "\"-\"", "# Type (file mapping, executable image, or private memory).", "mbiType", "=", "mbi", ".", "Type", "if", "mbiType", "==", "win32", ".", "MEM_IMAGE", ":", "Type", "=", "\"Image\"", "elif", "mbiType", "==", "win32", ".", "MEM_MAPPED", ":", "Type", "=", "\"Mapped\"", "elif", "mbiType", "==", "win32", ".", "MEM_PRIVATE", ":", "Type", "=", "\"Private\"", "elif", "mbiType", "==", "0", ":", "Type", "=", "\"\"", "else", ":", "Type", "=", "\"Unknown\"", "# Output a row in the table.", "if", "mappedFilenames", ":", "FileName", "=", "mappedFilenames", ".", "get", "(", "mbi", ".", "BaseAddress", ",", "''", ")", "table", ".", "addRow", "(", "BaseAddress", ",", "RegionSize", ",", "State", ",", "Protect", ",", "Type", ",", "FileName", ")", "else", ":", "table", ".", "addRow", "(", "BaseAddress", ",", "RegionSize", ",", "State", ",", "Protect", ",", "Type", ")", "# Return the table output.", "return", "table", ".", "getOutput", "(", ")" ]
Generate reciprocal configurations sort by AB and optionally append to configurations .
def gen_reciprocals ( self , append = False ) : # Switch AB and MN reciprocals = self . configs . copy ( ) [ : , : : - 1 ] reciprocals [ : , 0 : 2 ] = np . sort ( reciprocals [ : , 0 : 2 ] , axis = 1 ) reciprocals [ : , 2 : 4 ] = np . sort ( reciprocals [ : , 2 : 4 ] , axis = 1 ) # # Sort by current dipoles ind = np . lexsort ( ( reciprocals [ : , 3 ] , reciprocals [ : , 2 ] , reciprocals [ : , 1 ] , reciprocals [ : , 0 ] ) ) reciprocals = reciprocals [ ind ] if append : self . configs = np . vstack ( ( self . configs , reciprocals ) ) return reciprocals
8,403
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L640-L673
[ "def", "get_tok", "(", "self", ",", "tok", ")", ":", "tdata", "=", "self", ".", "tokens", "[", "\"{0}.get_token\"", ".", "format", "(", "self", ".", "opts", "[", "'eauth_tokens'", "]", ")", "]", "(", "self", ".", "opts", ",", "tok", ")", "if", "not", "tdata", ":", "return", "{", "}", "rm_tok", "=", "False", "if", "'expire'", "not", "in", "tdata", ":", "# invalid token, delete it!", "rm_tok", "=", "True", "if", "tdata", ".", "get", "(", "'expire'", ",", "'0'", ")", "<", "time", ".", "time", "(", ")", ":", "rm_tok", "=", "True", "if", "rm_tok", ":", "self", ".", "rm_token", "(", "tok", ")", "return", "tdata" ]
Create measurement configurations out of a pool of current injections . Use only the provided dipoles for potential dipole selection . This means that we have always reciprocal measurements .
def gen_configs_permutate ( self , injections_raw , only_same_dipole_length = False , ignore_crossed_dipoles = False , silent = False ) : injections = np . atleast_2d ( injections_raw ) . astype ( int ) N = injections . shape [ 0 ] measurements = [ ] for injection in range ( 0 , N ) : dipole_length = np . abs ( injections [ injection ] [ 1 ] - injections [ injection ] [ 0 ] ) # select all dipole EXCEPT for the injection dipole for i in set ( range ( 0 , N ) ) - set ( [ injection ] ) : test_dipole_length = np . abs ( injections [ i , : ] [ 1 ] - injections [ i , : ] [ 0 ] ) if ( only_same_dipole_length and test_dipole_length != dipole_length ) : continue quadpole = np . array ( [ injections [ injection , : ] , injections [ i , : ] ] ) . flatten ( ) if ignore_crossed_dipoles is True : # check if we need to ignore this dipole # Note: this could be wrong if electrode number are not # ascending! if ( quadpole [ 2 ] > quadpole [ 0 ] and quadpole [ 2 ] < quadpole [ 1 ] ) : if not silent : print ( 'A - ignoring' , quadpole ) elif ( quadpole [ 3 ] > quadpole [ 0 ] and quadpole [ 3 ] < quadpole [ 1 ] ) : if not silent : print ( 'B - ignoring' , quadpole ) else : measurements . append ( quadpole ) else : # add very quadpole measurements . append ( quadpole ) # check and remove double use of electrodes filtered = [ ] for quadpole in measurements : if ( not set ( quadpole [ 0 : 2 ] ) . isdisjoint ( set ( quadpole [ 2 : 4 ] ) ) ) : if not silent : print ( 'Ignoring quadrupole because of ' , 'repeated electrode use:' , quadpole ) else : filtered . append ( quadpole ) self . add_to_configs ( filtered ) return np . array ( filtered )
8,404
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L675-L753
[ "def", "_OpenFile", "(", "self", ",", "path", ")", ":", "if", "not", "self", ".", "_registry_file_reader", ":", "return", "None", "return", "self", ".", "_registry_file_reader", ".", "Open", "(", "path", ",", "ascii_codepage", "=", "self", ".", "_ascii_codepage", ")" ]
Remove configurations with dipole separations higher than maxsep .
def remove_max_dipole_sep ( self , maxsep = 10 ) : sep = np . abs ( self . configs [ : , 1 ] - self . configs [ : , 2 ] ) self . configs = self . configs [ sep <= maxsep ]
8,405
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L755-L764
[ "def", "_load_lib", "(", ")", ":", "lib_paths", "=", "find_lib_path", "(", ")", "if", "not", "lib_paths", ":", "return", "None", "try", ":", "pathBackup", "=", "os", ".", "environ", "[", "'PATH'", "]", ".", "split", "(", "os", ".", "pathsep", ")", "except", "KeyError", ":", "pathBackup", "=", "[", "]", "lib_success", "=", "False", "os_error_list", "=", "[", "]", "for", "lib_path", "in", "lib_paths", ":", "try", ":", "# needed when the lib is linked with non-system-available dependencies", "os", ".", "environ", "[", "'PATH'", "]", "=", "os", ".", "pathsep", ".", "join", "(", "pathBackup", "+", "[", "os", ".", "path", ".", "dirname", "(", "lib_path", ")", "]", ")", "lib", "=", "ctypes", ".", "cdll", ".", "LoadLibrary", "(", "lib_path", ")", "lib_success", "=", "True", "except", "OSError", "as", "e", ":", "os_error_list", ".", "append", "(", "str", "(", "e", ")", ")", "continue", "finally", ":", "os", ".", "environ", "[", "'PATH'", "]", "=", "os", ".", "pathsep", ".", "join", "(", "pathBackup", ")", "if", "not", "lib_success", ":", "libname", "=", "os", ".", "path", ".", "basename", "(", "lib_paths", "[", "0", "]", ")", "raise", "XGBoostError", "(", "'XGBoost Library ({}) could not be loaded.\\n'", ".", "format", "(", "libname", ")", "+", "'Likely causes:\\n'", "+", "' * OpenMP runtime is not installed '", "+", "'(vcomp140.dll or libgomp-1.dll for Windows, '", "+", "'libgomp.so for UNIX-like OSes)\\n'", "+", "' * You are running 32-bit Python on a 64-bit OS\\n'", "+", "'Error message(s): {}\\n'", ".", "format", "(", "os_error_list", ")", ")", "lib", ".", "XGBGetLastError", ".", "restype", "=", "ctypes", ".", "c_char_p", "lib", ".", "callback", "=", "_get_log_callback_func", "(", ")", "if", "lib", ".", "XGBRegisterLogCallback", "(", "lib", ".", "callback", ")", "!=", "0", ":", "raise", "XGBoostError", "(", "lib", ".", "XGBGetLastError", "(", ")", ")", "return", "lib" ]
Convert the configuration to a pygimli measurement scheme
def to_pg_scheme ( self , container = None , positions = None ) : if container is None and positions is None : raise Exception ( 'electrode positions are required for BERT export' ) if container is not None and container . electrodes is None : raise Exception ( 'container does not contain electrode positions' ) if container is not None and positions is not None : raise Exception ( 'only one of container OR positions must be provided' ) if container is not None : elec_positions = container . electrodes . values elif positions is not None : elec_positions = positions opt_import ( "pybert" , requiredFor = "" ) import pybert # Initialize BERT DataContainer data = pybert . DataContainerERT ( ) # Define electrodes (48 electrodes spaced by 0.5 m) for nr , ( x , y , z ) in enumerate ( elec_positions ) : data . createSensor ( ( x , y , z ) ) # Define number of measurements data . resize ( self . configs . shape [ 0 ] ) for index , token in enumerate ( "abmn" ) : data . set ( token , self . configs [ : , index ] . tolist ( ) ) # account for zero indexing for token in "abmn" : data . set ( token , data ( token ) - 1 ) # np.vstack([data.get(x).array() for x in ("abmn")]).T return data
8,406
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L771-L836
[ "def", "unpause", "(", "name", ",", "call", "=", "None", ",", "session", "=", "None", ")", ":", "if", "call", "==", "'function'", ":", "raise", "SaltCloudException", "(", "'The show_instnce function must be called with -a or --action.'", ")", "if", "session", "is", "None", ":", "session", "=", "_get_session", "(", ")", "log", ".", "info", "(", "'Unpausing VM %s'", ",", "name", ")", "vm", "=", "_get_vm", "(", "name", ",", "session", ")", "task", "=", "session", ".", "xenapi", ".", "Async", ".", "VM", ".", "unpause", "(", "vm", ")", "_run_async_task", "(", "task", ",", "session", ")", "return", "show_instance", "(", "name", ")" ]
Export to IRIS Instrument configuration file
def to_iris_syscal ( self , filename ) : with open ( filename , 'w' ) as fid : # fprintf(fod, '#\t X\t Y\t Z\n'); fid . write ( '#\t X\t Y\t Z\n' ) # fprintf(fod, '%d\t %.1f\t %d\t %d\n', D'); # loop over electrodes and assign increasing x-positions # TODO: use proper electrode positions, if available for nr in range ( 0 , self . configs . max ( ) ) : fid . write ( '{} {} 0 0\n' . format ( nr + 1 , nr ) ) # fprintf(fod, '#\t A\t B\t M\t N\n'); fid . write ( '#\t A\t B\t M\t N\n' ) # fprintf(fod, '%d\t %d\t %d\t %d\t %d\n', C'); for nr , config in enumerate ( self . configs ) : fid . write ( '{} {} {} {} {}\n' . format ( nr + 1 , * config ) )
8,407
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/configs/configManager.py#L838-L860
[ "def", "removeAllEntitlements", "(", "self", ",", "appId", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"appId\"", ":", "appId", "}", "url", "=", "self", ".", "_url", "+", "\"/licenses/removeAllEntitlements\"", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
Creating a new plan for subscriptions associated with the merchant .
def create_plan ( self , * , plan_code , description , interval , interval_count , max_payments_allowed , payment_attempts_delay , plan_value , plan_tax , plan_tax_return_base , currency , max_payment_attempts = None , max_pending_payments = None , trial_days = None ) : payload = { "accountId" : self . client . account_id , "planCode" : plan_code , "description" : description , "interval" : interval , "intervalCount" : interval_count , "maxPaymentsAllowed" : max_payments_allowed , "paymentAttemptsDelay" : payment_attempts_delay , "additionalValues" : [ { "name" : "PLAN_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "PLAN_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "PLAN_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] , "maxPaymentAttempts" : max_payment_attempts , "maxPendingPayments" : max_pending_payments , "trialDays" : trial_days } return self . client . _post ( self . url + 'plans' , json = payload , headers = self . get_headers ( ) )
8,408
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L10-L90
[ "def", "zlib_decompress_all_frames", "(", "data", ")", ":", "frames", "=", "bytearray", "(", ")", "data", "=", "bytes", "(", "data", ")", "while", "data", ":", "decomp", "=", "zlib", ".", "decompressobj", "(", ")", "try", ":", "frames", ".", "extend", "(", "decomp", ".", "decompress", "(", "data", ")", ")", "data", "=", "decomp", ".", "unused_data", "except", "zlib", ".", "error", ":", "frames", ".", "extend", "(", "data", ")", "break", "return", "frames" ]
Check all the information of a plan for subscriptions associated with the merchant .
def get_plan ( self , plan_code ) : return self . client . _get ( self . url + 'plans/{}' . format ( plan_code ) , headers = self . get_headers ( ) )
8,409
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L92-L102
[ "def", "framesToFrameRange", "(", "frames", ",", "sort", "=", "True", ",", "zfill", "=", "0", ",", "compress", "=", "False", ")", ":", "if", "compress", ":", "frames", "=", "unique", "(", "set", "(", ")", ",", "frames", ")", "frames", "=", "list", "(", "frames", ")", "if", "not", "frames", ":", "return", "''", "if", "len", "(", "frames", ")", "==", "1", ":", "return", "pad", "(", "frames", "[", "0", "]", ",", "zfill", ")", "if", "sort", ":", "frames", ".", "sort", "(", ")", "return", "','", ".", "join", "(", "FrameSet", ".", "framesToFrameRanges", "(", "frames", ",", "zfill", ")", ")" ]
Delete an entire subscription plan associated with the merchant .
def delete_plan ( self , plan_code ) : return self . client . _delete ( self . url + 'plans/{}' . format ( plan_code ) , headers = self . get_headers ( ) )
8,410
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L107-L117
[ "def", "add_fluctuations", "(", "hdf5_file", ",", "N_columns", ",", "N_processes", ")", ":", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "slice_queue", "=", "multiprocessing", ".", "JoinableQueue", "(", ")", "pid_list", "=", "[", "]", "for", "i", "in", "range", "(", "N_processes", ")", ":", "worker", "=", "Fluctuations_worker", "(", "hdf5_file", ",", "'/aff_prop_group/similarities'", ",", "random_state", ",", "N_columns", ",", "slice_queue", ")", "worker", ".", "daemon", "=", "True", "worker", ".", "start", "(", ")", "pid_list", ".", "append", "(", "worker", ".", "pid", ")", "for", "rows_slice", "in", "chunk_generator", "(", "N_columns", ",", "4", "*", "N_processes", ")", ":", "slice_queue", ".", "put", "(", "rows_slice", ")", "slice_queue", ".", "join", "(", ")", "slice_queue", ".", "close", "(", ")", "terminate_processes", "(", "pid_list", ")", "gc", ".", "collect", "(", ")" ]
Creation of a customer in the system .
def create_customer ( self , * , full_name , email ) : payload = { "fullName" : full_name , "email" : email } return self . client . _post ( self . url + 'customers' , json = payload , headers = self . get_headers ( ) )
8,411
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L119-L137
[ "def", "draw_sparse_matrix", "(", "array_filename", ",", "output_image", ",", "vmax", "=", "DEFAULT_SATURATION_THRESHOLD", ",", "max_size_matrix", "=", "DEFAULT_MAX_SIZE_MATRIX", ",", ")", ":", "matrix", "=", "np", ".", "loadtxt", "(", "array_filename", ",", "dtype", "=", "np", ".", "int32", ",", "skiprows", "=", "1", ")", "try", ":", "row", ",", "col", ",", "data", "=", "matrix", ".", "T", "except", "ValueError", ":", "row", ",", "col", ",", "data", "=", "matrix", "size", "=", "max", "(", "np", ".", "amax", "(", "row", ")", ",", "np", ".", "amax", "(", "col", ")", ")", "+", "1", "S", "=", "sparse", ".", "coo_matrix", "(", "(", "data", ",", "(", "row", ",", "col", ")", ")", ",", "shape", "=", "(", "size", ",", "size", ")", ")", "if", "max_size_matrix", "<=", "0", ":", "binning", "=", "1", "else", ":", "binning", "=", "(", "size", "//", "max_size_matrix", ")", "+", "1", "binned_S", "=", "hcs", ".", "bin_sparse", "(", "S", ",", "subsampling_factor", "=", "binning", ")", "dense_S", "=", "binned_S", ".", "todense", "(", ")", "dense_S", "=", "dense_S", "+", "dense_S", ".", "T", "-", "np", ".", "diag", "(", "np", ".", "diag", "(", "dense_S", ")", ")", "normed_S", "=", "hcs", ".", "normalize_dense", "(", "dense_S", ")", "spaceless_pdf_plot_maker", "(", "normed_S", ",", "output_image", ",", "vmax", "=", "vmax", ")" ]
Queries the information related to the customer .
def get_customer ( self , customer_id ) : return self . client . _get ( self . url + 'customers/{}' . format ( customer_id ) , headers = self . get_headers ( ) )
8,412
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L139-L149
[ "def", "update_reg", "(", "self", ",", "addr", ",", "mask", ",", "new_val", ")", ":", "shift", "=", "_mask_to_shift", "(", "mask", ")", "val", "=", "self", ".", "read_reg", "(", "addr", ")", "val", "&=", "~", "mask", "val", "|=", "(", "new_val", "<<", "shift", ")", "&", "mask", "self", ".", "write_reg", "(", "addr", ",", "val", ")", "return", "val" ]
Removes a user from the system .
def delete_customer ( self , customer_id ) : return self . client . _delete ( self . url + 'customers/{}' . format ( customer_id ) , headers = self . get_headers ( ) )
8,413
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L154-L164
[ "def", "_start_new_cdx_file", "(", "self", ")", ":", "self", ".", "_cdx_filename", "=", "'{0}.cdx'", ".", "format", "(", "self", ".", "_prefix_filename", ")", "if", "not", "self", ".", "_params", ".", "appending", ":", "wpull", ".", "util", ".", "truncate_file", "(", "self", ".", "_cdx_filename", ")", "self", ".", "_write_cdx_header", "(", ")", "elif", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_cdx_filename", ")", ":", "self", ".", "_write_cdx_header", "(", ")" ]
Creating a new subscription of a client to a plan .
def create_subscription ( self , * , customer_id , credit_card_token , plan_code , quantity = None , installments = None , trial_days = None , immediate_payment = None , extra1 = None , extra2 = None , delivery_address = None , notify_url = None , recurring_bill_items = None ) : payload = { "quantity" : quantity , "installments" : installments , "trialDays" : trial_days , "immediatePayment" : immediate_payment , "extra1" : extra1 , "extra2" : extra2 , "customer" : { "id" : customer_id , "creditCards" : [ { "token" : credit_card_token } ] } , "plan" : { "planCode" : plan_code } , "deliveryAddress" : delivery_address , "notifyUrl" : notify_url , "recurringBillItems" : recurring_bill_items } return self . client . _post ( self . url + 'subscriptions' , json = payload , headers = self . get_headers ( ) )
8,414
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L240-L303
[ "def", "process_headers", "(", "data", ",", "filename", ")", ":", "headers", "=", "[", "]", "if", "'toc'", "in", "data", ":", "for", "element", "in", "PyQuery", "(", "data", "[", "'toc'", "]", ")", "(", "'a'", ")", ":", "headers", ".", "append", "(", "recurse_while_none", "(", "element", ")", ")", "if", "None", "in", "headers", ":", "log", ".", "info", "(", "'Unable to index file headers for: %s'", ",", "filename", ")", "return", "headers" ]
Check the basic information associated with the specified subscription .
def get_subscription ( self , subscription_id ) : return self . client . _put ( self . url + 'subscriptions/{}' . format ( subscription_id ) , headers = self . get_headers ( ) )
8,415
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L305-L315
[ "def", "freeze", "(", "self", ",", "tmp_dir", ")", ":", "for", "sfile", "in", "self", ".", "secrets", "(", ")", ":", "src_file", "=", "hard_path", "(", "sfile", ",", "self", ".", "opt", ".", "secrets", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "src_file", ")", ":", "raise", "aomi_excep", ".", "IceFile", "(", "\"%s secret not found at %s\"", "%", "(", "self", ",", "src_file", ")", ")", "dest_file", "=", "\"%s/%s\"", "%", "(", "tmp_dir", ",", "sfile", ")", "dest_dir", "=", "os", ".", "path", ".", "dirname", "(", "dest_file", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "dest_dir", ")", ":", "os", ".", "mkdir", "(", "dest_dir", ",", "0o700", ")", "shutil", ".", "copy", "(", "src_file", ",", "dest_file", ")", "LOG", ".", "debug", "(", "\"Froze %s %s\"", ",", "self", ",", "sfile", ")" ]
Update information associated with the specified subscription . At the moment it is only possible to update the token of the credit card to which the charge of the subscription is made .
def update_subscription ( self , * , subscription_id , credit_card_token ) : payload = { "creditCardToken" : credit_card_token } fmt = 'subscriptions/{}' . format ( subscription_id ) return self . client . _put ( self . url + fmt , json = payload , headers = self . get_headers ( ) )
8,416
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L317-L333
[ "def", "get_rng", "(", "obj", "=", "None", ")", ":", "seed", "=", "(", "id", "(", "obj", ")", "+", "os", ".", "getpid", "(", ")", "+", "int", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y%m%d%H%M%S%f\"", ")", ")", ")", "%", "4294967295", "if", "_RNG_SEED", "is", "not", "None", ":", "seed", "=", "_RNG_SEED", "return", "np", ".", "random", ".", "RandomState", "(", "seed", ")" ]
Unsubscribe delete the relationship of the customer with the plan .
def delete_subscription ( self , subscription_id ) : return self . client . _delete ( self . url + 'subscriptions/{}' . format ( subscription_id ) , headers = self . get_headers ( ) )
8,417
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L335-L345
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Adds extra charges to the respective invoice for the current period .
def create_additional_charge ( self , * , subscription_id , description , plan_value , plan_tax , plan_tax_return_base , currency ) : payload = { "description" : description , "additionalValues" : [ { "name" : "ITEM_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "ITEM_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "ITEM_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] } fmt = 'subscriptions/{}/recurringBillItems' . format ( subscription_id ) return self . client . _post ( self . url + fmt , json = payload , headers = self . get_headers ( ) )
8,418
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L347-L384
[ "def", "intersect", "(", "self", ",", "other", ")", ":", "loc", "=", "self", ".", "locate_keys", "(", "other", ",", "strict", "=", "False", ")", "return", "self", ".", "compress", "(", "loc", ",", "axis", "=", "0", ")" ]
Query extra charge information of an invoice from its identifier .
def get_additional_charge_by_identifier ( self , recurring_billing_id ) : fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _get ( self . url + fmt , headers = self . get_headers ( ) )
8,419
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L386-L397
[ "def", "fit", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "X", "=", "array2d", "(", "X", ")", "self", ".", "n_features", "=", "X", ".", "shape", "[", "1", "]", "self", ".", "n_bins", "=", "self", ".", "n_bins_per_feature", "**", "self", ".", "n_features", "if", "self", ".", "min", "is", "None", ":", "min", "=", "np", ".", "min", "(", "X", ",", "axis", "=", "0", ")", "elif", "isinstance", "(", "self", ".", "min", ",", "numbers", ".", "Number", ")", ":", "min", "=", "self", ".", "min", "*", "np", ".", "ones", "(", "self", ".", "n_features", ")", "else", ":", "min", "=", "np", ".", "asarray", "(", "self", ".", "min", ")", "if", "not", "min", ".", "shape", "==", "(", "self", ".", "n_features", ",", ")", ":", "raise", "ValueError", "(", "'min shape error'", ")", "if", "self", ".", "max", "is", "None", ":", "max", "=", "np", ".", "max", "(", "X", ",", "axis", "=", "0", ")", "elif", "isinstance", "(", "self", ".", "max", ",", "numbers", ".", "Number", ")", ":", "max", "=", "self", ".", "max", "*", "np", ".", "ones", "(", "self", ".", "n_features", ")", "else", ":", "max", "=", "np", ".", "asarray", "(", "self", ".", "max", ")", "if", "not", "max", ".", "shape", "==", "(", "self", ".", "n_features", ",", ")", ":", "raise", "ValueError", "(", "'max shape error'", ")", "self", ".", "grid", "=", "np", ".", "array", "(", "[", "np", ".", "linspace", "(", "min", "[", "i", "]", "-", "EPS", ",", "max", "[", "i", "]", "+", "EPS", ",", "self", ".", "n_bins_per_feature", "+", "1", ")", "for", "i", "in", "range", "(", "self", ".", "n_features", ")", "]", ")", "return", "self" ]
Updates the information from an additional charge in an invoice .
def update_additional_charge ( self , * , recurring_billing_id , description , plan_value , plan_tax , plan_tax_return_base , currency ) : payload = { "description" : description , "additionalValues" : [ { "name" : "ITEM_VALUE" , "value" : plan_value , "currency" : currency } , { "name" : "ITEM_TAX" , "value" : plan_tax , "currency" : currency } , { "name" : "ITEM_TAX_RETURN_BASE" , "value" : plan_tax_return_base , "currency" : currency } ] } fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _put ( self . url + fmt , payload = payload , headers = self . get_headers ( ) )
8,420
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L429-L466
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Remove an extra charge from an invoice .
def delete_additional_charge ( self , recurring_billing_id ) : fmt = 'recurringBillItems/{}' . format ( recurring_billing_id ) return self . client . _delete ( self . url + fmt , headers = self . get_headers ( ) )
8,421
https://github.com/GearPlug/payu-python/blob/47ec5c9fc89f1f89a53ec0a68c84f358bbe3394e/payu/recurring.py#L468-L479
[ "def", "play", "(", ")", ":", "with", "sqlite3", ".", "connect", "(", "ARGS", ".", "database", ")", "as", "connection", ":", "connection", ".", "text_factory", "=", "str", "cursor", "=", "connection", ".", "cursor", "(", ")", "if", "ARGS", ".", "pattern", ":", "if", "not", "ARGS", ".", "strict", ":", "ARGS", ".", "pattern", "=", "'%{0}%'", ".", "format", "(", "ARGS", ".", "pattern", ")", "cursor", ".", "execute", "(", "'SELECT * FROM Movies WHERE Name LIKE (?)'", ",", "[", "ARGS", ".", "pattern", "]", ")", "try", ":", "path", "=", "sorted", "(", "[", "row", "for", "row", "in", "cursor", "]", ")", "[", "0", "]", "[", "1", "]", "replace_map", "=", "{", "' '", ":", "'\\\\ '", ",", "'\"'", ":", "'\\\\\"'", ",", "\"'\"", ":", "\"\\\\'\"", "}", "for", "key", ",", "val", "in", "replace_map", ".", "iteritems", "(", ")", ":", "path", "=", "path", ".", "replace", "(", "key", ",", "val", ")", "os", ".", "system", "(", "'{0} {1} &'", ".", "format", "(", "ARGS", ".", "player", ",", "path", ")", ")", "except", "IndexError", ":", "exit", "(", "'Error: Movie not found.'", ")" ]
Creates a thumbnail of for an ImageField .
def thumbnail ( parser , token ) : args = token . split_contents ( ) tag = args [ 0 ] # Check to see if we're setting to a context variable. if len ( args ) > 4 and args [ - 2 ] == 'as' : context_name = args [ - 1 ] args = args [ : - 2 ] else : context_name = None if len ( args ) < 3 : raise TemplateSyntaxError ( "Invalid syntax. Expected " "'{%% %s source size [option1 option2 ...] %%}' or " "'{%% %s source size [option1 option2 ...] as variable %%}'" % ( tag , tag ) ) # Get the source image path and requested size. source_var = args [ 1 ] # If the size argument was a correct static format, wrap it in quotes so # that it is compiled correctly. m = REGEXP_THUMB_SIZES . match ( args [ 2 ] ) if m : args [ 2 ] = '"%s"' % args [ 2 ] size_var = args [ 2 ] # Get the options. args_list = split_args ( args [ 3 : ] ) . items ( ) # Check the options. opts = { } kwargs = { } # key,values here override settings and defaults for arg , value in args_list : value = value and parser . compile_filter ( value ) if arg in TAG_SETTINGS and value is not None : kwargs [ str ( arg ) ] = value continue else : raise TemplateSyntaxError ( "'%s' tag received a bad argument: " "'%s'" % ( tag , arg ) ) return ThumbnailNode ( source_var , size_var , opts = opts , context_name = context_name , * * kwargs )
8,422
https://github.com/gtaylor/django-athumb/blob/69261ace0dff81e33156a54440874456a7b38dfb/athumb/templatetags/thumbnail.py#L142-L201
[ "def", "_approximate_unkown_bond_lengths", "(", "self", ")", ":", "dataset", "=", "self", ".", "lengths", "[", "BOND_SINGLE", "]", "for", "n1", "in", "periodic", ".", "iter_numbers", "(", ")", ":", "for", "n2", "in", "periodic", ".", "iter_numbers", "(", ")", ":", "if", "n1", "<=", "n2", ":", "pair", "=", "frozenset", "(", "[", "n1", ",", "n2", "]", ")", "atom1", "=", "periodic", "[", "n1", "]", "atom2", "=", "periodic", "[", "n2", "]", "#if (pair not in dataset) and hasattr(atom1, \"covalent_radius\") and hasattr(atom2, \"covalent_radius\"):", "if", "(", "pair", "not", "in", "dataset", ")", "and", "(", "atom1", ".", "covalent_radius", "is", "not", "None", ")", "and", "(", "atom2", ".", "covalent_radius", "is", "not", "None", ")", ":", "dataset", "[", "pair", "]", "=", "(", "atom1", ".", "covalent_radius", "+", "atom2", ".", "covalent_radius", ")" ]
Print the requested statistics values for those fields specified on input .
def printStats ( self ) : print ( "--- Imagestats Results ---" ) if ( self . fields . find ( 'npix' ) != - 1 ) : print ( "Number of pixels : " , self . npix ) if ( self . fields . find ( 'min' ) != - 1 ) : print ( "Minimum value : " , self . min ) if ( self . fields . find ( 'max' ) != - 1 ) : print ( "Maximum value : " , self . max ) if ( self . fields . find ( 'stddev' ) != - 1 ) : print ( "Standard Deviation: " , self . stddev ) if ( self . fields . find ( 'mean' ) != - 1 ) : print ( "Mean : " , self . mean ) if ( self . fields . find ( 'mode' ) != - 1 ) : print ( "Mode : " , self . mode ) if ( self . fields . find ( 'median' ) != - 1 ) : print ( "Median : " , self . median ) if ( self . fields . find ( 'midpt' ) != - 1 ) : print ( "Midpt : " , self . midpt )
8,423
https://github.com/spacetelescope/stsci.imagestats/blob/d7fc9fe9783f7ed3dc9e4af47acd357a5ccd68e3/stsci/imagestats/__init__.py#L332-L351
[ "def", "upload_cbn_dir", "(", "dir_path", ",", "manager", ")", ":", "t", "=", "time", ".", "time", "(", ")", "for", "jfg_path", "in", "os", ".", "listdir", "(", "dir_path", ")", ":", "if", "not", "jfg_path", ".", "endswith", "(", "'.jgf'", ")", ":", "continue", "path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ")", "log", ".", "info", "(", "'opening %s'", ",", "path", ")", "with", "open", "(", "path", ")", "as", "f", ":", "cbn_jgif_dict", "=", "json", ".", "load", "(", "f", ")", "graph", "=", "pybel", ".", "from_cbn_jgif", "(", "cbn_jgif_dict", ")", "out_path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "jfg_path", ".", "replace", "(", "'.jgf'", ",", "'.bel'", ")", ")", "with", "open", "(", "out_path", ",", "'w'", ")", "as", "o", ":", "pybel", ".", "to_bel", "(", "graph", ",", "o", ")", "strip_annotations", "(", "graph", ")", "enrich_pubmed_citations", "(", "manager", "=", "manager", ",", "graph", "=", "graph", ")", "pybel", ".", "to_database", "(", "graph", ",", "manager", "=", "manager", ")", "log", ".", "info", "(", "''", ")", "log", ".", "info", "(", "'done in %.2f'", ",", "time", ".", "time", "(", ")", "-", "t", ")" ]
Perform a WVA web services request and return the raw response object
def raw_request ( self , method , uri , * * kwargs ) : with warnings . catch_warnings ( ) : # catch warning about certs not being verified warnings . simplefilter ( "ignore" , urllib3 . exceptions . InsecureRequestWarning ) warnings . simplefilter ( "ignore" , urllib3 . exceptions . InsecurePlatformWarning ) try : response = self . _get_session ( ) . request ( method , self . _get_ws_url ( uri ) , * * kwargs ) except requests . RequestException as e : # e.g. raise new_exc from old_exc six . raise_from ( WVAHttpRequestError ( e ) , e ) else : return response
8,424
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L79-L97
[ "def", "logline_timestamp_comparator", "(", "t1", ",", "t2", ")", ":", "dt1", "=", "_parse_logline_timestamp", "(", "t1", ")", "dt2", "=", "_parse_logline_timestamp", "(", "t2", ")", "for", "u1", ",", "u2", "in", "zip", "(", "dt1", ",", "dt2", ")", ":", "if", "u1", "<", "u2", ":", "return", "-", "1", "elif", "u1", ">", "u2", ":", "return", "1", "return", "0" ]
Perform a WVA web services request and return the decoded value if successful
def request ( self , method , uri , * * kwargs ) : response = self . raw_request ( method , uri , * * kwargs ) if response . status_code != 200 : exception_class = HTTP_STATUS_EXCEPTION_MAP . get ( response . status_code , WVAHttpError ) raise exception_class ( response ) if response . headers . get ( "content-type" ) == "application/json" : return json . loads ( response . text ) else : return response . text
8,425
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L99-L121
[ "def", "start", "(", "self", ")", ":", "def", "_heartbeat", "(", ")", ":", "if", "not", "self", ".", "_client", ".", "lifecycle", ".", "is_live", ":", "return", "self", ".", "_heartbeat", "(", ")", "self", ".", "_heartbeat_timer", "=", "self", ".", "_client", ".", "reactor", ".", "add_timer", "(", "self", ".", "_heartbeat_interval", ",", "_heartbeat", ")", "self", ".", "_heartbeat_timer", "=", "self", ".", "_client", ".", "reactor", ".", "add_timer", "(", "self", ".", "_heartbeat_interval", ",", "_heartbeat", ")" ]
POST the provided data to the specified path
def post ( self , uri , data , * * kwargs ) : return self . request ( "POST" , uri , data = data , * * kwargs )
8,426
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L137-L143
[ "def", "state_histogram", "(", "rho", ",", "ax", "=", "None", ",", "title", "=", "\"\"", ",", "threshold", "=", "0.001", ")", ":", "rho_amps", "=", "rho", ".", "data", ".", "toarray", "(", ")", ".", "ravel", "(", ")", "nqc", "=", "int", "(", "round", "(", "np", ".", "log2", "(", "rho", ".", "shape", "[", "0", "]", ")", ")", ")", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6", ")", ")", "ax", "=", "Axes3D", "(", "fig", ",", "azim", "=", "-", "35", ",", "elev", "=", "35", ")", "cmap", "=", "rigetti_4_color_cm", "norm", "=", "mpl", ".", "colors", ".", "Normalize", "(", "-", "np", ".", "pi", ",", "np", ".", "pi", ")", "colors", "=", "cmap", "(", "norm", "(", "np", ".", "angle", "(", "rho_amps", ")", ")", ")", "dzs", "=", "abs", "(", "rho_amps", ")", "colors", "[", ":", ",", "3", "]", "=", "1.0", "*", "(", "dzs", ">", "threshold", ")", "xs", ",", "ys", "=", "np", ".", "meshgrid", "(", "range", "(", "2", "**", "nqc", ")", ",", "range", "(", "2", "**", "nqc", ")", ")", "xs", "=", "xs", ".", "ravel", "(", ")", "ys", "=", "ys", ".", "ravel", "(", ")", "zs", "=", "np", ".", "zeros_like", "(", "xs", ")", "dxs", "=", "dys", "=", "np", ".", "ones_like", "(", "xs", ")", "*", "0.8", "_", "=", "ax", ".", "bar3d", "(", "xs", ",", "ys", ",", "zs", ",", "dxs", ",", "dys", ",", "dzs", ",", "color", "=", "colors", ")", "ax", ".", "set_xticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_xticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_yticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_yticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_zlim3d", "(", "[", "0", ",", "1", "]", ")", "cax", ",", "kw", "=", "mpl", ".", "colorbar", ".", "make_axes", "(", "ax", ",", "shrink", "=", ".75", ",", "pad", "=", ".1", ")", "cb", "=", "mpl", ".", "colorbar", ".", "ColorbarBase", "(", "cax", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "cb", ".", "set_ticks", "(", "[", "-", "np", ".", "pi", ",", "-", "np", ".", "pi", "/", "2", ",", "0", ",", "np", ".", "pi", "/", "2", ",", "np", ".", "pi", "]", ")", "cb", ".", "set_ticklabels", "(", "(", "r'$-\\pi$'", ",", "r'$-\\pi/2$'", ",", "r'$0$'", ",", "r'$\\pi/2$'", ",", "r'$\\pi$'", ")", ")", "cb", ".", "set_label", "(", "'arg'", ")", "ax", ".", "view_init", "(", "azim", "=", "-", "55", ",", "elev", "=", "45", ")", "ax", ".", "set_title", "(", "title", ")", "return", "ax" ]
POST the provided data as json to the specified path
def post_json ( self , uri , data , * * kwargs ) : encoded_data = json . dumps ( data ) kwargs . setdefault ( "headers" , { } ) . update ( { "Content-Type" : "application/json" , # tell server we are sending json } ) return self . post ( uri , data = encoded_data , * * kwargs )
8,427
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L145-L154
[ "def", "state_histogram", "(", "rho", ",", "ax", "=", "None", ",", "title", "=", "\"\"", ",", "threshold", "=", "0.001", ")", ":", "rho_amps", "=", "rho", ".", "data", ".", "toarray", "(", ")", ".", "ravel", "(", ")", "nqc", "=", "int", "(", "round", "(", "np", ".", "log2", "(", "rho", ".", "shape", "[", "0", "]", ")", ")", ")", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6", ")", ")", "ax", "=", "Axes3D", "(", "fig", ",", "azim", "=", "-", "35", ",", "elev", "=", "35", ")", "cmap", "=", "rigetti_4_color_cm", "norm", "=", "mpl", ".", "colors", ".", "Normalize", "(", "-", "np", ".", "pi", ",", "np", ".", "pi", ")", "colors", "=", "cmap", "(", "norm", "(", "np", ".", "angle", "(", "rho_amps", ")", ")", ")", "dzs", "=", "abs", "(", "rho_amps", ")", "colors", "[", ":", ",", "3", "]", "=", "1.0", "*", "(", "dzs", ">", "threshold", ")", "xs", ",", "ys", "=", "np", ".", "meshgrid", "(", "range", "(", "2", "**", "nqc", ")", ",", "range", "(", "2", "**", "nqc", ")", ")", "xs", "=", "xs", ".", "ravel", "(", ")", "ys", "=", "ys", ".", "ravel", "(", ")", "zs", "=", "np", ".", "zeros_like", "(", "xs", ")", "dxs", "=", "dys", "=", "np", ".", "ones_like", "(", "xs", ")", "*", "0.8", "_", "=", "ax", ".", "bar3d", "(", "xs", ",", "ys", ",", "zs", ",", "dxs", ",", "dys", ",", "dzs", ",", "color", "=", "colors", ")", "ax", ".", "set_xticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_xticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_yticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_yticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_zlim3d", "(", "[", "0", ",", "1", "]", ")", "cax", ",", "kw", "=", "mpl", ".", "colorbar", ".", "make_axes", "(", "ax", ",", "shrink", "=", ".75", ",", "pad", "=", ".1", ")", "cb", "=", "mpl", ".", "colorbar", ".", "ColorbarBase", "(", "cax", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "cb", ".", "set_ticks", "(", "[", "-", "np", ".", "pi", ",", "-", "np", ".", "pi", "/", "2", ",", "0", ",", "np", ".", "pi", "/", "2", ",", "np", ".", "pi", "]", ")", "cb", ".", "set_ticklabels", "(", "(", "r'$-\\pi$'", ",", "r'$-\\pi/2$'", ",", "r'$0$'", ",", "r'$\\pi/2$'", ",", "r'$\\pi$'", ")", ")", "cb", ".", "set_label", "(", "'arg'", ")", "ax", ".", "view_init", "(", "azim", "=", "-", "55", ",", "elev", "=", "45", ")", "ax", ".", "set_title", "(", "title", ")", "return", "ax" ]
PUT the provided data to the specified path
def put ( self , uri , data , * * kwargs ) : return self . request ( "PUT" , uri , data = data , * * kwargs )
8,428
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L156-L162
[ "def", "state_histogram", "(", "rho", ",", "ax", "=", "None", ",", "title", "=", "\"\"", ",", "threshold", "=", "0.001", ")", ":", "rho_amps", "=", "rho", ".", "data", ".", "toarray", "(", ")", ".", "ravel", "(", ")", "nqc", "=", "int", "(", "round", "(", "np", ".", "log2", "(", "rho", ".", "shape", "[", "0", "]", ")", ")", ")", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6", ")", ")", "ax", "=", "Axes3D", "(", "fig", ",", "azim", "=", "-", "35", ",", "elev", "=", "35", ")", "cmap", "=", "rigetti_4_color_cm", "norm", "=", "mpl", ".", "colors", ".", "Normalize", "(", "-", "np", ".", "pi", ",", "np", ".", "pi", ")", "colors", "=", "cmap", "(", "norm", "(", "np", ".", "angle", "(", "rho_amps", ")", ")", ")", "dzs", "=", "abs", "(", "rho_amps", ")", "colors", "[", ":", ",", "3", "]", "=", "1.0", "*", "(", "dzs", ">", "threshold", ")", "xs", ",", "ys", "=", "np", ".", "meshgrid", "(", "range", "(", "2", "**", "nqc", ")", ",", "range", "(", "2", "**", "nqc", ")", ")", "xs", "=", "xs", ".", "ravel", "(", ")", "ys", "=", "ys", ".", "ravel", "(", ")", "zs", "=", "np", ".", "zeros_like", "(", "xs", ")", "dxs", "=", "dys", "=", "np", ".", "ones_like", "(", "xs", ")", "*", "0.8", "_", "=", "ax", ".", "bar3d", "(", "xs", ",", "ys", ",", "zs", ",", "dxs", ",", "dys", ",", "dzs", ",", "color", "=", "colors", ")", "ax", ".", "set_xticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_xticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_yticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_yticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_zlim3d", "(", "[", "0", ",", "1", "]", ")", "cax", ",", "kw", "=", "mpl", ".", "colorbar", ".", "make_axes", "(", "ax", ",", "shrink", "=", ".75", ",", "pad", "=", ".1", ")", "cb", "=", "mpl", ".", "colorbar", ".", "ColorbarBase", "(", "cax", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "cb", ".", "set_ticks", "(", "[", "-", "np", ".", "pi", ",", "-", "np", ".", "pi", "/", "2", ",", "0", ",", "np", ".", "pi", "/", "2", ",", "np", ".", "pi", "]", ")", "cb", ".", "set_ticklabels", "(", "(", "r'$-\\pi$'", ",", "r'$-\\pi/2$'", ",", "r'$0$'", ",", "r'$\\pi/2$'", ",", "r'$\\pi$'", ")", ")", "cb", ".", "set_label", "(", "'arg'", ")", "ax", ".", "view_init", "(", "azim", "=", "-", "55", ",", "elev", "=", "45", ")", "ax", ".", "set_title", "(", "title", ")", "return", "ax" ]
PUT the provided data as json to the specified path
def put_json ( self , uri , data , * * kwargs ) : encoded_data = json . dumps ( data ) kwargs . setdefault ( "headers" , { } ) . update ( { "Content-Type" : "application/json" , # tell server we are sending json } ) return self . put ( uri , data = encoded_data , * * kwargs )
8,429
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/http_client.py#L164-L173
[ "def", "state_histogram", "(", "rho", ",", "ax", "=", "None", ",", "title", "=", "\"\"", ",", "threshold", "=", "0.001", ")", ":", "rho_amps", "=", "rho", ".", "data", ".", "toarray", "(", ")", ".", "ravel", "(", ")", "nqc", "=", "int", "(", "round", "(", "np", ".", "log2", "(", "rho", ".", "shape", "[", "0", "]", ")", ")", ")", "if", "ax", "is", "None", ":", "fig", "=", "plt", ".", "figure", "(", "figsize", "=", "(", "10", ",", "6", ")", ")", "ax", "=", "Axes3D", "(", "fig", ",", "azim", "=", "-", "35", ",", "elev", "=", "35", ")", "cmap", "=", "rigetti_4_color_cm", "norm", "=", "mpl", ".", "colors", ".", "Normalize", "(", "-", "np", ".", "pi", ",", "np", ".", "pi", ")", "colors", "=", "cmap", "(", "norm", "(", "np", ".", "angle", "(", "rho_amps", ")", ")", ")", "dzs", "=", "abs", "(", "rho_amps", ")", "colors", "[", ":", ",", "3", "]", "=", "1.0", "*", "(", "dzs", ">", "threshold", ")", "xs", ",", "ys", "=", "np", ".", "meshgrid", "(", "range", "(", "2", "**", "nqc", ")", ",", "range", "(", "2", "**", "nqc", ")", ")", "xs", "=", "xs", ".", "ravel", "(", ")", "ys", "=", "ys", ".", "ravel", "(", ")", "zs", "=", "np", ".", "zeros_like", "(", "xs", ")", "dxs", "=", "dys", "=", "np", ".", "ones_like", "(", "xs", ")", "*", "0.8", "_", "=", "ax", ".", "bar3d", "(", "xs", ",", "ys", ",", "zs", ",", "dxs", ",", "dys", ",", "dzs", ",", "color", "=", "colors", ")", "ax", ".", "set_xticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_xticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_yticks", "(", "np", ".", "arange", "(", "2", "**", "nqc", ")", "+", ".4", ")", "ax", ".", "set_yticklabels", "(", "basis_labels", "(", "nqc", ")", ")", "ax", ".", "set_zlim3d", "(", "[", "0", ",", "1", "]", ")", "cax", ",", "kw", "=", "mpl", ".", "colorbar", ".", "make_axes", "(", "ax", ",", "shrink", "=", ".75", ",", "pad", "=", ".1", ")", "cb", "=", "mpl", ".", "colorbar", ".", "ColorbarBase", "(", "cax", ",", "cmap", "=", "cmap", ",", "norm", "=", "norm", ")", "cb", ".", "set_ticks", "(", "[", "-", "np", ".", "pi", ",", "-", "np", ".", "pi", "/", "2", ",", "0", ",", "np", ".", "pi", "/", "2", ",", "np", ".", "pi", "]", ")", "cb", ".", "set_ticklabels", "(", "(", "r'$-\\pi$'", ",", "r'$-\\pi/2$'", ",", "r'$0$'", ",", "r'$\\pi/2$'", ",", "r'$\\pi$'", ")", ")", "cb", ".", "set_label", "(", "'arg'", ")", "ax", ".", "view_init", "(", "azim", "=", "-", "55", ",", "elev", "=", "45", ")", "ax", ".", "set_title", "(", "title", ")", "return", "ax" ]
Translate a MAC address into an IPv6 address in the prefixed network .
def mac_to_ipv6_linklocal ( mac , prefix = "fe80::" ) : # Remove the most common delimiters; dots, dashes, etc. mac_value = int ( mac . translate ( str . maketrans ( dict ( [ ( x , None ) for x in [ " " , "." , ":" , "-" ] ] ) ) ) , 16 ) # Split out the bytes that slot into the IPv6 address # XOR the most significant byte with 0x02, inverting the # Universal / Local bit high2 = mac_value >> 32 & 0xffff ^ 0x0200 high1 = mac_value >> 24 & 0xff low1 = mac_value >> 16 & 0xff low2 = mac_value & 0xffff return prefix + ':{:04x}:{:02x}ff:fe{:02x}:{:04x}' . format ( high2 , high1 , low1 , low2 )
8,430
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L41-L67
[ "def", "restart", "(", "self", ",", "timeout", "=", "None", ")", ":", "msg", "=", "{", "\"value\"", ":", "\"Restart requested by \"", "+", "self", ".", "username", "+", "\"via the Splunk SDK for Python\"", "}", "# This message will be deleted once the server actually restarts.", "self", ".", "messages", ".", "create", "(", "name", "=", "\"restart_required\"", ",", "*", "*", "msg", ")", "result", "=", "self", ".", "post", "(", "\"server/control/restart\"", ")", "if", "timeout", "is", "None", ":", "return", "result", "start", "=", "datetime", ".", "now", "(", ")", "diff", "=", "timedelta", "(", "seconds", "=", "timeout", ")", "while", "datetime", ".", "now", "(", ")", "-", "start", "<", "diff", ":", "try", ":", "self", ".", "login", "(", ")", "if", "not", "self", ".", "restart_required", ":", "return", "result", "except", "Exception", "as", "e", ":", "sleep", "(", "1", ")", "raise", "Exception", "(", "\"Operation time out.\"", ")" ]
Method run when data is received from the device
def datagram_received ( self , data , addr ) : self . register ( ) response = unpack_lifx_message ( data ) self . lastmsg = datetime . datetime . now ( ) if response . seq_num in self . message : response_type , myevent , callb = self . message [ response . seq_num ] if type ( response ) == response_type : if response . source_id == self . source_id : if "State" in response . __class__ . __name__ : setmethod = "resp_set_" + response . __class__ . __name__ . replace ( "State" , "" ) . lower ( ) if setmethod in dir ( self ) and callable ( getattr ( self , setmethod ) ) : getattr ( self , setmethod ) ( response ) if callb : callb ( self , response ) myevent . set ( ) del ( self . message [ response . seq_num ] ) elif type ( response ) == Acknowledgement : pass else : del ( self . message [ response . seq_num ] ) elif self . default_callb : self . default_callb ( response )
8,431
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L146-L180
[ "def", "load_files", "(", "self", ",", "path", ")", ":", "if", "self", ".", "verbose", "==", "2", ":", "print", "(", "\"Indexing {}\"", ".", "format", "(", "path", ")", ")", "for", "filename", "in", "os", ".", "listdir", "(", "path", ")", ":", "file_path", "=", "path", "+", "\"/\"", "+", "filename", "if", "os", ".", "path", ".", "isdir", "(", "file_path", ")", ":", "self", ".", "load_files", "(", "file_path", ")", "elif", "filename", ".", "endswith", "(", "\".yaml\"", ")", "or", "filename", ".", "endswith", "(", "\".yml\"", ")", ":", "self", ".", "unfold_yaml", "(", "file_path", ")" ]
Proxy method to register the device with the parent .
def register ( self ) : if not self . registered : self . registered = True if self . parent : self . parent . register ( self )
8,432
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L182-L188
[ "def", "pages", "(", "self", ")", ":", "# protected access has to be permitted here to not close the paginator's pages", "# pylint: disable=protected-access", "paginator_pages", "=", "list", "(", "self", ".", "paginator", ".", "_pages", ")", "if", "len", "(", "self", ".", "paginator", ".", "_current_page", ")", ">", "1", ":", "paginator_pages", ".", "append", "(", "'\\n'", ".", "join", "(", "self", ".", "paginator", ".", "_current_page", ")", "+", "'\\n'", "+", "(", "self", ".", "paginator", ".", "suffix", "or", "''", ")", ")", "# pylint: enable=protected-access", "return", "paginator_pages" ]
Proxy method to unregister the device with the parent .
def unregister ( self ) : if self . registered : #Only if we have not received any message recently. if datetime . datetime . now ( ) - datetime . timedelta ( seconds = self . unregister_timeout ) > self . lastmsg : self . registered = False if self . parent : self . parent . unregister ( self )
8,433
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L190-L198
[ "def", "_HandleApi", "(", "self", ",", "request", ")", ":", "# Checks CSRF token. CSRF token cookie is updated when homepage is visited", "# or via GetPendingUserNotificationsCount API call.", "ValidateCSRFTokenOrRaise", "(", "request", ")", "response", "=", "http_api", ".", "RenderHttpResponse", "(", "request", ")", "# GetPendingUserNotificationsCount is an API method that is meant", "# to be invoked very often (every 10 seconds). So it's ideal", "# for updating the CSRF token.", "# We should also store the CSRF token if it wasn't yet stored at all.", "if", "(", "(", "\"csrftoken\"", "not", "in", "request", ".", "cookies", ")", "or", "response", ".", "headers", ".", "get", "(", "\"X-API-Method\"", ",", "\"\"", ")", "==", "\"GetPendingUserNotificationsCount\"", ")", ":", "StoreCSRFCookie", "(", "request", ".", "user", ",", "response", ")", "return", "response" ]
Coroutine used to send message to the device when no response is needed .
async def fire_sending ( self , msg , num_repeats ) : if num_repeats is None : num_repeats = self . retry_count sent_msg_count = 0 sleep_interval = 0.05 while ( sent_msg_count < num_repeats ) : if self . transport : self . transport . sendto ( msg . packed_message ) sent_msg_count += 1 await aio . sleep ( sleep_interval )
8,434
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L214-L231
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Coroutine used to send message to the device when a response or ack is needed .
async def try_sending ( self , msg , timeout_secs , max_attempts ) : if timeout_secs is None : timeout_secs = self . timeout if max_attempts is None : max_attempts = self . retry_count attempts = 0 while attempts < max_attempts : if msg . seq_num not in self . message : return event = aio . Event ( ) self . message [ msg . seq_num ] [ 1 ] = event attempts += 1 if self . transport : self . transport . sendto ( msg . packed_message ) try : myresult = await aio . wait_for ( event . wait ( ) , timeout_secs ) break except Exception as inst : if attempts >= max_attempts : if msg . seq_num in self . message : callb = self . message [ msg . seq_num ] [ 2 ] if callb : callb ( self , None ) del ( self . message [ msg . seq_num ] ) #It's dead Jim self . unregister ( )
8,435
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L253-L293
[ "def", "get_current_time", "(", "self", ")", ":", "current_time", "=", "c_double", "(", ")", "self", ".", "library", ".", "get_current_time", ".", "argtypes", "=", "[", "POINTER", "(", "c_double", ")", "]", "self", ".", "library", ".", "get_current_time", ".", "restype", "=", "None", "self", ".", "library", ".", "get_current_time", "(", "byref", "(", "current_time", ")", ")", "return", "current_time", ".", "value" ]
Method to send a message expecting to receive an ACK .
def req_with_ack ( self , msg_type , payload , callb = None , timeout_secs = None , max_attempts = None ) : msg = msg_type ( self . mac_addr , self . source_id , seq_num = self . seq_next ( ) , payload = payload , ack_requested = True , response_requested = False ) self . message [ msg . seq_num ] = [ Acknowledgement , None , callb ] xx = self . loop . create_task ( self . try_sending ( msg , timeout_secs , max_attempts ) ) return True
8,436
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L296-L315
[ "def", "configure", "(", "self", ",", "organization", ",", "base_url", "=", "''", ",", "ttl", "=", "''", ",", "max_ttl", "=", "''", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'organization'", ":", "organization", ",", "'base_url'", ":", "base_url", ",", "'ttl'", ":", "ttl", ",", "'max_ttl'", ":", "max_ttl", ",", "}", "api_path", "=", "'/v1/auth/{mount_point}/config'", ".", "format", "(", "mount_point", "=", "mount_point", ")", "return", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Convenience method to request the label from the device
def get_label ( self , callb = None ) : if self . label is None : mypartial = partial ( self . resp_set_label ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetLabel , StateLabel , callb = mycallb ) return self . label
8,437
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L367-L388
[ "def", "_page_update", "(", "self", ",", "event", ")", ":", "try", ":", "if", "event", ".", "schema", "==", "'wikipage'", ":", "self", ".", "_update_index", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "\"Page creation notification error: \"", ",", "event", ",", "e", ",", "type", "(", "e", ")", ",", "lvl", "=", "error", ")" ]
Convenience method to set the label of the device
def set_label ( self , value , callb = None ) : if len ( value ) > 32 : value = value [ : 32 ] mypartial = partial ( self . resp_set_label , label = value ) if callb : self . req_with_ack ( SetLabel , { "label" : value } , lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) ) else : self . req_with_ack ( SetLabel , { "label" : value } , lambda x , y : mypartial ( y ) )
8,438
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L390-L410
[ "def", "_read_columns_file", "(", "f", ")", ":", "try", ":", "columns", "=", "json", ".", "loads", "(", "open", "(", "f", ",", "'r'", ")", ".", "read", "(", ")", ",", "object_pairs_hook", "=", "collections", ".", "OrderedDict", ")", "except", "Exception", "as", "err", ":", "raise", "InvalidColumnsFileError", "(", "\"There was an error while reading {0}: {1}\"", ".", "format", "(", "f", ",", "err", ")", ")", "# Options are not supported yet:", "if", "'__options'", "in", "columns", ":", "del", "columns", "[", "'__options'", "]", "return", "columns" ]
Convenience method to request the location from the device
def get_location ( self , callb = None ) : if self . location is None : mypartial = partial ( self . resp_set_location ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetLocation , StateLocation , callb = mycallb ) return self . location
8,439
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L420-L441
[ "def", "_create_update_tracking_related_event", "(", "instance", ")", ":", "events", "=", "{", "}", "# Create a dict mapping related model field to modified fields", "for", "field", ",", "related_fields", "in", "instance", ".", "_tracked_related_fields", ".", "items", "(", ")", ":", "if", "not", "isinstance", "(", "instance", ".", "_meta", ".", "get_field", "(", "field", ")", ",", "ManyToManyField", ")", ":", "if", "isinstance", "(", "instance", ".", "_meta", ".", "get_field", "(", "field", ")", ",", "ForeignKey", ")", ":", "# Compare pk", "value", "=", "getattr", "(", "instance", ",", "'{0}_id'", ".", "format", "(", "field", ")", ")", "else", ":", "value", "=", "getattr", "(", "instance", ",", "field", ")", "if", "instance", ".", "_original_fields", "[", "field", "]", "!=", "value", ":", "for", "related_field", "in", "related_fields", ":", "events", ".", "setdefault", "(", "related_field", ",", "[", "]", ")", ".", "append", "(", "field", ")", "# Create the events from the events dict", "for", "related_field", ",", "fields", "in", "events", ".", "items", "(", ")", ":", "try", ":", "related_instances", "=", "getattr", "(", "instance", ",", "related_field", "[", "1", "]", ")", "except", "ObjectDoesNotExist", ":", "continue", "# FIXME: isinstance(related_instances, RelatedManager ?)", "if", "hasattr", "(", "related_instances", ",", "'all'", ")", ":", "related_instances", "=", "related_instances", ".", "all", "(", ")", "else", ":", "related_instances", "=", "[", "related_instances", "]", "for", "related_instance", "in", "related_instances", ":", "event", "=", "_create_event", "(", "related_instance", ",", "UPDATE", ")", "for", "field", "in", "fields", ":", "fieldname", "=", "'{0}__{1}'", ".", "format", "(", "related_field", "[", "0", "]", ",", "field", ")", "_create_tracked_field", "(", "event", ",", "instance", ",", "field", ",", "fieldname", "=", "fieldname", ")" ]
Convenience method to request the group from the device
def get_group ( self , callb = None ) : if self . group is None : mypartial = partial ( self . resp_set_group ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetGroup , StateGroup , callb = callb ) return self . group
8,440
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L460-L481
[ "def", "document_type", "(", "self", ",", "key", ",", "value", ")", ":", "schema", "=", "load_schema", "(", "'hep'", ")", "publication_type_schema", "=", "schema", "[", "'properties'", "]", "[", "'publication_type'", "]", "valid_publication_types", "=", "publication_type_schema", "[", "'items'", "]", "[", "'enum'", "]", "document_type", "=", "self", ".", "get", "(", "'document_type'", ",", "[", "]", ")", "publication_type", "=", "self", ".", "get", "(", "'publication_type'", ",", "[", "]", ")", "a_values", "=", "force_list", "(", "value", ".", "get", "(", "'a'", ")", ")", "for", "a_value", "in", "a_values", ":", "normalized_a_value", "=", "a_value", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "normalized_a_value", "==", "'arxiv'", ":", "continue", "# XXX: ignored.", "elif", "normalized_a_value", "==", "'citeable'", ":", "self", "[", "'citeable'", "]", "=", "True", "elif", "normalized_a_value", "==", "'core'", ":", "self", "[", "'core'", "]", "=", "True", "elif", "normalized_a_value", "==", "'noncore'", ":", "self", "[", "'core'", "]", "=", "False", "elif", "normalized_a_value", "==", "'published'", ":", "self", "[", "'refereed'", "]", "=", "True", "elif", "normalized_a_value", "==", "'withdrawn'", ":", "self", "[", "'withdrawn'", "]", "=", "True", "elif", "normalized_a_value", "==", "'deleted'", ":", "self", "[", "'deleted'", "]", "=", "True", "elif", "normalized_a_value", "in", "COLLECTIONS_MAP", ":", "self", ".", "setdefault", "(", "'_collections'", ",", "[", "]", ")", ".", "append", "(", "COLLECTIONS_MAP", "[", "normalized_a_value", "]", ")", "elif", "normalized_a_value", "in", "DOCUMENT_TYPE_MAP", ":", "document_type", ".", "append", "(", "DOCUMENT_TYPE_MAP", "[", "normalized_a_value", "]", ")", "elif", "normalized_a_value", "in", "valid_publication_types", ":", "publication_type", ".", "append", "(", "normalized_a_value", ")", "c_value", "=", "force_single_element", "(", "value", ".", "get", "(", "'c'", ",", "''", ")", ")", "normalized_c_value", "=", "c_value", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "normalized_c_value", "==", "'deleted'", ":", "self", "[", "'deleted'", "]", "=", "True", "self", "[", "'publication_type'", "]", "=", "publication_type", "return", "document_type" ]
Convenience method to request the wifi firmware info from the device
def get_wififirmware ( self , callb = None ) : if self . wifi_firmware_version is None : mypartial = partial ( self . resp_set_wififirmware ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetWifiFirmware , StateWifiFirmware , mycallb ) return ( self . wifi_firmware_version , self . wifi_firmware_build_timestamp )
8,441
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L560-L581
[ "def", "remove_targets", "(", "self", ",", "type", ",", "kept", "=", "None", ")", ":", "if", "kept", "is", "None", ":", "kept", "=", "[", "i", "for", "i", ",", "x", "in", "enumerate", "(", "self", ".", "_targets", ")", "if", "not", "isinstance", "(", "x", ",", "type", ")", "]", "if", "len", "(", "kept", ")", "==", "len", "(", "self", ".", "_targets", ")", ":", "return", "self", "self", ".", "_targets", "=", "[", "self", ".", "_targets", "[", "x", "]", "for", "x", "in", "kept", "]", "self", ".", "_labels", "=", "[", "self", ".", "_labels", "[", "x", "]", "for", "x", "in", "kept", "]", "if", "not", "self", ".", "_groups", ":", "return", "self", "index_map", "=", "{", "o_idx", ":", "n_idx", "for", "n_idx", ",", "o_idx", "in", "zip", "(", "range", "(", "len", "(", "self", ".", "_targets", ")", ")", ",", "kept", ")", "}", "kept", "=", "set", "(", "kept", ")", "for", "idx", ",", "grp", "in", "enumerate", "(", "self", ".", "_groups", ")", ":", "self", ".", "_groups", "[", "idx", "]", "=", "_sos_group", "(", "[", "index_map", "[", "x", "]", "for", "x", "in", "grp", ".", "_indexes", "if", "x", "in", "kept", "]", ",", "[", "y", "for", "x", ",", "y", "in", "zip", "(", "grp", ".", "_indexes", ",", "grp", ".", "_labels", ")", "if", "x", "in", "kept", "]", ")", ".", "set", "(", "*", "*", "grp", ".", "_dict", ")", "return", "self" ]
Default callback for get_wififirmware
def resp_set_wififirmware ( self , resp ) : if resp : self . wifi_firmware_version = float ( str ( str ( resp . version >> 16 ) + "." + str ( resp . version & 0xff ) ) ) self . wifi_firmware_build_timestamp = resp . build
8,442
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L583-L588
[ "def", "touch_object", "(", "self", ",", "objects", ":", "Set", "[", "Object", "]", ")", "->", "Set", "[", "Object", "]", ":", "objects_per_box", "=", "self", ".", "_separate_objects_by_boxes", "(", "objects", ")", "return_set", "=", "set", "(", ")", "for", "box", ",", "box_objects", "in", "objects_per_box", ".", "items", "(", ")", ":", "candidate_objects", "=", "box", ".", "objects", "for", "object_", "in", "box_objects", ":", "for", "candidate_object", "in", "candidate_objects", ":", "if", "self", ".", "_objects_touch_each_other", "(", "object_", ",", "candidate_object", ")", ":", "return_set", ".", "add", "(", "candidate_object", ")", "return", "return_set" ]
Convenience method to request the wifi info from the device
def get_wifiinfo ( self , callb = None ) : response = self . req_with_resp ( GetWifiInfo , StateWifiInfo , callb = callb ) return None
8,443
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L591-L604
[ "def", "_modify_report", "(", "summary_path", ",", "out_dir", ")", ":", "summary_path", "=", "op", ".", "abspath", "(", "summary_path", ")", "template", "=", "op", ".", "normpath", "(", "op", ".", "join", "(", "op", ".", "dirname", "(", "op", ".", "realpath", "(", "template_seqcluster", ".", "__file__", ")", ")", ",", "\"report.rmd\"", ")", ")", "content", "=", "open", "(", "template", ")", ".", "read", "(", ")", "out_content", "=", "string", ".", "Template", "(", "content", ")", ".", "safe_substitute", "(", "{", "'path_abs'", ":", "summary_path", "}", ")", "out_file", "=", "op", ".", "join", "(", "out_dir", ",", "\"srna_report.rmd\"", ")", "with", "open", "(", "out_file", ",", "'w'", ")", "as", "out_handle", ":", "out_handle", ".", "write", "(", "out_content", ")", "return", "out_file" ]
Convenience method to request the device firmware info from the device
def get_hostfirmware ( self , callb = None ) : if self . host_firmware_version is None : mypartial = partial ( self . resp_set_hostfirmware ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetHostFirmware , StateHostFirmware , mycallb ) return ( self . host_firmware_version , self . host_firmware_build_timestamp )
8,444
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L607-L628
[ "def", "option", "(", "self", ",", "key", ",", "value", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "self", ".", "_container", ",", "Section", ")", ":", "raise", "ValueError", "(", "\"Options can only be added inside a section!\"", ")", "option", "=", "Option", "(", "key", ",", "value", ",", "container", "=", "self", ".", "_container", ",", "*", "*", "kwargs", ")", "option", ".", "value", "=", "value", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "option", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Default callback for get_hostfirmware
def resp_set_hostfirmware ( self , resp ) : if resp : self . host_firmware_version = float ( str ( str ( resp . version >> 16 ) + "." + str ( resp . version & 0xff ) ) ) self . host_firmware_build_timestamp = resp . build
8,445
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L630-L635
[ "def", "_set_local_file_path", "(", "self", ")", ":", "self", ".", "FILE_LOCAL", "=", "self", ".", "_transfer", ".", "get_env", "(", "'FILE_LOCAL'", ")", "if", "not", "self", ".", "FILE_LOCAL", ":", "filename", "=", "'{}_{}.{}'", ".", "format", "(", "str", "(", "self", ".", "_transfer", ".", "prefix", ")", ",", "str", "(", "self", ".", "_transfer", ".", "namespace", ")", ",", "str", "(", "self", ".", "file_extension", ")", ")", "self", ".", "FILE_LOCAL", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", ",", "filename", ")", "dirs", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "FILE_LOCAL", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dirs", ")", ":", "os", ".", "makedirs", "(", "dirs", ")", "try", ":", "open", "(", "self", ".", "FILE_LOCAL", ",", "\"rb+\"", ")", ".", "close", "(", ")", "except", ":", "open", "(", "self", ".", "FILE_LOCAL", ",", "\"a\"", ")", ".", "close", "(", ")" ]
Convenience method to request the device info from the device
def get_hostinfo ( self , callb = None ) : response = self . req_with_resp ( GetInfo , StateInfo , callb = callb ) return None
8,446
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L638-L651
[ "def", "option", "(", "self", ",", "key", ",", "value", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "self", ".", "_container", ",", "Section", ")", ":", "raise", "ValueError", "(", "\"Options can only be added inside a section!\"", ")", "option", "=", "Option", "(", "key", ",", "value", ",", "container", "=", "self", ".", "_container", ",", "*", "*", "kwargs", ")", "option", ".", "value", "=", "value", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "option", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Convenience method to request the version from the device
def get_version ( self , callb = None ) : if self . vendor is None : mypartial = partial ( self . resp_set_version ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) response = self . req_with_resp ( GetVersion , StateVersion , callb = mycallb ) return ( self . host_firmware_version , self . host_firmware_build_timestamp )
8,447
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L653-L674
[ "def", "setOverlayTextureColorSpace", "(", "self", ",", "ulOverlayHandle", ",", "eTextureColorSpace", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureColorSpace", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTextureColorSpace", ")", "return", "result" ]
Default callback for get_version
def resp_set_version ( self , resp ) : if resp : self . vendor = resp . vendor self . product = resp . product self . version = resp . version
8,448
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L676-L682
[ "def", "read_frames", "(", "cls", ",", "reader", ")", ":", "rval", "=", "deque", "(", ")", "while", "True", ":", "frame_start_pos", "=", "reader", ".", "tell", "(", ")", "try", ":", "frame", "=", "Frame", ".", "_read_frame", "(", "reader", ")", "except", "Reader", ".", "BufferUnderflow", ":", "# No more data in the stream", "frame", "=", "None", "except", "Reader", ".", "ReaderError", "as", "e", ":", "# Some other format error", "raise", "Frame", ".", "FormatError", ",", "str", "(", "e", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "-", "1", "]", "except", "struct", ".", "error", "as", "e", ":", "raise", "Frame", ".", "FormatError", ",", "str", "(", "e", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "-", "1", "]", "if", "frame", "is", "None", ":", "reader", ".", "seek", "(", "frame_start_pos", ")", "break", "rval", ".", "append", "(", "frame", ")", "return", "rval" ]
Default callback for set_power
def resp_set_lightpower ( self , resp , power_level = None ) : if power_level is not None : self . power_level = power_level elif resp : self . power_level = resp . power_level
8,449
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L834-L840
[ "def", "construct_error_message", "(", "driver_id", ",", "error_type", ",", "message", ",", "timestamp", ")", ":", "builder", "=", "flatbuffers", ".", "Builder", "(", "0", ")", "driver_offset", "=", "builder", ".", "CreateString", "(", "driver_id", ".", "binary", "(", ")", ")", "error_type_offset", "=", "builder", ".", "CreateString", "(", "error_type", ")", "message_offset", "=", "builder", ".", "CreateString", "(", "message", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataStart", "(", "builder", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddDriverId", "(", "builder", ",", "driver_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddType", "(", "builder", ",", "error_type_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddErrorMessage", "(", "builder", ",", "message_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddTimestamp", "(", "builder", ",", "timestamp", ")", "error_data_offset", "=", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataEnd", "(", "builder", ")", "builder", ".", "Finish", "(", "error_data_offset", ")", "return", "bytes", "(", "builder", ".", "Output", "(", ")", ")" ]
Convenience method to request the colour status from the device
def get_color ( self , callb = None ) : response = self . req_with_resp ( LightGet , LightState , callb = callb ) return self . color
8,450
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L843-L858
[ "def", "get_partition", "(", "self", ",", "num_x", "=", "1", ",", "num_y", "=", "1", ")", ":", "size_x", ",", "size_y", "=", "(", "self", ".", "max_x", "-", "self", ".", "min_x", ")", "/", "num_x", ",", "(", "self", ".", "max_y", "-", "self", ".", "min_y", ")", "/", "num_y", "return", "[", "[", "BBox", "(", "[", "self", ".", "min_x", "+", "i", "*", "size_x", ",", "self", ".", "min_y", "+", "j", "*", "size_y", ",", "self", ".", "min_x", "+", "(", "i", "+", "1", ")", "*", "size_x", ",", "self", ".", "min_y", "+", "(", "j", "+", "1", ")", "*", "size_y", "]", ",", "crs", "=", "self", ".", "crs", ")", "for", "j", "in", "range", "(", "num_y", ")", "]", "for", "i", "in", "range", "(", "num_x", ")", "]" ]
Convenience method to set the colour status of the device
def set_color ( self , value , callb = None , duration = 0 , rapid = False ) : if len ( value ) == 4 : mypartial = partial ( self . resp_set_light , color = value ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) #try: if rapid : self . fire_and_forget ( LightSetColor , { "color" : value , "duration" : duration } , num_repeats = 1 ) self . resp_set_light ( None , color = value ) if callb : callb ( self , None ) else : self . req_with_ack ( LightSetColor , { "color" : value , "duration" : duration } , callb = mycallb )
8,451
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L861-L892
[ "def", "_GetFileMappingsByPath", "(", "self", ",", "key_path_upper", ")", ":", "candidate_mappings", "=", "[", "]", "for", "mapping", "in", "self", ".", "_REGISTRY_FILE_MAPPINGS_NT", ":", "if", "key_path_upper", ".", "startswith", "(", "mapping", ".", "key_path_prefix", ".", "upper", "(", ")", ")", ":", "candidate_mappings", ".", "append", "(", "mapping", ")", "# Sort the candidate mappings by longest (most specific) match first.", "candidate_mappings", ".", "sort", "(", "key", "=", "lambda", "mapping", ":", "len", "(", "mapping", ".", "key_path_prefix", ")", ",", "reverse", "=", "True", ")", "for", "mapping", "in", "candidate_mappings", ":", "yield", "mapping" ]
Default callback for set_color
def resp_set_light ( self , resp , color = None ) : if color : self . color = color elif resp : self . power_level = resp . power_level self . color = resp . color self . label = resp . label . decode ( ) . replace ( "\x00" , "" )
8,452
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L897-L905
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Convenience method to request the state of colour by zones from the device
def get_color_zones ( self , start_index , end_index = None , callb = None ) : if end_index is None : end_index = start_index + 7 args = { "start_index" : start_index , "end_index" : end_index , } self . req_with_resp ( MultiZoneGetColorZones , MultiZoneStateMultiZone , payload = args , callb = callb )
8,453
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L908-L930
[ "def", "guess_extension", "(", "amimetype", ",", "normalize", "=", "False", ")", ":", "ext", "=", "_mimes", ".", "guess_extension", "(", "amimetype", ")", "if", "ext", "and", "normalize", ":", "# Normalize some common magic mis-interpreation", "ext", "=", "{", "'.asc'", ":", "'.txt'", ",", "'.obj'", ":", "'.bin'", "}", ".", "get", "(", "ext", ",", "ext", ")", "from", "invenio", ".", "legacy", ".", "bibdocfile", ".", "api_normalizer", "import", "normalize_format", "return", "normalize_format", "(", "ext", ")", "return", "ext" ]
Convenience method to set the colour status zone of the device
def set_color_zones ( self , start_index , end_index , color , duration = 0 , apply = 1 , callb = None , rapid = False ) : if len ( color ) == 4 : args = { "start_index" : start_index , "end_index" : end_index , "color" : color , "duration" : duration , "apply" : apply , } mypartial = partial ( self . resp_set_multizonemultizone , args = args ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) if rapid : self . fire_and_forget ( MultiZoneSetColorZones , args , num_repeats = 1 ) mycallb ( self , None ) else : self . req_with_ack ( MultiZoneSetColorZones , args , callb = mycallb )
8,454
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L932-L975
[ "def", "_handle_relation_harness", "(", "self", ",", "line", ":", "str", ",", "position", ":", "int", ",", "tokens", ":", "Union", "[", "ParseResults", ",", "Dict", "]", ")", "->", "ParseResults", ":", "if", "not", "self", ".", "control_parser", ".", "citation", ":", "raise", "MissingCitationException", "(", "self", ".", "get_line_number", "(", ")", ",", "line", ",", "position", ")", "if", "not", "self", ".", "control_parser", ".", "evidence", ":", "raise", "MissingSupportWarning", "(", "self", ".", "get_line_number", "(", ")", ",", "line", ",", "position", ")", "missing_required_annotations", "=", "self", ".", "control_parser", ".", "get_missing_required_annotations", "(", ")", "if", "missing_required_annotations", ":", "raise", "MissingAnnotationWarning", "(", "self", ".", "get_line_number", "(", ")", ",", "line", ",", "position", ",", "missing_required_annotations", ")", "self", ".", "_handle_relation", "(", "tokens", ")", "return", "tokens" ]
Convenience method to request the infrared brightness from the device
def get_infrared ( self , callb = None ) : response = self . req_with_resp ( LightGetInfrared , LightStateInfrared , callb = callb ) return self . infrared_brightness
8,455
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1045-L1060
[ "def", "serialize_on_post_delete", "(", "sender", ",", "instance", ",", "using", ",", "*", "*", "kwargs", ")", ":", "try", ":", "wrapped_instance", "=", "site_offline_models", ".", "get_wrapped_instance", "(", "instance", ")", "except", "ModelNotRegistered", ":", "pass", "else", ":", "wrapped_instance", ".", "to_outgoing_transaction", "(", "using", ",", "created", "=", "False", ",", "deleted", "=", "True", ")" ]
Convenience method to set the infrared status of the device
def set_infrared ( self , infrared_brightness , callb = None , rapid = False ) : mypartial = partial ( self . resp_set_infrared , infrared_brightness = infrared_brightness ) if callb : mycallb = lambda x , y : ( mypartial ( y ) , callb ( x , y ) ) else : mycallb = lambda x , y : mypartial ( y ) if rapid : self . fire_and_forget ( LightSetInfrared , { "infrared_brightness" : infrared_brightness } , num_repeats = 1 ) self . resp_set_infrared ( None , infrared_brightness = infrared_brightness ) if callb : callb ( self , None ) else : self . req_with_ack ( LightSetInfrared , { "infrared_brightness" : infrared_brightness } , callb = mycallb )
8,456
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1063-L1092
[ "def", "query", "(", "conn_type", ",", "option", ",", "post_data", "=", "None", ")", ":", "if", "ticket", "is", "None", "or", "csrf", "is", "None", "or", "url", "is", "None", ":", "log", ".", "debug", "(", "'Not authenticated yet, doing that now..'", ")", "_authenticate", "(", ")", "full_url", "=", "'https://{0}:{1}/api2/json/{2}'", ".", "format", "(", "url", ",", "port", ",", "option", ")", "log", ".", "debug", "(", "'%s: %s (%s)'", ",", "conn_type", ",", "full_url", ",", "post_data", ")", "httpheaders", "=", "{", "'Accept'", ":", "'application/json'", ",", "'Content-Type'", ":", "'application/x-www-form-urlencoded'", ",", "'User-Agent'", ":", "'salt-cloud-proxmox'", "}", "if", "conn_type", "==", "'post'", ":", "httpheaders", "[", "'CSRFPreventionToken'", "]", "=", "csrf", "response", "=", "requests", ".", "post", "(", "full_url", ",", "verify", "=", "verify_ssl", ",", "data", "=", "post_data", ",", "cookies", "=", "ticket", ",", "headers", "=", "httpheaders", ")", "elif", "conn_type", "==", "'put'", ":", "httpheaders", "[", "'CSRFPreventionToken'", "]", "=", "csrf", "response", "=", "requests", ".", "put", "(", "full_url", ",", "verify", "=", "verify_ssl", ",", "data", "=", "post_data", ",", "cookies", "=", "ticket", ",", "headers", "=", "httpheaders", ")", "elif", "conn_type", "==", "'delete'", ":", "httpheaders", "[", "'CSRFPreventionToken'", "]", "=", "csrf", "response", "=", "requests", ".", "delete", "(", "full_url", ",", "verify", "=", "verify_ssl", ",", "data", "=", "post_data", ",", "cookies", "=", "ticket", ",", "headers", "=", "httpheaders", ")", "elif", "conn_type", "==", "'get'", ":", "response", "=", "requests", ".", "get", "(", "full_url", ",", "verify", "=", "verify_ssl", ",", "cookies", "=", "ticket", ")", "response", ".", "raise_for_status", "(", ")", "try", ":", "returned_data", "=", "response", ".", "json", "(", ")", "if", "'data'", "not", "in", "returned_data", ":", "raise", "SaltCloudExecutionFailure", "return", "returned_data", "[", "'data'", "]", "except", "Exception", ":", "log", ".", "error", "(", "'Error in trying to process JSON'", ")", "log", ".", "error", "(", "response", ")" ]
Start discovery task .
def start ( self , listen_ip = LISTEN_IP , listen_port = 0 ) : coro = self . loop . create_datagram_endpoint ( lambda : self , local_addr = ( listen_ip , listen_port ) ) self . task = self . loop . create_task ( coro ) return self . task
8,457
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1150-L1156
[ "def", "set_editor", "(", "self", ",", "editor", ")", ":", "if", "self", ".", "_editor", "is", "not", "None", ":", "try", ":", "self", ".", "_editor", ".", "offset_calculator", ".", "pic_infos_available", ".", "disconnect", "(", "self", ".", "_update", ")", "except", "(", "AttributeError", ",", "RuntimeError", ",", "ReferenceError", ")", ":", "# see https://github.com/OpenCobolIDE/OpenCobolIDE/issues/89", "pass", "self", ".", "_editor", "=", "weakref", ".", "proxy", "(", "editor", ")", "if", "editor", "else", "editor", "try", ":", "self", ".", "_editor", ".", "offset_calculator", ".", "pic_infos_available", ".", "connect", "(", "self", ".", "_update", ")", "except", "AttributeError", ":", "pass" ]
Method run when the UDP broadcast server is started
def connection_made ( self , transport ) : #print('started') self . transport = transport sock = self . transport . get_extra_info ( "socket" ) sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , 1 ) sock . setsockopt ( socket . SOL_SOCKET , socket . SO_BROADCAST , 1 ) self . loop . call_soon ( self . discover )
8,458
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1158-L1166
[ "def", "createAltHistoryPlot", "(", "self", ")", ":", "self", ".", "altHistRect", "=", "patches", ".", "Rectangle", "(", "(", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "-", "0.25", ")", ",", "0.5", ",", "0.5", ",", "facecolor", "=", "'grey'", ",", "edgecolor", "=", "'none'", ",", "alpha", "=", "0.4", ",", "zorder", "=", "4", ")", "self", ".", "axes", ".", "add_patch", "(", "self", ".", "altHistRect", ")", "self", ".", "altPlot", ",", "=", "self", ".", "axes", ".", "plot", "(", "[", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", ",", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", "+", "0.5", "]", ",", "[", "0.0", ",", "0.0", "]", ",", "color", "=", "'k'", ",", "marker", "=", "None", ",", "zorder", "=", "4", ")", "self", ".", "altMarker", ",", "=", "self", ".", "axes", ".", "plot", "(", "self", ".", "leftPos", "+", "(", "self", ".", "vertSize", "/", "10.0", ")", "+", "0.5", ",", "0.0", ",", "marker", "=", "'o'", ",", "color", "=", "'k'", ",", "zorder", "=", "4", ")", "self", ".", "altText2", "=", "self", ".", "axes", ".", "text", "(", "self", ".", "leftPos", "+", "(", "4", "*", "self", ".", "vertSize", "/", "10.0", ")", "+", "0.5", ",", "0.0", ",", "'%.f m'", "%", "self", ".", "relAlt", ",", "color", "=", "'k'", ",", "size", "=", "self", ".", "fontSize", ",", "ha", "=", "'left'", ",", "va", "=", "'center'", ",", "zorder", "=", "4", ")" ]
Method run when data is received from the devices
def datagram_received ( self , data , addr ) : response = unpack_lifx_message ( data ) response . ip_addr = addr [ 0 ] mac_addr = response . target_addr if mac_addr == BROADCAST_MAC : return if type ( response ) == StateService and response . service == 1 : # only look for UDP services # discovered remote_port = response . port elif type ( response ) == LightState : # looks like the lights are volunteering LigthState after booting remote_port = UDP_BROADCAST_PORT else : return if self . ipv6prefix : family = socket . AF_INET6 remote_ip = mac_to_ipv6_linklocal ( mac_addr , self . ipv6prefix ) else : family = socket . AF_INET remote_ip = response . ip_addr if mac_addr in self . lights : # rediscovered light = self . lights [ mac_addr ] # nothing to do if light . registered : return light . cleanup ( ) light . ip_addr = remote_ip light . port = remote_port else : # newly discovered light = Light ( self . loop , mac_addr , remote_ip , remote_port , parent = self ) self . lights [ mac_addr ] = light coro = self . loop . create_datagram_endpoint ( lambda : light , family = family , remote_addr = ( remote_ip , remote_port ) ) light . task = self . loop . create_task ( coro )
8,459
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1168-L1222
[ "def", "summary", "(", "doc", ")", ":", "lines", "=", "[", "]", "if", "\"Summary\"", "in", "doc", "and", "len", "(", "doc", "[", "\"Summary\"", "]", ")", ">", "0", ":", "lines", ".", "append", "(", "fix_footnotes", "(", "\" \"", ".", "join", "(", "doc", "[", "\"Summary\"", "]", ")", ")", ")", "lines", ".", "append", "(", "\"\\n\"", ")", "if", "\"Extended Summary\"", "in", "doc", "and", "len", "(", "doc", "[", "\"Extended Summary\"", "]", ")", ">", "0", ":", "lines", ".", "append", "(", "fix_footnotes", "(", "\" \"", ".", "join", "(", "doc", "[", "\"Extended Summary\"", "]", ")", ")", ")", "lines", ".", "append", "(", "\"\\n\"", ")", "return", "lines" ]
Method to send a discovery message
def discover ( self ) : if self . transport : if self . discovery_countdown <= 0 : self . discovery_countdown = self . discovery_interval msg = GetService ( BROADCAST_MAC , self . source_id , seq_num = 0 , payload = { } , ack_requested = False , response_requested = True ) self . transport . sendto ( msg . generate_packed_message ( ) , ( self . broadcast_ip , UDP_BROADCAST_PORT ) ) else : self . discovery_countdown -= self . discovery_step self . loop . call_later ( self . discovery_step , self . discover )
8,460
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1224-L1234
[ "def", "generate_citation_counter", "(", "self", ")", ":", "cite_counter", "=", "dict", "(", ")", "filename", "=", "'%s.aux'", "%", "self", ".", "project_name", "with", "open", "(", "filename", ")", "as", "fobj", ":", "main_aux", "=", "fobj", ".", "read", "(", ")", "cite_counter", "[", "filename", "]", "=", "_count_citations", "(", "filename", ")", "for", "match", "in", "re", ".", "finditer", "(", "r'\\\\@input\\{(.*.aux)\\}'", ",", "main_aux", ")", ":", "filename", "=", "match", ".", "groups", "(", ")", "[", "0", "]", "try", ":", "counter", "=", "_count_citations", "(", "filename", ")", "except", "IOError", ":", "pass", "else", ":", "cite_counter", "[", "filename", "]", "=", "counter", "return", "cite_counter" ]
Return a list of local IP addresses on interfaces with LIFX bulbs .
async def scan ( self , timeout = 1 ) : adapters = await self . loop . run_in_executor ( None , ifaddr . get_adapters ) ips = [ ip . ip for adapter in ifaddr . get_adapters ( ) for ip in adapter . ips if ip . is_IPv4 ] if not ips : return [ ] tasks = [ ] discoveries = [ ] for ip in ips : manager = ScanManager ( ip ) lifx_discovery = LifxDiscovery ( self . loop , manager ) discoveries . append ( lifx_discovery ) lifx_discovery . start ( listen_ip = ip ) tasks . append ( self . loop . create_task ( manager . lifx_ip ( ) ) ) ( done , pending ) = await aio . wait ( tasks , timeout = timeout ) for discovery in discoveries : discovery . cleanup ( ) for task in pending : task . cancel ( ) return [ task . result ( ) for task in done ]
8,461
https://github.com/frawau/aiolifx/blob/9bd8c5e6d291f4c79314989402f7e2c6476d5851/aiolifx/aiolifx.py#L1269-L1294
[ "def", "export_draco", "(", "mesh", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.ply'", ")", "as", "temp_ply", ":", "temp_ply", ".", "write", "(", "export_ply", "(", "mesh", ")", ")", "temp_ply", ".", "flush", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.drc'", ")", "as", "encoded", ":", "subprocess", ".", "check_output", "(", "[", "draco_encoder", ",", "'-qp'", ",", "# bits of quantization for position", "'28'", ",", "# since our tol.merge is 1e-8, 25 bits", "# more has a machine epsilon", "# smaller than that", "'-i'", ",", "temp_ply", ".", "name", ",", "'-o'", ",", "encoded", ".", "name", "]", ")", "encoded", ".", "seek", "(", "0", ")", "data", "=", "encoded", ".", "read", "(", ")", "return", "data" ]
High level import function that tries to determine the specific version of the data format used .
def _get_file_version ( filename ) : mat = sio . loadmat ( filename , squeeze_me = True ) version = mat [ 'MP' ] [ 'Version' ] . item ( ) del ( mat ) return version
8,462
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L34-L54
[ "def", "result", "(", "self", ")", ":", "self", ".", "_reactor_check", "(", ")", "self", ".", "_event", ".", "wait", "(", ")", "if", "self", ".", "_exception", ":", "six", ".", "reraise", "(", "self", ".", "_exception", ".", "__class__", ",", "self", ".", "_exception", ",", "self", ".", "_traceback", ")", "if", "self", ".", "_result", "==", "NONE_RESULT", ":", "return", "None", "else", ":", "return", "self", ".", "_result" ]
Given a MD DataFrame return a Nx4 array which permutes the current injection dipoles .
def MD_ConfigsPermutate ( df_md ) : g_current_injections = df_md . groupby ( [ 'a' , 'b' ] ) ab = np . array ( list ( g_current_injections . groups . keys ( ) ) ) config_mgr = ConfigManager ( nr_of_electrodes = ab . max ( ) ) config_mgr . gen_configs_permutate ( ab , silent = True ) return config_mgr . configs
8,463
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L57-L65
[ "async", "def", "update_lease_async", "(", "self", ",", "lease", ")", ":", "if", "lease", "is", "None", ":", "return", "False", "if", "not", "lease", ".", "token", ":", "return", "False", "_logger", ".", "debug", "(", "\"Updating lease %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ")", "# First, renew the lease to make sure the update will go through.", "if", "await", "self", ".", "renew_lease_async", "(", "lease", ")", ":", "try", ":", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "lease", ".", "partition_id", ",", "json", ".", "dumps", "(", "lease", ".", "serializable", "(", ")", ")", ",", "lease_id", "=", "lease", ".", "token", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to update lease %r %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ",", "err", ")", "raise", "err", "else", ":", "return", "False", "return", "True" ]
Apply correction factors for a pseudo - 2D measurement setup . See Weigand and Kemna 2017 Biogeosciences for detailed information .
def apply_correction_factors ( df , correction_file ) : if isinstance ( correction_file , ( list , tuple ) ) : corr_data_raw = np . vstack ( [ np . loadtxt ( x ) for x in correction_file ] ) else : corr_data_raw = np . loadtxt ( correction_file ) if corr_data_raw . shape [ 1 ] == 3 : A = ( corr_data_raw [ : , 0 ] / 1e4 ) . astype ( int ) B = ( corr_data_raw [ : , 0 ] % 1e4 ) . astype ( int ) M = ( corr_data_raw [ : , 1 ] / 1e4 ) . astype ( int ) N = ( corr_data_raw [ : , 1 ] % 1e4 ) . astype ( int ) corr_data = np . vstack ( ( A , B , M , N , corr_data_raw [ : , 2 ] ) ) . T elif corr_data_raw . shape [ 1 ] == 5 : corr_data = corr_data_raw else : raise Exception ( 'error' ) corr_data [ : , 0 : 2 ] = np . sort ( corr_data [ : , 0 : 2 ] , axis = 1 ) corr_data [ : , 2 : 4 ] = np . sort ( corr_data [ : , 2 : 4 ] , axis = 1 ) if 'frequency' not in df . columns : raise Exception ( 'No frequency data found. Are you sure this is a seit data set?' ) df = df . reset_index ( ) gf = df . groupby ( [ 'a' , 'b' , 'm' , 'n' ] ) for key , item in gf . indices . items ( ) : # print('key', key) # print(item) item_norm = np . hstack ( ( np . sort ( key [ 0 : 2 ] ) , np . sort ( key [ 2 : 4 ] ) ) ) # print(item_norm) index = np . where ( ( corr_data [ : , 0 ] == item_norm [ 0 ] ) & ( corr_data [ : , 1 ] == item_norm [ 1 ] ) & ( corr_data [ : , 2 ] == item_norm [ 2 ] ) & ( corr_data [ : , 3 ] == item_norm [ 3 ] ) ) [ 0 ] # print(index, corr_data[index]) if len ( index ) == 0 : print ( key ) import IPython IPython . embed ( ) raise Exception ( 'No correction factor found for this configuration' ) factor = corr_data [ index , 4 ] # if key == (1, 4, 2, 3): # print(key) # print(factor) # print(df['R']) # print(df['k']) # import IPython # IPython.embed() # exit() # apply correction factor for col in ( 'r' , 'Zt' , 'Vmn' , 'rho_a' ) : if col in df . columns : df . ix [ item , col ] *= factor df . ix [ item , 'corr_fac' ] = factor return df , corr_data
8,464
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/importers/eit_fzj.py#L217-L283
[ "def", "blueprint", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "self", ".", "endpoint", "is", "not", "None", "and", "'.'", "in", "self", ".", "endpoint", ":", "return", "self", ".", "endpoint", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "else", ":", "return", "None" ]
Returns a function for calculation of probability matrix of substitutions i - > j over time t .
def get_pij_method ( model = F81 , frequencies = None , kappa = None ) : if is_f81_like ( model ) : mu = get_mu ( frequencies ) return lambda t : get_f81_pij ( t , frequencies , mu ) if JTT == model : return get_jtt_pij if HKY == model : return lambda t : get_hky_pij ( t , frequencies , kappa )
8,465
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L87-L106
[ "def", "XOR", "(", "classical_reg1", ",", "classical_reg2", ")", ":", "left", ",", "right", "=", "unpack_reg_val_pair", "(", "classical_reg1", ",", "classical_reg2", ")", "return", "ClassicalExclusiveOr", "(", "left", ",", "right", ")" ]
Initializes the allowed state arrays for tips based on their states given by the feature .
def initialize_allowed_states ( tree , feature , states ) : allowed_states_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) state2index = dict ( zip ( states , range ( len ( states ) ) ) ) for node in tree . traverse ( ) : node_states = getattr ( node , feature , set ( ) ) if not node_states : allowed_states = np . ones ( len ( state2index ) , dtype = np . int ) else : allowed_states = np . zeros ( len ( state2index ) , dtype = np . int ) for state in node_states : allowed_states [ state2index [ state ] ] = 1 node . add_feature ( allowed_states_feature , allowed_states )
8,466
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L309-L333
[ "def", "cancelOrder", "(", "self", ",", "orderId", ")", ":", "self", ".", "ibConn", ".", "cancelOrder", "(", "orderId", ")", "# update order id for next time", "self", ".", "requestOrderIds", "(", ")", "return", "orderId" ]
Alters the bottom - up likelihood arrays for zero - distance tips to make sure they do not contradict with other zero - distance tip siblings .
def alter_zero_tip_allowed_states ( tree , feature ) : zero_parent2tips = defaultdict ( list ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for tip in tree : if tip . dist == 0 : state = getattr ( tip , feature , None ) if state is not None and state != '' : zero_parent2tips [ tip . up ] . append ( tip ) # adjust zero tips to contain all the zero tip options as states for parent , zero_tips in zero_parent2tips . items ( ) : # If there is a common state do nothing counts = None for tip in zero_tips : if counts is None : counts = getattr ( tip , allowed_state_feature ) . copy ( ) else : counts += getattr ( tip , allowed_state_feature ) if counts . max ( ) == len ( zero_tips ) : continue # Otherwise set all tip states to state union allowed_states = None for tip in zero_tips : if allowed_states is None : allowed_states = getattr ( tip , allowed_state_feature ) . copy ( ) else : tip_allowed_states = getattr ( tip , allowed_state_feature ) allowed_states [ np . nonzero ( tip_allowed_states ) ] = 1 tip . add_feature ( allowed_state_feature , allowed_states )
8,467
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L336-L375
[ "def", "StreamMetrics", "(", "self", ",", "request_iterator", ",", "context", ")", ":", "LOG", ".", "debug", "(", "\"StreamMetrics called\"", ")", "# set up arguments", "collect_args", "=", "(", "next", "(", "request_iterator", ")", ")", "max_metrics_buffer", "=", "0", "max_collect_duration", "=", "0", "cfg", "=", "Metric", "(", "pb", "=", "collect_args", ".", "Metrics_Arg", ".", "metrics", "[", "0", "]", ")", "try", ":", "max_metrics_buffer", "=", "int", "(", "cfg", ".", "config", "[", "\"max-metrics-buffer\"", "]", ")", "except", "Exception", "as", "ex", ":", "LOG", ".", "debug", "(", "\"Unable to get schedule parameters: {}\"", ".", "format", "(", "ex", ")", ")", "try", ":", "max_collect_duration", "=", "int", "(", "cfg", ".", "config", "[", "\"max-collect-duration\"", "]", ")", "except", "Exception", "as", "ex", ":", "LOG", ".", "debug", "(", "\"Unable to get schedule parameters: {}\"", ".", "format", "(", "ex", ")", ")", "if", "max_metrics_buffer", ">", "0", ":", "self", ".", "max_metrics_buffer", "=", "max_metrics_buffer", "if", "max_collect_duration", ">", "0", ":", "self", ".", "max_collect_duration", "=", "max_collect_duration", "# start collection thread", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_stream_wrapper", ",", "args", "=", "(", "collect_args", ",", ")", ",", ")", "thread", ".", "daemon", "=", "True", "thread", ".", "start", "(", ")", "# stream metrics", "metrics", "=", "[", "]", "metrics_to_stream", "=", "[", "]", "stream_timeout", "=", "self", ".", "max_collect_duration", "while", "context", ".", "is_active", "(", ")", ":", "try", ":", "# wait for metrics until timeout is reached", "t_start", "=", "time", ".", "time", "(", ")", "metrics", "=", "self", ".", "metrics_queue", ".", "get", "(", "block", "=", "True", ",", "timeout", "=", "stream_timeout", ")", "elapsed", "=", "round", "(", "time", ".", "time", "(", ")", "-", "t_start", ")", "stream_timeout", "-=", "elapsed", "except", "queue", ".", "Empty", ":", "LOG", ".", "debug", "(", "\"Max collect duration exceeded. Streaming {} metrics\"", ".", "format", "(", "len", "(", "metrics_to_stream", ")", ")", ")", "metrics_col", "=", "CollectReply", "(", "Metrics_Reply", "=", "MetricsReply", "(", "metrics", "=", "[", "m", ".", "pb", "for", "m", "in", "metrics_to_stream", "]", ")", ")", "metrics_to_stream", "=", "[", "]", "stream_timeout", "=", "self", ".", "max_collect_duration", "yield", "metrics_col", "else", ":", "for", "metric", "in", "metrics", ":", "metrics_to_stream", ".", "append", "(", "metric", ")", "if", "len", "(", "metrics_to_stream", ")", "==", "self", ".", "max_metrics_buffer", ":", "LOG", ".", "debug", "(", "\"Max metrics buffer reached. Streaming {} metrics\"", ".", "format", "(", "len", "(", "metrics_to_stream", ")", ")", ")", "metrics_col", "=", "CollectReply", "(", "Metrics_Reply", "=", "MetricsReply", "(", "metrics", "=", "[", "m", ".", "pb", "for", "m", "in", "metrics_to_stream", "]", ")", ")", "metrics_to_stream", "=", "[", "]", "stream_timeout", "=", "self", ".", "max_collect_duration", "yield", "metrics_col", "# stream metrics if max_metrics_buffer is 0 or enough metrics has been collected", "if", "self", ".", "max_metrics_buffer", "==", "0", ":", "LOG", ".", "debug", "(", "\"Max metrics buffer set to 0. Streaming {} metrics\"", ".", "format", "(", "len", "(", "metrics_to_stream", ")", ")", ")", "metrics_col", "=", "CollectReply", "(", "Metrics_Reply", "=", "MetricsReply", "(", "metrics", "=", "[", "m", ".", "pb", "for", "m", "in", "metrics_to_stream", "]", ")", ")", "metrics_to_stream", "=", "[", "]", "stream_timeout", "=", "self", ".", "max_collect_duration", "yield", "metrics_col", "# sent notification if stream has been stopped", "self", ".", "done_queue", ".", "put", "(", "True", ")" ]
Unalters the bottom - up likelihood arrays for zero - distance tips to contain ones only in their states .
def unalter_zero_tip_allowed_states ( tree , feature , state2index ) : allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for tip in tree : if tip . dist > 0 : continue state = getattr ( tip , feature , set ( ) ) if state : initial_allowed_states = np . zeros ( len ( state2index ) , np . int ) for _ in state : initial_allowed_states [ state2index [ _ ] ] = 1 allowed_states = getattr ( tip , allowed_state_feature ) & initial_allowed_states tip . add_feature ( allowed_state_feature , ( allowed_states if np . any ( allowed_states > 0 ) else initial_allowed_states ) )
8,468
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L378-L399
[ "def", "driverDebugRequest", "(", "self", ",", "unDeviceIndex", ",", "pchRequest", ",", "pchResponseBuffer", ",", "unResponseBufferSize", ")", ":", "fn", "=", "self", ".", "function_table", ".", "driverDebugRequest", "result", "=", "fn", "(", "unDeviceIndex", ",", "pchRequest", ",", "pchResponseBuffer", ",", "unResponseBufferSize", ")", "return", "result" ]
Unalters the joint tip states for zero - distance tips to contain only their states .
def unalter_zero_tip_joint_states ( tree , feature , state2index ) : lh_joint_state_feature = get_personalized_feature_name ( feature , BU_LH_JOINT_STATES ) for tip in tree : if tip . dist > 0 : continue state = getattr ( tip , feature , set ( ) ) if len ( state ) > 1 : allowed_indices = { state2index [ _ ] for _ in state } allowed_index = next ( iter ( allowed_indices ) ) joint_states = getattr ( tip , lh_joint_state_feature ) for i in range ( len ( state2index ) ) : if joint_states [ i ] not in allowed_indices : joint_states [ i ] = allowed_index elif len ( state ) == 1 : tip . add_feature ( lh_joint_state_feature , np . ones ( len ( state2index ) , np . int ) * state2index [ next ( iter ( state ) ) ] )
8,469
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L402-L425
[ "def", "Load", "(", "self", ")", ":", "for", "record", "in", "super", "(", "EventFileLoader", ",", "self", ")", ".", "Load", "(", ")", ":", "yield", "event_pb2", ".", "Event", ".", "FromString", "(", "record", ")" ]
Calculates marginal likelihoods for each tree node by multiplying state frequencies with their bottom - up and top - down likelihoods .
def calculate_marginal_likelihoods ( tree , feature , frequencies ) : bu_lh_feature = get_personalized_feature_name ( feature , BU_LH ) bu_lh_sf_feature = get_personalized_feature_name ( feature , BU_LH_SF ) td_lh_feature = get_personalized_feature_name ( feature , TD_LH ) td_lh_sf_feature = get_personalized_feature_name ( feature , TD_LH_SF ) lh_feature = get_personalized_feature_name ( feature , LH ) lh_sf_feature = get_personalized_feature_name ( feature , LH_SF ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) for node in tree . traverse ( 'preorder' ) : likelihood = getattr ( node , bu_lh_feature ) * getattr ( node , td_lh_feature ) * frequencies * getattr ( node , allowed_state_feature ) node . add_feature ( lh_feature , likelihood ) node . add_feature ( lh_sf_feature , getattr ( node , td_lh_sf_feature ) + getattr ( node , bu_lh_sf_feature ) ) node . del_feature ( bu_lh_feature ) node . del_feature ( bu_lh_sf_feature ) node . del_feature ( td_lh_feature ) node . del_feature ( td_lh_sf_feature )
8,470
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L428-L455
[ "def", "hide_tool", "(", "self", ",", "context_name", ",", "tool_name", ")", ":", "data", "=", "self", ".", "_context", "(", "context_name", ")", "hidden_tools", "=", "data", "[", "\"hidden_tools\"", "]", "if", "tool_name", "not", "in", "hidden_tools", ":", "self", ".", "_validate_tool", "(", "context_name", ",", "tool_name", ")", "hidden_tools", ".", "add", "(", "tool_name", ")", "self", ".", "_flush_tools", "(", ")" ]
Normalizes each node marginal likelihoods to convert them to marginal probabilities .
def convert_likelihoods_to_probabilities ( tree , feature , states ) : lh_feature = get_personalized_feature_name ( feature , LH ) name2probs = { } for node in tree . traverse ( ) : lh = getattr ( node , lh_feature ) name2probs [ node . name ] = lh / lh . sum ( ) return pd . DataFrame . from_dict ( name2probs , orient = 'index' , columns = states )
8,471
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L476-L493
[ "def", "_overlapping", "(", "files", ")", ":", "segments", "=", "set", "(", ")", "for", "path", "in", "files", ":", "seg", "=", "file_segment", "(", "path", ")", "for", "s", "in", "segments", ":", "if", "seg", ".", "intersects", "(", "s", ")", ":", "return", "True", "segments", ".", "add", "(", "seg", ")", "return", "False" ]
Chooses node ancestral states based on their marginal probabilities using MPPA method .
def choose_ancestral_states_mppa ( tree , feature , states , force_joint = True ) : lh_feature = get_personalized_feature_name ( feature , LH ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) joint_state_feature = get_personalized_feature_name ( feature , JOINT_STATE ) n = len ( states ) _ , state2array = get_state2allowed_states ( states , False ) num_scenarios = 1 unresolved_nodes = 0 num_states = 0 # If force_joint == True, # we make sure that the joint state is always chosen, # for this we sort the marginal probabilities array as [lowest_non_joint_mp, ..., highest_non_joint_mp, joint_mp] # select k in 1:n such as the correction between choosing 0, 0, ..., 1/k, ..., 1/k and our sorted array is min # and return the corresponding states for node in tree . traverse ( ) : marginal_likelihoods = getattr ( node , lh_feature ) marginal_probs = marginal_likelihoods / marginal_likelihoods . sum ( ) if force_joint : joint_index = getattr ( node , joint_state_feature ) joint_prob = marginal_probs [ joint_index ] marginal_probs = np . hstack ( ( np . sort ( np . delete ( marginal_probs , joint_index ) ) , [ joint_prob ] ) ) else : marginal_probs = np . sort ( marginal_probs ) best_k = n best_correstion = np . inf for k in range ( 1 , n + 1 ) : correction = np . hstack ( ( np . zeros ( n - k ) , np . ones ( k ) / k ) ) - marginal_probs correction = correction . dot ( correction ) if correction < best_correstion : best_correstion = correction best_k = k num_scenarios *= best_k num_states += best_k if force_joint : indices_selected = sorted ( range ( n ) , key = lambda _ : ( 0 if n == joint_index else 1 , - marginal_likelihoods [ _ ] ) ) [ : best_k ] else : indices_selected = sorted ( range ( n ) , key = lambda _ : - marginal_likelihoods [ _ ] ) [ : best_k ] if best_k == 1 : allowed_states = state2array [ indices_selected [ 0 ] ] else : allowed_states = np . zeros ( len ( states ) , dtype = np . int ) allowed_states [ indices_selected ] = 1 unresolved_nodes += 1 node . add_feature ( allowed_state_feature , allowed_states ) return num_scenarios , unresolved_nodes , num_states
8,472
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L496-L563
[ "def", "console_wait_for_keypress", "(", "flush", ":", "bool", ")", "->", "Key", ":", "key", "=", "Key", "(", ")", "lib", ".", "TCOD_console_wait_for_keypress_wrapper", "(", "key", ".", "key_p", ",", "flush", ")", "return", "key" ]
Chooses node ancestral states based on their marginal probabilities using MAP method .
def choose_ancestral_states_map ( tree , feature , states ) : lh_feature = get_personalized_feature_name ( feature , LH ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) _ , state2array = get_state2allowed_states ( states , False ) for node in tree . traverse ( ) : marginal_likelihoods = getattr ( node , lh_feature ) node . add_feature ( allowed_state_feature , state2array [ marginal_likelihoods . argmax ( ) ] )
8,473
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L566-L582
[ "def", "console_wait_for_keypress", "(", "flush", ":", "bool", ")", "->", "Key", ":", "key", "=", "Key", "(", ")", "lib", ".", "TCOD_console_wait_for_keypress_wrapper", "(", "key", ".", "key_p", ",", "flush", ")", "return", "key" ]
Chooses node ancestral states based on their marginal probabilities using joint method .
def choose_ancestral_states_joint ( tree , feature , states , frequencies ) : lh_feature = get_personalized_feature_name ( feature , BU_LH ) lh_state_feature = get_personalized_feature_name ( feature , BU_LH_JOINT_STATES ) allowed_state_feature = get_personalized_feature_name ( feature , ALLOWED_STATES ) joint_state_feature = get_personalized_feature_name ( feature , JOINT_STATE ) _ , state2array = get_state2allowed_states ( states , False ) def chose_consistent_state ( node , state_index ) : node . add_feature ( joint_state_feature , state_index ) node . add_feature ( allowed_state_feature , state2array [ state_index ] ) for child in node . children : chose_consistent_state ( child , getattr ( child , lh_state_feature ) [ state_index ] ) chose_consistent_state ( tree , ( getattr ( tree , lh_feature ) * frequencies ) . argmax ( ) )
8,474
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/ml.py#L585-L609
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
Reformats the column string to make sure it contains only numerical letter characters or underscore .
def col_name2cat ( column ) : column_string = '' . join ( s for s in column . replace ( ' ' , '_' ) if s . isalnum ( ) or '_' == s ) return column_string
8,475
https://github.com/evolbioinfo/pastml/blob/df8a375841525738383e59548eed3441b07dbd3e/pastml/__init__.py#L14-L24
[ "def", "_parse_document", "(", "document", ":", "Path", ",", "system", ":", "System", "=", "None", ",", "profile", "=", "EProfile", ".", "FULL", ")", ":", "logger", ".", "debug", "(", "'parse document: {0}'", ".", "format", "(", "document", ")", ")", "stream", "=", "FileStream", "(", "str", "(", "document", ")", ",", "encoding", "=", "'utf-8'", ")", "system", "=", "FileSystem", ".", "_parse_stream", "(", "stream", ",", "system", ",", "document", ",", "profile", ")", "FileSystem", ".", "merge_annotations", "(", "system", ",", "document", ".", "stripext", "(", ")", "+", "'.yaml'", ")", "return", "system" ]
Get user config filename .
def get_user_config_filename ( appname = 'notify' ) : import platform system = platform . system ( ) if system == 'Windows' : rootname = os . path . join ( os . environ [ 'APPDATA' ] , appname ) filename = appname + ".cfg" prefix = '' elif system == 'Linux' : XDG_CONFIG_HOME = os . environ . get ( 'XDG_CONFIG_HOME' , None ) rootname = XDG_CONFIG_HOME or os . path . join ( '~' , '.config' ) rootname = os . path . expanduser ( rootname ) # check if XDG_CONFIG_HOME exists if not os . path . exists ( rootname ) and XDG_CONFIG_HOME is None : # XDG_CONFIG_HOME is not used rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '.' else : rootname = os . path . join ( rootname , appname ) filename = appname + ".cfg" prefix = '' elif system == 'Darwin' : rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '.' else : # Unknown rootname = os . path . expanduser ( '~' ) filename = appname + ".cfg" prefix = '' return os . path . join ( rootname , prefix + filename )
8,476
https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L24-L70
[ "def", "Nu_vertical_cylinder", "(", "Pr", ",", "Gr", ",", "L", "=", "None", ",", "D", "=", "None", ",", "Method", "=", "None", ",", "AvailableMethods", "=", "False", ")", ":", "def", "list_methods", "(", ")", ":", "methods", "=", "[", "]", "for", "key", ",", "values", "in", "vertical_cylinder_correlations", ".", "items", "(", ")", ":", "if", "values", "[", "4", "]", "or", "all", "(", "(", "L", ",", "D", ")", ")", ":", "methods", ".", "append", "(", "key", ")", "if", "'Popiel & Churchill'", "in", "methods", ":", "methods", ".", "remove", "(", "'Popiel & Churchill'", ")", "methods", ".", "insert", "(", "0", ",", "'Popiel & Churchill'", ")", "elif", "'McAdams, Weiss & Saunders'", "in", "methods", ":", "methods", ".", "remove", "(", "'McAdams, Weiss & Saunders'", ")", "methods", ".", "insert", "(", "0", ",", "'McAdams, Weiss & Saunders'", ")", "return", "methods", "if", "AvailableMethods", ":", "return", "list_methods", "(", ")", "if", "not", "Method", ":", "Method", "=", "list_methods", "(", ")", "[", "0", "]", "if", "Method", "in", "vertical_cylinder_correlations", ":", "if", "vertical_cylinder_correlations", "[", "Method", "]", "[", "4", "]", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ")", "else", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ",", "L", "=", "L", ",", "D", "=", "D", ")", "else", ":", "raise", "Exception", "(", "\"Correlation name not recognized; see the \"", "\"documentation for the available options.\"", ")" ]
Convert ConfigParser instance to argparse . Namespace
def config_to_options ( config ) : class Options : host = config . get ( 'smtp' , 'host' , raw = True ) port = config . getint ( 'smtp' , 'port' ) to_addr = config . get ( 'mail' , 'to_addr' , raw = True ) from_addr = config . get ( 'mail' , 'from_addr' , raw = True ) subject = config . get ( 'mail' , 'subject' , raw = True ) encoding = config . get ( 'mail' , 'encoding' , raw = True ) username = config . get ( 'auth' , 'username' ) opts = Options ( ) # format opts . from_addr % { 'host' : opts . host , 'prog' : 'notify' } opts . to_addr % { 'host' : opts . host , 'prog' : 'notify' } return opts
8,477
https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L73-L99
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Create default ConfigParser instance
def create_default_config ( ) : import codecs config = ConfigParser . SafeConfigParser ( ) config . readfp ( StringIO ( DEFAULT_CONFIG ) ) # Load user settings filename = get_user_config_filename ( ) if not os . path . exists ( filename ) : from wizard import setup_wizard setup_wizard ( config ) else : try : fi = codecs . open ( filename , 'r' , encoding = 'utf-8' ) config . readfp ( fi ) finally : fi . close ( ) return config
8,478
https://github.com/lambdalisue/notify/blob/1b6d7d1faa2cea13bfaa1f35130f279a0115e686/src/notify/conf.py#L102-L121
[ "def", "draw", "(", "self", ",", "milliseconds", ",", "surface", ")", ":", "self", ".", "drawn_rects", "=", "[", "]", "cam", "=", "Ragnarok", ".", "get_world", "(", ")", ".", "Camera", "cX", ",", "cY", ",", "cXMax", ",", "cYMax", "=", "cam", ".", "get_cam_bounds", "(", ")", "#Draw out only the tiles visible to the camera.", "start_pos", "=", "self", ".", "pixels_to_tiles", "(", "(", "cX", ",", "cY", ")", ")", "start_pos", "-=", "Vector2", "(", "1", ",", "1", ")", "end_pos", "=", "self", ".", "pixels_to_tiles", "(", "(", "cXMax", ",", "cYMax", ")", ")", "end_pos", "+=", "Vector2", "(", "1", ",", "1", ")", "start_pos", ".", "X", ",", "start_pos", ".", "Y", "=", "self", ".", "clamp_within_range", "(", "start_pos", ".", "X", ",", "start_pos", ".", "Y", ")", "end_pos", ".", "X", ",", "end_pos", ".", "Y", "=", "self", ".", "clamp_within_range", "(", "end_pos", ".", "X", ",", "end_pos", ".", "Y", ")", "cam_pos", "=", "cam", ".", "get_world_pos", "(", ")", "for", "x", "in", "range", "(", "start_pos", ".", "X", ",", "end_pos", ".", "X", "+", "1", ")", ":", "for", "y", "in", "range", "(", "start_pos", ".", "Y", ",", "end_pos", ".", "Y", "+", "1", ")", ":", "tile", "=", "self", ".", "tiles", "[", "y", "]", "[", "x", "]", "translate_posX", "=", "tile", ".", "coords", ".", "X", "-", "cam_pos", ".", "X", "translate_posY", "=", "tile", ".", "coords", ".", "Y", "-", "cam_pos", ".", "Y", "surface", ".", "blit", "(", "self", ".", "spritesheet", ".", "image", ",", "(", "translate_posX", ",", "translate_posY", ")", ",", "tile", ".", "source", ",", "special_flags", "=", "0", ")" ]
Return True if data container has multiple timesteps .
def has_multiple_timesteps ( data ) : if "timestep" in data . keys ( ) : if len ( np . unique ( data [ "timestep" ] ) ) > 1 : return True return False
8,479
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/utils/helper_functions.py#L83-L88
[ "def", "clean", "(", "self", ")", ":", "if", "self", ".", "_initialized", ":", "logger", ".", "info", "(", "\"brace yourselves, removing %r\"", ",", "self", ".", "path", ")", "shutil", ".", "rmtree", "(", "self", ".", "path", ")" ]
Split data into multiple timesteps .
def split_timesteps ( data , consistent_abmn = False ) : if has_multiple_timesteps ( data ) : grouped = data . groupby ( "timestep" ) return [ group [ 1 ] for group in grouped ] else : return data
8,480
https://github.com/geophysics-ubonn/reda/blob/46a939729e40c7c4723315c03679c40761152e9e/lib/reda/utils/helper_functions.py#L90-L96
[ "def", "arcball_constrain_to_axis", "(", "point", ",", "axis", ")", ":", "v", "=", "np", ".", "array", "(", "point", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "a", "=", "np", ".", "array", "(", "axis", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "v", "-=", "a", "*", "np", ".", "dot", "(", "a", ",", "v", ")", "# on plane", "n", "=", "vector_norm", "(", "v", ")", "if", "n", ">", "_EPS", ":", "if", "v", "[", "2", "]", "<", "0.0", ":", "np", ".", "negative", "(", "v", ",", "v", ")", "v", "/=", "n", "return", "v", "if", "a", "[", "2", "]", "==", "1.0", ":", "return", "np", ".", "array", "(", "[", "1.0", ",", "0.0", ",", "0.0", "]", ")", "return", "unit_vector", "(", "[", "-", "a", "[", "1", "]", ",", "a", "[", "0", "]", ",", "0.0", "]", ")" ]
Parses input text matches and replaces using avrodict
def parse ( text ) : # Sanitize text case to meet phonetic comparison standards fixed_text = validate . fix_string_case ( utf ( text ) ) # prepare output list output = [ ] # cursor end point cur_end = 0 # iterate through input text for cur , i in enumerate ( fixed_text ) : # Trap characters with unicode encoding errors try : i . encode ( 'utf-8' ) except UnicodeDecodeError : uni_pass = False else : uni_pass = True # Default value for match match = { 'matched' : False } # Check cur is greater than or equals cur_end. If cursor is in # a position that has alread been processed/replaced, we don't # process anything at all if not uni_pass : cur_end = cur + 1 output . append ( i ) elif cur >= cur_end and uni_pass : # Try looking in non rule patterns with current string portion match = match_non_rule_patterns ( fixed_text , cur ) # Check if non rule patterns have matched if match [ "matched" ] : output . append ( match [ "replaced" ] ) cur_end = cur + len ( match [ "found" ] ) else : # if non rule patterns have not matched, try rule patterns match = match_rule_patterns ( fixed_text , cur ) # Check if rule patterns have matched if match [ "matched" ] : # Update cur_end as cursor + length of match found cur_end = cur + len ( match [ "found" ] ) # Process its rules replaced = process_rules ( rules = match [ "rules" ] , fixed_text = fixed_text , cur = cur , cur_end = cur_end ) # If any rules match, output replacement from the # rule, else output it's default top-level/default # replacement if replaced is not None : # Rule has matched output . append ( replaced ) else : # No rules have matched # output common match output . append ( match [ "replaced" ] ) # If none matched, append present cursor value if not match [ "matched" ] : cur_end = cur + 1 output . append ( i ) # End looping through input text and produce output return '' . join ( output )
8,481
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L38-L109
[ "def", "_wrapper", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "if", "func", ".", "__name__", "==", "\"init\"", ":", "# init may not fail, as its return code is just stored as", "# private_data field of struct fuse_context", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "or", "0", "else", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "or", "0", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", ">", "0", ":", "log", ".", "debug", "(", "\"FUSE operation %s raised a %s, returning errno %s.\"", ",", "func", ".", "__name__", ",", "type", "(", "e", ")", ",", "e", ".", "errno", ",", "exc_info", "=", "True", ")", "return", "-", "e", ".", "errno", "else", ":", "log", ".", "error", "(", "\"FUSE operation %s raised an OSError with negative \"", "\"errno %s, returning errno.EINVAL.\"", ",", "func", ".", "__name__", ",", "e", ".", "errno", ",", "exc_info", "=", "True", ")", "return", "-", "errno", ".", "EINVAL", "except", "Exception", ":", "log", ".", "error", "(", "\"Uncaught exception from FUSE operation %s, \"", "\"returning errno.EINVAL.\"", ",", "func", ".", "__name__", ",", "exc_info", "=", "True", ")", "return", "-", "errno", ".", "EINVAL", "except", "BaseException", "as", "e", ":", "self", ".", "__critical_exception", "=", "e", "log", ".", "critical", "(", "\"Uncaught critical exception from FUSE operation %s, aborting.\"", ",", "func", ".", "__name__", ",", "exc_info", "=", "True", ")", "# the raised exception (even SystemExit) will be caught by FUSE", "# potentially causing SIGSEGV, so tell system to stop/interrupt FUSE", "fuse_exit", "(", ")", "return", "-", "errno", ".", "EFAULT" ]
Matches given text at cursor position with non rule patterns
def match_non_rule_patterns ( fixed_text , cur = 0 ) : pattern = exact_find_in_pattern ( fixed_text , cur , NON_RULE_PATTERNS ) if len ( pattern ) > 0 : return { "matched" : True , "found" : pattern [ 0 ] [ 'find' ] , "replaced" : pattern [ 0 ] [ 'replace' ] } else : return { "matched" : False , "found" : None , "replaced" : fixed_text [ cur ] }
8,482
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L111-L128
[ "def", "set_USRdict", "(", "self", ",", "USRdict", "=", "{", "}", ")", ":", "self", ".", "_check_inputs", "(", "USRdict", "=", "USRdict", ")", "self", ".", "_USRdict", "=", "USRdict" ]
Matches given text at cursor position with rule patterns
def match_rule_patterns ( fixed_text , cur = 0 ) : pattern = exact_find_in_pattern ( fixed_text , cur , RULE_PATTERNS ) # if len(pattern) == 1: if len ( pattern ) > 0 : return { "matched" : True , "found" : pattern [ 0 ] [ 'find' ] , "replaced" : pattern [ 0 ] [ 'replace' ] , "rules" : pattern [ 0 ] [ 'rules' ] } else : return { "matched" : False , "found" : None , "replaced" : fixed_text [ cur ] , "rules" : None }
8,483
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L130-L149
[ "def", "set_USRdict", "(", "self", ",", "USRdict", "=", "{", "}", ")", ":", "self", ".", "_check_inputs", "(", "USRdict", "=", "USRdict", ")", "self", ".", "_USRdict", "=", "USRdict" ]
Returns pattern items that match given text cur position and pattern
def exact_find_in_pattern ( fixed_text , cur = 0 , patterns = PATTERNS ) : return [ x for x in patterns if ( cur + len ( x [ 'find' ] ) <= len ( fixed_text ) ) and x [ 'find' ] == fixed_text [ cur : ( cur + len ( x [ 'find' ] ) ) ] ]
8,484
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L151-L154
[ "def", "create_server", "(", "self", ")", ":", "self", ".", "check_port", "(", "self", ".", "port", ")", "try", ":", "# Establish communication grpc", "self", ".", "server", "=", "grpc", ".", "server", "(", "ThreadPoolExecutor", "(", "max_workers", "=", "10", ")", ")", "self", ".", "unity_to_external", "=", "UnityToExternalServicerImplementation", "(", ")", "add_UnityToExternalServicer_to_server", "(", "self", ".", "unity_to_external", ",", "self", ".", "server", ")", "# Using unspecified address, which means that grpc is communicating on all IPs", "# This is so that the docker container can connect.", "self", ".", "server", ".", "add_insecure_port", "(", "'[::]:'", "+", "str", "(", "self", ".", "port", ")", ")", "self", ".", "server", ".", "start", "(", ")", "self", ".", "is_open", "=", "True", "except", ":", "raise", "UnityWorkerInUseException", "(", "self", ".", "worker_id", ")" ]
Process rules matched in pattern and returns suitable replacement
def process_rules ( rules , fixed_text , cur = 0 , cur_end = 1 ) : replaced = '' # iterate through rules for rule in rules : matched = False # iterate through matches for match in rule [ 'matches' ] : matched = process_match ( match , fixed_text , cur , cur_end ) # Break out of loop if we dont' have a match. Here we are # trusting avrodict to have listed matches sequentially if not matched : break # If a match is found, stop looping through rules any further if matched : replaced = rule [ 'replace' ] break # if any match has been found return replace value if matched : return replaced else : return None
8,485
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L156-L183
[ "def", "remove_armor", "(", "armored_data", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "armored_data", ")", "lines", "=", "stream", ".", "readlines", "(", ")", "[", "3", ":", "-", "1", "]", "data", "=", "base64", ".", "b64decode", "(", "b''", ".", "join", "(", "lines", ")", ")", "payload", ",", "checksum", "=", "data", "[", ":", "-", "3", "]", ",", "data", "[", "-", "3", ":", "]", "assert", "util", ".", "crc24", "(", "payload", ")", "==", "checksum", "return", "payload" ]
Processes a single match in rules
def process_match ( match , fixed_text , cur , cur_end ) : # Set our tools # -- Initial/default value for replace replace = True # -- Set check cursor depending on match['type'] if match [ 'type' ] == 'prefix' : chk = cur - 1 else : # suffix chk = cur_end # -- Set scope based on whether scope is negative if match [ 'scope' ] . startswith ( '!' ) : scope = match [ 'scope' ] [ 1 : ] negative = True else : scope = match [ 'scope' ] negative = False # Let the matching begin # -- Punctuations if scope == 'punctuation' : # Conditions: XORd with negative if ( not ( ( chk < 0 and match [ 'type' ] == 'prefix' ) or ( chk >= len ( fixed_text ) and match [ 'type' ] == 'suffix' ) or validate . is_punctuation ( fixed_text [ chk ] ) ) ^ negative ) : replace = False # -- Vowels -- Checks: 1. Cursor should not be at first character # -- if prefix or last character if suffix, 2. Character at chk # -- should be a vowel. 3. 'negative' will invert the value of 1 # -- AND 2 elif scope == 'vowel' : if ( not ( ( ( chk >= 0 and match [ 'type' ] == 'prefix' ) or ( chk < len ( fixed_text ) and match [ 'type' ] == 'suffix' ) ) and validate . is_vowel ( fixed_text [ chk ] ) ) ^ negative ) : replace = False # -- Consonants -- Checks: 1. Cursor should not be at first # -- character if prefix or last character if suffix, 2. Character # -- at chk should be a consonant. 3. 'negative' will invert the # -- value of 1 AND 2 elif scope == 'consonant' : if ( not ( ( ( chk >= 0 and match [ 'type' ] == 'prefix' ) or ( chk < len ( fixed_text ) and match [ 'type' ] == 'suffix' ) ) and validate . is_consonant ( fixed_text [ chk ] ) ) ^ negative ) : replace = False # -- Exacts elif scope == 'exact' : # Prepare cursor for exact search if match [ 'type' ] == 'prefix' : exact_start = cur - len ( match [ 'value' ] ) exact_end = cur else : # suffix exact_start = cur_end exact_end = cur_end + len ( match [ 'value' ] ) # Validate exact find. if not validate . is_exact ( match [ 'value' ] , fixed_text , exact_start , exact_end , negative ) : replace = False # Return replace, which will be true if none of the checks above match return replace
8,486
https://github.com/kaustavdm/pyAvroPhonetic/blob/26b7d567d8db025f2cac4de817e716390d7ac337/pyavrophonetic/avro.py#L185-L248
[ "def", "normaliseURL", "(", "url", ")", ":", "url", "=", "unicode_safe", "(", "url", ")", ".", "strip", "(", ")", "# XXX: brutal hack", "url", "=", "unescape", "(", "url", ")", "pu", "=", "list", "(", "urlparse", "(", "url", ")", ")", "segments", "=", "pu", "[", "2", "]", ".", "split", "(", "'/'", ")", "while", "segments", "and", "segments", "[", "0", "]", "in", "(", "''", ",", "'..'", ")", ":", "del", "segments", "[", "0", "]", "pu", "[", "2", "]", "=", "'/'", "+", "'/'", ".", "join", "(", "segments", ")", "# remove leading '&' from query", "if", "pu", "[", "4", "]", ".", "startswith", "(", "'&'", ")", ":", "pu", "[", "4", "]", "=", "pu", "[", "4", "]", "[", "1", ":", "]", "# remove anchor", "pu", "[", "5", "]", "=", "\"\"", "return", "urlunparse", "(", "pu", ")" ]
Command - line interface for interacting with a WVA device
def cli ( ctx , hostname , username , password , config_dir , https ) : ctx . is_root = True ctx . user_values_entered = False ctx . config_dir = os . path . abspath ( os . path . expanduser ( config_dir ) ) ctx . config = load_config ( ctx ) ctx . hostname = hostname ctx . username = username ctx . password = password ctx . https = https # Creating the WVA object is deferred as some commands like clearconfig # should not require a username/password to perform them ctx . wva = None
8,487
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L110-L123
[ "def", "_MultiNotifyQueue", "(", "self", ",", "queue", ",", "notifications", ",", "mutation_pool", "=", "None", ")", ":", "notification_list", "=", "[", "]", "now", "=", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", "for", "notification", "in", "notifications", ":", "if", "not", "notification", ".", "first_queued", ":", "notification", ".", "first_queued", "=", "(", "self", ".", "frozen_timestamp", "or", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", ")", "else", ":", "diff", "=", "now", "-", "notification", ".", "first_queued", "if", "diff", ".", "seconds", ">=", "self", ".", "notification_expiry_time", ":", "# This notification has been around for too long, we drop it.", "logging", ".", "debug", "(", "\"Dropping notification: %s\"", ",", "str", "(", "notification", ")", ")", "continue", "notification_list", ".", "append", "(", "notification", ")", "mutation_pool", ".", "CreateNotifications", "(", "self", ".", "GetNotificationShard", "(", "queue", ")", ",", "notification_list", ")" ]
Perform an HTTP GET of the provided URI
def get ( ctx , uri ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . get ( uri ) )
8,488
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L161-L196
[ "def", "write", "(", "self", ",", "basename", "=", "None", ",", "write_separate_manifests", "=", "True", ")", ":", "self", ".", "check_files", "(", ")", "n", "=", "0", "for", "manifest", "in", "self", ".", "partition_dumps", "(", ")", ":", "dumpbase", "=", "\"%s%05d\"", "%", "(", "basename", ",", "n", ")", "dumpfile", "=", "\"%s.%s\"", "%", "(", "dumpbase", ",", "self", ".", "format", ")", "if", "(", "write_separate_manifests", ")", ":", "manifest", ".", "write", "(", "basename", "=", "dumpbase", "+", "'.xml'", ")", "if", "(", "self", ".", "format", "==", "'zip'", ")", ":", "self", ".", "write_zip", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "elif", "(", "self", ".", "format", "==", "'warc'", ")", ":", "self", ".", "write_warc", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "else", ":", "raise", "DumpError", "(", "\"Unknown dump format requested (%s)\"", "%", "(", "self", ".", "format", ")", ")", "n", "+=", "1", "self", ".", "logger", ".", "info", "(", "\"Wrote %d dump files\"", "%", "(", "n", ")", ")", "return", "(", "n", ")" ]
DELETE the specified URI
def delete ( ctx , uri ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . delete ( uri ) )
8,489
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L202-L217
[ "async", "def", "_sem_crawl", "(", "self", ",", "sem", ",", "res", ")", ":", "async", "with", "sem", ":", "st_", "=", "await", "self", ".", "crawl_raw", "(", "res", ")", "if", "st_", ":", "self", ".", "result", "[", "'ok'", "]", "+=", "1", "else", ":", "self", ".", "result", "[", "'fail'", "]", "+=", "1", "# take a little gap", "await", "asyncio", ".", "sleep", "(", "random", ".", "randint", "(", "0", ",", "1", ")", ")" ]
POST file data to a specific URI
def post ( ctx , uri , input_file ) : http_client = get_wva ( ctx ) . get_http_client ( ) cli_pprint ( http_client . post ( uri , input_file . read ( ) ) )
8,490
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L224-L231
[ "def", "upload", "(", "self", ",", "params", "=", "{", "}", ")", ":", "if", "self", ".", "upload_token", "is", "not", "None", ":", "# resume upload", "status", "=", "self", ".", "check", "(", ")", "if", "status", "[", "'status'", "]", "!=", "4", ":", "return", "self", ".", "commit", "(", ")", "else", ":", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")", "else", ":", "# new upload", "self", ".", "create", "(", "self", ".", "prepare_video_params", "(", "*", "*", "params", ")", ")", "self", ".", "create_file", "(", ")", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")" ]
Sample the value of a vehicle data element
def sample ( ctx , element , timestamp , repeat , delay ) : element = get_wva ( ctx ) . get_vehicle_data_element ( element ) for i in xrange ( repeat ) : curval = element . sample ( ) if timestamp : print ( "{} at {}" . format ( curval . value , curval . timestamp . ctime ( ) ) ) else : print ( "{}" . format ( curval . value ) ) if i + 1 < repeat : # do not delay on last iteration time . sleep ( delay )
8,491
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L293-L330
[ "def", "get_all_indices", "(", "self", ",", "n_samples", "=", "None", ",", "max_samples", "=", "None", ",", "random_state", "=", "None", ")", ":", "if", "self", ".", "_indices_state", "is", "None", "and", "random_state", "is", "None", ":", "raise", "ValueError", "(", "'The program has not been evaluated for fitness '", "'yet, indices not available.'", ")", "if", "n_samples", "is", "not", "None", "and", "self", ".", "_n_samples", "is", "None", ":", "self", ".", "_n_samples", "=", "n_samples", "if", "max_samples", "is", "not", "None", "and", "self", ".", "_max_samples", "is", "None", ":", "self", ".", "_max_samples", "=", "max_samples", "if", "random_state", "is", "not", "None", "and", "self", ".", "_indices_state", "is", "None", ":", "self", ".", "_indices_state", "=", "random_state", ".", "get_state", "(", ")", "indices_state", "=", "check_random_state", "(", "None", ")", "indices_state", ".", "set_state", "(", "self", ".", "_indices_state", ")", "not_indices", "=", "sample_without_replacement", "(", "self", ".", "_n_samples", ",", "self", ".", "_n_samples", "-", "self", ".", "_max_samples", ",", "random_state", "=", "indices_state", ")", "sample_counts", "=", "np", ".", "bincount", "(", "not_indices", ",", "minlength", "=", "self", ".", "_n_samples", ")", "indices", "=", "np", ".", "where", "(", "sample_counts", "==", "0", ")", "[", "0", "]", "return", "indices", ",", "not_indices" ]
List short name of all current subscriptions
def list ( ctx ) : wva = get_wva ( ctx ) for subscription in wva . get_subscriptions ( ) : print ( subscription . short_name )
8,492
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L344-L348
[ "def", "_readWMSDatasets", "(", "self", ",", "datasetCards", ",", "directory", ",", "session", ",", "spatial", "=", "False", ",", "spatialReferenceID", "=", "4236", ")", ":", "if", "self", ".", "mapType", "in", "self", ".", "MAP_TYPES_SUPPORTED", ":", "# Get Mask Map dependency", "maskMap", "=", "session", ".", "query", "(", "RasterMapFile", ")", ".", "filter", "(", "RasterMapFile", ".", "projectFile", "==", "self", ")", ".", "filter", "(", "RasterMapFile", ".", "fileExtension", "==", "'msk'", ")", ".", "one", "(", ")", "for", "card", "in", "self", ".", "projectCards", ":", "if", "(", "card", ".", "name", "in", "datasetCards", ")", "and", "self", ".", "_noneOrNumValue", "(", "card", ".", "value", ")", ":", "# Get filename from project file", "filename", "=", "card", ".", "value", ".", "strip", "(", "'\"'", ")", "path", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "filename", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "wmsDatasetFile", "=", "WMSDatasetFile", "(", ")", "wmsDatasetFile", ".", "projectFile", "=", "self", "wmsDatasetFile", ".", "read", "(", "directory", "=", "directory", ",", "filename", "=", "filename", ",", "session", "=", "session", ",", "maskMap", "=", "maskMap", ",", "spatial", "=", "spatial", ",", "spatialReferenceID", "=", "spatialReferenceID", ")", "else", ":", "self", ".", "_readBatchOutputForFile", "(", "directory", ",", "WMSDatasetFile", ",", "filename", ",", "session", ",", "spatial", ",", "spatialReferenceID", ",", "maskMap", "=", "maskMap", ")" ]
Delete a specific subscription by short name
def delete ( ctx , short_name ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) subscription . delete ( )
8,493
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L354-L358
[ "def", "enforce_filetype_file", "(", "form", ",", "field", ")", ":", "if", "form", ".", "_fields", ".", "get", "(", "'filetype'", ")", ".", "data", "!=", "RESOURCE_FILETYPE_FILE", ":", "return", "domain", "=", "urlparse", "(", "field", ".", "data", ")", ".", "netloc", "allowed_domains", "=", "current_app", ".", "config", "[", "'RESOURCES_FILE_ALLOWED_DOMAINS'", "]", "allowed_domains", "+=", "[", "current_app", ".", "config", ".", "get", "(", "'SERVER_NAME'", ")", "]", "if", "current_app", ".", "config", ".", "get", "(", "'CDN_DOMAIN'", ")", ":", "allowed_domains", ".", "append", "(", "current_app", ".", "config", "[", "'CDN_DOMAIN'", "]", ")", "if", "'*'", "in", "allowed_domains", ":", "return", "if", "domain", "and", "domain", "not", "in", "allowed_domains", ":", "message", "=", "_", "(", "'Domain \"{domain}\" not allowed for filetype \"{filetype}\"'", ")", "raise", "validators", ".", "ValidationError", "(", "message", ".", "format", "(", "domain", "=", "domain", ",", "filetype", "=", "RESOURCE_FILETYPE_FILE", ")", ")" ]
Remove all registered subscriptions
def clear ( ctx ) : wva = get_wva ( ctx ) for subscription in wva . get_subscriptions ( ) : sys . stdout . write ( "Deleting {}... " . format ( subscription . short_name ) ) sys . stdout . flush ( ) subscription . delete ( ) print ( "Done" )
8,494
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L363-L383
[ "def", "waveform_stack", "(", "mediafiles", ",", "xy_size", ",", "output", "=", "None", ",", "label_style", "=", "None", ",", "center_color", "=", "None", ",", "outer_color", "=", "None", ",", "bg_color", "=", "None", ")", ":", "img_files", "=", "[", "]", "output", "=", "output", "or", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "commonprefix", "(", "mediafiles", ")", ")", ")", "if", "os", ".", "path", ".", "isdir", "(", "output", ")", ":", "output", "=", "os", ".", "path", ".", "join", "(", "output", ",", "\"waveforms.jpg\"", ")", "cmd", "=", "[", "config", ".", "CMD_IM_MONTAGE", "]", "+", "shlex", ".", "split", "(", "label_style", "or", "WAVE_LABEL_STYLE", ")", "cmd", "+=", "[", "\"-tile\"", ",", "\"1x%d\"", "%", "len", "(", "mediafiles", ")", ",", "\"-geometry\"", ",", "\"%dx%d\"", "%", "xy_size", ",", "\"-label\"", ",", "\"%t\"", "]", "try", ":", "tempdir", "=", "tempfile", ".", "mktemp", "(", "__name__", ")", "os", ".", "makedirs", "(", "tempdir", ")", "for", "mediafile", "in", "sorted", "(", "mediafiles", ")", ":", "img_files", ".", "append", "(", "waveform_image", "(", "mediafile", ",", "xy_size", ",", "tempdir", ",", "center_color", ",", "outer_color", ",", "bg_color", ")", ")", "cmd", ".", "extend", "(", "img_files", ")", "cmd", ".", "append", "(", "output", ")", "subprocess", ".", "check_call", "(", "cmd", ",", "stdout", "=", "open", "(", "os", ".", "devnull", ",", "\"wb\"", ")", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "finally", ":", "if", "os", ".", "path", ".", "isdir", "(", "tempdir", ")", ":", "shutil", ".", "rmtree", "(", "tempdir", ",", "ignore_errors", "=", "True", ")", "return", "output" ]
Show metadata for a specific subscription
def show ( ctx , short_name ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) cli_pprint ( subscription . get_metadata ( ) )
8,495
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L389-L400
[ "def", "_incomplete_files", "(", "filenames", ")", ":", "tmp_files", "=", "[", "get_incomplete_path", "(", "f", ")", "for", "f", "in", "filenames", "]", "try", ":", "yield", "tmp_files", "for", "tmp", ",", "output", "in", "zip", "(", "tmp_files", ",", "filenames", ")", ":", "tf", ".", "io", ".", "gfile", ".", "rename", "(", "tmp", ",", "output", ")", "finally", ":", "for", "tmp", "in", "tmp_files", ":", "if", "tf", ".", "io", ".", "gfile", ".", "exists", "(", "tmp", ")", ":", "tf", ".", "io", ".", "gfile", ".", "remove", "(", "tmp", ")" ]
Add a subscription with a given short_name for a given uri
def add ( ctx , short_name , uri , interval , buffer ) : wva = get_wva ( ctx ) subscription = wva . get_subscription ( short_name ) subscription . create ( uri , buffer , interval )
8,496
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L409-L432
[ "def", "merge_cts_records", "(", "file_name", ",", "crypto_idfp", ",", "crypto_idfps", ")", ":", "db", "=", "XonoticDB", ".", "load_path", "(", "file_name", ")", "db", ".", "merge_cts_records", "(", "crypto_idfp", ",", "crypto_idfps", ")", "db", ".", "save", "(", "file_name", ")" ]
Output the contents of the WVA event stream
def listen ( ctx ) : wva = get_wva ( ctx ) es = wva . get_event_stream ( ) def cb ( event ) : cli_pprint ( event ) es . add_event_listener ( cb ) es . enable ( ) while True : time . sleep ( 5 )
8,497
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L437-L473
[ "def", "multicomp", "(", "pvals", ",", "alpha", "=", "0.05", ",", "method", "=", "'holm'", ")", ":", "if", "not", "isinstance", "(", "pvals", ",", "(", "list", ",", "np", ".", "ndarray", ")", ")", ":", "err", "=", "\"pvals must be a list or a np.ndarray\"", "raise", "ValueError", "(", "err", ")", "if", "method", ".", "lower", "(", ")", "in", "[", "'b'", ",", "'bonf'", ",", "'bonferroni'", "]", ":", "reject", ",", "pvals_corrected", "=", "bonf", "(", "pvals", ",", "alpha", "=", "alpha", ")", "elif", "method", ".", "lower", "(", ")", "in", "[", "'h'", ",", "'holm'", "]", ":", "reject", ",", "pvals_corrected", "=", "holm", "(", "pvals", ",", "alpha", "=", "alpha", ")", "elif", "method", ".", "lower", "(", ")", "in", "[", "'fdr'", ",", "'fdr_bh'", "]", ":", "reject", ",", "pvals_corrected", "=", "fdr", "(", "pvals", ",", "alpha", "=", "alpha", ",", "method", "=", "'fdr_bh'", ")", "elif", "method", ".", "lower", "(", ")", "in", "[", "'fdr_by'", "]", ":", "reject", ",", "pvals_corrected", "=", "fdr", "(", "pvals", ",", "alpha", "=", "alpha", ",", "method", "=", "'fdr_by'", ")", "elif", "method", ".", "lower", "(", ")", "==", "'none'", ":", "pvals_corrected", "=", "pvals", "with", "np", ".", "errstate", "(", "invalid", "=", "'ignore'", ")", ":", "reject", "=", "np", ".", "less", "(", "pvals_corrected", ",", "alpha", ")", "else", ":", "raise", "ValueError", "(", "'Multiple comparison method not recognized'", ")", "return", "reject", ",", "pvals_corrected" ]
Present a live graph of the incoming streaming data
def graph ( ctx , items , seconds , ylim ) : wva = get_wva ( ctx ) es = wva . get_event_stream ( ) try : from wva import grapher except ImportError : print ( "Unable to graph... you must have matplotlib installed" ) else : stream_grapher = grapher . WVAStreamGrapher ( wva , items , seconds = seconds , ylim = ylim ) es . enable ( ) stream_grapher . run ( )
8,498
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L481-L507
[ "def", "transform_i_to_j_optional", "(", "self", ",", "line", ":", "str", ")", "->", "str", ":", "words", "=", "line", ".", "split", "(", "\" \"", ")", "space_list", "=", "string_utils", ".", "space_list", "(", "line", ")", "corrected_words", "=", "[", "]", "for", "word", "in", "words", ":", "found", "=", "False", "for", "prefix", "in", "self", ".", "constants", ".", "PREFIXES", ":", "if", "word", ".", "startswith", "(", "prefix", ")", "and", "word", "!=", "prefix", ":", "corrected_words", ".", "append", "(", "self", ".", "syllabifier", ".", "convert_consonantal_i", "(", "prefix", ")", ")", "corrected_words", ".", "append", "(", "self", ".", "syllabifier", ".", "convert_consonantal_i", "(", "word", "[", "len", "(", "prefix", ")", ":", "]", ")", ")", "found", "=", "True", "break", "if", "not", "found", ":", "corrected_words", ".", "append", "(", "self", ".", "syllabifier", ".", "convert_consonantal_i", "(", "word", ")", ")", "new_line", "=", "string_utils", ".", "join_syllables_spaces", "(", "corrected_words", ",", "space_list", ")", "# the following two may be tunable and subject to improvement", "char_list", "=", "string_utils", ".", "overwrite", "(", "list", "(", "new_line", ")", ",", "\"[bcdfgjkmpqrstvwxzBCDFGHJKMPQRSTVWXZ][i][{}]\"", ".", "format", "(", "self", ".", "constants", ".", "VOWELS_WO_I", ")", ",", "\"j\"", ",", "1", ")", "char_list", "=", "string_utils", ".", "overwrite", "(", "char_list", ",", "\"[{}][iI][{}]\"", ".", "format", "(", "self", ".", "constants", ".", "LIQUIDS", ",", "self", ".", "constants", ".", "VOWELS_WO_I", ")", ",", "\"j\"", ",", "1", ")", "return", "\"\"", ".", "join", "(", "char_list", ")" ]
Enable ssh login as the Python user for the current user
def authorize ( ctx , public_key , append ) : wva = get_wva ( ctx ) http_client = wva . get_http_client ( ) authorized_keys_uri = "/files/userfs/WEB/python/.ssh/authorized_keys" authorized_key_contents = public_key if append : try : existing_contents = http_client . get ( authorized_keys_uri ) authorized_key_contents = "{}\n{}" . format ( existing_contents , public_key ) except WVAHttpNotFoundError : pass # file doesn't exist, just write the public key http_client . put ( authorized_keys_uri , authorized_key_contents ) print ( "Public key written to authorized_keys for python user." ) print ( "You should now be able to ssh to the device by doing the following:" ) print ( "" ) print ( " $ ssh python@{}" . format ( get_root_ctx ( ctx ) . hostname ) )
8,499
https://github.com/digidotcom/python-wvalib/blob/4252735e2775f80ebaffd813fbe84046d26906b3/wva/cli.py#L523-L546
[ "def", "parse_torrent_properties", "(", "table_datas", ")", ":", "output", "=", "{", "'category'", ":", "table_datas", "[", "0", "]", ".", "text", ",", "'subcategory'", ":", "None", ",", "'quality'", ":", "None", ",", "'language'", ":", "None", "}", "for", "i", "in", "range", "(", "1", ",", "len", "(", "table_datas", ")", ")", ":", "td", "=", "table_datas", "[", "i", "]", "url", "=", "td", ".", "get", "(", "'href'", ")", "params", "=", "Parser", ".", "get_params", "(", "url", ")", "if", "Parser", ".", "is_subcategory", "(", "params", ")", "and", "not", "output", "[", "'subcategory'", "]", ":", "output", "[", "'subcategory'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_quality", "(", "params", ")", "and", "not", "output", "[", "'quality'", "]", ":", "output", "[", "'quality'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_language", "(", "params", ")", "and", "not", "output", "[", "'language'", "]", ":", "output", "[", "'language'", "]", "=", "td", ".", "text", "return", "output" ]