query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
load subset based on extent
def _set_subset_indices ( self , y_min , y_max , x_min , x_max ) : y_coords , x_coords = self . xd . lsm . coords dx = self . xd . lsm . dx dy = self . xd . lsm . dy lsm_y_indices_from_y , lsm_x_indices_from_y = np . where ( ( y_coords >= ( y_min - 2 * dy ) ) & ( y_coords <= ( y_max + 2 * dy ) ) ) lsm_y_indices_from_x , lsm_x_indices_from_x = np . where ( ( x_coords >= ( x_min - 2 * dx ) ) & ( x_coords <= ( x_max + 2 * dx ) ) ) lsm_y_indices = np . intersect1d ( lsm_y_indices_from_y , lsm_y_indices_from_x ) lsm_x_indices = np . intersect1d ( lsm_x_indices_from_y , lsm_x_indices_from_x ) self . xslice = slice ( np . amin ( lsm_x_indices ) , np . amax ( lsm_x_indices ) + 1 ) self . yslice = slice ( np . amin ( lsm_y_indices ) , np . amax ( lsm_y_indices ) + 1 )
4,600
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L608-L631
[ "def", "_encode_header", "(", "key", ":", "str", ",", "pdict", ":", "Dict", "[", "str", ",", "str", "]", ")", "->", "str", ":", "if", "not", "pdict", ":", "return", "key", "out", "=", "[", "key", "]", "# Sort the parameters just to make it easy to test.", "for", "k", ",", "v", "in", "sorted", "(", "pdict", ".", "items", "(", ")", ")", ":", "if", "v", "is", "None", ":", "out", ".", "append", "(", "k", ")", "else", ":", "# TODO: quote if necessary.", "out", ".", "append", "(", "\"%s=%s\"", "%", "(", "k", ",", "v", ")", ")", "return", "\"; \"", ".", "join", "(", "out", ")" ]
This converts a UTC time integer to a string
def _time_to_string ( self , dt , conversion_string = "%Y %m %d %H %M" ) : if self . output_timezone is not None : dt = dt . replace ( tzinfo = utc ) . astimezone ( self . output_timezone ) return dt . strftime ( conversion_string )
4,601
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L651-L658
[ "def", "assignParameters", "(", "self", ",", "solution_next", ",", "IncomeDstn", ",", "LivPrb", ",", "DiscFac", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ",", "BoroCnstArt", ",", "aXtraGrid", ",", "vFuncBool", ",", "CubicBool", ")", ":", "ConsPerfForesightSolver", ".", "assignParameters", "(", "self", ",", "solution_next", ",", "DiscFac", ",", "LivPrb", ",", "CRRA", ",", "Rfree", ",", "PermGroFac", ")", "self", ".", "BoroCnstArt", "=", "BoroCnstArt", "self", ".", "IncomeDstn", "=", "IncomeDstn", "self", ".", "aXtraGrid", "=", "aXtraGrid", "self", ".", "vFuncBool", "=", "vFuncBool", "self", ".", "CubicBool", "=", "CubicBool" ]
This extracts the LSM data from a folder of netcdf files
def _load_lsm_data ( self , data_var , conversion_factor = 1 , calc_4d_method = None , calc_4d_dim = None , time_step = None ) : data = self . xd . lsm . getvar ( data_var , yslice = self . yslice , xslice = self . xslice , calc_4d_method = calc_4d_method , calc_4d_dim = calc_4d_dim ) if isinstance ( time_step , datetime ) : data = data . loc [ { self . lsm_time_dim : [ pd . to_datetime ( time_step ) ] } ] elif time_step is not None : data = data [ { self . lsm_time_dim : [ time_step ] } ] data = data . fillna ( 0 ) data . values *= conversion_factor return data
4,602
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L660-L679
[ "def", "bulk_upsert", "(", "self", ",", "docs", ",", "namespace", ",", "timestamp", ")", ":", "def", "docs_to_upsert", "(", ")", ":", "doc", "=", "None", "for", "doc", "in", "docs", ":", "# Remove metadata and redundant _id", "index", ",", "doc_type", "=", "self", ".", "_index_and_mapping", "(", "namespace", ")", "doc_id", "=", "u", "(", "doc", ".", "pop", "(", "\"_id\"", ")", ")", "document_action", "=", "{", "'_index'", ":", "index", ",", "'_type'", ":", "doc_type", ",", "'_id'", ":", "doc_id", ",", "'_source'", ":", "self", ".", "_formatter", ".", "format_document", "(", "doc", ")", "}", "document_meta", "=", "{", "'_index'", ":", "self", ".", "meta_index_name", ",", "'_type'", ":", "self", ".", "meta_type", ",", "'_id'", ":", "doc_id", ",", "'_source'", ":", "{", "'ns'", ":", "namespace", ",", "'_ts'", ":", "timestamp", "}", "}", "yield", "document_action", "yield", "document_meta", "if", "doc", "is", "None", ":", "raise", "errors", ".", "EmptyDocsError", "(", "\"Cannot upsert an empty sequence of \"", "\"documents into Elastic Search\"", ")", "try", ":", "kw", "=", "{", "}", "if", "self", ".", "chunk_size", ">", "0", ":", "kw", "[", "'chunk_size'", "]", "=", "self", ".", "chunk_size", "responses", "=", "streaming_bulk", "(", "client", "=", "self", ".", "elastic", ",", "actions", "=", "docs_to_upsert", "(", ")", ",", "*", "*", "kw", ")", "for", "ok", ",", "resp", "in", "responses", ":", "if", "not", "ok", ":", "LOG", ".", "error", "(", "\"Could not bulk-upsert document \"", "\"into ElasticSearch: %r\"", "%", "resp", ")", "if", "self", ".", "auto_commit_interval", "==", "0", ":", "self", ".", "commit", "(", ")", "except", "errors", ".", "EmptyDocsError", ":", "# This can happen when mongo-connector starts up, there is no", "# config file, but nothing to dump", "pass" ]
This function checks the input var map array to ensure the required input variables exist
def _check_lsm_input ( self , data_var_map_array ) : REQUIRED_HMET_VAR_LIST = [ 'Prcp' , 'Pres' , 'Temp' , 'Clod' , 'RlHm' , 'Drad' , 'Grad' , 'WndS' ] # make sure all required variables exist given_hmet_var_list = [ ] for gssha_data_var , lsm_data_var in data_var_map_array : gssha_data_hmet_name = self . netcdf_attributes [ gssha_data_var ] [ 'hmet_name' ] if gssha_data_hmet_name in given_hmet_var_list : raise ValueError ( "Duplicate parameter for HMET variable {0}" . format ( gssha_data_hmet_name ) ) else : given_hmet_var_list . append ( gssha_data_hmet_name ) for REQUIRED_HMET_VAR in REQUIRED_HMET_VAR_LIST : if REQUIRED_HMET_VAR not in given_hmet_var_list : raise ValueError ( "ERROR: HMET param is required to continue " "{0} ..." . format ( REQUIRED_HMET_VAR ) )
4,603
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L808-L830
[ "def", "get_adapter_path", "(", "obj", ",", "to_cls", ")", ":", "from_cls", "=", "type", "(", "obj", ")", "key", "=", "(", "from_cls", ",", "to_cls", ")", "if", "key", "not", "in", "__mro__", ":", "__mro__", "[", "key", "]", "=", "list", "(", "itertools", ".", "product", "(", "inspect", ".", "getmro", "(", "from_cls", ")", ",", "inspect", ".", "getmro", "(", "to_cls", ")", ")", ")", "return", "__mro__", "[", "key", "]" ]
This function resamples the data to match the GSSHA grid IN TESTING MODE
def _resample_data ( self , gssha_var ) : self . data = self . data . lsm . resample ( gssha_var , self . gssha_grid )
4,604
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L832-L837
[ "def", "create_destination", "(", "flowable", ",", "container", ",", "at_top_of_container", "=", "False", ")", ":", "vertical_position", "=", "0", "if", "at_top_of_container", "else", "container", ".", "cursor", "ids", "=", "flowable", ".", "get_ids", "(", "container", ".", "document", ")", "destination", "=", "NamedDestination", "(", "*", "(", "str", "(", "id", ")", "for", "id", "in", "ids", ")", ")", "container", ".", "canvas", ".", "annotate", "(", "destination", ",", "0", ",", "vertical_position", ",", "container", ".", "width", ",", "None", ")", "container", ".", "document", ".", "register_page_reference", "(", "container", ".", "page", ",", "flowable", ")" ]
This function converts the data to hourly data and then puts it into the data_np_array USED WHEN GENERATING HMET DATA ONLY
def _convert_data_to_hourly ( self , gssha_data_var ) : time_step_hours = np . diff ( self . data . time ) [ 0 ] / np . timedelta64 ( 1 , 'h' ) calc_function = self . _get_calc_function ( gssha_data_var ) resampled_data = None if time_step_hours < 1 : resampled_data = self . data . resample ( '1H' , dim = 'time' , how = calc_function , keep_attrs = True ) elif time_step_hours > 1 : resampled_data = self . data . resample ( '1H' , dim = 'time' , keep_attrs = True ) for time_idx in range ( self . data . dims [ 'time' ] ) : if time_idx + 1 < self . data . dims [ 'time' ] : # interpolate between time steps start_time = self . data . time [ time_idx ] . values end_time = self . data . time [ time_idx + 1 ] . values slope_timeslice = slice ( str ( start_time ) , str ( end_time ) ) slice_size = resampled_data . sel ( time = slope_timeslice ) . dims [ 'time' ] - 1 first_timestep = resampled_data . sel ( time = str ( start_time ) ) [ gssha_data_var ] slope = ( resampled_data . sel ( time = str ( end_time ) ) [ gssha_data_var ] - first_timestep ) / float ( slice_size ) data_timeslice = slice ( str ( start_time + np . timedelta64 ( 1 , 'm' ) ) , str ( end_time - np . timedelta64 ( 1 , 'm' ) ) ) data_subset = resampled_data . sel ( time = data_timeslice ) for xidx in range ( data_subset . dims [ 'time' ] ) : data_subset [ gssha_data_var ] [ xidx ] = first_timestep + slope * ( xidx + 1 ) else : # just continue to repeat the timestep start_time = self . data . time [ time_idx ] . values end_time = resampled_data . time [ - 1 ] . values if end_time > start_time : first_timestep = resampled_data . sel ( time = str ( start_time ) ) [ gssha_data_var ] data_timeslice = slice ( str ( start_time ) , str ( end_time ) ) data_subset = resampled_data . sel ( time = data_timeslice ) slice_size = 1 if calc_function == "mean" : slice_size = data_subset . dims [ 'time' ] for xidx in range ( data_subset . dims [ 'time' ] ) : data_subset [ gssha_data_var ] [ xidx ] = first_timestep / float ( slice_size ) if resampled_data is not None : # make sure coordinates copied if self . data . lsm . x_var not in resampled_data . coords : resampled_data . coords [ self . data . lsm . x_var ] = self . data . coords [ self . data . lsm . x_var ] if self . data . lsm . y_var not in resampled_data . coords : resampled_data . coords [ self . data . lsm . y_var ] = self . data . coords [ self . data . lsm . y_var ] self . data = resampled_data
4,605
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L853-L908
[ "def", "_begin", "(", "self", ",", "retry_id", "=", "None", ")", ":", "if", "self", ".", "in_progress", ":", "msg", "=", "_CANT_BEGIN", ".", "format", "(", "self", ".", "_id", ")", "raise", "ValueError", "(", "msg", ")", "transaction_response", "=", "self", ".", "_client", ".", "_firestore_api", ".", "begin_transaction", "(", "self", ".", "_client", ".", "_database_string", ",", "options_", "=", "self", ".", "_options_protobuf", "(", "retry_id", ")", ",", "metadata", "=", "self", ".", "_client", ".", "_rpc_metadata", ",", ")", "self", ".", "_id", "=", "transaction_response", ".", "transaction" ]
This function takes array data and writes out a GSSHA ascii grid .
def lsm_var_to_grid ( self , out_grid_file , lsm_data_var , gssha_convert_var , time_step = 0 , ascii_format = 'grass' ) : self . _load_converted_gssha_data_from_lsm ( gssha_convert_var , lsm_data_var , 'grid' , time_step ) gssha_data_var_name = self . netcdf_attributes [ gssha_convert_var ] [ 'gssha_name' ] self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) self . _resample_data ( gssha_data_var_name ) arr_grid = ArrayGrid ( in_array = self . data [ gssha_data_var_name ] . values , wkt_projection = self . data . lsm . projection . ExportToWkt ( ) , geotransform = self . data . lsm . geotransform ) if ascii_format . strip ( ) . lower ( ) == 'grass' : arr_grid . to_grass_ascii ( out_grid_file ) elif ascii_format . strip ( ) . lower ( ) == 'arc' : arr_grid . to_arc_ascii ( out_grid_file ) else : raise ValueError ( "Invalid argument for 'ascii_format'. Only 'grass' or 'arc' allowed." )
4,606
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L911-L962
[ "def", "add", "(", "self", ",", "watch_key", ",", "tensor_value", ")", ":", "if", "watch_key", "not", "in", "self", ".", "_tensor_data", ":", "self", ".", "_tensor_data", "[", "watch_key", "]", "=", "_WatchStore", "(", "watch_key", ",", "mem_bytes_limit", "=", "self", ".", "_watch_mem_bytes_limit", ")", "self", ".", "_tensor_data", "[", "watch_key", "]", ".", "add", "(", "tensor_value", ")" ]
This function writes the HMET_ASCII card file with ASCII file list for input to GSSHA
def _write_hmet_card_file ( self , hmet_card_file_path , main_output_folder ) : with io_open ( hmet_card_file_path , 'w' ) as out_hmet_list_file : for hour_time in self . data . lsm . datetime : date_str = self . _time_to_string ( hour_time , "%Y%m%d%H" ) out_hmet_list_file . write ( u"{0}\n" . format ( path . join ( main_output_folder , date_str ) ) )
4,607
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L1061-L1069
[ "def", "retention_period", "(", "self", ",", "value", ")", ":", "policy", "=", "self", ".", "_properties", ".", "setdefault", "(", "\"retentionPolicy\"", ",", "{", "}", ")", "if", "value", "is", "not", "None", ":", "policy", "[", "\"retentionPeriod\"", "]", "=", "str", "(", "value", ")", "else", ":", "policy", "=", "None", "self", ".", "_patch_property", "(", "\"retentionPolicy\"", ",", "policy", ")" ]
Writes extracted data to Arc ASCII file format into folder to be read in by GSSHA . Also generates the HMET_ASCII card file for GSSHA in the folder named hmet_file_list . txt .
def lsm_data_to_arc_ascii ( self , data_var_map_array , main_output_folder = "" ) : self . _check_lsm_input ( data_var_map_array ) if not main_output_folder : main_output_folder = path . join ( self . gssha_project_folder , "hmet_ascii_data" ) try : mkdir ( main_output_folder ) except OSError : pass log . info ( "Outputting HMET data to {0}" . format ( main_output_folder ) ) #PART 2: DATA for data_var_map in data_var_map_array : gssha_data_var , lsm_data_var = data_var_map gssha_data_hmet_name = self . netcdf_attributes [ gssha_data_var ] [ 'hmet_name' ] gssha_data_var_name = self . netcdf_attributes [ gssha_data_var ] [ 'gssha_name' ] self . _load_converted_gssha_data_from_lsm ( gssha_data_var , lsm_data_var , 'ascii' ) self . _convert_data_to_hourly ( gssha_data_var_name ) self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) for time_idx in range ( self . data . dims [ 'time' ] ) : arr_grid = ArrayGrid ( in_array = self . data [ gssha_data_var_name ] [ time_idx ] . values , wkt_projection = self . data . lsm . projection . ExportToWkt ( ) , geotransform = self . data . lsm . geotransform , nodata_value = - 9999 ) date_str = self . _time_to_string ( self . data . lsm . datetime [ time_idx ] , "%Y%m%d%H" ) ascii_file_path = path . join ( main_output_folder , "{0}_{1}.asc" . format ( date_str , gssha_data_hmet_name ) ) arr_grid . to_arc_ascii ( ascii_file_path ) #PART 3: HMET_ASCII card input file with ASCII file list hmet_card_file_path = path . join ( main_output_folder , 'hmet_file_list.txt' ) self . _write_hmet_card_file ( hmet_card_file_path , main_output_folder )
4,608
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L1072-L1191
[ "def", "get_conversation", "(", "self", ",", "peer_jid", ",", "*", ",", "current_jid", "=", "None", ")", ":", "try", ":", "return", "self", ".", "_conversationmap", "[", "peer_jid", "]", "except", "KeyError", ":", "pass", "return", "self", ".", "_make_conversation", "(", "peer_jid", ",", "False", ")" ]
Writes extracted data to the NetCDF file format
def lsm_data_to_subset_netcdf ( self , netcdf_file_path , data_var_map_array , resample_method = None ) : self . _check_lsm_input ( data_var_map_array ) output_datasets = [ ] #DATA for gssha_var , lsm_var in data_var_map_array : if gssha_var in self . netcdf_attributes : self . _load_converted_gssha_data_from_lsm ( gssha_var , lsm_var , 'netcdf' ) #previously just added data, but needs to be hourly gssha_data_var_name = self . netcdf_attributes [ gssha_var ] [ 'gssha_name' ] self . _convert_data_to_hourly ( gssha_data_var_name ) if resample_method : self . _resample_data ( gssha_data_var_name ) else : self . data = self . data . lsm . to_projection ( gssha_data_var_name , projection = self . gssha_grid . projection ) output_datasets . append ( self . data ) else : raise ValueError ( "Invalid GSSHA variable name: {0} ..." . format ( gssha_var ) ) output_dataset = xr . merge ( output_datasets ) #add global attributes output_dataset . attrs [ 'Convention' ] = 'CF-1.6' output_dataset . attrs [ 'title' ] = 'GSSHA LSM Input' output_dataset . attrs [ 'history' ] = 'date_created: {0}' . format ( datetime . utcnow ( ) ) output_dataset . attrs [ 'proj4' ] = self . data . attrs [ 'proj4' ] output_dataset . attrs [ 'geotransform' ] = self . data . attrs [ 'geotransform' ] output_dataset . to_netcdf ( netcdf_file_path )
4,609
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/grid/grid_to_gssha.py#L1193-L1307
[ "def", "_sumterm_prime", "(", "lexer", ")", ":", "tok", "=", "next", "(", "lexer", ")", "# '|' XORTERM SUMTERM'", "if", "isinstance", "(", "tok", ",", "OP_or", ")", ":", "xorterm", "=", "_xorterm", "(", "lexer", ")", "sumterm_prime", "=", "_sumterm_prime", "(", "lexer", ")", "if", "sumterm_prime", "is", "None", ":", "return", "xorterm", "else", ":", "return", "(", "'or'", ",", "xorterm", ",", "sumterm_prime", ")", "# null", "else", ":", "lexer", ".", "unpop_token", "(", "tok", ")", "return", "None" ]
Creates a csv file from a query of devices and attributes over a time range .
def export ( self , * * kwargs ) : query_params = { "_actions" : "false" , "_links" : "true" , "_embedded" : "true" } path_params = { } headers = { } body = None if "applicationId" in kwargs : path_params [ "applicationId" ] = kwargs [ "applicationId" ] if "query" in kwargs : body = kwargs [ "query" ] if "losantdomain" in kwargs : headers [ "losantdomain" ] = kwargs [ "losantdomain" ] if "_actions" in kwargs : query_params [ "_actions" ] = kwargs [ "_actions" ] if "_links" in kwargs : query_params [ "_links" ] = kwargs [ "_links" ] if "_embedded" in kwargs : query_params [ "_embedded" ] = kwargs [ "_embedded" ] path = "/applications/{applicationId}/data/export" . format ( * * path_params ) return self . client . request ( "POST" , path , params = query_params , headers = headers , body = body )
4,610
https://github.com/Losant/losant-rest-python/blob/75b20decda0e999002f21811c3508f087e7f13b5/losantrest/data.py#L34-L80
[ "def", "_normalize_options", "(", "options", ")", ":", "normalized_options", "=", "{", "}", "for", "key", ",", "value", "in", "iteritems", "(", "options", ")", ":", "optname", "=", "str", "(", "key", ")", ".", "lower", "(", ")", "intname", "=", "INTERNAL_URI_OPTION_NAME_MAP", ".", "get", "(", "optname", ",", "key", ")", "normalized_options", "[", "intname", "]", "=", "options", "[", "key", "]", "return", "normalized_options" ]
Given a message in message return a response in the appropriate format .
def _generate ( self , message ) : raw_params = { "INPUT_TEXT" : message . encode ( 'UTF8' ) , "INPUT_TYPE" : self . input_type , "OUTPUT_TYPE" : self . output_type , "LOCALE" : self . _locale , "AUDIO" : self . audio , "VOICE" : self . _voice , } params = urlencode ( raw_params ) headers = { } logging . debug ( 'maryclient: generate, raw_params=%s' % repr ( raw_params ) ) # Open connection to self._host, self._port. conn = httplib . HTTPConnection ( self . _host , self . _port ) #conn.set_debuglevel(5) conn . request ( "POST" , "/process" , params , headers ) response = conn . getresponse ( ) if response . status != 200 : logging . error ( response . getheaders ( ) ) raise Exception ( "{0}: {1}" . format ( response . status , response . reason ) ) return response . read ( )
4,611
https://github.com/gooofy/py-marytts/blob/f2693531ea841d21a7b94be0304c3dc8f1d9e5f7/marytts/__init__.py#L84-L110
[ "async", "def", "throttle_update_heaters", "(", "self", ")", ":", "if", "(", "self", ".", "_throttle_time", "is", "not", "None", "and", "dt", ".", "datetime", ".", "now", "(", ")", "-", "self", ".", "_throttle_time", "<", "MIN_TIME_BETWEEN_UPDATES", ")", ":", "return", "self", ".", "_throttle_time", "=", "dt", ".", "datetime", ".", "now", "(", ")", "await", "self", ".", "update_heaters", "(", ")" ]
The message received from the queue specify a method of the class the actor represents . This invokes it . If the communication is an ASK sends the result back to the channel included in the message as an ASKRESPONSE .
def receive ( self , msg ) : if msg [ TYPE ] == TELL and msg [ METHOD ] == 'stop' : self . running = False self . future_manager . stop ( ) else : result = None try : invoke = getattr ( self . _obj , msg [ METHOD ] ) params = msg [ PARAMS ] result = invoke ( * params [ 0 ] , * * params [ 1 ] ) except Exception , e : if msg [ TYPE ] == TELL : print e return result = e self . send_response ( result , msg )
4,612
https://github.com/pedrotgn/pyactor/blob/24d98d134dd4228f2ba38e83611e9c3f50ec2fd4/pyactor/thread/actor.py#L100-L128
[ "def", "write_json_file", "(", "self", ",", "path", ")", ":", "with", "open", "(", "path", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "self", ".", "to_json", "(", ")", ")" ]
Downloads a file from the specified URL .
def download_file ( url ) : response = requests . get ( url ) if response . status_code is not 200 : return None return response . text
4,613
https://github.com/timofurrer/ramlient/blob/e93092252635a6b3b0aca2c390b9f820368b791c/ramlient/utils.py#L34-L46
[ "def", "pad_width", "(", "model", ",", "table_padding", "=", "0.85", ",", "tabs_padding", "=", "1.2", ")", ":", "if", "isinstance", "(", "model", ",", "Row", ")", ":", "vals", "=", "[", "pad_width", "(", "child", ")", "for", "child", "in", "model", ".", "children", "]", "width", "=", "np", ".", "max", "(", "[", "v", "for", "v", "in", "vals", "if", "v", "is", "not", "None", "]", ")", "elif", "isinstance", "(", "model", ",", "Column", ")", ":", "vals", "=", "[", "pad_width", "(", "child", ")", "for", "child", "in", "model", ".", "children", "]", "width", "=", "np", ".", "sum", "(", "[", "v", "for", "v", "in", "vals", "if", "v", "is", "not", "None", "]", ")", "elif", "isinstance", "(", "model", ",", "Tabs", ")", ":", "vals", "=", "[", "pad_width", "(", "t", ")", "for", "t", "in", "model", ".", "tabs", "]", "width", "=", "np", ".", "max", "(", "[", "v", "for", "v", "in", "vals", "if", "v", "is", "not", "None", "]", ")", "for", "model", "in", "model", ".", "tabs", ":", "model", ".", "width", "=", "width", "width", "=", "int", "(", "tabs_padding", "*", "width", ")", "elif", "isinstance", "(", "model", ",", "DataTable", ")", ":", "width", "=", "model", ".", "width", "model", ".", "width", "=", "int", "(", "table_padding", "*", "width", ")", "elif", "isinstance", "(", "model", ",", "(", "WidgetBox", ",", "Div", ")", ")", ":", "width", "=", "model", ".", "width", "elif", "model", ":", "width", "=", "model", ".", "plot_width", "else", ":", "width", "=", "0", "return", "width" ]
Returns the sub - dispatcher induced by given node and edge bunches .
def get_sub_dsp ( self , nodes_bunch , edges_bunch = None ) : # Get real paths. nodes_bunch = [ self . get_node ( u ) [ 1 ] [ 0 ] for u in nodes_bunch ] # Define an empty dispatcher. sub_dsp = self . copy_structure ( dmap = self . dmap . subgraph ( nodes_bunch ) . copy ( ) ) # Namespace shortcuts for speed. nodes , dmap_out_degree = sub_dsp . nodes , sub_dsp . dmap . out_degree dmap_dv , dmap_rm_edge = self . default_values , sub_dsp . dmap . remove_edge dmap_rm_node = sub_dsp . dmap . remove_node # Remove function nodes that has not whole inputs available. for u in nodes_bunch : n = nodes [ u ] . get ( 'inputs' , None ) # Function inputs. # No all inputs if n is not None and not set ( n ) . issubset ( nodes_bunch ) : dmap_rm_node ( u ) # Remove function node. # Remove edges that are not in edges_bunch. if edges_bunch is not None : for e in edges_bunch : # Iterate sub-graph edges. dmap_rm_edge ( * e ) # Remove edge. # Remove function node with no outputs. for u in [ u for u , n in sub_dsp . dmap . nodes . items ( ) if n [ 'type' ] == 'function' ] : # noinspection PyCallingNonCallable if not dmap_out_degree ( u ) : # No outputs. dmap_rm_node ( u ) # Remove function node. from networkx import isolates # Remove isolate nodes from sub-graph. sub_dsp . dmap . remove_nodes_from ( list ( isolates ( sub_dsp . dmap ) ) ) # Set default values. sub_dsp . default_values = { k : dmap_dv [ k ] for k in dmap_dv if k in nodes } return sub_dsp
4,614
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1048-L1146
[ "def", "update_null_primary", "(", "hdu_in", ",", "hdu", "=", "None", ")", ":", "if", "hdu", "is", "None", ":", "hdu", "=", "fits", ".", "PrimaryHDU", "(", "header", "=", "hdu_in", ".", "header", ")", "else", ":", "hdu", "=", "hdu_in", "hdu", ".", "header", ".", "remove", "(", "'FILENAME'", ")", "return", "hdu" ]
Returns all data nodes of the dispatcher .
def data_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'data' }
4,615
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1384-L1393
[ "def", "format_rst", "(", "self", ")", ":", "res", "=", "''", "num_cols", "=", "len", "(", "self", ".", "header", ")", "col_width", "=", "25", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "for", "c", "in", "self", ".", "header", ":", "res", "+=", "c", ".", "ljust", "(", "col_width", ")", "res", "+=", "'\\n'", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "for", "row", "in", "self", ".", "arr", ":", "for", "c", "in", "row", ":", "res", "+=", "self", ".", "force_to_string", "(", "c", ")", ".", "ljust", "(", "col_width", ")", "res", "+=", "'\\n'", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "return", "res" ]
Returns all function nodes of the dispatcher .
def function_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'function' }
4,616
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1396-L1405
[ "def", "guess_peb_size", "(", "path", ")", ":", "file_offset", "=", "0", "offsets", "=", "[", "]", "f", "=", "open", "(", "path", ",", "'rb'", ")", "f", ".", "seek", "(", "0", ",", "2", ")", "file_size", "=", "f", ".", "tell", "(", ")", "+", "1", "f", ".", "seek", "(", "0", ")", "for", "_", "in", "range", "(", "0", ",", "file_size", ",", "FILE_CHUNK_SZ", ")", ":", "buf", "=", "f", ".", "read", "(", "FILE_CHUNK_SZ", ")", "for", "m", "in", "re", ".", "finditer", "(", "UBI_EC_HDR_MAGIC", ",", "buf", ")", ":", "start", "=", "m", ".", "start", "(", ")", "if", "not", "file_offset", ":", "file_offset", "=", "start", "idx", "=", "start", "else", ":", "idx", "=", "start", "+", "file_offset", "offsets", ".", "append", "(", "idx", ")", "file_offset", "+=", "FILE_CHUNK_SZ", "f", ".", "close", "(", ")", "occurances", "=", "{", "}", "for", "i", "in", "range", "(", "0", ",", "len", "(", "offsets", ")", ")", ":", "try", ":", "diff", "=", "offsets", "[", "i", "]", "-", "offsets", "[", "i", "-", "1", "]", "except", ":", "diff", "=", "offsets", "[", "i", "]", "if", "diff", "not", "in", "occurances", ":", "occurances", "[", "diff", "]", "=", "0", "occurances", "[", "diff", "]", "+=", "1", "most_frequent", "=", "0", "block_size", "=", "None", "for", "offset", "in", "occurances", ":", "if", "occurances", "[", "offset", "]", ">", "most_frequent", ":", "most_frequent", "=", "occurances", "[", "offset", "]", "block_size", "=", "offset", "return", "block_size" ]
Returns all sub - dispatcher nodes of the dispatcher .
def sub_dsp_nodes ( self ) : return { k : v for k , v in self . nodes . items ( ) if v [ 'type' ] == 'dispatcher' }
4,617
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1408-L1418
[ "def", "format_rst", "(", "self", ")", ":", "res", "=", "''", "num_cols", "=", "len", "(", "self", ".", "header", ")", "col_width", "=", "25", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "for", "c", "in", "self", ".", "header", ":", "res", "+=", "c", ".", "ljust", "(", "col_width", ")", "res", "+=", "'\\n'", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "for", "row", "in", "self", ".", "arr", ":", "for", "c", "in", "row", ":", "res", "+=", "self", ".", "force_to_string", "(", "c", ")", ".", "ljust", "(", "col_width", ")", "res", "+=", "'\\n'", "for", "_", "in", "range", "(", "num_cols", ")", ":", "res", "+=", "''", ".", "join", "(", "[", "'='", "for", "_", "in", "range", "(", "col_width", "-", "1", ")", "]", ")", "+", "' '", "res", "+=", "'\\n'", "return", "res" ]
Constructs a BlueDispatcher out of the current object .
def blue ( self , memo = None ) : memo = { } if memo is None else memo if self in memo : return memo [ self ] from . utils . dsp import map_list from . utils . blue import BlueDispatcher , _parent_blue memo [ self ] = blue = BlueDispatcher ( executor = self . executor , name = self . name , raises = self . raises , description = self . __doc__ ) dfl = self . default_values key_map_data = [ 'data_id' , { 'value' : 'default_value' } ] pred , succ = self . dmap . pred , self . dmap . succ def _set_weight ( n , r , d ) : d = { i : j [ 'weight' ] for i , j in d . items ( ) if 'weight' in j } if d : r [ n ] = d for k , v in sorted ( self . nodes . items ( ) , key = lambda x : x [ 1 ] [ 'index' ] ) : v = v . copy ( ) t = v . pop ( 'type' ) del v [ 'index' ] if t == 'data' : method = 'add_data' combine_dicts ( map_list ( key_map_data , k , dfl . get ( k , { } ) ) , base = v ) elif t in ( 'function' , 'dispatcher' ) : method = 'add_%s' % t if t == 'dispatcher' : t = 'dsp' v [ '%s_id' % t ] = k del v [ 'wait_inputs' ] _set_weight ( 'inp_weight' , v , pred [ k ] ) _set_weight ( 'out_weight' , v , succ [ k ] ) if 'function' in v : v [ t ] = _parent_blue ( v . pop ( 'function' ) , memo ) blue . deferred . append ( ( method , v ) ) return blue
4,618
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1437-L1485
[ "def", "secret_file", "(", "filename", ")", ":", "filestat", "=", "os", ".", "stat", "(", "abspath", "(", "filename", ")", ")", "if", "stat", ".", "S_ISREG", "(", "filestat", ".", "st_mode", ")", "==", "0", "and", "stat", ".", "S_ISLNK", "(", "filestat", ".", "st_mode", ")", "==", "0", ":", "e_msg", "=", "\"Secret file %s must be a real file or symlink\"", "%", "filename", "raise", "aomi", ".", "exceptions", ".", "AomiFile", "(", "e_msg", ")", "if", "platform", ".", "system", "(", ")", "!=", "\"Windows\"", ":", "if", "filestat", ".", "st_mode", "&", "stat", ".", "S_IROTH", "or", "filestat", ".", "st_mode", "&", "stat", ".", "S_IWOTH", "or", "filestat", ".", "st_mode", "&", "stat", ".", "S_IWGRP", ":", "e_msg", "=", "\"Secret file %s has too loose permissions\"", "%", "filename", "raise", "aomi", ".", "exceptions", ".", "AomiFile", "(", "e_msg", ")" ]
Extends Dispatcher calling each deferred operation of given Blueprints .
def extend ( self , * blues , memo = None ) : from . utils . blue import BlueDispatcher as Blue return Blue ( ) . extend ( * blues , memo = memo ) . register ( self , memo = memo )
4,619
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1487-L1519
[ "def", "reconnect_redis", "(", "self", ")", ":", "if", "self", ".", "shared_client", "and", "Storage", ".", "storage", ":", "return", "Storage", ".", "storage", "storage", "=", "Redis", "(", "port", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PORT", ",", "host", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_HOST", ",", "db", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_DB", ",", "password", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PASSWORD", ")", "if", "self", ".", "shared_client", ":", "Storage", ".", "storage", "=", "storage", "return", "storage" ]
Evaluates the minimum workflow and data outputs of the dispatcher model from given inputs .
def dispatch ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = False , no_call = False , shrink = False , rm_unused_nds = False , select_output_kw = None , _wait_in = None , stopper = None , executor = False , sol_name = ( ) ) : dsp = self if not no_call : if shrink : # Pre shrink. dsp = self . shrink_dsp ( inputs , outputs , cutoff , inputs_dist , wildcard ) elif outputs : dsp = self . get_sub_dsp_from_workflow ( outputs , self . dmap , reverse = True , blockers = inputs , wildcard = wildcard ) # Initialize. self . solution = sol = self . solution . __class__ ( dsp , inputs , outputs , wildcard , cutoff , inputs_dist , no_call , rm_unused_nds , _wait_in , full_name = sol_name ) # Dispatch. sol . _run ( stopper = stopper , executor = executor ) if select_output_kw : return selector ( dictionary = sol , * * select_output_kw ) # Return the evaluated data outputs. return sol
4,620
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1521-L1679
[ "def", "vapour_pressure", "(", "Temperature", ",", "element", ")", ":", "if", "element", "==", "\"Rb\"", ":", "Tmelt", "=", "39.30", "+", "273.15", "# K.", "if", "Temperature", "<", "Tmelt", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.857", "-", "4215.0", "/", "Temperature", ")", "# Torr.", "else", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.312", "-", "4040.0", "/", "Temperature", ")", "# Torr.", "elif", "element", "==", "\"Cs\"", ":", "Tmelt", "=", "28.5", "+", "273.15", "# K.", "if", "Temperature", "<", "Tmelt", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.711", "-", "3999.0", "/", "Temperature", ")", "# Torr.", "else", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.165", "-", "3830.0", "/", "Temperature", ")", "# Torr.", "else", ":", "s", "=", "str", "(", "element", ")", "s", "+=", "\" is not an element in the database for this function.\"", "raise", "ValueError", "(", "s", ")", "P", "=", "P", "*", "101325.0", "/", "760.0", "# Pascals.", "return", "P" ]
Returns a reduced dispatcher .
def shrink_dsp ( self , inputs = None , outputs = None , cutoff = None , inputs_dist = None , wildcard = True ) : bfs = None if inputs : # Get all data nodes no wait inputs. wait_in = self . _get_wait_in ( flag = False ) # Evaluate the workflow graph without invoking functions. o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , True , _wait_in = wait_in ) data_nodes = self . data_nodes # Get data nodes. from . utils . alg import _union_workflow , _convert_bfs bfs = _union_workflow ( o ) # bfg edges. # Set minimum initial distances. if inputs_dist : inputs_dist = combine_dicts ( o . dist , inputs_dist ) else : inputs_dist = o . dist # Set data nodes to wait inputs. wait_in = self . _get_wait_in ( flag = True ) while True : # Start shrinking loop. # Evaluate the workflow graph without invoking functions. o = self . dispatch ( inputs , outputs , cutoff , inputs_dist , wildcard , True , False , False , _wait_in = wait_in ) _union_workflow ( o , bfs = bfs ) # Update bfs. n_d , status = o . _remove_wait_in ( ) # Remove wait input flags. if not status : break # Stop iteration. # Update inputs. inputs = n_d . intersection ( data_nodes ) . union ( inputs ) # Update outputs and convert bfs in DiGraphs. outputs , bfs = outputs or o , _convert_bfs ( bfs ) elif not outputs : return self . copy_structure ( ) # Empty Dispatcher. # Get sub dispatcher breadth-first-search graph. dsp = self . _get_dsp_from_bfs ( outputs , bfs_graphs = bfs ) return dsp
4,621
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1684-L1823
[ "def", "thaw", "(", "vault_client", ",", "src_file", ",", "opt", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "src_file", ")", ":", "raise", "aomi", ".", "exceptions", ".", "AomiFile", "(", "\"%s does not exist\"", "%", "src_file", ")", "tmp_dir", "=", "ensure_tmpdir", "(", ")", "zip_file", "=", "thaw_decrypt", "(", "vault_client", ",", "src_file", ",", "tmp_dir", ",", "opt", ")", "archive", "=", "zipfile", ".", "ZipFile", "(", "zip_file", ",", "'r'", ")", "for", "archive_file", "in", "archive", ".", "namelist", "(", ")", ":", "archive", ".", "extract", "(", "archive_file", ",", "tmp_dir", ")", "os", ".", "chmod", "(", "\"%s/%s\"", "%", "(", "tmp_dir", ",", "archive_file", ")", ",", "0o640", ")", "LOG", ".", "debug", "(", "\"Extracted %s from archive\"", ",", "archive_file", ")", "LOG", ".", "info", "(", "\"Thawing secrets into %s\"", ",", "opt", ".", "secrets", ")", "config", "=", "get_secretfile", "(", "opt", ")", "Context", ".", "load", "(", "config", ",", "opt", ")", ".", "thaw", "(", "tmp_dir", ")" ]
Returns the sub - dispatcher induced by the workflow from outputs .
def _get_dsp_from_bfs ( self , outputs , bfs_graphs = None ) : bfs = bfs_graphs [ NONE ] if bfs_graphs is not None else self . dmap # Get sub dispatcher breadth-first-search graph. dsp = self . get_sub_dsp_from_workflow ( sources = outputs , graph = bfs , reverse = True , _update_links = False ) # Namespace shortcuts. succ , nodes , pred = dsp . dmap . succ , dsp . nodes , dsp . dmap . pred rm_edges , nds = dsp . dmap . remove_edges_from , dsp . data_nodes from . utils . alg import _nodes , _get_sub_out , _update_io for n in dsp . sub_dsp_nodes : a = nodes [ n ] = nodes [ n ] . copy ( ) bfs = bfs_graphs [ n ] if bfs_graphs is not None else None out = _get_sub_out ( a , succ [ n ] ) if 'input_domain' in a : out . update ( _nodes ( a [ 'inputs' ] . values ( ) ) ) a [ 'function' ] = a [ 'function' ] . _get_dsp_from_bfs ( out , bfs ) i , o = _update_io ( a , pred [ n ] , succ [ n ] ) # Unreachable nodes. rm_edges ( { ( u , n ) for u in i } . union ( ( ( n , u ) for u in o ) ) ) return dsp
4,622
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/schedula/dispatcher.py#L1825-L1868
[ "def", "update", "(", "self", ",", "data", ")", ":", "self", ".", "_md", ".", "update", "(", "data", ")", "bufpos", "=", "self", ".", "_nbytes", "&", "63", "self", ".", "_nbytes", "+=", "len", "(", "data", ")", "if", "self", ".", "_rarbug", "and", "len", "(", "data", ")", ">", "64", ":", "dpos", "=", "self", ".", "block_size", "-", "bufpos", "while", "dpos", "+", "self", ".", "block_size", "<=", "len", "(", "data", ")", ":", "self", ".", "_corrupt", "(", "data", ",", "dpos", ")", "dpos", "+=", "self", ".", "block_size" ]
Attaches a mehtod that will be called when the future finishes .
def add_callback ( self , method ) : from_actor = get_current ( ) if from_actor is not None : callback = ( method , from_actor . channel , from_actor . url ) with self . __condition : if self . __state is not FINISHED : self . __callbacks . append ( callback ) return # Invoke the callback directly # msg = TellRequest(TELL, method, [self], from_actor.url) msg = { TYPE : TELL , METHOD : method , PARAMS : ( [ self ] , { } ) , TO : from_actor . url } from_actor . channel . send ( msg ) else : raise FutureError ( "add_callback only works when called " + "from inside an actor" )
4,623
https://github.com/pedrotgn/pyactor/blob/24d98d134dd4228f2ba38e83611e9c3f50ec2fd4/pyactor/thread/future.py#L60-L88
[ "def", "duplicate_ids", "(", "instance", ")", ":", "if", "instance", "[", "'type'", "]", "!=", "'bundle'", "or", "'objects'", "not", "in", "instance", ":", "return", "unique_ids", "=", "{", "}", "for", "obj", "in", "instance", "[", "'objects'", "]", ":", "if", "'id'", "not", "in", "obj", "or", "'modified'", "not", "in", "obj", ":", "continue", "elif", "obj", "[", "'id'", "]", "not", "in", "unique_ids", ":", "unique_ids", "[", "obj", "[", "'id'", "]", "]", "=", "obj", "[", "'modified'", "]", "elif", "obj", "[", "'modified'", "]", "==", "unique_ids", "[", "obj", "[", "'id'", "]", "]", ":", "yield", "JSONError", "(", "\"Duplicate ID '%s' has identical `modified` timestamp.\"", "\" If they are different versions of the same object, \"", "\"they should have different `modified` properties.\"", "%", "obj", "[", "'id'", "]", ",", "instance", "[", "'id'", "]", ",", "'duplicate-ids'", ")" ]
Sends the query to the actor for it to start executing the work .
def send_work ( self ) : if self . __set_running ( ) : # msg = FutureRequest(FUTURE, self.__method, self.__params, # self.__channel, self.__target, self.__id) msg = { TYPE : FUTURE , METHOD : self . __method , PARAMS : self . __params , CHANNEL : self . __channel , TO : self . __target , RPC_ID : self . __id } self . __actor_channel . send ( msg ) else : raise FutureError ( "Future already running." )
4,624
https://github.com/pedrotgn/pyactor/blob/24d98d134dd4228f2ba38e83611e9c3f50ec2fd4/pyactor/thread/future.py#L144-L160
[ "def", "write_lines_to_file", "(", "cls_name", ",", "filename", ",", "lines", ",", "metadata_dict", ")", ":", "metadata_dict", "=", "metadata_dict", "or", "{", "}", "header_line", "=", "\"%s%s\"", "%", "(", "_HEADER_PREFIX", ",", "cls_name", ")", "metadata_line", "=", "\"%s%s\"", "%", "(", "_METADATA_PREFIX", ",", "json", ".", "dumps", "(", "metadata_dict", ",", "sort_keys", "=", "True", ")", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "filename", ",", "\"wb\"", ")", "as", "f", ":", "for", "line", "in", "[", "header_line", ",", "metadata_line", "]", ":", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "line", ")", ")", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ")", ")", "if", "lines", ":", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")", ")", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ")", ")" ]
Sets the return value of work associated with the future . Only called internally .
def set_result ( self , result ) : with self . __condition : self . __result = result self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
4,625
https://github.com/pedrotgn/pyactor/blob/24d98d134dd4228f2ba38e83611e9c3f50ec2fd4/pyactor/thread/future.py#L173-L181
[ "def", "del_records", "(", "c_table_cd", ":", "str", ",", "tables", ":", "I2B2Tables", ")", "->", "int", ":", "conn", "=", "tables", ".", "ont_connection", "table", "=", "tables", ".", "schemes", "return", "conn", ".", "execute", "(", "table", ".", "delete", "(", ")", ".", "where", "(", "table", ".", "c", ".", "c_table_cd", "==", "c_table_cd", ")", ")", ".", "rowcount" ]
Sets the result of the future as being the given exception . Only called internally .
def set_exception ( self , exception ) : with self . __condition : self . __exception = exception self . __state = FINISHED self . __condition . notify_all ( ) self . _invoke_callbacks ( )
4,626
https://github.com/pedrotgn/pyactor/blob/24d98d134dd4228f2ba38e83611e9c3f50ec2fd4/pyactor/thread/future.py#L183-L191
[ "def", "generate_oauth_signature", "(", "self", ",", "params", ",", "url", ")", ":", "if", "\"oauth_signature\"", "in", "params", ".", "keys", "(", ")", ":", "del", "params", "[", "\"oauth_signature\"", "]", "base_request_uri", "=", "quote", "(", "url", ",", "\"\"", ")", "params", "=", "self", ".", "sorted_params", "(", "params", ")", "params", "=", "self", ".", "normalize_parameters", "(", "params", ")", "query_params", "=", "[", "\"{param_key}%3D{param_value}\"", ".", "format", "(", "param_key", "=", "key", ",", "param_value", "=", "value", ")", "for", "key", ",", "value", "in", "params", ".", "items", "(", ")", "]", "query_string", "=", "\"%26\"", ".", "join", "(", "query_params", ")", "string_to_sign", "=", "\"%s&%s&%s\"", "%", "(", "self", ".", "method", ",", "base_request_uri", ",", "query_string", ")", "consumer_secret", "=", "str", "(", "self", ".", "consumer_secret", ")", "if", "self", ".", "version", "not", "in", "[", "\"v1\"", ",", "\"v2\"", "]", ":", "consumer_secret", "+=", "\"&\"", "hash_signature", "=", "HMAC", "(", "consumer_secret", ".", "encode", "(", ")", ",", "str", "(", "string_to_sign", ")", ".", "encode", "(", ")", ",", "sha256", ")", ".", "digest", "(", ")", "return", "b64encode", "(", "hash_signature", ")", ".", "decode", "(", "\"utf-8\"", ")", ".", "replace", "(", "\"\\n\"", ",", "\"\"", ")" ]
Compute the angle between vector x and y
def angle_between_vectors ( x , y ) : dp = dot_product ( x , y ) if dp == 0 : return 0 xm = magnitude ( x ) ym = magnitude ( y ) return math . acos ( dp / ( xm * ym ) ) * ( 180. / math . pi )
4,627
https://github.com/theodoregoetz/wernher/blob/ef5d3aabe24e532b5eab33cd0212b2dbc2c9022e/sandbox/Flight.py#L31-L38
[ "def", "_process_state_final_run", "(", "self", ",", "job_record", ")", ":", "uow", "=", "self", ".", "uow_dao", ".", "get_one", "(", "job_record", ".", "related_unit_of_work", ")", "if", "uow", ".", "is_processed", ":", "self", ".", "update_job", "(", "job_record", ",", "uow", ",", "job", ".", "STATE_PROCESSED", ")", "elif", "uow", ".", "is_noop", ":", "self", ".", "update_job", "(", "job_record", ",", "uow", ",", "job", ".", "STATE_NOOP", ")", "elif", "uow", ".", "is_canceled", ":", "self", ".", "update_job", "(", "job_record", ",", "uow", ",", "job", ".", "STATE_SKIPPED", ")", "elif", "uow", ".", "is_invalid", ":", "msg", "=", "'Job {0}: UOW for {1}@{2} is in {3}; '", "'relying on the Garbage Collector to either recycle or cancel the UOW.'", ".", "format", "(", "job_record", ".", "db_id", ",", "job_record", ".", "process_name", ",", "job_record", ".", "timeperiod", ",", "uow", ".", "state", ")", "self", ".", "_log_message", "(", "INFO", ",", "job_record", ".", "process_name", ",", "job_record", ".", "timeperiod", ",", "msg", ")", "else", ":", "msg", "=", "'Suppressed creating UOW for {0}@{1}; Job is in {2}; uow is in {3}'", ".", "format", "(", "job_record", ".", "process_name", ",", "job_record", ".", "timeperiod", ",", "job_record", ".", "state", ",", "uow", ".", "state", ")", "self", ".", "_log_message", "(", "INFO", ",", "job_record", ".", "process_name", ",", "job_record", ".", "timeperiod", ",", "msg", ")", "timetable_tree", "=", "self", ".", "timetable", ".", "get_tree", "(", "job_record", ".", "process_name", ")", "timetable_tree", ".", "build_tree", "(", ")" ]
Waiting for an incoming connection from a reverse forwarded port . Note that this results in a kernel block until a connection is received .
def _ssh_forward_accept ( ssh_session , timeout_ms ) : ssh_channel = c_ssh_forward_accept ( c_void_p ( ssh_session ) , c_int ( timeout_ms ) ) if ssh_channel is None : raise SshTimeoutException ( ) return ssh_channel
4,628
https://github.com/dsoprea/PySecure/blob/ff7e01a0a77e79564cb00b6e38b4e6f9f88674f0/pysecure/adapters/ssha.py#L249-L260
[ "def", "add_metadata", "(", "file_name", ",", "title", ",", "artist", ",", "album", ")", ":", "tags", "=", "EasyMP3", "(", "file_name", ")", "if", "title", ":", "tags", "[", "\"title\"", "]", "=", "title", "if", "artist", ":", "tags", "[", "\"artist\"", "]", "=", "artist", "if", "album", ":", "tags", "[", "\"album\"", "]", "=", "album", "tags", ".", "save", "(", ")", "return", "file_name" ]
Execute a remote command . This functionality does not support more than one command to be executed on the same channel so we create a dedicated channel at the session level than allowing direct access at the channel level .
def execute ( self , cmd , block_size = DEFAULT_EXECUTE_READ_BLOCK_SIZE ) : with SshChannel ( self ) as sc : self . __log . debug ( "Executing command: %s" % ( cmd ) ) sc . open_session ( ) sc . request_exec ( cmd ) buffer_ = bytearray ( ) while 1 : bytes = sc . read ( block_size ) yield bytes if len ( bytes ) < block_size : break
4,629
https://github.com/dsoprea/PySecure/blob/ff7e01a0a77e79564cb00b6e38b4e6f9f88674f0/pysecure/adapters/ssha.py#L476-L495
[ "def", "user_agent", "(", "self", ",", "text", ",", "*", "*", "kwargs", ")", ":", "indicator_obj", "=", "UserAgent", "(", "text", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_indicator", "(", "indicator_obj", ")" ]
Project File Read from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile , force_relative = True ) : self . project_directory = directory with tmp_chdir ( directory ) : # Headers to ignore HEADERS = ( 'GSSHAPROJECT' , ) # WMS Cards to include (don't discount as comments) WMS_CARDS = ( '#INDEXGRID_GUID' , '#PROJECTION_FILE' , '#LandSoil' , '#CHANNEL_POINT_INPUT_WMS' ) GSSHAPY_CARDS = ( '#GSSHAPY_EVENT_YML' , ) with open ( path , 'r' ) as f : for line in f : if not line . strip ( ) : # Skip empty lines continue elif '#' in line . split ( ) [ 0 ] and line . split ( ) [ 0 ] not in WMS_CARDS + GSSHAPY_CARDS : # Skip comments designated by the hash symbol # (with the exception of WMS_CARDS and GSSHAPY_CARDS) continue try : card = self . _extractCard ( line , force_relative ) except : card = self . _extractDirectoryCard ( line , force_relative ) # Now that the cardName and cardValue are separated # load them into the gsshapy objects if card [ 'name' ] not in HEADERS : # Create GSSHAPY Project Card object prjCard = ProjectCard ( name = card [ 'name' ] , value = card [ 'value' ] ) # Associate ProjectCard with ProjectFile prjCard . projectFile = self # Extract MAP_TYPE card value for convenience working # with output maps if card [ 'name' ] == 'MAP_TYPE' : self . mapType = int ( card [ 'value' ] ) # Assign properties self . srid = spatialReferenceID self . name = name self . fileExtension = extension
4,630
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L244-L296
[ "def", "syzygyJD", "(", "jd", ")", ":", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "distance", "(", "sun", ",", "moon", ")", "# Offset represents the Syzygy type. ", "# Zero is conjunction and 180 is opposition.", "offset", "=", "180", "if", "(", "dist", ">=", "180", ")", "else", "0", "while", "abs", "(", "dist", ")", ">", "MAX_ERROR", ":", "jd", "=", "jd", "-", "dist", "/", "13.1833", "# Moon mean daily motion", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "closestdistance", "(", "sun", "-", "offset", ",", "moon", ")", "return", "jd" ]
Project File Write to File Method
def _write ( self , session , openFile , replaceParamFile ) : # Enforce cards that must be written in certain order PRIORITY_CARDS = ( 'WMS' , 'MASK_WATERSHED' , 'REPLACE_LINE' , 'REPLACE_PARAMS' , 'REPLACE_VALS' , 'REPLACE_FOLDER' ) filename = os . path . split ( openFile . name ) [ 1 ] name = filename . split ( '.' ) [ 0 ] # Write lines openFile . write ( 'GSSHAPROJECT\n' ) # Write priority lines for card_key in PRIORITY_CARDS : card = self . getCard ( card_key ) # Write the card if card is not None : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) ) # Initiate write on each ProjectCard that belongs to this ProjectFile for card in self . projectCards : if card . name not in PRIORITY_CARDS : openFile . write ( card . write ( originalPrefix = self . name , newPrefix = name ) )
4,631
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L298-L323
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
Append directory to relative paths in project file . By default the project file paths are read and written as relative paths . Use this method to prepend a directory to all the paths in the project file .
def appendDirectory ( self , directory , projectFilePath ) : lines = [ ] with open ( projectFilePath , 'r' ) as original : for l in original : lines . append ( l ) with open ( projectFilePath , 'w' ) as new : for line in lines : card = { } try : card = self . _extractCard ( line ) except : card = self . _extractDirectoryCard ( line ) # Determine number of spaces between card and value for nice alignment numSpaces = max ( 2 , 25 - len ( card [ 'name' ] ) ) if card [ 'value' ] is None : rewriteLine = '%s\n' % ( card [ 'name' ] ) else : if card [ 'name' ] == 'WMS' : rewriteLine = '%s %s\n' % ( card [ 'name' ] , card [ 'value' ] ) elif card [ 'name' ] == 'PROJECT_PATH' : filePath = '"%s"' % os . path . normpath ( directory ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) elif '"' in card [ 'value' ] : filename = card [ 'value' ] . strip ( '"' ) filePath = '"%s"' % os . path . join ( directory , filename ) rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , filePath ) else : rewriteLine = '%s%s%s\n' % ( card [ 'name' ] , ' ' * numSpaces , card [ 'value' ] ) new . write ( rewriteLine )
4,632
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L325-L369
[ "def", "config", "(", "self", ",", "configlet", ",", "plane", ",", "*", "*", "attributes", ")", ":", "try", ":", "config_text", "=", "configlet", ".", "format", "(", "*", "*", "attributes", ")", "except", "KeyError", "as", "exp", ":", "raise", "CommandSyntaxError", "(", "\"Configuration template error: {}\"", ".", "format", "(", "str", "(", "exp", ")", ")", ")", "return", "self", ".", "driver", ".", "config", "(", "config_text", ",", "plane", ")" ]
Read all files for a GSSHA project into the database .
def readProject ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # First read self self . read ( directory , projectFileName , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read in replace param file replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) # Read Input Files self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Output Files self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Input Map Files self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read WMS Dataset Files self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
4,633
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L371-L421
[ "def", "get", "(", "string", ",", "clean", "=", "True", ")", ":", "val", "=", "_dmidecoder", "(", "'-s {0}'", ".", "format", "(", "string", ")", ")", ".", "strip", "(", ")", "# Cleanup possible comments in strings.", "val", "=", "'\\n'", ".", "join", "(", "[", "v", "for", "v", "in", "val", ".", "split", "(", "'\\n'", ")", "if", "not", "v", ".", "startswith", "(", "'#'", ")", "]", ")", "if", "val", ".", "startswith", "(", "'/dev/mem'", ")", "or", "clean", "and", "not", "_dmi_isclean", "(", "string", ",", "val", ")", ":", "val", "=", "None", "return", "val" ]
Read only input files for a GSSHA project into the database .
def readInput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # Read Project File self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read in replace param file replaceParamFile = self . _readReplacementFiles ( directory , session , spatial , spatialReferenceID ) # Read Input Files self . _readXput ( self . INPUT_FILES , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Read Input Map Files self . _readXputMaps ( self . INPUT_MAPS , directory , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
4,634
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L423-L462
[ "def", "remove_device", "(", "self", ",", "device", ",", "id_override", "=", "None", ",", "type_override", "=", "None", ")", ":", "object_id", "=", "id_override", "or", "device", ".", "object_id", "(", ")", "object_type", "=", "type_override", "or", "device", ".", "object_type", "(", ")", "url_string", "=", "\"{}/{}s/{}\"", ".", "format", "(", "self", ".", "BASE_URL", ",", "object_type", ",", "object_id", ")", "try", ":", "arequest", "=", "requests", ".", "delete", "(", "url_string", ",", "headers", "=", "API_HEADERS", ")", "if", "arequest", ".", "status_code", "==", "204", ":", "return", "True", "_LOGGER", ".", "error", "(", "\"Failed to remove device. Status code: %s\"", ",", "arequest", ".", "status_code", ")", "return", "False", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Failed to remove device.\"", ")", "return", "False" ]
Read only output files for a GSSHA project to the database .
def readOutput ( self , directory , projectFileName , session , spatial = False , spatialReferenceID = None ) : self . project_directory = directory with tmp_chdir ( directory ) : # Add project file to session session . add ( self ) # Read Project File self . read ( directory , projectFileName , session , spatial , spatialReferenceID ) # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Read Mask (dependency of some output files) maskMap = WatershedMaskFile ( ) maskMapFilename = self . getCard ( 'WATERSHED_MASK' ) . value . strip ( '"' ) maskMap . read ( session = session , directory = directory , filename = maskMapFilename , spatial = spatial ) maskMap . projectFile = self # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) # Read Output Files self . _readXput ( self . OUTPUT_FILES , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Read WMS Dataset Files self . _readWMSDatasets ( self . WMS_DATASETS , batchDirectory , session , spatial = spatial , spatialReferenceID = spatialReferenceID ) # Commit to database self . _commit ( session , self . COMMIT_ERROR_MESSAGE )
4,635
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L464-L509
[ "def", "insert", "(", "self", ",", "storagemodel", ")", "->", "StorageTableModel", ":", "modeldefinition", "=", "self", ".", "getmodeldefinition", "(", "storagemodel", ",", "True", ")", "try", ":", "modeldefinition", "[", "'tableservice'", "]", ".", "insert_or_replace_entity", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "entity", "(", ")", ")", "storagemodel", ".", "_exists", "=", "True", "except", "AzureMissingResourceHttpError", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "log", ".", "debug", "(", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "getPartitionKey", "(", ")", ",", "storagemodel", ".", "getRowKey", "(", ")", ",", "e", ")", ")", "except", "Exception", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "msg", "=", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "PartitionKey", ",", "storagemodel", ".", "RowKey", ",", "e", ")", "raise", "AzureStorageWrapException", "(", "msg", "=", "msg", ")", "finally", ":", "return", "storagemodel" ]
Read specific IO file for a GSSHA project to the database .
def _readXputFile ( self , file_cards , card_name , directory , session , spatial = False , spatialReferenceID = None , replaceParamFile = None , * * kwargs ) : # Automatically derive the spatial reference system, if possible if spatialReferenceID is None : spatialReferenceID = self . _automaticallyDeriveSpatialReferenceId ( directory ) card = self . getCard ( card_name ) if card : fileIO = file_cards [ card . name ] filename = card . value . strip ( '"' ) . strip ( "'" ) # Invoke read method on each file return self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , * * kwargs )
4,636
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L511-L534
[ "def", "remove_device", "(", "self", ",", "device", ",", "id_override", "=", "None", ",", "type_override", "=", "None", ")", ":", "object_id", "=", "id_override", "or", "device", ".", "object_id", "(", ")", "object_type", "=", "type_override", "or", "device", ".", "object_type", "(", ")", "url_string", "=", "\"{}/{}s/{}\"", ".", "format", "(", "self", ".", "BASE_URL", ",", "object_type", ",", "object_id", ")", "try", ":", "arequest", "=", "requests", ".", "delete", "(", "url_string", ",", "headers", "=", "API_HEADERS", ")", "if", "arequest", ".", "status_code", "==", "204", ":", "return", "True", "_LOGGER", ".", "error", "(", "\"Failed to remove device. Status code: %s\"", ",", "arequest", ".", "status_code", ")", "return", "False", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Failed to remove device.\"", ")", "return", "False" ]
Write all files for a project from the database to file .
def writeProject ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Get param file for writing replaceParamFile = self . replaceParamFile # Write the replacement files self . _writeReplacementFiles ( session = session , directory = directory , name = name ) # Write Project File self . write ( session = session , directory = directory , name = name ) # Write input files self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) # Write output files self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) # Write input map files self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile ) # Write WMS Dataset Files self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
4,637
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L587-L626
[ "def", "authenticate_heat_admin", "(", "self", ",", "keystone", ")", ":", "self", ".", "log", ".", "debug", "(", "'Authenticating heat admin...'", ")", "ep", "=", "keystone", ".", "service_catalog", ".", "url_for", "(", "service_type", "=", "'orchestration'", ",", "interface", "=", "'publicURL'", ")", "if", "keystone", ".", "session", ":", "return", "heat_client", ".", "Client", "(", "endpoint", "=", "ep", ",", "session", "=", "keystone", ".", "session", ")", "else", ":", "return", "heat_client", ".", "Client", "(", "endpoint", "=", "ep", ",", "token", "=", "keystone", ".", "auth_token", ")" ]
Write only input files for a GSSHA project from the database to file .
def writeInput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get param file for writing replaceParamFile = self . replaceParamFile # Write Project File self . write ( session = session , directory = directory , name = name ) # Write input files self . _writeXput ( session = session , directory = directory , fileCards = self . INPUT_FILES , name = name , replaceParamFile = replaceParamFile ) # Write input map files self . _writeXputMaps ( session = session , directory = directory , mapCards = self . INPUT_MAPS , name = name , replaceParamFile = replaceParamFile )
4,638
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L628-L652
[ "def", "remove_device", "(", "self", ",", "device", ",", "id_override", "=", "None", ",", "type_override", "=", "None", ")", ":", "object_id", "=", "id_override", "or", "device", ".", "object_id", "(", ")", "object_type", "=", "type_override", "or", "device", ".", "object_type", "(", ")", "url_string", "=", "\"{}/{}s/{}\"", ".", "format", "(", "self", ".", "BASE_URL", ",", "object_type", ",", "object_id", ")", "try", ":", "arequest", "=", "requests", ".", "delete", "(", "url_string", ",", "headers", "=", "API_HEADERS", ")", "if", "arequest", ".", "status_code", "==", "204", ":", "return", "True", "_LOGGER", ".", "error", "(", "\"Failed to remove device. Status code: %s\"", ",", "arequest", ".", "status_code", ")", "return", "False", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Failed to remove device.\"", ")", "return", "False" ]
Write only output files for a GSSHA project from the database to file .
def writeOutput ( self , session , directory , name ) : self . project_directory = directory with tmp_chdir ( directory ) : # Get the batch directory for output batchDirectory = self . _getBatchDirectory ( directory ) # Write the replacement files self . _writeReplacementFiles ( session = session , directory = directory , name = name ) # Write Project File self . write ( session = session , directory = directory , name = name ) # Write output files self . _writeXput ( session = session , directory = batchDirectory , fileCards = self . OUTPUT_FILES , name = name ) # Write WMS Dataset Files self . _writeWMSDatasets ( session = session , directory = batchDirectory , wmsDatasetCards = self . WMS_DATASETS , name = name )
4,639
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L656-L683
[ "def", "insert", "(", "self", ",", "storagemodel", ")", "->", "StorageTableModel", ":", "modeldefinition", "=", "self", ".", "getmodeldefinition", "(", "storagemodel", ",", "True", ")", "try", ":", "modeldefinition", "[", "'tableservice'", "]", ".", "insert_or_replace_entity", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "entity", "(", ")", ")", "storagemodel", ".", "_exists", "=", "True", "except", "AzureMissingResourceHttpError", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "log", ".", "debug", "(", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "getPartitionKey", "(", ")", ",", "storagemodel", ".", "getRowKey", "(", ")", ",", "e", ")", ")", "except", "Exception", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "msg", "=", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "PartitionKey", ",", "storagemodel", ".", "RowKey", ",", "e", ")", "raise", "AzureStorageWrapException", "(", "msg", "=", "msg", ")", "finally", ":", "return", "storagemodel" ]
Retrieve a list of file keys that have been read into the database .
def getFileKeys ( self ) : files = self . getFileObjects ( ) files_list = [ ] for key , value in files . iteritems ( ) : if value : files_list . append ( key ) return files_list
4,640
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L685-L703
[ "def", "weld_compare", "(", "array", ",", "scalar", ",", "operation", ",", "weld_type", ")", ":", "obj_id", ",", "weld_obj", "=", "create_weld_object", "(", "array", ")", "if", "not", "isinstance", "(", "scalar", ",", "str", ")", ":", "scalar", "=", "to_weld_literal", "(", "scalar", ",", "weld_type", ")", "cast", "=", "'{type}({scalar})'", ".", "format", "(", "type", "=", "weld_type", ",", "scalar", "=", "scalar", ")", "# actually checking WeldVec(WeldChar)", "if", "isinstance", "(", "weld_type", ",", "WeldVec", ")", ":", "cast", "=", "get_weld_obj_id", "(", "weld_obj", ",", "scalar", ")", "# TODO: there should be no casting! requires Weld fix", "weld_template", "=", "\"\"\"map(\n {array},\n |a: {type}| \n a {operation} {cast}\n)\"\"\"", "weld_obj", ".", "weld_code", "=", "weld_template", ".", "format", "(", "array", "=", "obj_id", ",", "operation", "=", "operation", ",", "type", "=", "weld_type", ",", "cast", "=", "cast", ")", "return", "weld_obj" ]
Retrieve a dictionary of file objects .
def getFileObjects ( self ) : files = { 'project-file' : self , 'mapping-table-file' : self . mapTableFile , 'channel-input-file' : self . channelInputFile , 'precipitation-file' : self . precipFile , 'storm-pipe-network-file' : self . stormPipeNetworkFile , 'hmet-file' : self . hmetFile , 'nwsrfs-file' : self . nwsrfsFile , 'orographic-gage-file' : self . orographicGageFile , 'grid-pipe-file' : self . gridPipeFile , 'grid-stream-file' : self . gridStreamFile , 'time-series-file' : self . timeSeriesFiles , 'projection-file' : self . projectionFile , 'replace-parameters-file' : self . replaceParamFile , 'replace-value-file' : self . replaceValFile , 'output-location-file' : self . outputLocationFiles , 'maps' : self . maps , 'link-node-datasets-file' : self . linkNodeDatasets } return files
4,641
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L705-L735
[ "def", "get_metric_type", "(", "measure", ",", "aggregation", ")", ":", "if", "aggregation", ".", "aggregation_type", "==", "aggregation_module", ".", "Type", ".", "NONE", ":", "raise", "ValueError", "(", "\"aggregation type must not be NONE\"", ")", "assert", "isinstance", "(", "aggregation", ",", "AGGREGATION_TYPE_MAP", "[", "aggregation", ".", "aggregation_type", "]", ")", "if", "aggregation", ".", "aggregation_type", "==", "aggregation_module", ".", "Type", ".", "SUM", ":", "if", "isinstance", "(", "measure", ",", "measure_module", ".", "MeasureInt", ")", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "CUMULATIVE_INT64", "elif", "isinstance", "(", "measure", ",", "measure_module", ".", "MeasureFloat", ")", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "CUMULATIVE_DOUBLE", "else", ":", "raise", "ValueError", "elif", "aggregation", ".", "aggregation_type", "==", "aggregation_module", ".", "Type", ".", "COUNT", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "CUMULATIVE_INT64", "elif", "aggregation", ".", "aggregation_type", "==", "aggregation_module", ".", "Type", ".", "DISTRIBUTION", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "CUMULATIVE_DISTRIBUTION", "elif", "aggregation", ".", "aggregation_type", "==", "aggregation_module", ".", "Type", ".", "LASTVALUE", ":", "if", "isinstance", "(", "measure", ",", "measure_module", ".", "MeasureInt", ")", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "GAUGE_INT64", "elif", "isinstance", "(", "measure", ",", "measure_module", ".", "MeasureFloat", ")", ":", "return", "metric_descriptor", ".", "MetricDescriptorType", ".", "GAUGE_DOUBLE", "else", ":", "raise", "ValueError", "else", ":", "raise", "AssertionError" ]
Retrieve card object for given card name .
def getCard ( self , name ) : cards = self . projectCards for card in cards : if card . name . upper ( ) == name . upper ( ) : return card return None
4,642
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L737-L753
[ "def", "on_start", "(", "self", ",", "session", ",", "session_context", ")", ":", "session_id", "=", "session", ".", "session_id", "web_registry", "=", "session_context", "[", "'web_registry'", "]", "if", "self", ".", "is_session_id_cookie_enabled", ":", "web_registry", ".", "session_id", "=", "session_id", "logger", ".", "debug", "(", "\"Set SessionID cookie using id: \"", "+", "str", "(", "session_id", ")", ")", "else", ":", "msg", "=", "(", "\"Session ID cookie is disabled. No cookie has been set for \"", "\"new session with id: \"", "+", "str", "(", "session_id", ")", ")", "logger", ".", "debug", "(", "msg", ")" ]
Removes card from gssha project file
def deleteCard ( self , card_name , db_session ) : card_name = card_name . upper ( ) gssha_card = self . getCard ( card_name ) if gssha_card is not None : db_session . delete ( gssha_card ) db_session . commit ( )
4,643
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L776-L784
[ "def", "avg_inner_product", "(", "data1", ",", "data2", ",", "bin_size", ")", ":", "assert", "data1", ".", "duration", "==", "data2", ".", "duration", "assert", "data1", ".", "sample_rate", "==", "data2", ".", "sample_rate", "seglen", "=", "int", "(", "bin_size", "*", "data1", ".", "sample_rate", ")", "inner_prod", "=", "[", "]", "for", "idx", "in", "range", "(", "int", "(", "data1", ".", "duration", "/", "bin_size", ")", ")", ":", "start", ",", "end", "=", "idx", "*", "seglen", ",", "(", "idx", "+", "1", ")", "*", "seglen", "norm", "=", "len", "(", "data1", "[", "start", ":", "end", "]", ")", "bin_prod", "=", "2", "*", "sum", "(", "data1", ".", "data", "[", "start", ":", "end", "]", ".", "real", "*", "numpy", ".", "conjugate", "(", "data2", ".", "data", "[", "start", ":", "end", "]", ")", ")", "/", "norm", "inner_prod", ".", "append", "(", "bin_prod", ")", "# Get the median over all bins to avoid outliers due to the presence", "# of a signal in a particular bin.", "inner_median", "=", "complex_median", "(", "inner_prod", ")", "return", "inner_prod", ",", "numpy", ".", "abs", "(", "inner_median", ")", ",", "numpy", ".", "angle", "(", "inner_median", ")" ]
Returns GDALGrid object of GSSHA grid
def getGridByCard ( self , gssha_card_name ) : with tmp_chdir ( self . project_directory ) : if gssha_card_name not in ( self . INPUT_MAPS + self . WMS_DATASETS ) : raise ValueError ( "Card {0} not found in valid grid cards ..." . format ( gssha_card_name ) ) gssha_grid_card = self . getCard ( gssha_card_name ) if gssha_grid_card is None : raise ValueError ( "{0} card not found ..." . format ( gssha_card_name ) ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) # return gssha grid return GDALGrid ( gssha_grid_card . value . strip ( '"' ) . strip ( "'" ) , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
4,644
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1136-L1161
[ "def", "readAnnotations", "(", "self", ")", ":", "annot", "=", "self", ".", "read_annotation", "(", ")", "annot", "=", "np", ".", "array", "(", "annot", ")", "if", "(", "annot", ".", "shape", "[", "0", "]", "==", "0", ")", ":", "return", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", "ann_time", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "0", "]", ")", "ann_text", "=", "annot", "[", ":", ",", "2", "]", "ann_text_out", "=", "[", "\"\"", "for", "x", "in", "range", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", "]", "for", "i", "in", "np", ".", "arange", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", ":", "ann_text_out", "[", "i", "]", "=", "self", ".", "_convert_string", "(", "ann_text", "[", "i", "]", ")", "if", "annot", "[", "i", ",", "1", "]", "==", "''", ":", "annot", "[", "i", ",", "1", "]", "=", "'-1'", "ann_duration", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "1", "]", ")", "return", "ann_time", "/", "10000000", ",", "ann_duration", ",", "np", ".", "array", "(", "ann_text_out", ")" ]
Returns GDALGrid object of GSSHA model bounds
def getGrid ( self , use_mask = True ) : grid_card_name = "WATERSHED_MASK" if not use_mask : grid_card_name = "ELEVATION" return self . getGridByCard ( grid_card_name )
4,645
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1163-L1178
[ "def", "readAnnotations", "(", "self", ")", ":", "annot", "=", "self", ".", "read_annotation", "(", ")", "annot", "=", "np", ".", "array", "(", "annot", ")", "if", "(", "annot", ".", "shape", "[", "0", "]", "==", "0", ")", ":", "return", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", ",", "np", ".", "array", "(", "[", "]", ")", "ann_time", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "0", "]", ")", "ann_text", "=", "annot", "[", ":", ",", "2", "]", "ann_text_out", "=", "[", "\"\"", "for", "x", "in", "range", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", "]", "for", "i", "in", "np", ".", "arange", "(", "len", "(", "annot", "[", ":", ",", "1", "]", ")", ")", ":", "ann_text_out", "[", "i", "]", "=", "self", ".", "_convert_string", "(", "ann_text", "[", "i", "]", ")", "if", "annot", "[", "i", ",", "1", "]", "==", "''", ":", "annot", "[", "i", ",", "1", "]", "=", "'-1'", "ann_duration", "=", "self", ".", "_get_float", "(", "annot", "[", ":", ",", "1", "]", ")", "return", "ann_time", "/", "10000000", ",", "ann_duration", ",", "np", ".", "array", "(", "ann_text_out", ")" ]
Returns GDALGrid object of index map
def getIndexGrid ( self , name ) : index_map = self . mapTableFile . indexMaps . filter_by ( name = name ) . one ( ) gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : # return gssha grid return GDALGrid ( index_map . filename , gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) )
4,646
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1180-L1199
[ "def", "process_task", "(", "self", ")", ":", "if", "_debug", ":", "ClientSSM", ".", "_debug", "(", "\"process_task\"", ")", "if", "self", ".", "state", "==", "SEGMENTED_REQUEST", ":", "self", ".", "segmented_request_timeout", "(", ")", "elif", "self", ".", "state", "==", "AWAIT_CONFIRMATION", ":", "self", ".", "await_confirmation_timeout", "(", ")", "elif", "self", ".", "state", "==", "SEGMENTED_CONFIRMATION", ":", "self", ".", "segmented_confirmation_timeout", "(", ")", "elif", "self", ".", "state", "==", "COMPLETED", ":", "pass", "elif", "self", ".", "state", "==", "ABORTED", ":", "pass", "else", ":", "e", "=", "RuntimeError", "(", "\"invalid state\"", ")", "ClientSSM", ".", "_exception", "(", "\"exception: %r\"", ",", "e", ")", "raise", "e" ]
Returns GSSHA projection WKT string
def getWkt ( self ) : gssha_pro_card = self . getCard ( "#PROJECTION_FILE" ) if gssha_pro_card is None : raise ValueError ( "#PROJECTION_FILE card not found ..." ) with tmp_chdir ( self . project_directory ) : gssha_prj_file = gssha_pro_card . value . strip ( '"' ) . strip ( "'" ) with open ( gssha_prj_file ) as pro_file : wkt_string = pro_file . read ( ) return wkt_string
4,647
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1201-L1213
[ "def", "tuples", "(", "self", ",", "rlist", ")", ":", "m", ",", "n", ",", "t", "=", "self", ".", "args", "for", "r", "in", "rlist", ":", "r", ",", "k", "=", "divmod", "(", "r", ",", "t", ")", "r", ",", "u", "=", "divmod", "(", "r", ",", "2", ")", "i", ",", "j", "=", "divmod", "(", "r", ",", "n", ")", "yield", "i", ",", "j", ",", "u", ",", "k" ]
Gets the outlet latitude and longitude .
def getOutlet ( self ) : # OUTROW, OUTCOL outrow = int ( self . getCard ( name = 'OUTROW' ) . value ) - 1 outcol = int ( self . getCard ( name = 'OUTCOL' ) . value ) - 1 gssha_grid = self . getGrid ( ) return gssha_grid . pixel2lonlat ( outcol , outrow )
4,648
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1215-L1227
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Sets the outlet grid cell information in the project file .
def setOutlet ( self , col , row , outslope = None ) : #OUTROW, OUTCOL, OUTSLOPE gssha_grid = self . getGrid ( ) # col, row = gssha_grid.lonlat2pixel(longitude, latitude) # add 1 to row & col becasue GSSHA is 1-based self . setCard ( name = 'OUTROW' , value = str ( row ) ) self . setCard ( name = 'OUTCOL' , value = str ( col ) ) if outslope is None : self . calculateOutletSlope ( ) else : self . setCard ( name = 'OUTSLOPE' , value = str ( outslope ) )
4,649
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1229-L1247
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Calculate outlet location
def findOutlet ( self , shapefile_path ) : # determine outlet from shapefile # by getting outlet from first point in polygon # make sure the boundary geometry is valid check_watershed_boundary_geometry ( shapefile_path ) shapefile = ogr . Open ( shapefile_path ) source_layer = shapefile . GetLayer ( 0 ) source_lyr_proj = source_layer . GetSpatialRef ( ) osr_geographic_proj = osr . SpatialReference ( ) osr_geographic_proj . ImportFromEPSG ( 4326 ) proj_transform = osr . CoordinateTransformation ( source_lyr_proj , osr_geographic_proj ) boundary_feature = source_layer . GetFeature ( 0 ) feat_geom = boundary_feature . GetGeometryRef ( ) feat_geom . Transform ( proj_transform ) polygon = shapely_loads ( feat_geom . ExportToWkb ( ) ) # make lowest point on boundary outlet mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) elevation_array = elevation_grid . np_array ( ) ma_elevation_array = np . ma . array ( elevation_array , mask = mask_grid . np_array ( ) == 0 ) min_elevation = sys . maxsize outlet_pt = None for coord in list ( polygon . exterior . coords ) : try : col , row = mask_grid . lonlat2pixel ( * coord ) except IndexError : # out of bounds continue elevation_value = ma_elevation_array [ row , col ] if elevation_value is np . ma . masked : # search for closest value in mask to this point # elevation within 5 pixels in any direction actual_value = elevation_array [ row , col ] max_diff = sys . maxsize nrow = None ncol = None nval = None for row_ix in range ( max ( row - 5 , 0 ) , min ( row + 5 , mask_grid . y_size ) ) : for col_ix in range ( max ( col - 5 , 0 ) , min ( col + 5 , mask_grid . x_size ) ) : val = ma_elevation_array [ row_ix , col_ix ] if not val is np . ma . masked : val_diff = abs ( val - actual_value ) if val_diff < max_diff : max_diff = val_diff nval = val nrow = row_ix ncol = col_ix if None not in ( nrow , ncol , nval ) : row = nrow col = ncol elevation_value = nval if elevation_value < min_elevation : min_elevation = elevation_value outlet_pt = ( col , row ) if outlet_pt is None : raise IndexError ( 'No valid outlet points found on boundary ...' ) outcol , outrow = outlet_pt self . setOutlet ( col = outcol + 1 , row = outrow + 1 )
4,650
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1249-L1319
[ "def", "readme_verify", "(", ")", ":", "expected", "=", "populate_readme", "(", "REVISION", ",", "RTD_VERSION", ")", "# Actually get the stored contents.", "with", "open", "(", "README_FILE", ",", "\"r\"", ")", "as", "file_obj", ":", "contents", "=", "file_obj", ".", "read", "(", ")", "if", "contents", "!=", "expected", ":", "err_msg", "=", "\"\\n\"", "+", "get_diff", "(", "contents", ",", "expected", ",", "\"README.rst.actual\"", ",", "\"README.rst.expected\"", ")", "raise", "ValueError", "(", "err_msg", ")", "else", ":", "print", "(", "\"README contents are as expected.\"", ")" ]
Attempt to determine the slope at the OUTLET
def calculateOutletSlope ( self ) : try : mask_grid = self . getGrid ( ) elevation_grid = self . getGrid ( use_mask = False ) outrow = int ( self . getCard ( "OUTROW" ) . value ) - 1 outcol = int ( self . getCard ( "OUTCOL" ) . value ) - 1 cell_size = float ( self . getCard ( "GRIDSIZE" ) . value ) min_row = max ( 0 , outrow - 1 ) max_row = min ( mask_grid . x_size , outrow + 2 ) min_col = max ( 0 , outcol - 1 ) max_col = min ( mask_grid . y_size , outcol + 2 ) mask_array = mask_grid . np_array ( ) mask_array [ outrow , outcol ] = 0 mask_array = mask_array [ min_row : max_row , min_col : max_col ] mask_array = ( mask_array == 0 ) elevation_array = elevation_grid . np_array ( ) original_elevation = elevation_array [ outrow , outcol ] elevation_array = elevation_array [ min_row : max_row , min_col : max_col ] slope_calc_array = ( elevation_array - original_elevation ) / cell_size #NOTE: Ignoring distance to cells at angles. Assuming to small to matter mask_array [ slope_calc_array <= 0 ] = True slope_mask_array = np . ma . array ( slope_calc_array , mask = mask_array ) outslope = slope_mask_array . mean ( ) if outslope is np . ma . masked or outslope < 0.001 : outslope = 0.001 except ValueError : outslope = 0.001 self . setCard ( "OUTSLOPE" , str ( outslope ) )
4,651
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1321-L1359
[ "def", "nvrtcCreateProgram", "(", "self", ",", "src", ",", "name", ",", "headers", ",", "include_names", ")", ":", "res", "=", "c_void_p", "(", ")", "headers_array", "=", "(", "c_char_p", "*", "len", "(", "headers", ")", ")", "(", ")", "headers_array", "[", ":", "]", "=", "encode_str_list", "(", "headers", ")", "include_names_array", "=", "(", "c_char_p", "*", "len", "(", "include_names", ")", ")", "(", ")", "include_names_array", "[", ":", "]", "=", "encode_str_list", "(", "include_names", ")", "code", "=", "self", ".", "_lib", ".", "nvrtcCreateProgram", "(", "byref", "(", "res", ")", ",", "c_char_p", "(", "encode_str", "(", "src", ")", ")", ",", "c_char_p", "(", "encode_str", "(", "name", ")", ")", ",", "len", "(", "headers", ")", ",", "headers_array", ",", "include_names_array", ")", "self", ".", "_throw_on_error", "(", "code", ")", "return", "res" ]
timezone of GSSHA model
def timezone ( self ) : if self . _tz is None : # GET CENTROID FROM GSSHA GRID cen_lat , cen_lon = self . centerLatLon ( ) # update time zone tf = TimezoneFinder ( ) tz_name = tf . timezone_at ( lng = cen_lon , lat = cen_lat ) self . _tz = timezone ( tz_name ) return self . _tz
4,652
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1362-L1374
[ "def", "pvcreate", "(", "devices", ",", "override", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "not", "devices", ":", "return", "'Error: at least one device is required'", "if", "isinstance", "(", "devices", ",", "six", ".", "string_types", ")", ":", "devices", "=", "devices", ".", "split", "(", "','", ")", "cmd", "=", "[", "'pvcreate'", ",", "'-y'", "]", "for", "device", "in", "devices", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "device", ")", ":", "raise", "CommandExecutionError", "(", "'{0} does not exist'", ".", "format", "(", "device", ")", ")", "if", "not", "pvdisplay", "(", "device", ",", "quiet", "=", "True", ")", ":", "cmd", ".", "append", "(", "device", ")", "elif", "not", "override", ":", "raise", "CommandExecutionError", "(", "'Device \"{0}\" is already an LVM physical volume.'", ".", "format", "(", "device", ")", ")", "if", "not", "cmd", "[", "2", ":", "]", ":", "# All specified devices are already LVM volumes", "return", "True", "valid", "=", "(", "'metadatasize'", ",", "'dataalignment'", ",", "'dataalignmentoffset'", ",", "'pvmetadatacopies'", ",", "'metadatacopies'", ",", "'metadataignore'", ",", "'restorefile'", ",", "'norestorefile'", ",", "'labelsector'", ",", "'setphysicalvolumesize'", ")", "no_parameter", "=", "(", "'force'", ",", "'norestorefile'", ")", "for", "var", "in", "kwargs", ":", "if", "kwargs", "[", "var", "]", "and", "var", "in", "valid", ":", "cmd", ".", "extend", "(", "[", "'--{0}'", ".", "format", "(", "var", ")", ",", "kwargs", "[", "var", "]", "]", ")", "elif", "kwargs", "[", "var", "]", "and", "var", "in", "no_parameter", ":", "cmd", ".", "append", "(", "'--{0}'", ".", "format", "(", "var", ")", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "if", "out", ".", "get", "(", "'retcode'", ")", ":", "raise", "CommandExecutionError", "(", "out", ".", "get", "(", "'stderr'", ")", ")", "# Verify pvcreate was successful", "for", "device", "in", "devices", ":", "if", "not", "pvdisplay", "(", "device", ")", ":", "raise", "CommandExecutionError", "(", "'Device \"{0}\" was not affected.'", ".", "format", "(", "device", ")", ")", "return", "True" ]
Check the project file for the REPLACE_FOLDER card . If it exists append it s value to create the batch directory path . This is the directory output is written to when run in batch mode .
def _getBatchDirectory ( self , projectRootDirectory ) : # Set output directory to main directory as default batchDirectory = projectRootDirectory # Get the replace folder card replaceFolderCard = self . getCard ( 'REPLACE_FOLDER' ) if replaceFolderCard : replaceDir = replaceFolderCard . value . strip ( '"' ) batchDirectory = os . path . join ( batchDirectory , replaceDir ) # Create directory if it doesn't exist if not os . path . isdir ( batchDirectory ) : os . mkdir ( batchDirectory ) log . info ( 'Creating directory for batch output: {0}' . format ( batchDirectory ) ) return batchDirectory
4,653
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1426-L1446
[ "def", "get_power", "(", "self", ")", ":", "self", ".", "get_status", "(", ")", "try", ":", "self", ".", "consumption", "=", "self", ".", "data", "[", "'power'", "]", "except", "TypeError", ":", "self", ".", "consumption", "=", "0", "return", "self", ".", "consumption" ]
GSSHAPY Project Read Files from File Method
def _readXput ( self , fileCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : ## NOTE: This function is dependent on the project file being read first # Read Input/Output Files for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) # Invoke read method on each file self . _invokeRead ( fileIO = fileIO , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile )
4,654
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1448-L1466
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
GSSHA Project Read Map Files from File Method
def _readXputMaps ( self , mapCards , directory , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Invoke read method on each map self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : # Invoke read method on each map self . _invokeRead ( fileIO = RasterMapFile , directory = directory , filename = filename , session = session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) log . warning ( 'Could not read map files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
4,655
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1468-L1501
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
Method to handle the special case of WMS Dataset Files . WMS Dataset Files cannot be read in independently as other types of file can . They rely on the Mask Map file for some parameters .
def _readWMSDatasets ( self , datasetCards , directory , session , spatial = False , spatialReferenceID = 4236 ) : if self . mapType in self . MAP_TYPES_SUPPORTED : # Get Mask Map dependency maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) for card in self . projectCards : if ( card . name in datasetCards ) and self . _noneOrNumValue ( card . value ) : # Get filename from project file filename = card . value . strip ( '"' ) path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : wmsDatasetFile = WMSDatasetFile ( ) wmsDatasetFile . projectFile = self wmsDatasetFile . read ( directory = directory , filename = filename , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : self . _readBatchOutputForFile ( directory , WMSDatasetFile , filename , session , spatial , spatialReferenceID , maskMap = maskMap )
4,656
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1503-L1533
[ "def", "remove_index", "(", "self", ")", ":", "self", ".", "index_client", ".", "close", "(", "self", ".", "index_name", ")", "self", ".", "index_client", ".", "delete", "(", "self", ".", "index_name", ")" ]
When batch mode is run in GSSHA the files of the same type are prepended with an integer to avoid filename conflicts . This will attempt to read files in this format and throw warnings if the files aren t found .
def _readBatchOutputForFile ( self , directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile = None , maskMap = None ) : # Get contents of directory directoryList = os . listdir ( directory ) # Compile a list of files with that include the filename in them batchFiles = [ ] for thing in directoryList : if filename in thing : batchFiles . append ( thing ) numFilesRead = 0 for batchFile in batchFiles : instance = fileIO ( ) instance . projectFile = self if isinstance ( instance , WMSDatasetFile ) : instance . read ( directory = directory , filename = batchFile , session = session , maskMap = maskMap , spatial = spatial , spatialReferenceID = spatialReferenceID ) else : instance . read ( directory , batchFile , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile ) # Increment runCounter for next file numFilesRead += 1 # Issue warnings if '[' in filename or ']' in filename : log . info ( 'A file cannot be read, because the path to the ' 'file in the project file has been replaced with ' 'replacement variable {0}.' . format ( filename ) ) elif numFilesRead == 0 : log . warning ( '{0} listed in project file, but no such ' 'file exists.' . format ( filename ) ) else : log . info ( 'Batch mode output detected. {0} files read ' 'for file {1}' . format ( numFilesRead , filename ) )
4,657
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1577-L1621
[ "async", "def", "set_power_settings", "(", "self", ",", "target", ":", "str", ",", "value", ":", "str", ")", "->", "None", ":", "params", "=", "{", "\"settings\"", ":", "[", "{", "\"target\"", ":", "target", ",", "\"value\"", ":", "value", "}", "]", "}", "return", "await", "self", ".", "services", "[", "\"system\"", "]", "[", "\"setPowerSettings\"", "]", "(", "params", ")" ]
Invoke File Read Method on Other Files
def _invokeRead ( self , fileIO , directory , filename , session , spatial = False , spatialReferenceID = 4236 , replaceParamFile = None , * * kwargs ) : path = os . path . join ( directory , filename ) if os . path . isfile ( path ) : instance = fileIO ( ) instance . projectFile = self instance . read ( directory , filename , session , spatial = spatial , spatialReferenceID = spatialReferenceID , replaceParamFile = replaceParamFile , * * kwargs ) return instance else : self . _readBatchOutputForFile ( directory , fileIO , filename , session , spatial , spatialReferenceID , replaceParamFile )
4,658
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1623-L1639
[ "async", "def", "message_fetcher_coroutine", "(", "self", ",", "loop", ")", ":", "Global", ".", "LOGGER", ".", "debug", "(", "'registering callbacks for message fetcher coroutine'", ")", "self", ".", "isrunning", "=", "True", "while", "self", ".", "isrunning", ":", "loop", ".", "call_soon", "(", "self", ".", "_fetch_messages", ")", "loop", ".", "call_soon", "(", "self", ".", "_perform_system_check", ")", "await", "asyncio", ".", "sleep", "(", "Global", ".", "CONFIG_MANAGER", ".", "message_fetcher_sleep_interval", ")", "Global", ".", "LOGGER", ".", "debug", "(", "'message fetcher stopped'", ")" ]
GSSHA Project Write Files to File Method
def _writeXput ( self , session , directory , fileCards , name = None , replaceParamFile = None ) : for card in self . projectCards : if ( card . name in fileCards ) and self . _noneOrNumValue ( card . value ) and fileCards [ card . name ] : fileIO = fileCards [ card . name ] filename = card . value . strip ( '"' ) # Check for replacement variables if '[' in filename or ']' in filename : log . info ( 'The file for project card {0} cannot be ' 'written, because the path has been replaced ' 'with replacement variable {1}.' . format ( card . name , filename ) ) return # Determine new filename filename = self . _replaceNewFilename ( filename = filename , name = name ) # Invoke write method on each file self . _invokeWrite ( fileIO = fileIO , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile )
4,659
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1642-L1669
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
GSSHAPY Project Write Map Files to File Method
def _writeXputMaps ( self , session , directory , mapCards , name = None , replaceParamFile = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Write map file self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) else : for card in self . projectCards : if ( card . name in mapCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) fileExtension = filename . split ( '.' ) [ 1 ] if fileExtension in self . ALWAYS_READ_AND_WRITE_MAPS : # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Write map file self . _invokeWrite ( fileIO = RasterMapFile , session = session , directory = directory , filename = filename , replaceParamFile = replaceParamFile ) log . error ( 'Could not write map files. MAP_TYPE {0} ' 'not supported.' . format ( self . mapType ) )
4,660
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1671-L1709
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
GSSHAPY Project Write WMS Datasets to File Method
def _writeWMSDatasets ( self , session , directory , wmsDatasetCards , name = None ) : if self . mapType in self . MAP_TYPES_SUPPORTED : for card in self . projectCards : if ( card . name in wmsDatasetCards ) and self . _noneOrNumValue ( card . value ) : filename = card . value . strip ( '"' ) # Determine new filename filename = self . _replaceNewFilename ( filename , name ) # Handle case where fileIO interfaces with multiple files # Retrieve File using FileIO and file extension extension = filename . split ( '.' ) [ 1 ] # Get mask map file maskMap = session . query ( RasterMapFile ) . filter ( RasterMapFile . projectFile == self ) . filter ( RasterMapFile . fileExtension == 'msk' ) . one ( ) # Default wms dataset wmsDataset = None try : wmsDataset = session . query ( WMSDatasetFile ) . filter ( WMSDatasetFile . projectFile == self ) . filter ( WMSDatasetFile . fileExtension == extension ) . one ( ) except NoResultFound : # Handle case when there is no file in database but # the card is listed in the project file log . warning ( '{0} listed as card in project file, ' 'but the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : # Write all instances self . _invokeWriteForMultipleOfType ( directory , extension , WMSDatasetFile , filename , session , maskMap = maskMap ) return # Initiate Write Method on File if wmsDataset is not None and maskMap is not None : wmsDataset . write ( session = session , directory = directory , name = filename , maskMap = maskMap ) else : log . error ( 'Could not write WMS Dataset files. ' 'MAP_TYPE {0} not supported.' . format ( self . mapType ) )
4,661
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1711-L1761
[ "def", "check_extraneous", "(", "config", ",", "schema", ")", ":", "if", "not", "isinstance", "(", "config", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"Config {} is not a dictionary\"", ".", "format", "(", "config", ")", ")", "for", "k", "in", "config", ":", "if", "k", "not", "in", "schema", ":", "raise", "ValueError", "(", "\"Unexpected config key `{}` not in {}\"", ".", "format", "(", "k", ",", "list", "(", "schema", ".", "keys", "(", ")", ")", ")", ")", "v", ",", "kreq", "=", "schema", "[", "k", "]", "if", "v", "is", "None", ":", "continue", "elif", "isinstance", "(", "v", ",", "type", ")", ":", "if", "not", "isinstance", "(", "config", "[", "k", "]", ",", "v", ")", ":", "if", "v", "is", "str", "and", "isinstance", "(", "config", "[", "k", "]", ",", "string_types", ")", ":", "continue", "raise", "ValueError", "(", "\"Config key `{}` has wrong type {}, expected {}\"", ".", "format", "(", "k", ",", "type", "(", "config", "[", "k", "]", ")", ".", "__name__", ",", "v", ".", "__name__", ")", ")", "else", ":", "check_extraneous", "(", "config", "[", "k", "]", ",", "v", ")" ]
Write the replacement files
def _writeReplacementFiles ( self , session , directory , name ) : if self . replaceParamFile : self . replaceParamFile . write ( session = session , directory = directory , name = name ) if self . replaceValFile : self . replaceValFile . write ( session = session , directory = directory , name = name )
4,662
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1763-L1773
[ "def", "width", "(", "poly", ")", ":", "num", "=", "len", "(", "poly", ")", "-", "1", "if", "abs", "(", "poly", "[", "num", "]", "[", "2", "]", "-", "poly", "[", "0", "]", "[", "2", "]", ")", "<", "abs", "(", "poly", "[", "1", "]", "[", "2", "]", "-", "poly", "[", "0", "]", "[", "2", "]", ")", ":", "return", "dist", "(", "poly", "[", "num", "]", ",", "poly", "[", "0", "]", ")", "elif", "abs", "(", "poly", "[", "num", "]", "[", "2", "]", "-", "poly", "[", "0", "]", "[", "2", "]", ")", ">", "abs", "(", "poly", "[", "1", "]", "[", "2", "]", "-", "poly", "[", "0", "]", "[", "2", "]", ")", ":", "return", "dist", "(", "poly", "[", "1", "]", ",", "poly", "[", "0", "]", ")", "else", ":", "return", "max", "(", "dist", "(", "poly", "[", "num", "]", ",", "poly", "[", "0", "]", ")", ",", "dist", "(", "poly", "[", "1", "]", ",", "poly", "[", "0", "]", ")", ")" ]
Invoke File Write Method on Other Files
def _invokeWrite ( self , fileIO , session , directory , filename , replaceParamFile ) : # Default value for instance instance = None try : # Handle case where fileIO interfaces with single file # Retrieve File using FileIO instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . one ( ) except : # Handle case where fileIO interfaces with multiple files # Retrieve File using FileIO and file extension extension = filename . split ( '.' ) [ 1 ] try : instance = session . query ( fileIO ) . filter ( fileIO . projectFile == self ) . filter ( fileIO . fileExtension == extension ) . one ( ) except NoResultFound : # Handle case when there is no file in database but the # card is listed in the project file log . warning ( '{0} listed as card in project file, but ' 'the file is not found in the database.' . format ( filename ) ) except MultipleResultsFound : self . _invokeWriteForMultipleOfType ( directory , extension , fileIO , filename , session , replaceParamFile = replaceParamFile ) return # Initiate Write Method on File if instance is not None : instance . write ( session = session , directory = directory , name = filename , replaceParamFile = replaceParamFile )
4,663
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L1802-L1842
[ "async", "def", "message_fetcher_coroutine", "(", "self", ",", "loop", ")", ":", "Global", ".", "LOGGER", ".", "debug", "(", "'registering callbacks for message fetcher coroutine'", ")", "self", ".", "isrunning", "=", "True", "while", "self", ".", "isrunning", ":", "loop", ".", "call_soon", "(", "self", ".", "_fetch_messages", ")", "loop", ".", "call_soon", "(", "self", ".", "_perform_system_check", ")", "await", "asyncio", ".", "sleep", "(", "Global", ".", "CONFIG_MANAGER", ".", "message_fetcher_sleep_interval", ")", "Global", ".", "LOGGER", ".", "debug", "(", "'message fetcher stopped'", ")" ]
Write project card to string .
def write ( self , originalPrefix , newPrefix = None ) : # Determine number of spaces between card and value for nice alignment numSpaces = max ( 2 , 25 - len ( self . name ) ) # Handle special case of booleans if self . value is None : line = '%s\n' % self . name else : if self . name == 'WMS' : line = '%s %s\n' % ( self . name , self . value ) elif newPrefix is None : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) elif originalPrefix in self . value : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value . replace ( originalPrefix , newPrefix ) ) else : line = '%s%s%s\n' % ( self . name , ' ' * numSpaces , self . value ) return line
4,664
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/prj.py#L2024-L2051
[ "def", "Filter", "(", "self", ",", "filename_spec", ")", ":", "if", "\"@\"", "in", "filename_spec", ":", "file_path", ",", "package_name", "=", "filename_spec", ".", "split", "(", "\"@\"", ")", "else", ":", "file_path", ",", "package_name", "=", "filename_spec", ",", "Resource", ".", "default_package", "resource_path", "=", "package", ".", "ResourcePath", "(", "package_name", ",", "file_path", ")", "if", "resource_path", "is", "not", "None", ":", "return", "resource_path", "# pylint: disable=unreachable", "raise", "FilterError", "(", "\"Unable to find resource %s while interpolating: \"", "%", "filename_spec", ")" ]
Returns the number of seconds it has been since the start until the latest entry .
def elapsed ( self ) : if not self . started or self . _start_time is None : return 0.0 return self . _timing_data [ - 1 ] [ 0 ] - self . _start_time
4,665
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/eta.py#L83-L87
[ "def", "virtual_potential_temperature", "(", "pressure", ",", "temperature", ",", "mixing", ",", "molecular_weight_ratio", "=", "mpconsts", ".", "epsilon", ")", ":", "pottemp", "=", "potential_temperature", "(", "pressure", ",", "temperature", ")", "return", "virtual_temperature", "(", "pottemp", ",", "mixing", ",", "molecular_weight_ratio", ")" ]
Returns an unstable rate based on the last two entries in the timing data . Less intensive to compute .
def rate_unstable ( self ) : if not self . started or self . stalled : return 0.0 x1 , y1 = self . _timing_data [ - 2 ] x2 , y2 = self . _timing_data [ - 1 ] return ( y2 - y1 ) / ( x2 - x1 )
4,666
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/eta.py#L90-L96
[ "def", "normalize_dictionary_values", "(", "dictionary", ")", ":", "for", "key", ",", "val", "in", "dictionary", ".", "iteritems", "(", ")", ":", "if", "isinstance", "(", "val", ",", "dict", ")", ":", "dictionary", "[", "key", "]", "=", "normalize_dictionary_values", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "list", ")", ":", "dictionary", "[", "key", "]", "=", "list", "(", "val", ")", "else", ":", "dictionary", "[", "key", "]", "=", "normalize_value", "(", "val", ")", "return", "dictionary" ]
Returns the overall average rate based on the start time .
def rate_overall ( self ) : elapsed = self . elapsed return self . rate if not elapsed else self . numerator / self . elapsed
4,667
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/eta.py#L99-L102
[ "def", "setup", "(", ")", ":", "try", ":", "db", ".", "bind", "(", "*", "*", "config", ".", "database_config", ")", "except", "OSError", ":", "# Attempted to connect to a file-based database where the file didn't", "# exist", "db", ".", "bind", "(", "*", "*", "config", ".", "database_config", ",", "create_db", "=", "True", ")", "rebuild", "=", "True", "try", ":", "db", ".", "generate_mapping", "(", "create_tables", "=", "True", ")", "with", "orm", ".", "db_session", ":", "version", "=", "GlobalConfig", ".", "get", "(", "key", "=", "'schema_version'", ")", "if", "version", "and", "version", ".", "int_value", "!=", "SCHEMA_VERSION", ":", "logger", ".", "info", "(", "\"Existing database has schema version %d\"", ",", "version", ".", "int_value", ")", "else", ":", "rebuild", "=", "False", "except", ":", "# pylint:disable=bare-except", "logger", ".", "exception", "(", "\"Error mapping schema\"", ")", "if", "rebuild", ":", "logger", ".", "info", "(", "\"Rebuilding schema\"", ")", "try", ":", "db", ".", "drop_all_tables", "(", "with_all_data", "=", "True", ")", "db", ".", "create_tables", "(", ")", "except", ":", "raise", "RuntimeError", "(", "\"Unable to upgrade schema automatically; please \"", "+", "\"delete the existing database and try again.\"", ")", "with", "orm", ".", "db_session", ":", "if", "not", "GlobalConfig", ".", "get", "(", "key", "=", "'schema_version'", ")", ":", "logger", ".", "info", "(", "\"setting schema version to %d\"", ",", "SCHEMA_VERSION", ")", "GlobalConfig", "(", "key", "=", "'schema_version'", ",", "int_value", "=", "SCHEMA_VERSION", ")", "orm", ".", "commit", "(", ")" ]
Perform the ETA and rate calculation .
def _calculate ( self ) : # Calculate means and standard deviations. mean_x = sum ( i [ 0 ] for i in self . _timing_data ) / len ( self . _timing_data ) mean_y = sum ( i [ 1 ] for i in self . _timing_data ) / len ( self . _timing_data ) std_x = sqrt ( sum ( pow ( i [ 0 ] - mean_x , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) std_y = sqrt ( sum ( pow ( i [ 1 ] - mean_y , 2 ) for i in self . _timing_data ) / ( len ( self . _timing_data ) - 1 ) ) # Calculate coefficient. sum_xy , sum_sq_v_x , sum_sq_v_y = 0 , 0 , 0 for x , y in self . _timing_data : x -= mean_x y -= mean_y sum_xy += x * y sum_sq_v_x += pow ( x , 2 ) sum_sq_v_y += pow ( y , 2 ) pearson_r = sum_xy / sqrt ( sum_sq_v_x * sum_sq_v_y ) # Calculate regression line. y = mx + b where m is the slope and b is the y-intercept. m = self . rate = pearson_r * ( std_y / std_x ) if self . undefined : return y = self . denominator b = mean_y - m * mean_x x = ( y - b ) / m # Calculate fitted line (transformed/shifted regression line horizontally). fitted_b = self . _timing_data [ - 1 ] [ 1 ] - ( m * self . _timing_data [ - 1 ] [ 0 ] ) fitted_x = ( y - fitted_b ) / m adjusted_x = ( ( fitted_x - x ) * ( self . numerator / self . denominator ) ) + x self . eta_epoch = adjusted_x
4,668
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/eta.py#L128-L169
[ "def", "_read", "(", "self", ",", "directory", ",", "filename", ",", "session", ",", "path", ",", "name", ",", "extension", ",", "spatial", "=", "None", ",", "spatialReferenceID", "=", "None", ",", "replaceParamFile", "=", "None", ")", ":", "yml_events", "=", "[", "]", "with", "open", "(", "path", ")", "as", "fo", ":", "yml_events", "=", "yaml", ".", "load", "(", "fo", ")", "for", "yml_event", "in", "yml_events", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "yml_event", ".", "subfolder", ")", ")", ":", "orm_event", "=", "yml_event", ".", "as_orm", "(", ")", "if", "not", "self", ".", "_similar_event_exists", "(", "orm_event", ".", "subfolder", ")", ":", "session", ".", "add", "(", "orm_event", ")", "self", ".", "events", ".", "append", "(", "orm_event", ")", "session", ".", "commit", "(", ")" ]
Read HMET WES from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Open file and parse into HmetRecords with open ( path , 'r' ) as hmetFile : for line in hmetFile : sline = line . strip ( ) . split ( ) try : # Extract data time from record dateTime = datetime ( int ( sline [ 0 ] ) , int ( sline [ 1 ] ) , int ( sline [ 2 ] ) , int ( sline [ 3 ] ) ) # Intitialize GSSHAPY HmetRecord object hmetRecord = HmetRecord ( hmetDateTime = dateTime , barometricPress = sline [ 4 ] , relHumidity = sline [ 5 ] , totalSkyCover = sline [ 6 ] , windSpeed = sline [ 7 ] , dryBulbTemp = sline [ 8 ] , directRad = sline [ 9 ] , globalRad = sline [ 10 ] ) # Associate HmetRecord with HmetFile hmetRecord . hmetFile = self except : pass
4,669
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/hmet.py#L56-L87
[ "async", "def", "register", "(", "self", ",", "request", ")", ":", "session", "=", "await", "get_session", "(", "request", ")", "user_id", "=", "session", ".", "get", "(", "'user_id'", ")", "if", "user_id", ":", "return", "redirect", "(", "request", ",", "'timeline'", ")", "error", "=", "None", "form", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "await", "request", ".", "post", "(", ")", "user_id", "=", "await", "db", ".", "get_user_id", "(", "self", ".", "mongo", ".", "user", ",", "form", "[", "'username'", "]", ")", "if", "not", "form", "[", "'username'", "]", ":", "error", "=", "'You have to enter a username'", "elif", "not", "form", "[", "'email'", "]", "or", "'@'", "not", "in", "form", "[", "'email'", "]", ":", "error", "=", "'You have to enter a valid email address'", "elif", "not", "form", "[", "'password'", "]", ":", "error", "=", "'You have to enter a password'", "elif", "form", "[", "'password'", "]", "!=", "form", "[", "'password2'", "]", ":", "error", "=", "'The two passwords do not match'", "elif", "user_id", "is", "not", "None", ":", "error", "=", "'The username is already taken'", "else", ":", "await", "self", ".", "mongo", ".", "user", ".", "insert", "(", "{", "'username'", ":", "form", "[", "'username'", "]", ",", "'email'", ":", "form", "[", "'email'", "]", ",", "'pw_hash'", ":", "generate_password_hash", "(", "form", "[", "'password'", "]", ")", "}", ")", "return", "redirect", "(", "request", ",", "'login'", ")", "return", "{", "\"error\"", ":", "error", ",", "\"form\"", ":", "form", "}" ]
Write HMET WES to File Method
def _write ( self , session , openFile , replaceParamFile ) : ## TODO: Ensure Other HMET Formats are supported hmetRecords = self . hmetRecords for record in hmetRecords : openFile . write ( '%s\t%s\t%s\t%s\t%.3f\t%s\t%s\t%s\t%s\t%.2f\t%.2f\n' % ( record . hmetDateTime . year , record . hmetDateTime . month , record . hmetDateTime . day , record . hmetDateTime . hour , record . barometricPress , record . relHumidity , record . totalSkyCover , record . windSpeed , record . dryBulbTemp , record . directRad , record . globalRad ) )
4,670
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/hmet.py#L89-L108
[ "async", "def", "register", "(", "self", ",", "request", ")", ":", "session", "=", "await", "get_session", "(", "request", ")", "user_id", "=", "session", ".", "get", "(", "'user_id'", ")", "if", "user_id", ":", "return", "redirect", "(", "request", ",", "'timeline'", ")", "error", "=", "None", "form", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "await", "request", ".", "post", "(", ")", "user_id", "=", "await", "db", ".", "get_user_id", "(", "self", ".", "mongo", ".", "user", ",", "form", "[", "'username'", "]", ")", "if", "not", "form", "[", "'username'", "]", ":", "error", "=", "'You have to enter a username'", "elif", "not", "form", "[", "'email'", "]", "or", "'@'", "not", "in", "form", "[", "'email'", "]", ":", "error", "=", "'You have to enter a valid email address'", "elif", "not", "form", "[", "'password'", "]", ":", "error", "=", "'You have to enter a password'", "elif", "form", "[", "'password'", "]", "!=", "form", "[", "'password2'", "]", ":", "error", "=", "'The two passwords do not match'", "elif", "user_id", "is", "not", "None", ":", "error", "=", "'The username is already taken'", "else", ":", "await", "self", ".", "mongo", ".", "user", ".", "insert", "(", "{", "'username'", ":", "form", "[", "'username'", "]", ",", "'email'", ":", "form", "[", "'email'", "]", ",", "'pw_hash'", ":", "generate_password_hash", "(", "form", "[", "'password'", "]", ")", "}", ")", "return", "redirect", "(", "request", ",", "'login'", ")", "return", "{", "\"error\"", ":", "error", ",", "\"form\"", ":", "form", "}" ]
ProjectFileEvent Read from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial = None , spatialReferenceID = None , replaceParamFile = None ) : yml_events = [ ] with open ( path ) as fo : yml_events = yaml . load ( fo ) for yml_event in yml_events : if os . path . exists ( os . path . join ( directory , yml_event . subfolder ) ) : orm_event = yml_event . as_orm ( ) if not self . _similar_event_exists ( orm_event . subfolder ) : session . add ( orm_event ) self . events . append ( orm_event ) session . commit ( )
4,671
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/evt.py#L30-L46
[ "def", "syzygyJD", "(", "jd", ")", ":", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "distance", "(", "sun", ",", "moon", ")", "# Offset represents the Syzygy type. ", "# Zero is conjunction and 180 is opposition.", "offset", "=", "180", "if", "(", "dist", ">=", "180", ")", "else", "0", "while", "abs", "(", "dist", ")", ">", "MAX_ERROR", ":", "jd", "=", "jd", "-", "dist", "/", "13.1833", "# Moon mean daily motion", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "closestdistance", "(", "sun", "-", "offset", ",", "moon", ")", "return", "jd" ]
Return yml compatible version of self
def as_yml ( self ) : return YmlFileEvent ( name = str ( self . name ) , subfolder = str ( self . subfolder ) )
4,672
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/evt.py#L115-L120
[ "def", "status", "(", "sec", ")", ":", "if", "_meta_", ".", "prg_bar", "in", "[", "\"on\"", ",", "\"ON\"", "]", ":", "syms", "=", "[", "\"|\"", ",", "\"/\"", ",", "\"-\"", ",", "\"\\\\\"", "]", "for", "sym", "in", "syms", ":", "sys", ".", "stdout", ".", "write", "(", "\"\\b{0}{1}{2}\"", ".", "format", "(", "_meta_", ".", "color", "[", "\"GREY\"", "]", ",", "sym", ",", "_meta_", ".", "color", "[", "\"ENDC\"", "]", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "time", ".", "sleep", "(", "float", "(", "sec", ")", ")" ]
Prepare request to node s API route
def prepare_request ( node ) : if node . resource . method not in AVAILABLE_METHODS : raise UnsupportedHTTPMethodError ( node . resource . method ) def request ( data = None , json = None , * * kwargs ) : """ Make request to node's API route with the given keyword arguments """ # validate given query parameters for key , value in kwargs . items ( ) : param = next ( ( p for p in node . resource . query_params if p . name == key ) , None ) if not param : raise UnsupportedQueryParameter ( node . resource . path , key ) if not match_type ( value , param . type ) : raise TypeError ( "Resource Query Parameter has type '{0}' but expected type '{1}'" . format ( value . __class__ . __name__ , param . type ) ) response = requests . request ( node . resource . method , node . resource . absolute_uri , params = kwargs , data = data , json = json ) return response return request
4,673
https://github.com/timofurrer/ramlient/blob/e93092252635a6b3b0aca2c390b9f820368b791c/ramlient/request.py#L20-L47
[ "def", "mergeDQarray", "(", "maskname", ",", "dqarr", ")", ":", "maskarr", "=", "None", "if", "maskname", "is", "not", "None", ":", "if", "isinstance", "(", "maskname", ",", "str", ")", ":", "# working with file on disk (default case)", "if", "os", ".", "path", ".", "exists", "(", "maskname", ")", ":", "mask", "=", "fileutil", ".", "openImage", "(", "maskname", ",", "memmap", "=", "False", ")", "maskarr", "=", "mask", "[", "0", "]", ".", "data", ".", "astype", "(", "np", ".", "bool", ")", "mask", ".", "close", "(", ")", "else", ":", "if", "isinstance", "(", "maskname", ",", "fits", ".", "HDUList", ")", ":", "# working with a virtual input file", "maskarr", "=", "maskname", "[", "0", "]", ".", "data", ".", "astype", "(", "np", ".", "bool", ")", "else", ":", "maskarr", "=", "maskname", ".", "data", ".", "astype", "(", "np", ".", "bool", ")", "if", "maskarr", "is", "not", "None", ":", "# merge array with dqarr now", "np", ".", "bitwise_and", "(", "dqarr", ",", "maskarr", ",", "dqarr", ")" ]
Defines the data to be plotted .
def define_plot_data ( data , x_name , * y_names ) : it = [ ] for k in y_names : it . append ( { 'x' : data [ x_name ] , 'y' : data [ k ] , 'name' : k } ) return it
4,674
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/examples/processing_chain/utils/plot.py#L9-L36
[ "def", "_calc_cc", "(", "self", ",", "iso_size", ")", ":", "# type: (int) -> Tuple[int, int]", "cylsize", "=", "self", ".", "geometry_heads", "*", "self", ".", "geometry_sectors", "*", "512", "frac", "=", "iso_size", "%", "cylsize", "padding", "=", "0", "if", "frac", ">", "0", ":", "padding", "=", "cylsize", "-", "frac", "cc", "=", "(", "iso_size", "+", "padding", ")", "//", "cylsize", "if", "cc", ">", "1024", ":", "cc", "=", "1024", "return", "(", "cc", ",", "padding", ")" ]
Plotting lines .
def plot_lines ( it ) : data = [ go . Scatter ( mode = 'lines' , * * d ) for d in it ] return py . iplot ( data , filename = 'scatter-mode' )
4,675
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/examples/processing_chain/utils/plot.py#L39-L52
[ "def", "numRegisteredForRole", "(", "self", ",", "role", ",", "includeTemporaryRegs", "=", "False", ")", ":", "count", "=", "self", ".", "eventregistration_set", ".", "filter", "(", "cancelled", "=", "False", ",", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "count", "(", ")", "if", "includeTemporaryRegs", ":", "count", "+=", "self", ".", "temporaryeventregistration_set", ".", "filter", "(", "dropIn", "=", "False", ",", "role", "=", "role", ")", ".", "exclude", "(", "registration__expirationDate__lte", "=", "timezone", ".", "now", "(", ")", ")", ".", "count", "(", ")", "return", "count" ]
Do a read on a channel .
def _ssh_channel_read ( ssh_channel_int , count , is_stderr ) : buffer_ = create_string_buffer ( count ) while 1 : received_bytes = c_ssh_channel_read ( ssh_channel_int , cast ( buffer_ , c_void_p ) , c_uint32 ( count ) , c_int ( int ( is_stderr ) ) ) if received_bytes == SSH_ERROR : ssh_session_int = _ssh_channel_get_session ( ssh_channel_int ) error = ssh_get_error ( ssh_session_int ) raise SshError ( "Channel read failed: %s" % ( error ) ) # BUG: We're not using the nonblocking variant, but this can still # return SSH_AGAIN due to that call's broken dependencies. # TODO: This call might return SSH_AGAIN, even though we should always be # blocking. Reported as bug #115. elif received_bytes == SSH_AGAIN : continue else : break # TODO: Where is the timeout configured for the read? return buffer_ . raw [ 0 : received_bytes ]
4,676
https://github.com/dsoprea/PySecure/blob/ff7e01a0a77e79564cb00b6e38b4e6f9f88674f0/pysecure/adapters/channela.py#L79-L106
[ "def", "expand_gallery", "(", "generator", ",", "metadata", ")", ":", "if", "\"gallery\"", "not", "in", "metadata", "or", "metadata", "[", "'gallery'", "]", "is", "None", ":", "return", "# If no gallery specified, we do nothing", "lines", "=", "[", "]", "base_path", "=", "_image_path", "(", "generator", ")", "in_path", "=", "path", ".", "join", "(", "base_path", ",", "metadata", "[", "'gallery'", "]", ")", "template", "=", "generator", ".", "settings", ".", "get", "(", "'GALLERY_TEMPLATE'", ",", "DEFAULT_TEMPLATE", ")", "thumbnail_name", "=", "generator", ".", "settings", ".", "get", "(", "\"GALLERY_THUMBNAIL\"", ",", "DEFAULT_GALLERY_THUMB", ")", "thumbnail_prefix", "=", "generator", ".", "settings", ".", "get", "(", "\"\"", ")", "resizer", "=", "_resizer", "(", "thumbnail_name", ",", "'?x?'", ",", "base_path", ")", "for", "dirpath", ",", "_", ",", "filenames", "in", "os", ".", "walk", "(", "in_path", ")", ":", "for", "filename", "in", "filenames", ":", "if", "not", "filename", ".", "startswith", "(", "'.'", ")", ":", "url", "=", "path", ".", "join", "(", "dirpath", ",", "filename", ")", ".", "replace", "(", "base_path", ",", "\"\"", ")", "[", "1", ":", "]", "url", "=", "path", ".", "join", "(", "'/static'", ",", "generator", ".", "settings", ".", "get", "(", "'IMAGE_PATH'", ",", "DEFAULT_IMAGE_DIR", ")", ",", "url", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "logger", ".", "debug", "(", "\"GALLERY: {0}\"", ".", "format", "(", "url", ")", ")", "thumbnail", "=", "resizer", ".", "get_thumbnail_name", "(", "filename", ")", "thumbnail", "=", "path", ".", "join", "(", "'/'", ",", "generator", ".", "settings", ".", "get", "(", "'THUMBNAIL_DIR'", ",", "DEFAULT_THUMBNAIL_DIR", ")", ",", "thumbnail", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "lines", ".", "append", "(", "template", ".", "format", "(", "filename", "=", "filename", ",", "url", "=", "url", ",", "thumbnail", "=", "thumbnail", ",", ")", ")", "metadata", "[", "'gallery_content'", "]", "=", "\"\\n\"", ".", "join", "(", "lines", ")" ]
Parse EVENT chunks
def eventChunk ( key , lines ) : ## NOTE: RADAR file format not supported currently. ## TODO: Add Support for RADAR file format type values # Contants KEYWORDS = ( 'EVENT' , 'NRPDS' , 'NRGAG' , 'COORD' , 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) NUM_CARDS = ( 'NRPDS' , 'NRGAG' ) VALUE_CARDS = ( 'GAGES' , 'ACCUM' , 'RATES' , 'RADAR' ) # Define result object result = { 'description' : None , 'nrgag' : None , 'nrpds' : None , 'coords' : [ ] , 'valLines' : [ ] } chunks = pt . chunk ( KEYWORDS , lines ) # Parse chunks associated with each key for card , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : schunk = chunk [ 0 ] . strip ( ) . split ( ) # Cases if card == 'EVENT' : # EVENT handler schunk = pt . splitLine ( chunk [ 0 ] ) result [ 'description' ] = schunk [ 1 ] elif card in NUM_CARDS : # Num cards handler result [ card . lower ( ) ] = schunk [ 1 ] elif card == 'COORD' : # COORD handler schunk = pt . splitLine ( chunk [ 0 ] ) try : # Extract the event description desc = schunk [ 3 ] except : # Handle case where the event description is blank desc = "" coord = { 'x' : schunk [ 1 ] , 'y' : schunk [ 2 ] , 'description' : desc } result [ 'coords' ] . append ( coord ) elif card in VALUE_CARDS : # Value cards handler # Extract DateTime dateTime = datetime ( year = int ( schunk [ 1 ] ) , month = int ( schunk [ 2 ] ) , day = int ( schunk [ 3 ] ) , hour = int ( schunk [ 4 ] ) , minute = int ( schunk [ 5 ] ) ) # Compile values into a list values = [ ] for index in range ( 6 , len ( schunk ) ) : values . append ( schunk [ index ] ) valueLine = { 'type' : schunk [ 0 ] , 'dateTime' : dateTime , 'values' : values } result [ 'valLines' ] . append ( valueLine ) return result
4,677
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/lib/gag_chunk.py#L15-L102
[ "def", "_get_site_response_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "sa1180", ")", ":", "# vs30 star", "vs30_star", "=", "self", ".", "_get_vs30star", "(", "vs30", ",", "imt", ")", "# compute the site term", "site_resp_term", "=", "np", ".", "zeros_like", "(", "vs30", ")", "gt_vlin", "=", "vs30", ">=", "C", "[", "'vlin'", "]", "lw_vlin", "=", "vs30", "<", "C", "[", "'vlin'", "]", "# compute site response term for sites with vs30 greater than vlin", "vs30_rat", "=", "vs30_star", "/", "C", "[", "'vlin'", "]", "site_resp_term", "[", "gt_vlin", "]", "=", "(", "(", "C", "[", "'a10'", "]", "+", "C", "[", "'b'", "]", "*", "self", ".", "CONSTS", "[", "'n'", "]", ")", "*", "np", ".", "log", "(", "vs30_rat", "[", "gt_vlin", "]", ")", ")", "# compute site response term for sites with vs30 lower than vlin", "site_resp_term", "[", "lw_vlin", "]", "=", "(", "C", "[", "'a10'", "]", "*", "np", ".", "log", "(", "vs30_rat", "[", "lw_vlin", "]", ")", "-", "C", "[", "'b'", "]", "*", "np", ".", "log", "(", "sa1180", "[", "lw_vlin", "]", "+", "C", "[", "'c'", "]", ")", "+", "C", "[", "'b'", "]", "*", "np", ".", "log", "(", "sa1180", "[", "lw_vlin", "]", "+", "C", "[", "'c'", "]", "*", "vs30_rat", "[", "lw_vlin", "]", "**", "self", ".", "CONSTS", "[", "'n'", "]", ")", ")", "return", "site_resp_term" ]
Base method for making a Losant API request
def request ( self , method , path , params = None , headers = None , body = None ) : if not headers : headers = { } if not params : params = { } headers [ "Accept" ] = "application/json" headers [ "Accept-Version" ] = "^1.15.0" if self . auth_token : headers [ "Authorization" ] = "Bearer {0}" . format ( self . auth_token ) path = self . url + path params = self . flatten_params ( params ) response = requests . request ( method , path , params = params , headers = headers , json = body ) result = response . text try : result = response . json ( ) except Exception : pass if response . status_code >= 400 : raise LosantError ( response . status_code , result ) return result
4,678
https://github.com/Losant/losant-rest-python/blob/75b20decda0e999002f21811c3508f087e7f13b5/losantrest/client.py#L165-L190
[ "def", "concatenate_json", "(", "source_folder", ",", "destination_file", ")", ":", "matches", "=", "[", "]", "for", "root", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "source_folder", ")", ":", "for", "filename", "in", "fnmatch", ".", "filter", "(", "filenames", ",", "'*.json'", ")", ":", "matches", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", ")", "with", "open", "(", "destination_file", ",", "\"wb\"", ")", "as", "f", ":", "f", ".", "write", "(", "\"[\\n\"", ")", "for", "m", "in", "matches", "[", ":", "-", "1", "]", ":", "f", ".", "write", "(", "open", "(", "m", ",", "\"rb\"", ")", ".", "read", "(", ")", ")", "f", ".", "write", "(", "\",\\n\"", ")", "f", ".", "write", "(", "open", "(", "matches", "[", "-", "1", "]", ",", "\"rb\"", ")", ".", "read", "(", ")", ")", "f", ".", "write", "(", "\"\\n]\"", ")" ]
Flatten out nested arrays and dicts in query params into correct format
def flatten_params ( self , data , base_key = None ) : result = { } if data is None : return result map_data = None if not isinstance ( data , collections . Mapping ) : map_data = [ ] for idx , val in enumerate ( data ) : map_data . append ( [ str ( idx ) , val ] ) else : map_data = list ( data . items ( ) ) for key , value in map_data : if not base_key is None : key = base_key + "[" + key + "]" if isinstance ( value , basestring ) or not hasattr ( value , "__iter__" ) : result [ key ] = value else : result . update ( self . flatten_params ( value , key ) ) return result
4,679
https://github.com/Losant/losant-rest-python/blob/75b20decda0e999002f21811c3508f087e7f13b5/losantrest/client.py#L192-L216
[ "def", "set_max_order_count", "(", "self", ",", "max_count", ",", "on_error", "=", "'fail'", ")", ":", "control", "=", "MaxOrderCount", "(", "on_error", ",", "max_count", ")", "self", ".", "register_trading_control", "(", "control", ")" ]
Reads the excel file .
def read_excel ( input_fpath ) : return { k : v . values for k , v in pd . read_excel ( input_fpath ) . items ( ) }
4,680
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/examples/processing_chain/process.py#L13-L25
[ "def", "refract", "(", "structure", ")", "->", "Element", ":", "if", "isinstance", "(", "structure", ",", "Element", ")", ":", "return", "structure", "elif", "isinstance", "(", "structure", ",", "str", ")", ":", "return", "String", "(", "content", "=", "structure", ")", "elif", "isinstance", "(", "structure", ",", "bool", ")", ":", "return", "Boolean", "(", "content", "=", "structure", ")", "elif", "isinstance", "(", "structure", ",", "(", "int", ",", "float", ")", ")", ":", "return", "Number", "(", "content", "=", "structure", ")", "elif", "isinstance", "(", "structure", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "Array", "(", "content", "=", "list", "(", "map", "(", "refract", ",", "structure", ")", ")", ")", "elif", "isinstance", "(", "structure", ",", "dict", ")", ":", "return", "Object", "(", "content", "=", "[", "Member", "(", "key", "=", "refract", "(", "k", ")", ",", "value", "=", "refract", "(", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "structure", ".", "items", "(", ")", "]", ")", "elif", "structure", "is", "None", ":", "return", "Null", "(", ")", "raise", "ValueError", "(", "'Unsupported Value Type'", ")" ]
Save model outputs in an Excel file .
def save_outputs ( outputs , output_fpath ) : df = pd . DataFrame ( outputs ) with pd . ExcelWriter ( output_fpath ) as writer : df . to_excel ( writer )
4,681
https://github.com/vinci1it2000/schedula/blob/addb9fd685be81544b796c51383ac00a31543ce9/examples/processing_chain/process.py#L51-L65
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "sites", ",", "pga1100", ",", "sigma_pga", ",", "stddev_types", ")", ":", "std_intra", "=", "self", ".", "_compute_intra_event_std", "(", "C", ",", "sites", ".", "vs30", ",", "pga1100", ",", "sigma_pga", ")", "std_inter", "=", "C", "[", "'t_lny'", "]", "*", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "self", ".", "_get_total_sigma", "(", "C", ",", "std_intra", ",", "std_inter", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "std_intra", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "std_inter", ")", "return", "stddevs" ]
Precipitation Read from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Dictionary of keywords/cards and parse function names KEYWORDS = ( 'EVENT' , ) # Parse file into chunks associated with keywords/cards with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) # Parse chunks associated with each key for key , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : result = gak . eventChunk ( key , chunk ) self . _createGsshaPyObjects ( result ) # Add this PrecipFile to the database session session . add ( self )
4,682
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/gag.py#L62-L84
[ "def", "_setup_aggregation", "(", "self", ",", "aggregator", "=", "None", ")", ":", "from", "nefertari", ".", "elasticsearch", "import", "ES", "if", "aggregator", "is", "None", ":", "aggregator", "=", "ESAggregator", "aggregations_enabled", "=", "(", "ES", ".", "settings", "and", "ES", ".", "settings", ".", "asbool", "(", "'enable_aggregations'", ")", ")", "if", "not", "aggregations_enabled", ":", "log", ".", "debug", "(", "'Elasticsearch aggregations are not enabled'", ")", "return", "index", "=", "getattr", "(", "self", ",", "'index'", ",", "None", ")", "index_defined", "=", "index", "and", "index", "!=", "self", ".", "not_allowed_action", "if", "index_defined", ":", "self", ".", "index", "=", "aggregator", "(", "self", ")", ".", "wrap", "(", "self", ".", "index", ")" ]
Precipitation File Write to File Method
def _write ( self , session , openFile , replaceParamFile ) : # Retrieve the events associated with this PrecipFile events = self . precipEvents # Write each event to file for event in events : openFile . write ( 'EVENT "%s"\nNRGAG %s\nNRPDS %s\n' % ( event . description , event . nrGag , event . nrPds ) ) if event . nrGag > 0 : values = event . values valList = [ ] # Convert PrecipValue objects into a list of dictionaries, valList, # so that it is compatible with the pivot function. for value in values : valList . append ( { 'ValueType' : value . valueType , 'DateTime' : value . dateTime , 'Gage' : value . gage . id , 'Value' : value . value } ) # Pivot using the function found at: # code.activestate.com/recipes/334695 pivotedValues = pivot . pivot ( valList , ( 'DateTime' , 'ValueType' ) , ( 'Gage' , ) , 'Value' ) ## TODO: Create custom pivot function that can work with sqlalchemy ## objects explicitly without the costly conversion. # Create an empty set for obtaining a list of unique gages gages = session . query ( PrecipGage ) . filter ( PrecipGage . event == event ) . order_by ( PrecipGage . id ) . all ( ) for gage in gages : openFile . write ( 'COORD %s %s "%s"\n' % ( gage . x , gage . y , gage . description ) ) # Write the value rows out to file for row in pivotedValues : # Extract the PrecipValues valString = '' # Retreive a list of sorted keys. This assumes the values are # read into the database in order keys = sorted ( [ key for key in row if key != 'DateTime' and key != 'ValueType' ] ) # String all of the values together into valString for key in keys : if key != 'DateTime' and key != 'ValueType' : valString = '%s %.3f' % ( valString , row [ key ] ) # Write value line to file with appropriate formatting openFile . write ( '%s %.4d %.2d %.2d %.2d %.2d%s\n' % ( row [ 'ValueType' ] , row [ 'DateTime' ] . year , row [ 'DateTime' ] . month , row [ 'DateTime' ] . day , row [ 'DateTime' ] . hour , row [ 'DateTime' ] . minute , valString ) )
4,683
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/gag.py#L86-L148
[ "def", "green", "(", "fn", "=", "None", ",", "consume_green_mode", "=", "True", ")", ":", "def", "decorator", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "greener", "(", "obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "args", "=", "(", "obj", ",", ")", "+", "args", "wait", "=", "kwargs", ".", "pop", "(", "'wait'", ",", "None", ")", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ",", "None", ")", "access", "=", "kwargs", ".", "pop", "if", "consume_green_mode", "else", "kwargs", ".", "get", "green_mode", "=", "access", "(", "'green_mode'", ",", "None", ")", "executor", "=", "get_object_executor", "(", "obj", ",", "green_mode", ")", "return", "executor", ".", "run", "(", "fn", ",", "args", ",", "kwargs", ",", "wait", "=", "wait", ",", "timeout", "=", "timeout", ")", "return", "greener", "if", "fn", "is", "None", ":", "return", "decorator", "return", "decorator", "(", "fn", ")" ]
Create GSSHAPY PrecipEvent PrecipValue and PrecipGage Objects Method
def _createGsshaPyObjects ( self , eventChunk ) : ## TODO: Add Support for RADAR file format type values # Create GSSHAPY PrecipEvent event = PrecipEvent ( description = eventChunk [ 'description' ] , nrGag = eventChunk [ 'nrgag' ] , nrPds = eventChunk [ 'nrpds' ] ) # Associate PrecipEvent with PrecipFile event . precipFile = self gages = [ ] for coord in eventChunk [ 'coords' ] : # Create GSSHAPY PrecipGage object gage = PrecipGage ( description = coord [ 'description' ] , x = coord [ 'x' ] , y = coord [ 'y' ] ) # Associate PrecipGage with PrecipEvent gage . event = event # Append to gages list for association with PrecipValues gages . append ( gage ) for valLine in eventChunk [ 'valLines' ] : for index , value in enumerate ( valLine [ 'values' ] ) : # Create GSSHAPY PrecipValue object val = PrecipValue ( valueType = valLine [ 'type' ] , dateTime = valLine [ 'dateTime' ] , value = value ) # Associate PrecipValue with PrecipEvent and PrecipGage val . event = event val . gage = gages [ index ]
4,684
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/gag.py#L150-L186
[ "def", "decode_modified_utf8", "(", "data", ",", "errors", "=", "\"strict\"", ")", ":", "value", ",", "length", "=", "u\"\"", ",", "0", "it", "=", "iter", "(", "decoder", "(", "data", ")", ")", "while", "True", ":", "try", ":", "value", "+=", "next", "(", "it", ")", "length", "+=", "1", "except", "StopIteration", ":", "break", "except", "UnicodeDecodeError", "as", "e", ":", "if", "errors", "==", "\"strict\"", ":", "raise", "e", "elif", "errors", "==", "\"ignore\"", ":", "pass", "elif", "errors", "==", "\"replace\"", ":", "value", "+=", "u\"\\uFFFD\"", "length", "+=", "1", "return", "value", ",", "length" ]
Look up spatial reference system using the projection file .
def lookupSpatialReferenceID ( cls , directory , filename ) : path = os . path . join ( directory , filename ) with open ( path , 'r' ) as f : srid = lookupSpatialReferenceID ( f . read ( ) ) return srid
4,685
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/pro.py#L61-L78
[ "def", "get_categories", "(", "self", ")", ":", "posts", "=", "self", ".", "get_posts", "(", "include_draft", "=", "True", ")", "result", "=", "{", "}", "for", "post", "in", "posts", ":", "for", "category_name", "in", "set", "(", "post", ".", "categories", ")", ":", "result", "[", "category_name", "]", "=", "result", ".", "setdefault", "(", "category_name", ",", "Pair", "(", "0", ",", "0", ")", ")", "+", "Pair", "(", "1", ",", "0", "if", "post", ".", "is_draft", "else", "1", ")", "return", "list", "(", "result", ".", "items", "(", ")", ")" ]
Projection File Read from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Open file and parse into a data structure with io_open ( path , 'r' ) as f : self . projection = f . read ( )
4,686
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/pro.py#L81-L90
[ "def", "syzygyJD", "(", "jd", ")", ":", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "distance", "(", "sun", ",", "moon", ")", "# Offset represents the Syzygy type. ", "# Zero is conjunction and 180 is opposition.", "offset", "=", "180", "if", "(", "dist", ">=", "180", ")", "else", "0", "while", "abs", "(", "dist", ")", ">", "MAX_ERROR", ":", "jd", "=", "jd", "-", "dist", "/", "13.1833", "# Moon mean daily motion", "sun", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "SUN", ",", "jd", ")", "moon", "=", "swe", ".", "sweObjectLon", "(", "const", ".", "MOON", ",", "jd", ")", "dist", "=", "angle", ".", "closestdistance", "(", "sun", "-", "offset", ",", "moon", ")", "return", "jd" ]
Projection File Write to File Method
def _write ( self , session , openFile , replaceParamFile ) : # Write lines openFile . write ( text ( self . projection ) )
4,687
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/pro.py#L92-L97
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
Sets a new numerator and generates the ETA . Must be greater than or equal to previous numerator .
def numerator ( self , value ) : # If ETA is every iteration, don't do anything fancy. if self . eta_every <= 1 : self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return # If ETA is not every iteration, unstable rate is used. If this bar is undefined, no point in calculating ever. if self . _eta . undefined : self . _eta . set_numerator ( value , calculate = False ) return # Calculate if this iteration is the right one. if self . _eta_count >= self . eta_every : self . _eta_count = 1 self . _eta . numerator = value self . _eta_string = self . _generate_eta ( self . _eta . eta_seconds ) return self . _eta_count += 1 self . _eta . set_numerator ( value , calculate = False )
4,688
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/components/base_progress_bar.py#L40-L61
[ "def", "_ParseStorageMediaOptions", "(", "self", ",", "options", ")", ":", "self", ".", "_ParseStorageMediaImageOptions", "(", "options", ")", "self", ".", "_ParseVSSProcessingOptions", "(", "options", ")", "self", ".", "_ParseCredentialOptions", "(", "options", ")", "self", ".", "_ParseSourcePathOption", "(", "options", ")" ]
Returns the rate of the progress as a float . Selects the unstable rate if eta_every > 1 for performance .
def rate ( self ) : return float ( self . _eta . rate_unstable if self . eta_every > 1 else self . _eta . rate )
4,689
https://github.com/Robpol86/etaprogress/blob/224e8a248c2bf820bad218763281914ad3983fff/etaprogress/components/base_progress_bar.py#L69-L71
[ "def", "do_reset_ids", "(", "concatenated_meta_df", ",", "data_df", ",", "concat_direction", ")", ":", "if", "concat_direction", "==", "\"horiz\"", ":", "# Make sure cids agree between data_df and concatenated_meta_df", "assert", "concatenated_meta_df", ".", "index", ".", "equals", "(", "data_df", ".", "columns", ")", ",", "(", "\"cids in concatenated_meta_df do not agree with cids in data_df.\"", ")", "# Reset cids in concatenated_meta_df", "reset_ids_in_meta_df", "(", "concatenated_meta_df", ")", "# Replace cids in data_df with the new ones from concatenated_meta_df", "# (just an array of unique integers, zero-indexed)", "data_df", ".", "columns", "=", "pd", ".", "Index", "(", "concatenated_meta_df", ".", "index", ".", "values", ")", "elif", "concat_direction", "==", "\"vert\"", ":", "# Make sure rids agree between data_df and concatenated_meta_df", "assert", "concatenated_meta_df", ".", "index", ".", "equals", "(", "data_df", ".", "index", ")", ",", "(", "\"rids in concatenated_meta_df do not agree with rids in data_df.\"", ")", "# Reset rids in concatenated_meta_df", "reset_ids_in_meta_df", "(", "concatenated_meta_df", ")", "# Replace rids in data_df with the new ones from concatenated_meta_df", "# (just an array of unique integers, zero-indexed)", "data_df", ".", "index", "=", "pd", ".", "Index", "(", "concatenated_meta_df", ".", "index", ".", "values", ")" ]
Generates an elevation grid for the GSSHA simulation from an elevation raster
def generateFromRaster ( self , elevation_raster , shapefile_path = None , out_elevation_grid = None , resample_method = gdalconst . GRA_Average , load_raster_to_db = True ) : if not self . projectFile : raise ValueError ( "Must be connected to project file ..." ) # make sure paths are absolute as the working directory changes elevation_raster = os . path . abspath ( elevation_raster ) shapefile_path = os . path . abspath ( shapefile_path ) # must match elevation mask grid mask_grid = self . projectFile . getGrid ( ) if out_elevation_grid is None : out_elevation_grid = '{0}.{1}' . format ( self . projectFile . name , self . fileExtension ) elevation_grid = resample_grid ( elevation_raster , mask_grid , resample_method = resample_method , as_gdal_grid = True ) with tmp_chdir ( self . projectFile . project_directory ) : elevation_grid . to_grass_ascii ( out_elevation_grid , print_nodata = False ) # read raster into object if load_raster_to_db : self . _load_raster_text ( out_elevation_grid ) self . filename = out_elevation_grid self . projectFile . setCard ( "ELEVATION" , out_elevation_grid , add_quotes = True ) # find outlet and add slope self . projectFile . findOutlet ( shapefile_path )
4,690
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/ele.py#L42-L111
[ "def", "addRectAnnot", "(", "self", ",", "rect", ")", ":", "CheckParent", "(", "self", ")", "val", "=", "_fitz", ".", "Page_addRectAnnot", "(", "self", ",", "rect", ")", "if", "not", "val", ":", "return", "val", ".", "thisown", "=", "True", "val", ".", "parent", "=", "weakref", ".", "proxy", "(", "self", ")", "self", ".", "_annot_refs", "[", "id", "(", "val", ")", "]", "=", "val", "return", "val" ]
Storm Pipe Network File Read from File Method
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : # Set file extension property self . fileExtension = extension # Dictionary of keywords/cards and parse function names KEYWORDS = { 'CONNECT' : spc . connectChunk , 'SJUNC' : spc . sjuncChunk , 'SLINK' : spc . slinkChunk } sjuncs = [ ] slinks = [ ] connections = [ ] # Parse file into chunks associated with keywords/cards with open ( path , 'r' ) as f : chunks = pt . chunk ( KEYWORDS , f ) # Parse chunks associated with each key for key , chunkList in iteritems ( chunks ) : # Parse each chunk in the chunk list for chunk in chunkList : # Call chunk specific parsers for each chunk result = KEYWORDS [ key ] ( key , chunk ) # Cases if key == 'CONNECT' : connections . append ( result ) elif key == 'SJUNC' : sjuncs . append ( result ) elif key == 'SLINK' : slinks . append ( result ) # Create GSSHAPY objects self . _createConnection ( connections ) self . _createSjunc ( sjuncs ) self . _createSlink ( slinks )
4,691
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L65-L103
[ "def", "binormal", "(", "obj", ",", "params", ",", "*", "*", "kwargs", ")", ":", "normalize", "=", "kwargs", ".", "get", "(", "'normalize'", ",", "True", ")", "if", "isinstance", "(", "obj", ",", "abstract", ".", "Curve", ")", ":", "if", "isinstance", "(", "params", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "ops", ".", "binormal_curve_single_list", "(", "obj", ",", "params", ",", "normalize", ")", "else", ":", "return", "ops", ".", "binormal_curve_single", "(", "obj", ",", "params", ",", "normalize", ")", "if", "isinstance", "(", "obj", ",", "abstract", ".", "Surface", ")", ":", "raise", "GeomdlException", "(", "\"Binormal vector evaluation for the surfaces is not implemented!\"", ")" ]
Storm Pipe Network File Write to File Method
def _write ( self , session , openFile , replaceParamFile ) : # Retrieve Connection objects and write to file connections = self . connections self . _writeConnections ( connections = connections , fileObject = openFile ) # Retrieve SuperJunction objects and write to file sjuncs = self . superJunctions self . _writeSuperJunctions ( superJunctions = sjuncs , fileObject = openFile ) # Retrieve SuperLink objects and write to file slinks = self . superLinks self . _writeSuperLinks ( superLinks = slinks , fileObject = openFile )
4,692
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L105-L122
[ "def", "reboot", "(", "*", "args", ")", ":", "args", "=", "list", "(", "sys", ".", "argv", ")", "+", "list", "(", "args", ")", "if", "args", "[", "0", "]", "==", "'python'", "or", "not", "args", "[", "0", "]", ":", "args", "[", "0", "]", "=", "BIN_PYTHON", "elif", "os", ".", "path", ".", "basename", "(", "sys", ".", "argv", "[", "0", "]", ")", "in", "[", "'lore'", ",", "'lore.exe'", "]", ":", "args", "[", "0", "]", "=", "BIN_LORE", "try", ":", "os", ".", "execv", "(", "args", "[", "0", "]", ",", "args", ")", "except", "Exception", "as", "e", ":", "if", "args", "[", "0", "]", "==", "BIN_LORE", "and", "args", "[", "1", "]", "==", "'console'", "and", "JUPYTER_KERNEL_PATH", ":", "print", "(", "ansi", ".", "error", "(", ")", "+", "' Your jupyter kernel may be corrupt. Please remove it so lore can reinstall:\\n $ rm '", "+", "JUPYTER_KERNEL_PATH", ")", "raise", "e" ]
Create GSSHAPY Connection Objects Method
def _createConnection ( self , connections ) : for c in connections : # Create GSSHAPY Connection object connection = Connection ( slinkNumber = c [ 'slinkNumber' ] , upSjuncNumber = c [ 'upSjunc' ] , downSjuncNumber = c [ 'downSjunc' ] ) # Associate Connection with StormPipeNetworkFile connection . stormPipeNetworkFile = self
4,693
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L124-L136
[ "def", "execute", "(", "self", ",", "eopatch", ")", ":", "feature_type", ",", "feature_name", "=", "next", "(", "self", ".", "feature", "(", "eopatch", ")", ")", "data", "=", "eopatch", "[", "feature_type", "]", "[", "feature_name", "]", ".", "copy", "(", ")", "# compute band according to compositing method (e.g. blue, maxNDVI, maxNDWI)", "reference_bands", "=", "self", ".", "_get_reference_band", "(", "data", ")", "# find temporal indices corresponding to pre-defined percentile", "indices", "=", "self", ".", "_get_indices", "(", "reference_bands", ")", "# compute composite image selecting values along temporal dimension corresponding to percentile indices", "composite_image", "=", "np", ".", "empty", "(", "(", "data", ".", "shape", "[", "1", ":", "]", ")", ",", "np", ".", "float32", ")", "composite_image", "[", ":", "]", "=", "self", ".", "no_data_value", "for", "scene_id", ",", "scene", "in", "enumerate", "(", "data", ")", ":", "composite_image", "=", "np", ".", "where", "(", "np", ".", "dstack", "(", "[", "indices", "]", ")", "==", "scene_id", ",", "scene", ",", "composite_image", ")", "eopatch", "[", "self", ".", "composite_type", "]", "[", "self", ".", "composite_name", "]", "=", "composite_image", "return", "eopatch" ]
Create GSSHAPY SuperLink Pipe and SuperNode Objects Method
def _createSlink ( self , slinks ) : for slink in slinks : # Create GSSHAPY SuperLink object superLink = SuperLink ( slinkNumber = slink [ 'slinkNumber' ] , numPipes = slink [ 'numPipes' ] ) # Associate SuperLink with StormPipeNetworkFile superLink . stormPipeNetworkFile = self for node in slink [ 'nodes' ] : # Create GSSHAPY SuperNode objects superNode = SuperNode ( nodeNumber = node [ 'nodeNumber' ] , groundSurfaceElev = node [ 'groundSurfaceElev' ] , invertElev = node [ 'invertElev' ] , manholeSA = node [ 'manholeSA' ] , nodeInletCode = node [ 'inletCode' ] , cellI = node [ 'cellI' ] , cellJ = node [ 'cellJ' ] , weirSideLength = node [ 'weirSideLength' ] , orificeDiameter = node [ 'orificeDiameter' ] ) # Associate SuperNode with SuperLink superNode . superLink = superLink for p in slink [ 'pipes' ] : # Create GSSHAPY Pipe objects pipe = Pipe ( pipeNumber = p [ 'pipeNumber' ] , xSecType = p [ 'xSecType' ] , diameterOrHeight = p [ 'diameterOrHeight' ] , width = p [ 'width' ] , slope = p [ 'slope' ] , roughness = p [ 'roughness' ] , length = p [ 'length' ] , conductance = p [ 'conductance' ] , drainSpacing = p [ 'drainSpacing' ] ) # Associate Pipe with SuperLink pipe . superLink = superLink
4,694
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L138-L179
[ "def", "parse_date", "(", "datestring", ")", ":", "datestring", "=", "str", "(", "datestring", ")", ".", "strip", "(", ")", "if", "not", "datestring", "[", "0", "]", ".", "isdigit", "(", ")", ":", "raise", "ParseError", "(", ")", "if", "'W'", "in", "datestring", ".", "upper", "(", ")", ":", "try", ":", "datestring", "=", "datestring", "[", ":", "-", "1", "]", "+", "str", "(", "int", "(", "datestring", "[", "-", "1", ":", "]", ")", "-", "1", ")", "except", ":", "pass", "for", "regex", ",", "pattern", "in", "DATE_FORMATS", ":", "if", "regex", ".", "match", "(", "datestring", ")", ":", "found", "=", "regex", ".", "search", "(", "datestring", ")", ".", "groupdict", "(", ")", "dt", "=", "datetime", ".", "utcnow", "(", ")", ".", "strptime", "(", "found", "[", "'matched'", "]", ",", "pattern", ")", "if", "'fraction'", "in", "found", "and", "found", "[", "'fraction'", "]", "is", "not", "None", ":", "dt", "=", "dt", ".", "replace", "(", "microsecond", "=", "int", "(", "found", "[", "'fraction'", "]", "[", "1", ":", "]", ")", ")", "if", "'timezone'", "in", "found", "and", "found", "[", "'timezone'", "]", "is", "not", "None", ":", "dt", "=", "dt", ".", "replace", "(", "tzinfo", "=", "Timezone", "(", "found", ".", "get", "(", "'timezone'", ",", "''", ")", ")", ")", "return", "dt", "return", "parse_time", "(", "datestring", ")" ]
Create GSSHAPY SuperJunction Objects Method
def _createSjunc ( self , sjuncs ) : for sjunc in sjuncs : # Create GSSHAPY SuperJunction object superJunction = SuperJunction ( sjuncNumber = sjunc [ 'sjuncNumber' ] , groundSurfaceElev = sjunc [ 'groundSurfaceElev' ] , invertElev = sjunc [ 'invertElev' ] , manholeSA = sjunc [ 'manholeSA' ] , inletCode = sjunc [ 'inletCode' ] , linkOrCellI = sjunc [ 'linkOrCellI' ] , nodeOrCellJ = sjunc [ 'nodeOrCellJ' ] , weirSideLength = sjunc [ 'weirSideLength' ] , orificeDiameter = sjunc [ 'orificeDiameter' ] ) # Associate SuperJunction with StormPipeNetworkFile superJunction . stormPipeNetworkFile = self
4,695
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L181-L199
[ "def", "flush", "(", "self", ")", ":", "writer", "=", "self", ".", "writer", "if", "writer", "is", "None", ":", "raise", "GaugedUseAfterFreeError", "self", ".", "flush_writer_position", "(", ")", "keys", "=", "self", ".", "translate_keys", "(", ")", "blocks", "=", "[", "]", "current_block", "=", "self", ".", "current_block", "statistics", "=", "self", ".", "statistics", "driver", "=", "self", ".", "driver", "flags", "=", "0", "# for future extensions, e.g. block compression", "for", "namespace", ",", "key", ",", "block", "in", "self", ".", "pending_blocks", "(", ")", ":", "length", "=", "block", ".", "byte_length", "(", ")", "if", "not", "length", ":", "continue", "key_id", "=", "keys", "[", "(", "namespace", ",", "key", ")", "]", "statistics", "[", "namespace", "]", ".", "byte_count", "+=", "length", "blocks", ".", "append", "(", "(", "namespace", ",", "current_block", ",", "key_id", ",", "block", ".", "buffer", "(", ")", ",", "flags", ")", ")", "if", "self", ".", "config", ".", "overwrite_blocks", ":", "driver", ".", "replace_blocks", "(", "blocks", ")", "else", ":", "driver", ".", "insert_or_append_blocks", "(", "blocks", ")", "if", "not", "Gauged", ".", "writer_flush_maps", "(", "writer", ",", "True", ")", ":", "raise", "MemoryError", "update_namespace", "=", "driver", ".", "add_namespace_statistics", "for", "namespace", ",", "stats", "in", "statistics", ".", "iteritems", "(", ")", ":", "update_namespace", "(", "namespace", ",", "self", ".", "current_block", ",", "stats", ".", "data_points", ",", "stats", ".", "byte_count", ")", "statistics", ".", "clear", "(", ")", "driver", ".", "commit", "(", ")", "self", ".", "flush_now", "=", "False" ]
Write Connections to File Method
def _writeConnections ( self , connections , fileObject ) : for connection in connections : fileObject . write ( 'CONNECT %s %s %s\n' % ( connection . slinkNumber , connection . upSjuncNumber , connection . downSjuncNumber ) )
4,696
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L201-L209
[ "def", "get_changed_devices", "(", "self", ",", "timestamp", ")", ":", "if", "timestamp", "is", "None", ":", "payload", "=", "{", "}", "else", ":", "payload", "=", "{", "'timeout'", ":", "SUBSCRIPTION_WAIT", ",", "'minimumdelay'", ":", "SUBSCRIPTION_MIN_WAIT", "}", "payload", ".", "update", "(", "timestamp", ")", "# double the timeout here so requests doesn't timeout before vera", "payload", ".", "update", "(", "{", "'id'", ":", "'lu_sdata'", ",", "}", ")", "logger", ".", "debug", "(", "\"get_changed_devices() requesting payload %s\"", ",", "str", "(", "payload", ")", ")", "r", "=", "self", ".", "data_request", "(", "payload", ",", "TIMEOUT", "*", "2", ")", "r", ".", "raise_for_status", "(", ")", "# If the Vera disconnects before writing a full response (as lu_sdata", "# will do when interrupted by a Luup reload), the requests module will", "# happily return 200 with an empty string. So, test for empty response,", "# so we don't rely on the JSON parser to throw an exception.", "if", "r", ".", "text", "==", "\"\"", ":", "raise", "PyveraError", "(", "\"Empty response from Vera\"", ")", "# Catch a wide swath of what the JSON parser might throw, within", "# reason. Unfortunately, some parsers don't specifically return", "# json.decode.JSONDecodeError, but so far most seem to derive what", "# they do throw from ValueError, so that's helpful.", "try", ":", "result", "=", "r", ".", "json", "(", ")", "except", "ValueError", "as", "ex", ":", "raise", "PyveraError", "(", "\"JSON decode error: \"", "+", "str", "(", "ex", ")", ")", "if", "not", "(", "type", "(", "result", ")", "is", "dict", "and", "'loadtime'", "in", "result", "and", "'dataversion'", "in", "result", ")", ":", "raise", "PyveraError", "(", "\"Unexpected/garbled response from Vera\"", ")", "# At this point, all good. Update timestamp and return change data.", "device_data", "=", "result", ".", "get", "(", "'devices'", ")", "timestamp", "=", "{", "'loadtime'", ":", "result", ".", "get", "(", "'loadtime'", ")", ",", "'dataversion'", ":", "result", ".", "get", "(", "'dataversion'", ")", "}", "return", "[", "device_data", ",", "timestamp", "]" ]
Write SuperJunctions to File Method
def _writeSuperJunctions ( self , superJunctions , fileObject ) : for sjunc in superJunctions : fileObject . write ( 'SJUNC %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( sjunc . sjuncNumber , sjunc . groundSurfaceElev , sjunc . invertElev , sjunc . manholeSA , sjunc . inletCode , sjunc . linkOrCellI , sjunc . nodeOrCellJ , sjunc . weirSideLength , sjunc . orificeDiameter ) )
4,697
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L211-L225
[ "def", "readout", "(", "self", ")", ":", "elec", "=", "self", ".", "simulate_poisson_variate", "(", ")", "elec_pre", "=", "self", ".", "saturate", "(", "elec", ")", "elec_f", "=", "self", ".", "pre_readout", "(", "elec_pre", ")", "adu_r", "=", "self", ".", "base_readout", "(", "elec_f", ")", "adu_p", "=", "self", ".", "post_readout", "(", "adu_r", ")", "self", ".", "clean_up", "(", ")", "return", "adu_p" ]
Write SuperLinks to File Method
def _writeSuperLinks ( self , superLinks , fileObject ) : for slink in superLinks : fileObject . write ( 'SLINK %s %s\n' % ( slink . slinkNumber , slink . numPipes ) ) for node in slink . superNodes : fileObject . write ( 'NODE %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( node . nodeNumber , node . groundSurfaceElev , node . invertElev , node . manholeSA , node . nodeInletCode , node . cellI , node . cellJ , node . weirSideLength , node . orificeDiameter ) ) for pipe in slink . pipes : fileObject . write ( 'PIPE %s %s %.6f %.6f %.6f %.6f %.2f %.6f %.6f\n' % ( pipe . pipeNumber , pipe . xSecType , pipe . diameterOrHeight , pipe . width , pipe . slope , pipe . roughness , pipe . length , pipe . conductance , pipe . drainSpacing ) )
4,698
https://github.com/CI-WATER/gsshapy/blob/00fd4af0fd65f1614d75a52fe950a04fb0867f4c/gsshapy/orm/spn.py#L227-L257
[ "def", "get_changed_devices", "(", "self", ",", "timestamp", ")", ":", "if", "timestamp", "is", "None", ":", "payload", "=", "{", "}", "else", ":", "payload", "=", "{", "'timeout'", ":", "SUBSCRIPTION_WAIT", ",", "'minimumdelay'", ":", "SUBSCRIPTION_MIN_WAIT", "}", "payload", ".", "update", "(", "timestamp", ")", "# double the timeout here so requests doesn't timeout before vera", "payload", ".", "update", "(", "{", "'id'", ":", "'lu_sdata'", ",", "}", ")", "logger", ".", "debug", "(", "\"get_changed_devices() requesting payload %s\"", ",", "str", "(", "payload", ")", ")", "r", "=", "self", ".", "data_request", "(", "payload", ",", "TIMEOUT", "*", "2", ")", "r", ".", "raise_for_status", "(", ")", "# If the Vera disconnects before writing a full response (as lu_sdata", "# will do when interrupted by a Luup reload), the requests module will", "# happily return 200 with an empty string. So, test for empty response,", "# so we don't rely on the JSON parser to throw an exception.", "if", "r", ".", "text", "==", "\"\"", ":", "raise", "PyveraError", "(", "\"Empty response from Vera\"", ")", "# Catch a wide swath of what the JSON parser might throw, within", "# reason. Unfortunately, some parsers don't specifically return", "# json.decode.JSONDecodeError, but so far most seem to derive what", "# they do throw from ValueError, so that's helpful.", "try", ":", "result", "=", "r", ".", "json", "(", ")", "except", "ValueError", "as", "ex", ":", "raise", "PyveraError", "(", "\"JSON decode error: \"", "+", "str", "(", "ex", ")", ")", "if", "not", "(", "type", "(", "result", ")", "is", "dict", "and", "'loadtime'", "in", "result", "and", "'dataversion'", "in", "result", ")", ":", "raise", "PyveraError", "(", "\"Unexpected/garbled response from Vera\"", ")", "# At this point, all good. Update timestamp and return change data.", "device_data", "=", "result", ".", "get", "(", "'devices'", ")", "timestamp", "=", "{", "'loadtime'", ":", "result", ".", "get", "(", "'loadtime'", ")", ",", "'dataversion'", ":", "result", ".", "get", "(", "'dataversion'", ")", "}", "return", "[", "device_data", ",", "timestamp", "]" ]
ku = ultimate gain tu = period of oscillation at ultimate gain
def ziegler_nichols ( self , ku , tu , control_type = 'pid' ) : converter = dict ( p = lambda ku , tu : ( .5 * ku , 0 , 0 ) , pi = lambda ku , tu : ( .45 * ku , 1.2 * ( .45 * ku ) / tu , 0 ) , pd = lambda ku , tu : ( .8 * ku , 0 , ( .8 * ku ) * tu / 8 ) , pid = lambda ku , tu : ( .6 * ku , 2 * ( .6 * ku ) / tu , ( .6 * ku ) * tu / 8 ) , pessen = lambda ku , tu : ( .7 * ku , 2.5 * ( .7 * ku ) / tu , 3 * ( .7 * ku ) * tu / 20 ) , some_overshoot = lambda ku , tu : ( .33 * ku , 2 * ( .33 * ku ) / tu , ( .33 * ku ) * tu / 3 ) , no_overshoot = lambda ku , tu : ( .2 * ku , 2 * ( .2 * ku ) / tu , ( .2 * ku ) * tu / 3 ) ) self . kp , self . ki , self . kd = converter [ control_type . lower ( ) ] ( ku , tu )
4,699
https://github.com/theodoregoetz/wernher/blob/ef5d3aabe24e532b5eab33cd0212b2dbc2c9022e/wernher/pid_control.py#L110-L124
[ "def", "dict_to_pyxb", "(", "rp_dict", ")", ":", "rp_pyxb", "=", "d1_common", ".", "types", ".", "dataoneTypes", ".", "replicationPolicy", "(", ")", "rp_pyxb", ".", "replicationAllowed", "=", "rp_dict", "[", "'allowed'", "]", "rp_pyxb", ".", "numberReplicas", "=", "rp_dict", "[", "'num'", "]", "rp_pyxb", ".", "blockedMemberNode", "=", "rp_dict", "[", "'block'", "]", "rp_pyxb", ".", "preferredMemberNode", "=", "rp_dict", "[", "'pref'", "]", "normalize", "(", "rp_pyxb", ")", "return", "rp_pyxb" ]