idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
36,900
def optimize_forecasting_method ( self , timeSeries , forecastingMethod ) : tuneableParameters = forecastingMethod . get_optimizable_parameters ( ) remainingParameters = [ ] for tuneableParameter in tuneableParameters : remainingParameters . append ( [ tuneableParameter , [ item for item in self . _generate_next_parameter_value ( tuneableParameter , forecastingMethod ) ] ] ) forecastingResults = self . optimization_loop ( timeSeries , forecastingMethod , remainingParameters ) bestForecastingResult = min ( forecastingResults , key = lambda item : item [ 0 ] . get_error ( self . _startingPercentage , self . _endPercentage ) ) return bestForecastingResult
Optimizes the parameters for the given timeSeries and forecastingMethod .
36,901
def optimization_loop ( self , timeSeries , forecastingMethod , remainingParameters , currentParameterValues = None ) : if currentParameterValues is None : currentParameterValues = { } if 0 == len ( remainingParameters ) : for parameter in currentParameterValues : forecastingMethod . set_parameter ( parameter , currentParameterValues [ parameter ] ) forecast = timeSeries . apply ( forecastingMethod ) error = self . _errorClass ( ** self . _errorMeasureKWArgs ) if not error . initialize ( timeSeries , forecast ) : return [ ] return [ [ error , dict ( currentParameterValues ) ] ] localParameter = remainingParameters [ - 1 ] localParameterName = localParameter [ 0 ] localParameterValues = localParameter [ 1 ] results = [ ] for value in localParameterValues : currentParameterValues [ localParameterName ] = value remainingParameters = remainingParameters [ : - 1 ] results += self . optimization_loop ( timeSeries , forecastingMethod , remainingParameters , currentParameterValues ) return results
The optimization loop .
36,902
def energy_data ( ) : cur = db . cursor ( ) . execute ( ) original = TimeSeries ( ) original . initialize_from_sql_cursor ( cur ) original . normalize ( "day" , fusionMethod = "sum" ) return itty . Response ( json . dumps ( original , cls = PycastEncoder ) , content_type = 'application/json' )
Connects to the database and loads Readings for device 8 .
36,903
def get ( cls , parent = None , id = None , data = None ) : if parent is not None : route = copy ( parent . route ) else : route = { } if id is not None and cls . ID_NAME is not None : route [ cls . ID_NAME ] = id obj = cls ( key = parent . key , route = route , config = parent . config ) if data : obj . data = data else : obj . fetch ( ) return obj
Inherit info from parent and return new object
36,904
def _url ( self ) : if self . ID_NAME not in self . route . keys ( ) and "id" in self . data . keys ( ) : self . route [ self . ID_NAME ] = self . data [ "id" ] return self . config . BASE + self . PATH . format ( ** self . route )
Get the URL for the resource
36,905
def _handle_request_exception ( request ) : try : data = request . json ( ) except : data = { } code = request . status_code if code == requests . codes . bad : raise BadRequestException ( response = data ) if code == requests . codes . unauthorized : raise UnauthorizedException ( response = data ) if code == requests . codes . not_found : raise NotFoundException ( response = data ) request . raise_for_status ( )
Raise the proper exception based on the response
36,906
def _process_meta ( self , response ) : for key in self . META_ENVELOPES : self . meta [ key ] = response . get ( key )
Process additional data sent in response
36,907
def patch ( self , ** kwargs ) : start = datetime . now ( ) r = requests . patch ( self . _url ( ) , auth = ( self . key , "" ) , data = kwargs ) self . _delay_for_ratelimits ( start ) if r . status_code not in self . TRUTHY_CODES : return self . _handle_request_exception ( r ) self . fetch ( )
Change attributes of the item
36,908
def create ( cls , parent = None , ** kwargs ) : if parent is None : raise Exception ( "Parent class is required" ) route = copy ( parent . route ) if cls . ID_NAME is not None : route [ cls . ID_NAME ] = "" obj = cls ( key = parent . key , route = route , config = parent . config ) start = datetime . now ( ) response = requests . post ( obj . _url ( ) , auth = ( obj . key , "" ) , data = kwargs ) cls . _delay_for_ratelimits ( start ) if response . status_code not in cls . TRUTHY_CODES : return cls . _handle_request_exception ( response ) data = response . json ( ) obj . route [ obj . ID_NAME ] = data . get ( "id" , data . get ( obj . ID_NAME ) ) obj . data = data return obj
Create an object and return it
36,909
def _delay_for_ratelimits ( cls , start ) : stop = datetime . now ( ) duration_microseconds = ( stop - start ) . microseconds if duration_microseconds < cls . REQUEST_TIME_MICROSECONDS : time . sleep ( ( cls . REQUEST_TIME_MICROSECONDS - duration_microseconds ) / MICROSECONDS_PER_SECOND )
If request was shorter than max request time delay
36,910
def run ( self ) : region = AWSServiceRegion ( access_key = self . key , secret_key = self . secret , uri = self . endpoint ) query = self . query_factory ( action = self . action , creds = region . creds , endpoint = region . ec2_endpoint , other_params = self . parameters ) def write_response ( response ) : print >> self . output , "URL: %s" % query . client . url print >> self . output print >> self . output , "HTTP status code: %s" % query . client . status print >> self . output print >> self . output , response def write_error ( failure ) : if failure . check ( AWSError ) : message = failure . value . original else : message = failure . getErrorMessage ( ) if message . startswith ( "Error Message: " ) : message = message [ len ( "Error Message: " ) : ] print >> self . output , "URL: %s" % query . client . url print >> self . output if getattr ( query . client , "status" , None ) is not None : print >> self . output , "HTTP status code: %s" % ( query . client . status , ) print >> self . output print >> self . output , message if getattr ( failure . value , "response" , None ) is not None : print >> self . output print >> self . output , failure . value . response deferred = query . submit ( ) deferred . addCallback ( write_response ) deferred . addErrback ( write_error ) return deferred
Run the configured method and write the HTTP response status and text to the output stream .
36,911
def get_canonical_host ( self ) : host = self . host . lower ( ) if self . port is not None : host = "%s:%s" % ( host , self . port ) return host
Return the canonical host as for the Host HTTP header specification .
36,912
def set_canonical_host ( self , canonical_host ) : parts = canonical_host . lower ( ) . split ( ":" ) self . host = parts [ 0 ] if len ( parts ) > 1 and parts [ 1 ] : self . port = int ( parts [ 1 ] ) else : self . port = None
Set host and port from a canonical host string as for the Host HTTP header specification .
36,913
def get_uri ( self ) : uri = "%s://%s%s" % ( self . scheme , self . get_canonical_host ( ) , self . path ) return uri
Get a URL representation of the service .
36,914
def optimized ( fn ) : def _optimized ( self , * args , ** kwargs ) : if self . optimizationEnabled : class_name = self . __class__ . __name__ module = self . __module__ . replace ( "pycast" , "pycastC" ) try : imported = __import__ ( "%s.%s" % ( module , class_name ) , globals ( ) , locals ( ) , [ fn . __name__ ] ) function = getattr ( imported , fn . __name__ ) return function ( self , * args , ** kwargs ) except ImportError : print "[WARNING] Could not enable optimization for %s, %s" % ( fn . __name__ , self ) return fn ( self , * args , ** kwargs ) else : return fn ( self , * args , ** kwargs ) setattr ( _optimized , "__name__" , fn . __name__ ) setattr ( _optimized , "__repr__" , fn . __repr__ ) setattr ( _optimized , "__str__" , fn . __str__ ) setattr ( _optimized , "__doc__" , fn . __doc__ ) return _optimized
Decorator that will call the optimized c ++ version of a pycast function if available rather than theo original pycast function
36,915
def optimize ( self , timeSeries , forecastingMethods = None , startingPercentage = 0.0 , endPercentage = 100.0 ) : if forecastingMethods is None or len ( forecastingMethods ) == 0 : raise ValueError ( "forecastingMethods cannot be empty." )
Runs the optimization on the given TimeSeries .
36,916
def method ( method_class ) : def callback ( scanner , name , method_class ) : if method_class . actions is not None : actions = method_class . actions else : actions = [ name ] if method_class . versions is not None : versions = method_class . versions else : versions = [ None ] for action in actions : for version in versions : scanner . registry . add ( method_class , action = action , version = version ) from venusian import attach attach ( method_class , callback , category = "method" ) return method_class
Decorator to use to mark an API method .
36,917
def hexlify ( script , minify = False ) : if not script : return '' script = script . replace ( b'\r\n' , b'\n' ) script = script . replace ( b'\r' , b'\n' ) if minify : if not can_minify : raise ValueError ( "No minifier is available" ) script = nudatus . mangle ( script . decode ( 'utf-8' ) ) . encode ( 'utf-8' ) data = b'MP' + struct . pack ( '<H' , len ( script ) ) + script data = data + ( b'\x00' * ( 16 - len ( data ) % 16 ) ) if len ( data ) > _MAX_SIZE : raise ValueError ( "Python script must be less than 8188 bytes." ) output = [ ':020000040003F7' ] addr = _SCRIPT_ADDR for i in range ( 0 , len ( data ) , 16 ) : chunk = data [ i : min ( i + 16 , len ( data ) ) ] chunk = struct . pack ( '>BHB' , len ( chunk ) , addr & 0xffff , 0 ) + chunk checksum = ( - ( sum ( bytearray ( chunk ) ) ) ) & 0xff hexline = ':%s%02X' % ( strfunc ( binascii . hexlify ( chunk ) ) . upper ( ) , checksum ) output . append ( hexline ) addr += 16 return '\n' . join ( output )
Takes the byte content of a Python script and returns a hex encoded version of it .
36,918
def unhexlify ( blob ) : lines = blob . split ( '\n' ) [ 1 : ] output = [ ] for line in lines : output . append ( binascii . unhexlify ( line [ 9 : - 2 ] ) ) if ( output [ 0 ] [ 0 : 2 ] . decode ( 'utf-8' ) != u'MP' ) : return '' output [ 0 ] = output [ 0 ] [ 4 : ] output [ - 1 ] = output [ - 1 ] . strip ( b'\x00' ) script = b'' . join ( output ) try : result = script . decode ( 'utf-8' ) return result except UnicodeDecodeError : return ''
Takes a hexlified script and turns it back into a string of Python code .
36,919
def embed_hex ( runtime_hex , python_hex = None ) : if not runtime_hex : raise ValueError ( 'MicroPython runtime hex required.' ) if not python_hex : return runtime_hex py_list = python_hex . split ( ) runtime_list = runtime_hex . split ( ) embedded_list = [ ] embedded_list . extend ( runtime_list [ : - 5 ] ) embedded_list . extend ( py_list ) embedded_list . extend ( runtime_list [ - 5 : ] ) return '\n' . join ( embedded_list ) + '\n'
Given a string representing the MicroPython runtime hex will embed a string representing a hex encoded Python script into it .
36,920
def extract_script ( embedded_hex ) : hex_lines = embedded_hex . split ( '\n' ) script_addr_high = hex ( ( _SCRIPT_ADDR >> 16 ) & 0xffff ) [ 2 : ] . upper ( ) . zfill ( 4 ) script_addr_low = hex ( _SCRIPT_ADDR & 0xffff ) [ 2 : ] . upper ( ) . zfill ( 4 ) start_script = None within_range = False for loc , val in enumerate ( hex_lines ) : if val [ 0 : 9 ] == ':02000004' : within_range = val [ 9 : 13 ] . upper ( ) == script_addr_high elif within_range and val [ 0 : 3 ] == ':10' and val [ 3 : 7 ] . upper ( ) == script_addr_low : start_script = loc break if start_script : end_script = None for loc , val in enumerate ( hex_lines [ start_script : ] ) : if val [ 9 : 41 ] == 'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF' : end_script = loc + start_script break return unhexlify ( '\n' . join ( hex_lines [ start_script - 1 : end_script if end_script else - 6 ] ) ) return ''
Given a hex file containing the MicroPython runtime and an embedded Python script will extract the original Python script .
36,921
def save_hex ( hex_file , path ) : if not hex_file : raise ValueError ( 'Cannot flash an empty .hex file.' ) if not path . endswith ( '.hex' ) : raise ValueError ( 'The path to flash must be for a .hex file.' ) with open ( path , 'wb' ) as output : output . write ( hex_file . encode ( 'ascii' ) )
Given a string representation of a hex file this function copies it to the specified path thus causing the device mounted at that point to be flashed .
36,922
def extract ( path_to_hex , output_path = None ) : with open ( path_to_hex , 'r' ) as hex_file : python_script = extract_script ( hex_file . read ( ) ) if output_path : with open ( output_path , 'w' ) as output_file : output_file . write ( python_script ) else : print ( python_script )
Given a path_to_hex file this function will attempt to extract the embedded script from it and save it either to output_path or stdout
36,923
def main ( argv = None ) : if not argv : argv = sys . argv [ 1 : ] parser = argparse . ArgumentParser ( description = _HELP_TEXT ) parser . add_argument ( 'source' , nargs = '?' , default = None ) parser . add_argument ( 'target' , nargs = '*' , default = None ) parser . add_argument ( '-r' , '--runtime' , default = None , help = "Use the referenced MicroPython runtime." ) parser . add_argument ( '-e' , '--extract' , action = 'store_true' , help = ( "Extract python source from a hex file" " instead of creating the hex file." ) , ) parser . add_argument ( '-w' , '--watch' , action = 'store_true' , help = 'Watch the source file for changes.' ) parser . add_argument ( '-m' , '--minify' , action = 'store_true' , help = 'Minify the source' ) parser . add_argument ( '--version' , action = 'version' , version = '%(prog)s ' + get_version ( ) ) args = parser . parse_args ( argv ) if args . extract : try : extract ( args . source , args . target ) except Exception as ex : error_message = "Error extracting {source}: {error!s}" print ( error_message . format ( source = args . source , error = ex ) , file = sys . stderr ) sys . exit ( 1 ) elif args . watch : try : watch_file ( args . source , flash , path_to_python = args . source , paths_to_microbits = args . target , path_to_runtime = args . runtime ) except Exception as ex : error_message = "Error watching {source}: {error!s}" print ( error_message . format ( source = args . source , error = ex ) , file = sys . stderr ) sys . exit ( 1 ) else : try : flash ( path_to_python = args . source , paths_to_microbits = args . target , path_to_runtime = args . runtime , minify = args . minify ) except Exception as ex : error_message = ( "Error flashing {source} to {target}{runtime}: {error!s}" ) source = args . source target = args . target if args . target else "microbit" if args . runtime : runtime = "with runtime {runtime}" . format ( runtime = args . runtime ) else : runtime = "" print ( error_message . format ( source = source , target = target , runtime = runtime , error = ex ) , file = sys . stderr ) sys . exit ( 1 )
Entry point for the command line tool uflash .
36,924
def timing ( func ) : @ functools . wraps ( func ) def wrap ( * args , ** kw ) : t0 = time ( ) result = func ( * args , ** kw ) t1 = time ( ) print ( 'func:%r args:[%r, %r] took: %2.4f sec' % ( func . __name__ , args , kw , t1 - t0 ) ) return result return wrap
Measure the execution time of a function call and print the result .
36,925
def get_currency ( currency_str ) : path = 'units/currencies.csv' filepath = pkg_resources . resource_filename ( 'mpu' , path ) with open ( filepath , 'r' ) as fp : reader = csv . reader ( fp , delimiter = ',' , quotechar = '"' ) next ( reader , None ) for row in reader : is_currency = currency_str in [ row [ 0 ] , row [ 1 ] , row [ 2 ] ] if is_currency : entity = row [ 0 ] name = row [ 1 ] code = row [ 2 ] numeric_code = row [ 3 ] symbol = row [ 4 ] if len ( row [ 5 ] ) == 0 : exponent = None else : exponent = int ( row [ 5 ] ) if len ( row [ 6 ] ) > 0 : withdrawal_date = row [ 6 ] else : withdrawal_date = None subunits = row [ 7 ] return Currency ( name = name , code = code , numeric_code = numeric_code , symbol = symbol , exponent = exponent , entities = [ entity ] , withdrawal_date = withdrawal_date , subunits = subunits ) raise ValueError ( 'Could not find currency \'{}\'' . format ( currency_str ) )
Convert an identifier for a currency into a currency object .
36,926
def from_json ( cls , json ) : obj = cls ( name = json [ 'name' ] , code = json [ 'code' ] , numeric_code = json [ 'numeric_code' ] , symbol = json [ 'symbol' ] , exponent = json [ 'exponent' ] , entities = json [ 'entities' ] , withdrawal_date = json [ 'withdrawal_date' ] , subunits = json [ 'subunits' ] ) return obj
Create a Currency object from a JSON dump .
36,927
def is_email ( potential_email_address ) : context , mail = parseaddr ( potential_email_address ) first_condition = len ( context ) == 0 and len ( mail ) != 0 dot_after_at = ( '@' in potential_email_address and '.' in potential_email_address . split ( '@' ) [ 1 ] ) return first_condition and dot_after_at
Check if potential_email_address is a valid e - mail address .
36,928
def str2bool ( string_ , default = 'raise' ) : true = [ 'true' , 't' , '1' , 'y' , 'yes' , 'enabled' , 'enable' , 'on' ] false = [ 'false' , 'f' , '0' , 'n' , 'no' , 'disabled' , 'disable' , 'off' ] if string_ . lower ( ) in true : return True elif string_ . lower ( ) in false or ( not default ) : return False else : raise ValueError ( 'The value \'{}\' cannot be mapped to boolean.' . format ( string_ ) )
Convert a string to a bool .
36,929
def str2bool_or_none ( string_ , default = 'raise' ) : if is_none ( string_ , default = False ) : return None else : return str2bool ( string_ , default )
Convert a string to a bool or to None .
36,930
def is_none ( string_ , default = 'raise' ) : none = [ 'none' , 'undefined' , 'unknown' , 'null' , '' ] if string_ . lower ( ) in none : return True elif not default : return False else : raise ValueError ( 'The value \'{}\' cannot be mapped to none.' . format ( string_ ) )
Check if a string is equivalent to None .
36,931
def is_iban ( potential_iban ) : path = 'data/iban.csv' filepath = pkg_resources . resource_filename ( 'mpu' , path ) data = mpu . io . read ( filepath , delimiter = ';' , format = 'dicts' ) potential_iban = potential_iban . replace ( ' ' , '' ) if len ( potential_iban ) < min ( [ int ( el [ 'length' ] ) for el in data ] ) : return False country = None for element in data : if element [ 'iban_fields' ] [ : 2 ] == potential_iban [ : 2 ] : country = element break if country is None : return False if len ( potential_iban ) != int ( country [ 'length' ] ) : return False if country [ 'country_en' ] == 'Germany' : checksum_val = [ value for field_type , value in zip ( country [ 'iban_fields' ] , potential_iban ) if field_type == 'k' ] checksum_val = '' . join ( checksum_val ) checksum_exp = _calculate_german_iban_checksum ( potential_iban , country [ 'iban_fields' ] ) return checksum_val == checksum_exp return True
Check if a string is a valid IBAN number .
36,932
def _calculate_german_iban_checksum ( iban , iban_fields = 'DEkkbbbbbbbbcccccccccc' ) : number = [ value for field_type , value in zip ( iban_fields , iban ) if field_type in [ 'b' , 'c' ] ] translate = { '0' : '0' , '1' : '1' , '2' : '2' , '3' : '3' , '4' : '4' , '5' : '5' , '6' : '6' , '7' : '7' , '8' : '8' , '9' : '9' } for i in range ( ord ( 'A' ) , ord ( 'Z' ) + 1 ) : translate [ chr ( i ) ] = str ( i - ord ( 'A' ) + 10 ) for val in 'DE00' : translated = translate [ val ] for char in translated : number . append ( char ) number = sum ( int ( value ) * 10 ** i for i , value in enumerate ( number [ : : - 1 ] ) ) checksum = 98 - ( number % 97 ) return str ( checksum )
Calculate the checksam of the German IBAN format .
36,933
def human_readable_bytes ( nb_bytes , suffix = 'B' ) : for unit in [ '' , 'Ki' , 'Mi' , 'Gi' , 'Ti' , 'Pi' , 'Ei' , 'Zi' ] : if abs ( nb_bytes ) < 1024.0 : return '%3.1f %s%s' % ( nb_bytes , unit , suffix ) nb_bytes /= 1024.0 return '%.1f %s%s' % ( nb_bytes , 'Yi' , suffix )
Convert a byte number into a human readable format .
36,934
def list_files ( bucket , profile_name = None ) : session = boto3 . Session ( profile_name = profile_name ) conn = session . client ( 's3' ) keys = [ ] ret = conn . list_objects ( Bucket = bucket ) print ( ret ) if 'Contents' not in ret : return [ ] for key in conn . list_objects ( Bucket = bucket ) [ 'Contents' ] : keys . append ( 's3://' + bucket + '/' + key [ 'Key' ] ) return keys
List up to 1000 files in a bucket .
36,935
def s3_read ( source , profile_name = None ) : session = boto3 . Session ( profile_name = profile_name ) s3 = session . client ( 's3' ) bucket_name , key = _s3_path_split ( source ) s3_object = s3 . get_object ( Bucket = bucket_name , Key = key ) body = s3_object [ 'Body' ] return body . read ( )
Read a file from an S3 source .
36,936
def s3_download ( source , destination , exists_strategy = ExistsStrategy . RAISE , profile_name = None ) : if not isinstance ( exists_strategy , ExistsStrategy ) : raise ValueError ( 'exists_strategy \'{}\' is not in {}' . format ( exists_strategy , ExistsStrategy ) ) session = boto3 . Session ( profile_name = profile_name ) s3 = session . resource ( 's3' ) bucket_name , key = _s3_path_split ( source ) if os . path . isfile ( destination ) : if exists_strategy is ExistsStrategy . RAISE : raise RuntimeError ( 'File \'{}\' already exists.' . format ( destination ) ) elif exists_strategy is ExistsStrategy . ABORT : return s3 . Bucket ( bucket_name ) . download_file ( key , destination )
Copy a file from an S3 source to a local destination .
36,937
def s3_upload ( source , destination , profile_name = None ) : session = boto3 . Session ( profile_name = profile_name ) s3 = session . resource ( 's3' ) bucket_name , key = _s3_path_split ( destination ) with open ( source , 'rb' ) as data : s3 . Bucket ( bucket_name ) . put_object ( Key = key , Body = data )
Copy a file from a local source to an S3 destination .
36,938
def _s3_path_split ( s3_path ) : if not s3_path . startswith ( 's3://' ) : raise ValueError ( 's3_path is expected to start with \'s3://\', ' 'but was {}' . format ( s3_path ) ) bucket_key = s3_path [ len ( 's3://' ) : ] bucket_name , key = bucket_key . split ( '/' , 1 ) return S3Path ( bucket_name , key )
Split an S3 path into bucket and key .
36,939
def get_meta ( filepath ) : meta = { } try : from PIL import Image with Image . open ( filepath ) as img : width , height = img . size meta [ 'width' ] = width meta [ 'height' ] = height meta [ 'channels' ] = len ( img . mode ) except ImportError : pass meta [ 'file' ] = mpu . io . get_file_meta ( filepath ) return meta
Get meta - information of an image .
36,940
def flatten ( iterable , string_flattening = False ) : flat_list = [ ] for item in iterable : is_iterable = ( isinstance ( item , collections . Iterable ) and ( string_flattening or ( not string_flattening and not isinstance ( item , str ) ) ) ) if is_iterable : flat_list . extend ( flatten ( item ) ) else : flat_list . append ( item ) return flat_list
Flatten an given iterable of iterables into one list .
36,941
def dict_merge ( dict_left , dict_right , merge_method = 'take_left_shallow' ) : new_dict = { } if merge_method in [ 'take_right_shallow' , 'take_right_deep' ] : return _dict_merge_right ( dict_left , dict_right , merge_method ) elif merge_method == 'take_left_shallow' : return dict_merge ( dict_right , dict_left , 'take_right_shallow' ) elif merge_method == 'take_left_deep' : return dict_merge ( dict_right , dict_left , 'take_right_deep' ) elif merge_method == 'sum' : new_dict = deepcopy ( dict_left ) for key , value in dict_right . items ( ) : if key not in new_dict : new_dict [ key ] = value else : recurse = isinstance ( value , dict ) if recurse : new_dict [ key ] = dict_merge ( dict_left [ key ] , dict_right [ key ] , merge_method = 'sum' ) else : new_dict [ key ] = dict_left [ key ] + dict_right [ key ] return new_dict else : raise NotImplementedError ( 'merge_method=\'{}\' is not known.' . format ( merge_method ) )
Merge two dictionaries .
36,942
def _dict_merge_right ( dict_left , dict_right , merge_method ) : new_dict = deepcopy ( dict_left ) for key , value in dict_right . items ( ) : if key not in new_dict : new_dict [ key ] = value else : recurse = ( merge_method == 'take_right_deep' and isinstance ( dict_left [ key ] , dict ) and isinstance ( dict_right [ key ] , dict ) ) if recurse : new_dict [ key ] = dict_merge ( dict_left [ key ] , dict_right [ key ] , merge_method = 'take_right_deep' ) else : new_dict [ key ] = value return new_dict
See documentation of mpu . datastructures . dict_merge .
36,943
def does_keychain_exist ( dict_ , list_ ) : for key in list_ : if key not in dict_ : return False dict_ = dict_ [ key ] return True
Check if a sequence of keys exist in a nested dictionary .
36,944
def remove_indices ( self , indices ) : new_list = [ ] for index , element in enumerate ( self ) : if index not in indices : new_list . append ( element ) return EList ( new_list )
Remove rows by which have the given indices .
36,945
def get_all_files ( root , followlinks = False ) : filepaths = [ ] for path , _ , files in os . walk ( root , followlinks = followlinks ) : for name in files : filepaths . append ( os . path . abspath ( os . path . join ( path , name ) ) ) return filepaths
Get all files within the given root directory .
36,946
def get_from_package ( package_name , path ) : filepath = pkg_resources . resource_filename ( package_name , path ) return os . path . abspath ( filepath )
Get the absolute path to a file in a package .
36,947
def example_df ( ) : country_names = [ 'Germany' , 'France' , 'Indonesia' , 'Ireland' , 'Spain' , 'Vatican' ] population = [ 82521653 , 66991000 , 255461700 , 4761865 , 46549045 , None ] population_time = [ dt . datetime ( 2016 , 12 , 1 ) , dt . datetime ( 2017 , 1 , 1 ) , dt . datetime ( 2017 , 1 , 1 ) , None , dt . datetime ( 2017 , 6 , 1 ) , None , ] euro = [ True , True , False , True , True , True ] df = pd . DataFrame ( { 'country' : country_names , 'population' : population , 'population_time' : population_time , 'EUR' : euro } ) df = df [ [ 'country' , 'population' , 'population_time' , 'EUR' ] ] return df
Create an example dataframe .
36,948
def describe ( df , dtype = None ) : if dtype is None : dtype = { } print ( 'Number of datapoints: {datapoints}' . format ( datapoints = len ( df ) ) ) column_info , column_info_meta = _get_column_info ( df , dtype ) if len ( column_info [ 'int' ] ) > 0 : _describe_int ( df , column_info ) if len ( column_info [ 'float' ] ) > 0 : _describe_float ( df , column_info ) if len ( column_info [ 'category' ] ) > 0 : _describe_category ( df , column_info , column_info_meta ) if len ( column_info [ 'time' ] ) > 0 : _describe_time ( df , column_info , column_info_meta ) if len ( column_info [ 'other' ] ) > 0 : _describe_other ( df , column_info , column_info_meta ) column_types = { } for column_type , columns in column_info . items ( ) : for column_name in columns : if column_type == 'other' : column_type = 'str' column_types [ column_name ] = column_type return column_types
Print a description of a Pandas dataframe .
36,949
def indices2one_hot ( indices , nb_classes ) : if nb_classes < 1 : raise ValueError ( 'nb_classes={}, but positive number expected' . format ( nb_classes ) ) one_hot = [ ] for index in indices : one_hot . append ( [ 0 ] * nb_classes ) one_hot [ - 1 ] [ index ] = 1 return one_hot
Convert an iterable of indices to one - hot encoded list .
36,950
def one_hot2indices ( one_hots ) : indices = [ ] for one_hot in one_hots : indices . append ( argmax ( one_hot ) ) return indices
Convert an iterable of one - hot encoded targets to a list of indices .
36,951
def factorize ( number ) : if not isinstance ( number , int ) : raise ValueError ( 'integer expected, but type(number)={}' . format ( type ( number ) ) ) if number < 0 : return [ - 1 ] + factorize ( number * ( - 1 ) ) elif number == 0 : raise ValueError ( 'All primes are prime factors of 0.' ) else : for i in range ( 2 , int ( math_stl . ceil ( number ** 0.5 ) ) + 1 ) : if number % i == 0 : if i == number : return [ i ] else : return [ i ] + factorize ( int ( number / i ) ) return [ number ]
Get the prime factors of an integer except for 1 .
36,952
def argmax ( iterable ) : max_value = None max_index = None for index , value in enumerate ( iterable ) : if ( max_value is None ) or max_value < value : max_value = value max_index = index return max_index
Find the first index of the biggest value in the iterable .
36,953
def round_down ( x , decimal_places ) : from math import floor d = int ( '1' + ( '0' * decimal_places ) ) return floor ( x * d ) / d
Round a float down to decimal_places .
36,954
def add_time ( datetime_obj , days = 0 , hours = 0 , minutes = 0 , seconds = 0 ) : seconds += minutes * 60 seconds += hours * 60 ** 2 seconds += days * 24 * 60 ** 2 t14 = datetime_obj + dt . timedelta ( seconds = seconds ) t14 = t14 . astimezone ( pytz . utc ) . astimezone ( t14 . tzinfo ) return t14
Add time to a timezone - aware datetime object .
36,955
def generate ( minimum , maximum , local_random = random . Random ( ) ) : if not ( minimum < maximum ) : raise ValueError ( '{} is not smaller than {}' . format ( minimum , maximum ) ) time_d = maximum - minimum time_d_float = time_d . total_seconds ( ) time_d_rand = dt . timedelta ( seconds = time_d_float * local_random . random ( ) ) generated = minimum + time_d_rand return generated
Generate a random date .
36,956
def run_init ( args ) : root = args . root if root is None : root = '.' root = os . path . abspath ( root ) project_data = _get_package_data ( ) project_name = project_data [ 'project_name' ] directories = [ os . path . join ( root , 'bin' ) , os . path . join ( root , 'docs' ) , os . path . join ( root , 'tests' ) , os . path . join ( root , project_name ) , ] for dir_path in directories : if not os . path . exists ( dir_path ) : os . makedirs ( dir_path ) script_paths = [ os . path . join ( root , 'README.md' ) , os . path . join ( root , 'tests/__init__.py' ) , ] for script_path in script_paths : if not os . path . exists ( script_path ) : os . mknod ( script_path ) copy_samples = [ ( resource_filename ( 'mpu' , 'package/templates/tox.ini.txt' ) , os . path . join ( root , 'tox.ini' ) ) , ( resource_filename ( 'mpu' , 'package/templates/setup.cfg.txt' ) , os . path . join ( root , 'setup.cfg' ) ) , ( resource_filename ( 'mpu' , 'package/templates/setup.py.txt' ) , os . path . join ( root , 'setup.py' ) ) , ( resource_filename ( 'mpu' , 'package/templates/_version.py.txt' ) , os . path . join ( root , project_name + '/_version.py' ) ) , ( resource_filename ( 'mpu' , 'package/templates/coveragerc.txt' ) , os . path . join ( root , '.coveragerc' ) ) , ( resource_filename ( 'mpu' , 'package/templates/init.py.txt' ) , os . path . join ( root , project_name + '/__init__.py' ) ) , ] translate = { '[[project_name]]' : project_data [ 'project_name' ] , '[[license]]' : project_data [ 'license' ] , '[[author]]' : project_data [ 'author' ] , '[[email]]' : project_data [ 'email' ] , } for source , destination in copy_samples : if not os . path . exists ( destination ) : copyfile ( source , destination ) _adjust_template ( destination , translate )
Run project initialization .
36,957
def _multiple_replace ( text , search_replace_dict ) : regex = re . compile ( "|" . join ( map ( re . escape , search_replace_dict . keys ( ) ) ) ) return regex . sub ( lambda match : search_replace_dict [ match . group ( 0 ) ] , text )
Replace multiple things at once in a text .
36,958
def _adjust_template ( filepath , translate ) : with open ( filepath , 'r' ) as file : filedata = file . read ( ) filedata = _multiple_replace ( filedata , translate ) with open ( filepath , 'w' ) as file : file . write ( filedata )
Search and replace contents of a filepath .
36,959
def parallel_for ( loop_function , parameters , nb_threads = 100 ) : import multiprocessing . pool from contextlib import closing with closing ( multiprocessing . pool . ThreadPool ( nb_threads ) ) as pool : return pool . map ( loop_function , parameters )
Execute the loop body in parallel .
36,960
def consistent_shuffle ( * lists ) : perm = list ( range ( len ( lists [ 0 ] ) ) ) random . shuffle ( perm ) lists = tuple ( [ sublist [ index ] for index in perm ] for sublist in lists ) return lists
Shuffle lists consistently .
36,961
def haversine_distance ( origin , destination ) : lat1 , lon1 = origin lat2 , lon2 = destination if not ( - 90.0 <= lat1 <= 90 ) : raise ValueError ( 'lat1={:2.2f}, but must be in [-90,+90]' . format ( lat1 ) ) if not ( - 90.0 <= lat2 <= 90 ) : raise ValueError ( 'lat2={:2.2f}, but must be in [-90,+90]' . format ( lat2 ) ) if not ( - 180.0 <= lon1 <= 180 ) : raise ValueError ( 'lon1={:2.2f}, but must be in [-180,+180]' . format ( lat1 ) ) if not ( - 180.0 <= lon2 <= 180 ) : raise ValueError ( 'lon1={:2.2f}, but must be in [-180,+180]' . format ( lat1 ) ) radius = 6371 dlat = math_stl . radians ( lat2 - lat1 ) dlon = math_stl . radians ( lon2 - lon1 ) a = ( math_stl . sin ( dlat / 2 ) * math_stl . sin ( dlat / 2 ) + math_stl . cos ( math_stl . radians ( lat1 ) ) * math_stl . cos ( math_stl . radians ( lat2 ) ) * math_stl . sin ( dlon / 2 ) * math_stl . sin ( dlon / 2 ) ) c = 2 * math_stl . atan2 ( math_stl . sqrt ( a ) , math_stl . sqrt ( 1 - a ) ) d = radius * c return d
Calculate the Haversine distance .
36,962
def is_in_intervall ( value , min_value , max_value , name = 'variable' ) : if not ( min_value <= value <= max_value ) : raise ValueError ( '{}={} is not in [{}, {}]' . format ( name , value , min_value , max_value ) )
Raise an exception if value is not in an interval .
36,963
def exception_logging ( exctype , value , tb ) : write_val = { 'exception_type' : str ( exctype ) , 'message' : str ( traceback . format_tb ( tb , 10 ) ) } logging . exception ( str ( write_val ) )
Log exception by using the root logger .
36,964
def latitude ( self , latitude ) : if not ( - 90 <= latitude <= 90 ) : raise ValueError ( 'latitude was {}, but has to be in [-90, 90]' . format ( latitude ) ) self . _latitude = latitude
Setter for latiutde .
36,965
def longitude ( self , longitude ) : if not ( - 180 <= longitude <= 180 ) : raise ValueError ( 'longitude was {}, but has to be in [-180, 180]' . format ( longitude ) ) self . _longitude = longitude
Setter for longitude .
36,966
def distance ( self , there ) : return haversine_distance ( ( self . latitude , self . longitude ) , ( there . latitude , there . longitude ) )
Calculate the distance from this location to there .
36,967
def main ( ) : parser = get_parser ( ) args = parser . parse_args ( ) if hasattr ( args , 'func' ) and args . func : args . func ( args ) else : parser . print_help ( )
Command line interface of mpu .
36,968
def urlread ( url , encoding = 'utf8' ) : try : from urllib . request import urlopen except ImportError : from urllib2 import urlopen response = urlopen ( url ) content = response . read ( ) content = content . decode ( encoding ) return content
Read the content of an URL .
36,969
def hash ( filepath , method = 'sha1' , buffer_size = 65536 ) : if method == 'sha1' : hash_function = hashlib . sha1 ( ) elif method == 'md5' : hash_function = hashlib . md5 ( ) else : raise NotImplementedError ( 'Only md5 and sha1 hashes are known, but ' ' \'{}\' was specified.' . format ( method ) ) with open ( filepath , 'rb' ) as fp : while True : data = fp . read ( buffer_size ) if not data : break hash_function . update ( data ) return hash_function . hexdigest ( )
Calculate a hash of a local file .
36,970
def get_creation_datetime ( filepath ) : if platform . system ( ) == 'Windows' : return datetime . fromtimestamp ( os . path . getctime ( filepath ) ) else : stat = os . stat ( filepath ) try : return datetime . fromtimestamp ( stat . st_birthtime ) except AttributeError : return None
Get the date that a file was created .
36,971
def get_modification_datetime ( filepath ) : import tzlocal timezone = tzlocal . get_localzone ( ) mtime = datetime . fromtimestamp ( os . path . getmtime ( filepath ) ) return mtime . replace ( tzinfo = timezone )
Get the datetime that a file was last modified .
36,972
def get_access_datetime ( filepath ) : import tzlocal tz = tzlocal . get_localzone ( ) mtime = datetime . fromtimestamp ( os . path . getatime ( filepath ) ) return mtime . replace ( tzinfo = tz )
Get the last time filepath was accessed .
36,973
def get_file_meta ( filepath ) : meta = { } meta [ 'filepath' ] = os . path . abspath ( filepath ) meta [ 'creation_datetime' ] = get_creation_datetime ( filepath ) meta [ 'last_access_datetime' ] = get_access_datetime ( filepath ) meta [ 'modification_datetime' ] = get_modification_datetime ( filepath ) try : import magic f_mime = magic . Magic ( mime = True , uncompress = True ) f_other = magic . Magic ( mime = False , uncompress = True ) meta [ 'mime' ] = f_mime . from_file ( meta [ 'filepath' ] ) meta [ 'magic-type' ] = f_other . from_file ( meta [ 'filepath' ] ) except ImportError : pass return meta
Get meta - information about a file .
36,974
def gzip_file ( source , sink ) : import gzip with open ( source , 'rb' ) as f_in , gzip . open ( sink , 'wb' ) as f_out : f_out . writelines ( f_in )
Create a GZIP file from a source file .
36,975
def start ( self ) : self . _patcher = mock . patch ( target = self . target ) MockClient = self . _patcher . start ( ) instance = MockClient . return_value instance . model . side_effect = mock . Mock ( side_effect = self . model )
Start the patch
36,976
def setup ( cls , client_id , client_secret ) : cls . client_id = client_id cls . client_secret = client_secret
Configure client in session
36,977
def read ( filelines , mapping = None , wok = False ) : if wok : if not mapping : mapping = WOK_TAG_KEY_MAPPING return Wok ( filelines , mapping ) . parse ( ) else : if not mapping : mapping = TAG_KEY_MAPPING return Ris ( filelines , mapping ) . parse ( )
Parse a ris lines and return a list of entries .
36,978
def refresh_context ( self ) : User = self . model ( 'res.user' ) self . context = User . get_preferences ( True ) return self . context
Get the default context of the user and save it
36,979
def login ( self , login , password , set_auth = False ) : rv = self . session . post ( self . host , dumps ( { "method" : "common.db.login" , "params" : [ login , password ] } ) , ) rv = loads ( rv . content ) [ 'result' ] if set_auth : self . set_auth ( SessionAuth ( login , * rv ) ) return rv
Attempts a login to the remote server and on success returns user id and session or None
36,980
def is_auth_alive ( self ) : "Return true if the auth is not expired, else false" model = self . model ( 'ir.model' ) try : model . search ( [ ] , None , 1 , None ) except ClientError as err : if err and err . message [ 'code' ] == 403 : return False raise except Exception : raise else : return True
Return true if the auth is not expired else false
36,981
def update ( self , data = None , ** kwargs ) : if data is None : data = { } data . update ( kwargs ) return self . model . write ( [ self . id ] , data )
Update the record right away .
36,982
def search_read_all ( self , domain , order , fields , batch_size = 500 , context = None , offset = 0 , limit = None ) : if context is None : context = { } if limit is None : record_count = self . search_count ( domain , context = context ) end = record_count + offset else : end = limit + offset for page_offset in range ( offset , end , batch_size ) : if page_offset + batch_size > end : batch_size = end - page_offset for record in self . search_read ( domain , page_offset , batch_size , order , fields , context = context ) : yield record
An endless iterator that iterates over records .
36,983
def find ( self , filter = None , page = 1 , per_page = 10 , fields = None , context = None ) : if filter is None : filter = [ ] rv = self . client . session . get ( self . path , params = { 'filter' : dumps ( filter or [ ] ) , 'page' : page , 'per_page' : per_page , 'field' : fields , 'context' : dumps ( context or self . client . context ) , } ) response_received . send ( rv ) return rv
Find records that match the filter .
36,984
def attach ( self , id , filename , url ) : Attachment = self . client . model ( 'ir.attachment' ) return Attachment . add_attachment_from_url ( filename , url , '%s,%s' % ( self . model_name , id ) )
Add an attachmemt to record from url
36,985
def refresh_if_needed ( self ) : if self . state in ( self . PENDING , self . STARTED ) : try : response , = self . _fetch_result ( ) [ 'tasks' ] except ( KeyError , ValueError ) : raise Exception ( "Unable to find results for task." ) if 'error' in response : self . state == self . FAILURE raise ServerError ( response [ 'error' ] ) if 'state' in response : self . state = response [ 'state' ] self . result = response [ 'result' ]
Refresh the status of the task from server if required .
36,986
def get_customer ( code ) : Party = client . model ( 'party.party' ) results = Party . find ( [ ( 'code' , '=' , code ) ] ) if results : return results [ 0 ] [ 'id' ]
Fetch a customer with the code . Returns None if the customer is not found .
36,987
def get_address ( customer_id , data ) : Address = client . model ( 'party.address' ) addresses = Address . find ( [ ( 'party' , '=' , customer_id ) ] , fields = [ 'name' , 'street' , 'street_bis' , 'city' , 'zip' , 'subdivision.code' , 'country.code' ] ) for address in addresses : if ( address [ 'name' ] == data [ 'name' ] and address [ 'street' ] == data [ 'street' ] and address [ 'street_bis' ] == data [ 'street_bis' ] and address [ 'city' ] == data [ 'city' ] and address [ 'zip' ] == data [ 'zip' ] and address [ 'subdivision.code' ] . endswith ( data [ 'state' ] ) and address [ 'country.code' ] == data [ 'country' ] ) : return address [ 'id' ]
Easier to fetch the addresses of customer and then check one by one .
36,988
def create_address ( customer_id , data ) : Address = client . model ( 'party.address' ) Country = client . model ( 'country.country' ) Subdivision = client . model ( 'country.subdivision' ) country , = Country . find ( [ ( 'code' , '=' , data [ 'country' ] ) ] ) state , = Subdivision . find ( [ ( 'code' , 'ilike' , '%-' + data [ 'state' ] ) , ( 'country' , '=' , country [ 'id' ] ) ] ) address , = Address . create ( [ { 'party' : customer_id , 'name' : data [ 'name' ] , 'street' : data [ 'street' ] , 'street_bis' : data [ 'street_bis' ] , 'city' : data [ 'city' ] , 'zip' : data [ 'zip' ] , 'country' : country [ 'id' ] , 'subdivision' : state [ 'id' ] , } ] ) return address [ 'id' ]
Create an address and return the id
36,989
def create_customer ( name , email , phone ) : Party = client . model ( 'party.party' ) ContactMechanism = client . model ( 'party.contact_mechanism' ) party , = Party . create ( [ { 'name' : name } ] ) ContactMechanism . create ( [ { 'type' : 'email' , 'value' : email , 'party' : party } , { 'type' : 'phone' , 'value' : phone , 'party' : party } , ] ) return party
Create a customer with the name . Then attach the email and phone as contact methods
36,990
def create_order ( order ) : SaleOrder = client . model ( 'sale.sale' ) SaleOrderLine = client . model ( 'sale.line' ) customer_id = get_customer ( order [ 'customer' ] [ 'code' ] ) if not customer_id : customer_id = create_customer ( order [ 'customer' ] [ 'name' ] , order [ 'customer' ] [ 'email' ] , order [ 'customer' ] [ 'phone' ] , ) invoice_address = get_address ( customer_id , order [ 'invoice_address' ] ) if not invoice_address : invoice_address = create_address ( customer_id , order [ 'invoice_address' ] ) shipment_address = get_address ( customer_id , order [ 'shipment_address' ] ) if not shipment_address : shipment_address = create_address ( customer_id , order [ 'shipment_address' ] ) sale_order_id , = SaleOrder . create ( [ { 'reference' : order [ 'number' ] , 'sale_date' : order [ 'date' ] , 'party' : customer_id , 'invoice_address' : invoice_address , 'shipment_address' : shipment_address , } ] ) warehouses = get_warehouses ( ) warehouse_ids = [ warehouse [ 'id' ] for warehouse in warehouses ] lines = [ ] for item in order [ 'items' ] : product = get_product ( item [ 'product' ] ) product_inventory = get_product_inventory ( product , warehouse_ids ) for location , quantities in product_inventory . items ( ) : if quantities [ 'quantity_available' ] >= item [ 'quantity' ] : break lines . append ( { 'sale' : sale_order_id , 'product' : product , 'quantity' : item [ 'quantity' ] , 'unit_price' : item [ 'unit_price' ] , 'warehouse' : location , } ) SaleOrderLine . create ( lines ) SaleOrder . quote ( [ sale_order_id ] ) SaleOrder . confirm ( [ sale_order_id ] )
Create an order on fulfil from order_details . See the calling function below for an example of the order_details
36,991
def model_base ( fulfil_client , cache_backend = None , cache_expire = 10 * 60 ) : return type ( 'BaseModel' , ( Model , ) , { 'fulfil_client' : fulfil_client , 'cache_backend' : cache_backend , 'cache_expire' : cache_expire , '__abstract__' : True , '__modelregistry__' : { } , } , )
Return a Base Model class that binds to the fulfil client instance and the cache instance .
36,992
def all ( self ) : return self . rpc_model . search_read_all ( self . domain , self . _order_by , self . fields , context = self . context , offset = self . _offset or 0 , limit = self . _limit , )
Return the results represented by this Query as a list .
36,993
def count ( self ) : "Return a count of rows this Query would return." return self . rpc_model . search_count ( self . domain , context = self . context )
Return a count of rows this Query would return .
36,994
def exists ( self ) : return self . rpc_model . search_count ( self . domain , context = self . context ) > 0
A convenience method that returns True if a record satisfying the query exists
36,995
def show_active_only ( self , state ) : query = self . _copy ( ) query . active_only = state return query
Set active only to true or false on a copy of this query
36,996
def filter_by ( self , ** kwargs ) : query = self . _copy ( ) for field , value in kwargs . items ( ) : query . domain . append ( ( field , '=' , value ) ) return query
Apply the given filtering criterion to a copy of this Query using keyword expressions .
36,997
def filter_by_domain ( self , domain ) : query = self . _copy ( ) query . domain = domain return query
Apply the given domain to a copy of this query
36,998
def first ( self ) : results = self . rpc_model . search_read ( self . domain , None , 1 , self . _order_by , self . fields , context = self . context ) return results and results [ 0 ] or None
Return the first result of this Query or None if the result doesn t contain any row .
36,999
def get ( self , id ) : ctx = self . context . copy ( ) ctx [ 'active_test' ] = False results = self . rpc_model . search_read ( [ ( 'id' , '=' , id ) ] , None , None , None , self . fields , context = ctx ) return results and results [ 0 ] or None
Return an instance based on the given primary key identifier or None if not found .