idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
20,000 | def _parse_alignment ( alignment ) : align_flags = None for qt_align in alignment . split ( '|' ) : _ , qt_align = qt_align . split ( '::' ) align = getattr ( QtCore . Qt , qt_align ) if align_flags is None : align_flags = align else : align_flags |= align return align_flags | Convert a C ++ alignment to the corresponding flags . |
20,001 | def any_i18n ( * args ) : for a in args : if a is not None and not isinstance ( a , str ) : return True return False | Return True if any argument appears to be an i18n string . |
20,002 | def createWidgetItem ( self , item_type , elem , getter , * getter_args ) : item = self . factory . createQObject ( item_type , "item" , ( ) , False ) props = self . wprops text = props . getProperty ( elem , 'text' ) status_tip = props . getProperty ( elem , 'statusTip' ) tool_tip = props . getProperty ( elem , 'toolTip' ) whats_this = props . getProperty ( elem , 'whatsThis' ) if self . any_i18n ( text , status_tip , tool_tip , whats_this ) : self . factory . invoke ( "item" , getter , getter_args ) if text : item . setText ( text ) if status_tip : item . setStatusTip ( status_tip ) if tool_tip : item . setToolTip ( tool_tip ) if whats_this : item . setWhatsThis ( whats_this ) text_alignment = props . getProperty ( elem , 'textAlignment' ) if text_alignment : item . setTextAlignment ( text_alignment ) font = props . getProperty ( elem , 'font' ) if font : item . setFont ( font ) icon = props . getProperty ( elem , 'icon' ) if icon : item . setIcon ( icon ) background = props . getProperty ( elem , 'background' ) if background : item . setBackground ( background ) foreground = props . getProperty ( elem , 'foreground' ) if foreground : item . setForeground ( foreground ) flags = props . getProperty ( elem , 'flags' ) if flags : item . setFlags ( flags ) check_state = props . getProperty ( elem , 'checkState' ) if check_state : item . setCheckState ( check_state ) return item | Create a specific type of widget item . |
20,003 | def readResources ( self , elem ) : try : iterator = getattr ( elem , 'iter' ) except AttributeError : iterator = getattr ( elem , 'getiterator' ) for include in iterator ( "include" ) : loc = include . attrib . get ( "location" ) if loc and loc . endswith ( '.qrc' ) : mname = os . path . basename ( loc [ : - 4 ] + self . _resource_suffix ) if mname not in self . resources : self . resources . append ( mname ) | Read a resources tag and add the module to import to the parser s list of them . |
20,004 | def get_icon ( self , iconset ) : theme = iconset . attrib . get ( 'theme' ) if theme is not None : return self . _object_factory . createQObject ( "QIcon.fromTheme" , 'icon' , ( self . _object_factory . asString ( theme ) , ) , is_attribute = False ) if iconset . text is None : return None iset = _IconSet ( iconset , self . _base_dir ) try : idx = self . _cache . index ( iset ) except ValueError : idx = - 1 if idx >= 0 : iset = self . _cache [ idx ] else : name = 'icon' idx = len ( self . _cache ) if idx > 0 : name += str ( idx ) icon = self . _object_factory . createQObject ( "QIcon" , name , ( ) , is_attribute = False ) iset . set_icon ( icon , self . _qtgui_module ) self . _cache . append ( iset ) return iset . icon | Return an icon described by the given iconset tag . |
20,005 | def _file_name ( fname , base_dir ) : fname = fname . replace ( "\\" , "\\\\" ) if base_dir != '' and fname [ 0 ] != ':' and not os . path . isabs ( fname ) : fname = os . path . join ( base_dir , fname ) return fname | Convert a relative filename if we have a base directory . |
20,006 | def set_icon ( self , icon , qtgui_module ) : if self . _use_fallback : icon . addFile ( self . _fallback ) else : for role , pixmap in self . _roles . items ( ) : if role . endswith ( "off" ) : mode = role [ : - 3 ] state = qtgui_module . QIcon . Off elif role . endswith ( "on" ) : mode = role [ : - 2 ] state = qtgui_module . QIcon . On else : continue mode = getattr ( qtgui_module . QIcon , mode . title ( ) ) if pixmap : icon . addPixmap ( qtgui_module . QPixmap ( pixmap ) , mode , state ) else : icon . addPixmap ( qtgui_module . QPixmap ( ) , mode , state ) self . icon = icon | Save the icon and set its attributes . |
20,007 | def get_package_data ( ) : package_data = dict ( ) package_data [ 'PyQt5' ] = list ( ) for subdir in ( "doc/" , "examples/" , "include/" , "mkspecs/" , "plugins/" , "qml/" , "qsci/" , "sip/" , "translations/" , "uic/" ) : abspath = os . path . abspath ( "PyQt5/" + subdir ) for root , dirs , files in os . walk ( abspath ) : for f in files : fpath = os . path . join ( root , f ) relpath = os . path . relpath ( fpath , abspath ) relpath = relpath . replace ( "\\" , "/" ) package_data [ 'PyQt5' ] . append ( subdir + relpath ) package_data [ 'PyQt5' ] . extend ( [ "*.exe" , "*.dll" , "*.pyd" , "*.conf" , "*.api" , "*.qm" , "*.bat" ] ) return package_data | Include all files from all sub - directories |
20,008 | def _preview ( self ) : from PyQt5 import QtWidgets app = QtWidgets . QApplication ( [ self . _ui_file ] ) widget = loadUi ( self . _ui_file ) widget . show ( ) return app . exec_ ( ) | Preview the . ui file . Return the exit status to be passed back to the parent process . |
20,009 | def _generate ( self ) : needs_close = False if sys . hexversion >= 0x03000000 : if self . _opts . output == '-' : from io import TextIOWrapper pyfile = TextIOWrapper ( sys . stdout . buffer , encoding = 'utf8' ) else : pyfile = open ( self . _opts . output , 'wt' , encoding = 'utf8' ) needs_close = True else : if self . _opts . output == '-' : pyfile = sys . stdout else : pyfile = open ( self . _opts . output , 'wt' ) needs_close = True import_from = self . _opts . import_from if import_from : from_imports = True elif self . _opts . from_imports : from_imports = True import_from = '.' else : from_imports = False compileUi ( self . _ui_file , pyfile , self . _opts . execute , self . _opts . indent , from_imports , self . _opts . resource_suffix , import_from ) if needs_close : pyfile . close ( ) | Generate the Python code . |
20,010 | def on_IOError ( self , e ) : sys . stderr . write ( "Error: %s: \"%s\"\n" % ( e . strerror , e . filename ) ) | Handle an IOError exception . |
20,011 | def load_plugin ( filename , plugin_globals , plugin_locals ) : plugin = open ( filename , 'rU' ) try : exec ( plugin . read ( ) , plugin_globals , plugin_locals ) except ImportError : return False except Exception as e : raise WidgetPluginError ( "%s: %s" % ( e . __class__ , str ( e ) ) ) finally : plugin . close ( ) return True | Load the plugin from the given file . Return True if the plugin was loaded or False if it wanted to be ignored . Raise an exception if there was an error . |
20,012 | def _initSymbols ( ptc ) : ptc . am = [ '' , '' ] ptc . pm = [ '' , '' ] for idx , xm in enumerate ( ptc . locale . meridian [ : 2 ] ) : target = [ 'am' , 'pm' ] [ idx ] setattr ( ptc , target , [ xm ] ) target = getattr ( ptc , target ) if xm : lxm = xm . lower ( ) target . extend ( ( xm [ 0 ] , '{0}.{1}.' . format ( * xm ) , lxm , lxm [ 0 ] , '{0}.{1}.' . format ( * lxm ) ) ) | Initialize symbols and single character constants . |
20,013 | def _convertUnitAsWords ( self , unitText ) : word_list , a , b = re . split ( r"[,\s-]+" , unitText ) , 0 , 0 for word in word_list : x = self . ptc . small . get ( word ) if x is not None : a += x elif word == "hundred" : a *= 100 else : x = self . ptc . magnitude . get ( word ) if x is not None : b += a * x a = 0 elif word in self . ptc . ignore : pass else : raise Exception ( "Unknown number: " + word ) return a + b | Converts text units into their number value . |
20,014 | def _quantityToReal ( self , quantity ) : if not quantity : return 1.0 try : return float ( quantity . replace ( ',' , '.' ) ) except ValueError : pass try : return float ( self . ptc . numbers [ quantity ] ) except KeyError : pass return 0.0 | Convert a quantity either spelled - out or numeric to a float |
20,015 | def _evalDT ( self , datetimeString , sourceTime ) : ctx = self . currentContext s = datetimeString . strip ( ) if sourceTime is None : sourceTime = _parse_date_rfc822 ( s ) debug and log . debug ( 'attempt to parse as rfc822 - %s' , str ( sourceTime ) ) if sourceTime is not None : ( yr , mth , dy , hr , mn , sec , wd , yd , isdst , _ ) = sourceTime ctx . updateAccuracy ( ctx . ACU_YEAR , ctx . ACU_MONTH , ctx . ACU_DAY ) if hr != 0 and mn != 0 and sec != 0 : ctx . updateAccuracy ( ctx . ACU_HOUR , ctx . ACU_MIN , ctx . ACU_SEC ) sourceTime = ( yr , mth , dy , hr , mn , sec , wd , yd , isdst ) if sourceTime is None : sourceTime = _parse_date_w3dtf ( s ) if sourceTime is not None : ctx . updateAccuracy ( ctx . ACU_YEAR , ctx . ACU_MONTH , ctx . ACU_DAY , ctx . ACU_HOUR , ctx . ACU_MIN , ctx . ACU_SEC ) if sourceTime is None : sourceTime = time . localtime ( ) return sourceTime | Calculate the datetime from known format like RFC822 or W3CDTF |
20,016 | def updateAccuracy ( self , * accuracy ) : for acc in accuracy : if not isinstance ( acc , int ) : acc = self . _ACCURACY_REVERSE_MAPPING [ acc ] self . accuracy |= acc | Updates current accuracy flag |
20,017 | def indent ( func ) : def wrapper ( self , * args , ** kwds ) : func ( self , * args , ** kwds ) return Indent ( self ) return wrapper | Decorator for allowing to use method as normal method or with context manager for auto - indenting code blocks . |
20,018 | def in_scope ( self , scope : str ) : old_scope = self . resolution_scope self . resolution_scope = urlparse . urljoin ( old_scope , scope ) try : yield finally : self . resolution_scope = old_scope | Context manager to handle current scope . |
20,019 | def get_scope_name ( self ) : name = 'validate_' + unquote ( self . resolution_scope ) . replace ( '~1' , '_' ) . replace ( '~0' , '_' ) name = re . sub ( r'[:/#\.\-\%]' , '_' , name ) name = name . lower ( ) . rstrip ( '_' ) return name | Get current scope and return it as a valid function name . |
20,020 | def generate_type ( self ) : types = enforce_list ( self . _definition [ 'type' ] ) try : python_types = ', ' . join ( JSON_TYPE_TO_PYTHON_TYPE [ t ] for t in types ) except KeyError as exc : raise JsonSchemaDefinitionException ( 'Unknown type: {}' . format ( exc ) ) extra = '' if 'integer' in types : extra += ' and not (isinstance({variable}, float) and {variable}.is_integer())' . format ( variable = self . _variable , ) if ( 'number' in types or 'integer' in types ) and 'boolean' not in types : extra += ' or isinstance({variable}, bool)' . format ( variable = self . _variable ) with self . l ( 'if not isinstance({variable}, ({})){}:' , python_types , extra ) : self . l ( 'raise JsonSchemaException("{name} must be {}")' , ' or ' . join ( types ) ) | Validation of type . Can be one type or list of types . |
20,021 | def generate_property_names ( self ) : property_names_definition = self . _definition . get ( 'propertyNames' , { } ) if property_names_definition is True : pass elif property_names_definition is False : self . create_variable_keys ( ) with self . l ( 'if {variable}_keys:' ) : self . l ( 'raise JsonSchemaException("{name} must not be there")' ) else : self . create_variable_is_dict ( ) with self . l ( 'if {variable}_is_dict:' ) : self . create_variable_with_length ( ) with self . l ( 'if {variable}_len != 0:' ) : self . l ( '{variable}_property_names = True' ) with self . l ( 'for {variable}_key in {variable}:' ) : with self . l ( 'try:' ) : self . generate_func_code_block ( property_names_definition , '{}_key' . format ( self . _variable ) , self . _variable_name , clear_variables = True , ) with self . l ( 'except JsonSchemaException:' ) : self . l ( '{variable}_property_names = False' ) with self . l ( 'if not {variable}_property_names:' ) : self . l ( 'raise JsonSchemaException("{name} must be named by propertyName definition")' ) | Means that keys of object must to follow this definition . |
20,022 | def generate_contains ( self ) : self . create_variable_is_list ( ) with self . l ( 'if {variable}_is_list:' ) : contains_definition = self . _definition [ 'contains' ] if contains_definition is False : self . l ( 'raise JsonSchemaException("{name} is always invalid")' ) elif contains_definition is True : with self . l ( 'if not {variable}:' ) : self . l ( 'raise JsonSchemaException("{name} must not be empty")' ) else : self . l ( '{variable}_contains = False' ) with self . l ( 'for {variable}_key in {variable}:' ) : with self . l ( 'try:' ) : self . generate_func_code_block ( contains_definition , '{}_key' . format ( self . _variable ) , self . _variable_name , clear_variables = True , ) self . l ( '{variable}_contains = True' ) self . l ( 'break' ) self . l ( 'except JsonSchemaException: pass' ) with self . l ( 'if not {variable}_contains:' ) : self . l ( 'raise JsonSchemaException("{name} must contain one of contains definition")' ) | Means that array must contain at least one defined item . |
20,023 | def generate_const ( self ) : const = self . _definition [ 'const' ] if isinstance ( const , str ) : const = '"{}"' . format ( const ) with self . l ( 'if {variable} != {}:' , const ) : self . l ( 'raise JsonSchemaException("{name} must be same as const definition")' ) | Means that value is valid when is equeal to const definition . |
20,024 | def global_state ( self ) : self . _generate_func_code ( ) return dict ( REGEX_PATTERNS = self . _compile_regexps , re = re , JsonSchemaException = JsonSchemaException , ) | Returns global variables for generating function from func_code . Includes compiled regular expressions and imports so it does not have to do it every time when validation function is called . |
20,025 | def global_state_code ( self ) : self . _generate_func_code ( ) if not self . _compile_regexps : return '\n' . join ( [ 'from fastjsonschema import JsonSchemaException' , '' , '' , ] ) regexs = [ '"{}": re.compile(r"{}")' . format ( key , value . pattern ) for key , value in self . _compile_regexps . items ( ) ] return '\n' . join ( [ 'import re' , 'from fastjsonschema import JsonSchemaException' , '' , '' , 'REGEX_PATTERNS = {' , ' ' + ',\n ' . join ( regexs ) , '}' , '' , ] ) | Returns global variables for generating function from func_code as code . Includes compiled regular expressions and imports . |
20,026 | def generate_func_code ( self ) : self . l ( 'NoneType = type(None)' ) while self . _needed_validation_functions : uri , name = self . _needed_validation_functions . popitem ( ) self . generate_validation_function ( uri , name ) | Creates base code of validation function and calls helper for creating code by definition . |
20,027 | def generate_validation_function ( self , uri , name ) : self . _validation_functions_done . add ( uri ) self . l ( '' ) with self . _resolver . resolving ( uri ) as definition : with self . l ( 'def {}(data):' , name ) : self . generate_func_code_block ( definition , 'data' , 'data' , clear_variables = True ) self . l ( 'return data' ) | Generate validation function for given uri with given name |
20,028 | def generate_func_code_block ( self , definition , variable , variable_name , clear_variables = False ) : backup = self . _definition , self . _variable , self . _variable_name self . _definition , self . _variable , self . _variable_name = definition , variable , variable_name if clear_variables : backup_variables = self . _variables self . _variables = set ( ) self . _generate_func_code_block ( definition ) self . _definition , self . _variable , self . _variable_name = backup if clear_variables : self . _variables = backup_variables | Creates validation rules for current definition . |
20,029 | def generate_ref ( self ) : with self . _resolver . in_scope ( self . _definition [ '$ref' ] ) : name = self . _resolver . get_scope_name ( ) uri = self . _resolver . get_uri ( ) if uri not in self . _validation_functions_done : self . _needed_validation_functions [ uri ] = name self . l ( '{}({variable})' , name ) | Ref can be link to remote or local definition . |
20,030 | def generate_if_then_else ( self ) : with self . l ( 'try:' ) : self . generate_func_code_block ( self . _definition [ 'if' ] , self . _variable , self . _variable_name , clear_variables = True ) with self . l ( 'except JsonSchemaException:' ) : if 'else' in self . _definition : self . generate_func_code_block ( self . _definition [ 'else' ] , self . _variable , self . _variable_name , clear_variables = True ) else : self . l ( 'pass' ) if 'then' in self . _definition : with self . l ( 'else:' ) : self . generate_func_code_block ( self . _definition [ 'then' ] , self . _variable , self . _variable_name , clear_variables = True ) | Implementation of if - then - else . |
20,031 | def generate_content_encoding ( self ) : if self . _definition [ 'contentEncoding' ] == 'base64' : with self . l ( 'if isinstance({variable}, str):' ) : with self . l ( 'try:' ) : self . l ( 'import base64' ) self . l ( '{variable} = base64.b64decode({variable})' ) with self . l ( 'except Exception:' ) : self . l ( 'raise JsonSchemaException("{name} must be encoded by base64")' ) with self . l ( 'if {variable} == "":' ) : self . l ( 'raise JsonSchemaException("contentEncoding must be base64")' ) | Means decoding value when it s encoded by base64 . |
20,032 | def generate_content_media_type ( self ) : if self . _definition [ 'contentMediaType' ] == 'application/json' : with self . l ( 'if isinstance({variable}, bytes):' ) : with self . l ( 'try:' ) : self . l ( '{variable} = {variable}.decode("utf-8")' ) with self . l ( 'except Exception:' ) : self . l ( 'raise JsonSchemaException("{name} must encoded by utf8")' ) with self . l ( 'if isinstance({variable}, str):' ) : with self . l ( 'try:' ) : self . l ( 'import json' ) self . l ( '{variable} = json.loads({variable})' ) with self . l ( 'except Exception:' ) : self . l ( 'raise JsonSchemaException("{name} must be valid JSON")' ) | Means loading value when it s specified as JSON . |
20,033 | def generate_enum ( self ) : enum = self . _definition [ 'enum' ] if not isinstance ( enum , ( list , tuple ) ) : raise JsonSchemaDefinitionException ( 'enum must be an array' ) with self . l ( 'if {variable} not in {enum}:' ) : enum = str ( enum ) . replace ( '"' , '\\"' ) self . l ( 'raise JsonSchemaException("{name} must be one of {}")' , enum ) | Means that only value specified in the enum is valid . |
20,034 | def generate_all_of ( self ) : for definition_item in self . _definition [ 'allOf' ] : self . generate_func_code_block ( definition_item , self . _variable , self . _variable_name , clear_variables = True ) | Means that value have to be valid by all of those definitions . It s like put it in one big definition . |
20,035 | def generate_one_of ( self ) : self . l ( '{variable}_one_of_count = 0' ) for definition_item in self . _definition [ 'oneOf' ] : with self . l ( 'if {variable}_one_of_count < 2:' ) : with self . l ( 'try:' ) : self . generate_func_code_block ( definition_item , self . _variable , self . _variable_name , clear_variables = True ) self . l ( '{variable}_one_of_count += 1' ) self . l ( 'except JsonSchemaException: pass' ) with self . l ( 'if {variable}_one_of_count != 1:' ) : self . l ( 'raise JsonSchemaException("{name} must be valid exactly by one of oneOf definition")' ) | Means that value have to be valid by only one of those definitions . It can t be valid by two or more of them . |
20,036 | def generate_not ( self ) : not_definition = self . _definition [ 'not' ] if not_definition is True : self . l ( 'raise JsonSchemaException("{name} must not be there")' ) elif not_definition is False : return elif not not_definition : with self . l ( 'if {}:' , self . _variable ) : self . l ( 'raise JsonSchemaException("{name} must not be valid by not definition")' ) else : with self . l ( 'try:' ) : self . generate_func_code_block ( not_definition , self . _variable , self . _variable_name ) self . l ( 'except JsonSchemaException: pass' ) self . l ( 'else: raise JsonSchemaException("{name} must not be valid by not definition")' ) | Means that value have not to be valid by this definition . |
20,037 | def generate_format ( self ) : with self . l ( 'if isinstance({variable}, str):' ) : format_ = self . _definition [ 'format' ] if format_ in self . FORMAT_REGEXS : format_regex = self . FORMAT_REGEXS [ format_ ] self . _generate_format ( format_ , format_ + '_re_pattern' , format_regex ) elif format_ == 'regex' : with self . l ( 'try:' ) : self . l ( 're.compile({variable})' ) with self . l ( 'except Exception:' ) : self . l ( 'raise JsonSchemaException("{name} must be a valid regex")' ) else : self . l ( 'pass' ) | Means that value have to be in specified format . For example date email or other . |
20,038 | def generate_items ( self ) : items_definition = self . _definition [ 'items' ] if items_definition is True : return self . create_variable_is_list ( ) with self . l ( 'if {variable}_is_list:' ) : self . create_variable_with_length ( ) if items_definition is False : with self . l ( 'if {variable}:' ) : self . l ( 'raise JsonSchemaException("{name} must not be there")' ) elif isinstance ( items_definition , list ) : for idx , item_definition in enumerate ( items_definition ) : with self . l ( 'if {variable}_len > {}:' , idx ) : self . l ( '{variable}__{0} = {variable}[{0}]' , idx ) self . generate_func_code_block ( item_definition , '{}__{}' . format ( self . _variable , idx ) , '{}[{}]' . format ( self . _variable_name , idx ) , ) if isinstance ( item_definition , dict ) and 'default' in item_definition : self . l ( 'else: {variable}.append({})' , repr ( item_definition [ 'default' ] ) ) if 'additionalItems' in self . _definition : if self . _definition [ 'additionalItems' ] is False : self . l ( 'if {variable}_len > {}: raise JsonSchemaException("{name} must contain only specified items")' , len ( items_definition ) ) else : with self . l ( 'for {variable}_x, {variable}_item in enumerate({variable}[{0}:], {0}):' , len ( items_definition ) ) : self . generate_func_code_block ( self . _definition [ 'additionalItems' ] , '{}_item' . format ( self . _variable ) , '{}[{{{}_x}}]' . format ( self . _variable_name , self . _variable ) , ) else : if items_definition : with self . l ( 'for {variable}_x, {variable}_item in enumerate({variable}):' ) : self . generate_func_code_block ( items_definition , '{}_item' . format ( self . _variable ) , '{}[{{{}_x}}]' . format ( self . _variable_name , self . _variable ) , ) | Means array is valid only when all items are valid by this definition . |
20,039 | def generate_properties ( self ) : self . create_variable_is_dict ( ) with self . l ( 'if {variable}_is_dict:' ) : self . create_variable_keys ( ) for key , prop_definition in self . _definition [ 'properties' ] . items ( ) : key_name = re . sub ( r'($[^a-zA-Z]|[^a-zA-Z0-9])' , '' , key ) with self . l ( 'if "{}" in {variable}_keys:' , key ) : self . l ( '{variable}_keys.remove("{}")' , key ) self . l ( '{variable}__{0} = {variable}["{1}"]' , key_name , key ) self . generate_func_code_block ( prop_definition , '{}__{}' . format ( self . _variable , key_name ) , '{}.{}' . format ( self . _variable_name , key ) , ) if isinstance ( prop_definition , dict ) and 'default' in prop_definition : self . l ( 'else: {variable}["{}"] = {}' , key , repr ( prop_definition [ 'default' ] ) ) | Means object with defined keys . |
20,040 | def generate_pattern_properties ( self ) : self . create_variable_is_dict ( ) with self . l ( 'if {variable}_is_dict:' ) : self . create_variable_keys ( ) for pattern , definition in self . _definition [ 'patternProperties' ] . items ( ) : self . _compile_regexps [ pattern ] = re . compile ( pattern ) with self . l ( 'for {variable}_key, {variable}_val in {variable}.items():' ) : for pattern , definition in self . _definition [ 'patternProperties' ] . items ( ) : with self . l ( 'if REGEX_PATTERNS["{}"].search({variable}_key):' , pattern ) : with self . l ( 'if {variable}_key in {variable}_keys:' ) : self . l ( '{variable}_keys.remove({variable}_key)' ) self . generate_func_code_block ( definition , '{}_val' . format ( self . _variable ) , '{}.{{{}_key}}' . format ( self . _variable_name , self . _variable ) , ) | Means object with defined keys as patterns . |
20,041 | def generate_additional_properties ( self ) : self . create_variable_is_dict ( ) with self . l ( 'if {variable}_is_dict:' ) : self . create_variable_keys ( ) add_prop_definition = self . _definition [ "additionalProperties" ] if add_prop_definition : properties_keys = list ( self . _definition . get ( "properties" , { } ) . keys ( ) ) with self . l ( 'for {variable}_key in {variable}_keys:' ) : with self . l ( 'if {variable}_key not in {}:' , properties_keys ) : self . l ( '{variable}_value = {variable}.get({variable}_key)' ) self . generate_func_code_block ( add_prop_definition , '{}_value' . format ( self . _variable ) , '{}.{{{}_key}}' . format ( self . _variable_name , self . _variable ) , ) else : with self . l ( 'if {variable}_keys:' ) : self . l ( 'raise JsonSchemaException("{name} must contain only specified properties")' ) | Means object with keys with values defined by definition . |
20,042 | def generate_dependencies ( self ) : self . create_variable_is_dict ( ) with self . l ( 'if {variable}_is_dict:' ) : self . create_variable_keys ( ) for key , values in self . _definition [ "dependencies" ] . items ( ) : if values == [ ] or values is True : continue with self . l ( 'if "{}" in {variable}_keys:' , key ) : if values is False : self . l ( 'raise JsonSchemaException("{} in {name} must not be there")' , key ) elif isinstance ( values , list ) : for value in values : with self . l ( 'if "{}" not in {variable}_keys:' , value ) : self . l ( 'raise JsonSchemaException("{name} missing dependency {} for {}")' , value , key ) else : self . generate_func_code_block ( values , self . _variable , self . _variable_name , clear_variables = True ) | Means when object has property it needs to have also other property . |
20,043 | def _read_cmap ( self ) : try : i = 0 colormap = { 0 : ( 0 , 0 , 0 ) } with open ( settings . COLORMAP ) as cmap : lines = cmap . readlines ( ) for line in lines : if i == 0 and 'mode = ' in line : i = 1 maxval = float ( line . replace ( 'mode = ' , '' ) ) elif i > 0 : str = line . split ( ) if str == [ ] : break colormap . update ( { i : ( int ( round ( float ( str [ 0 ] ) * 255 / maxval ) ) , int ( round ( float ( str [ 1 ] ) * 255 / maxval ) ) , int ( round ( float ( str [ 2 ] ) * 255 / maxval ) ) ) } ) i += 1 except IOError : pass self . cmap = { k : v [ : 4 ] for k , v in colormap . items ( ) } | reads the colormap from a text file given in settings . py . See colormap_cubehelix . txt . File must contain 256 RGB values |
20,044 | def run ( self ) : self . output ( "* NDVI processing started." , normal = True ) bands = self . _read_bands ( ) image_data = self . _get_image_data ( ) new_bands = [ ] for i in range ( 0 , 2 ) : new_bands . append ( numpy . empty ( image_data [ 'shape' ] , dtype = numpy . float32 ) ) self . _warp ( image_data , bands , new_bands ) del bands calc_band = numpy . true_divide ( ( new_bands [ 1 ] - new_bands [ 0 ] ) , ( new_bands [ 1 ] + new_bands [ 0 ] ) ) output_band = numpy . rint ( ( calc_band + 1 ) * 255 / 2 ) . astype ( numpy . uint8 ) output_file = join ( self . dst_path , self . _filename ( suffix = 'NDVI' ) ) return self . write_band ( output_band , output_file , image_data ) | Executes NDVI processing |
20,045 | def data_collector ( iterable , def_buf_size = 5242880 ) : buf = b'' for data in iterable : buf += data if len ( buf ) >= def_buf_size : output = buf [ : def_buf_size ] buf = buf [ def_buf_size : ] yield output if len ( buf ) > 0 : yield buf | Buffers n bytes of data . |
20,046 | def upload ( bucket , aws_access_key , aws_secret_key , iterable , key , progress_cb = None , threads = 5 , replace = False , secure = True , connection = None ) : if not connection : from boto . s3 . connection import S3Connection as connection c = connection ( aws_access_key , aws_secret_key , is_secure = secure ) else : c = connection b = c . get_bucket ( bucket ) if not replace and b . lookup ( key ) : raise Exception ( 's3 key ' + key + ' already exists' ) multipart_obj = b . initiate_multipart_upload ( key ) err_queue = queue . Queue ( ) lock = threading . Lock ( ) upload . counter = 0 try : tpool = pool . ThreadPool ( processes = threads ) def check_errors ( ) : try : exc = err_queue . get ( block = False ) except queue . Empty : pass else : raise exc def waiter ( ) : while upload . counter >= threads : check_errors ( ) time . sleep ( 0.1 ) def cb ( err ) : if err : err_queue . put ( err ) with lock : upload . counter -= 1 args = [ multipart_obj . upload_part_from_file , progress_cb ] for part_no , part in enumerate ( iterable ) : part_no += 1 tpool . apply_async ( upload_part , args + [ part_no , part ] , callback = cb ) with lock : upload . counter += 1 waiter ( ) tpool . close ( ) tpool . join ( ) check_errors ( ) multipart_obj . complete_upload ( ) except : multipart_obj . cancel_upload ( ) tpool . terminate ( ) raise | Upload data to s3 using the s3 multipart upload API . |
20,047 | def run ( self , bucket_name , filename , path ) : f = open ( path , 'rb' ) self . source_size = os . stat ( path ) . st_size total_dict = { } def cb ( part_no , uploaded , total ) : total_dict [ part_no ] = uploaded params = { 'uploaded' : round ( sum ( total_dict . values ( ) ) / 1048576 , 0 ) , 'size' : round ( self . source_size / 1048576 , 0 ) , } p = ( self . progress_template + '\r' ) % params STREAM . write ( p ) STREAM . flush ( ) self . output ( 'Uploading to S3' , normal = True , arrow = True ) upload ( bucket_name , self . key , self . secret , data_collector ( iter ( f ) ) , filename , cb , threads = 10 , replace = True , secure = True , connection = self . conn ) print ( '\n' ) self . output ( 'Upload Completed' , normal = True , arrow = True ) | Initiate the upload . |
20,048 | def download ( self , scenes , bands = None ) : if isinstance ( scenes , list ) : files = [ ] for scene in scenes : try : if not isinstance ( bands , list ) : raise RemoteFileDoesntExist files . append ( self . amazon_s3 ( scene , bands ) ) except RemoteFileDoesntExist : try : files . append ( self . google_storage ( scene , self . download_dir ) ) except RemoteFileDoesntExist : files . append ( self . usgs_eros ( scene , self . download_dir ) ) return files else : raise Exception ( 'Expected sceneIDs list' ) | Download scenese from Google Storage or Amazon S3 if bands are provided |
20,049 | def usgs_eros ( self , scene , path ) : if self . usgs_user and self . usgs_pass : try : api_key = api . login ( self . usgs_user , self . usgs_pass ) except USGSError as e : error_tree = ElementTree . fromstring ( str ( e . message ) ) error_text = error_tree . find ( "SOAP-ENV:Body/SOAP-ENV:Fault/faultstring" , api . NAMESPACES ) . text raise USGSInventoryAccessMissing ( error_text ) download_url = api . download ( 'LANDSAT_8' , 'EE' , [ scene ] , api_key = api_key ) if download_url : self . output ( 'Source: USGS EarthExplorer' , normal = True , arrow = True ) return self . fetch ( download_url [ 0 ] , path ) raise RemoteFileDoesntExist ( '%s is not available on AWS S3, Google or USGS Earth Explorer' % scene ) raise RemoteFileDoesntExist ( '%s is not available on AWS S3 or Google Storage' % scene ) | Downloads the image from USGS |
20,050 | def google_storage ( self , scene , path ) : sat = self . scene_interpreter ( scene ) url = self . google_storage_url ( sat ) self . remote_file_exists ( url ) self . output ( 'Source: Google Storage' , normal = True , arrow = True ) return self . fetch ( url , path ) | Google Storage Downloader . |
20,051 | def amazon_s3 ( self , scene , bands ) : sat = self . scene_interpreter ( scene ) if 'BQA' not in bands : bands . append ( 'QA' ) if 'MTL' not in bands : bands . append ( 'MTL' ) urls = [ ] for band in bands : url = self . amazon_s3_url ( sat , band ) self . remote_file_exists ( url ) urls . append ( url ) path = check_create_folder ( join ( self . download_dir , scene ) ) self . output ( 'Source: AWS S3' , normal = True , arrow = True ) for url in urls : self . fetch ( url , path ) return path | Amazon S3 downloader |
20,052 | def fetch ( self , url , path ) : segments = url . split ( '/' ) filename = segments [ - 1 ] filename = filename . split ( '?' ) [ 0 ] self . output ( 'Downloading: %s' % filename , normal = True , arrow = True ) if exists ( join ( path , filename ) ) : size = getsize ( join ( path , filename ) ) if size == self . get_remote_file_size ( url ) : self . output ( '%s already exists on your system' % filename , normal = True , color = 'green' , indent = 1 ) else : fetch ( url , path ) self . output ( 'stored at %s' % path , normal = True , color = 'green' , indent = 1 ) return join ( path , filename ) | Downloads the given url . |
20,053 | def google_storage_url ( self , sat ) : filename = sat [ 'scene' ] + '.tar.bz' return url_builder ( [ self . google , sat [ 'sat' ] , sat [ 'path' ] , sat [ 'row' ] , filename ] ) | Returns a google storage url the contains the scene provided . |
20,054 | def amazon_s3_url ( self , sat , band ) : if band != 'MTL' : filename = '%s_B%s.TIF' % ( sat [ 'scene' ] , band ) else : filename = '%s_%s.txt' % ( sat [ 'scene' ] , band ) return url_builder ( [ self . s3 , sat [ 'sat' ] , sat [ 'path' ] , sat [ 'row' ] , sat [ 'scene' ] , filename ] ) | Return an amazon s3 url the contains the scene and band provided . |
20,055 | def remote_file_exists ( self , url ) : status = requests . head ( url ) . status_code if status != 200 : raise RemoteFileDoesntExist | Checks whether the remote file exists . |
20,056 | def get_remote_file_size ( self , url ) : headers = requests . head ( url ) . headers return int ( headers [ 'content-length' ] ) | Gets the filesize of a remote file . |
20,057 | def scene_interpreter ( self , scene ) : anatomy = { 'path' : None , 'row' : None , 'sat' : None , 'scene' : scene } if isinstance ( scene , str ) and len ( scene ) == 21 : anatomy [ 'path' ] = scene [ 3 : 6 ] anatomy [ 'row' ] = scene [ 6 : 9 ] anatomy [ 'sat' ] = 'L' + scene [ 2 : 3 ] return anatomy else : raise IncorrectSceneId ( 'Received incorrect scene' ) | Conver sceneID to rows paths and dates . |
20,058 | def search ( self , paths_rows = None , lat = None , lon = None , address = None , start_date = None , end_date = None , cloud_min = None , cloud_max = None , limit = 1 , geojson = False ) : search_string = self . query_builder ( paths_rows , lat , lon , address , start_date , end_date , cloud_min , cloud_max ) r = requests . get ( '%s?search=%s&limit=%s' % ( self . api_url , search_string , limit ) ) r_dict = json . loads ( r . text ) result = { } if 'error' in r_dict : result [ 'status' ] = u'error' result [ 'code' ] = r_dict [ 'error' ] [ 'code' ] result [ 'message' ] = r_dict [ 'error' ] [ 'message' ] elif 'meta' in r_dict : if geojson : result = { 'type' : 'FeatureCollection' , 'features' : [ ] } for r in r_dict [ 'results' ] : feature = { 'type' : 'Feature' , 'properties' : { 'sceneID' : r [ 'sceneID' ] , 'row' : three_digit ( r [ 'row' ] ) , 'path' : three_digit ( r [ 'path' ] ) , 'thumbnail' : r [ 'browseURL' ] , 'date' : r [ 'acquisitionDate' ] , 'cloud' : r [ 'cloud_coverage' ] } , 'geometry' : { 'type' : 'Polygon' , 'coordinates' : [ [ [ r [ 'upperLeftCornerLongitude' ] , r [ 'upperLeftCornerLatitude' ] ] , [ r [ 'lowerLeftCornerLongitude' ] , r [ 'lowerLeftCornerLatitude' ] ] , [ r [ 'lowerRightCornerLongitude' ] , r [ 'lowerRightCornerLatitude' ] ] , [ r [ 'upperRightCornerLongitude' ] , r [ 'upperRightCornerLatitude' ] ] , [ r [ 'upperLeftCornerLongitude' ] , r [ 'upperLeftCornerLatitude' ] ] ] ] } } result [ 'features' ] . append ( feature ) else : result [ 'status' ] = u'SUCCESS' result [ 'total' ] = r_dict [ 'meta' ] [ 'found' ] result [ 'limit' ] = r_dict [ 'meta' ] [ 'limit' ] result [ 'total_returned' ] = len ( r_dict [ 'results' ] ) result [ 'results' ] = [ { 'sceneID' : i [ 'sceneID' ] , 'sat_type' : u'L8' , 'path' : three_digit ( i [ 'path' ] ) , 'row' : three_digit ( i [ 'row' ] ) , 'thumbnail' : i [ 'browseURL' ] , 'date' : i [ 'acquisitionDate' ] , 'cloud' : i [ 'cloud_coverage' ] } for i in r_dict [ 'results' ] ] return result | The main method of Search class . It searches Development Seed s Landsat API . |
20,059 | def date_range_builder ( self , start = '2013-02-11' , end = None ) : if not end : end = time . strftime ( '%Y-%m-%d' ) return 'acquisitionDate:[%s+TO+%s]' % ( start , end ) | Builds date range query . |
20,060 | def exit ( message , code = 0 ) : v = VerbosityMixin ( ) if code == 0 : v . output ( message , normal = True , arrow = True ) v . output ( 'Done!' , normal = True , arrow = True ) else : v . output ( message , normal = True , error = True ) sys . exit ( code ) | output a message to stdout and terminates the process . |
20,061 | def create_paired_list ( value ) : if isinstance ( value , list ) : value = "," . join ( value ) array = re . split ( '\D+' , value ) if len ( array ) % 2 == 0 : new_array = [ list ( array [ i : i + 2 ] ) for i in range ( 0 , len ( array ) , 2 ) ] return new_array else : raise ValueError ( 'The string should include pairs and be formated. ' 'The format must be 003,003,004,004 (commas with ' 'no space)' ) | Create a list of paired items from a string . |
20,062 | def check_create_folder ( folder_path ) : if not os . path . exists ( folder_path ) : os . makedirs ( folder_path ) return folder_path | Check whether a folder exists if not the folder is created . |
20,063 | def three_digit ( number ) : number = str ( number ) if len ( number ) == 1 : return u'00%s' % number elif len ( number ) == 2 : return u'0%s' % number else : return number | Add 0s to inputs that their length is less than 3 . |
20,064 | def georgian_day ( date ) : try : fmt = '%m/%d/%Y' return datetime . strptime ( date , fmt ) . timetuple ( ) . tm_yday except ( ValueError , TypeError ) : return 0 | Returns the number of days passed since the start of the year . |
20,065 | def year ( date ) : try : fmt = '%m/%d/%Y' return datetime . strptime ( date , fmt ) . timetuple ( ) . tm_year except ValueError : return 0 | Returns the year . |
20,066 | def reformat_date ( date , new_fmt = '%Y-%m-%d' ) : try : if isinstance ( date , datetime ) : return date . strftime ( new_fmt ) else : fmt = '%m/%d/%Y' return datetime . strptime ( date , fmt ) . strftime ( new_fmt ) except ValueError : return date | Returns reformated date . |
20,067 | def geocode ( address , required_precision_km = 1. ) : geocoded = geocoder . google ( address ) precision_km = geocode_confidences [ geocoded . confidence ] if precision_km <= required_precision_km : ( lon , lat ) = geocoded . geometry [ 'coordinates' ] return { 'lat' : lat , 'lon' : lon } else : raise ValueError ( "Address could not be precisely located" ) | Identifies the coordinates of an address |
20,068 | def adjust_bounding_box ( bounds1 , bounds2 ) : if ( ( bounds2 [ 0 ] > bounds1 [ 0 ] and bounds2 [ 2 ] > bounds1 [ 0 ] ) or ( bounds2 [ 2 ] < bounds1 [ 2 ] and bounds2 [ 2 ] < bounds1 [ 0 ] ) ) : return bounds1 if ( ( bounds2 [ 1 ] < bounds1 [ 1 ] and bounds2 [ 3 ] < bounds1 [ 1 ] ) or ( bounds2 [ 3 ] > bounds1 [ 3 ] and bounds2 [ 1 ] > bounds1 [ 3 ] ) ) : return bounds1 new_bounds = list ( bounds2 ) if ( bounds2 [ 0 ] > bounds1 [ 0 ] or bounds2 [ 0 ] < bounds1 [ 3 ] ) : new_bounds [ 0 ] = bounds1 [ 0 ] if ( bounds2 [ 2 ] < bounds1 [ 2 ] or bounds2 [ 2 ] > bounds1 [ 0 ] ) : new_bounds [ 2 ] = bounds1 [ 2 ] if ( bounds2 [ 1 ] < bounds1 [ 1 ] or bounds2 [ 1 ] > bounds1 [ 3 ] ) : new_bounds [ 1 ] = bounds1 [ 1 ] if ( bounds2 [ 3 ] > bounds1 [ 3 ] or bounds2 [ 3 ] < bounds1 [ 1 ] ) : new_bounds [ 3 ] = bounds1 [ 3 ] return tuple ( new_bounds ) | If the bounds 2 corners are outside of bounds1 they will be adjusted to bounds1 corners |
20,069 | def process_image ( path , bands = None , verbose = False , pansharpen = False , ndvi = False , force_unzip = None , ndvigrey = False , bounds = None ) : try : bands = convert_to_integer_list ( bands ) if pansharpen : p = PanSharpen ( path , bands = bands , dst_path = settings . PROCESSED_IMAGE , verbose = verbose , force_unzip = force_unzip , bounds = bounds ) elif ndvigrey : p = NDVI ( path , verbose = verbose , dst_path = settings . PROCESSED_IMAGE , force_unzip = force_unzip , bounds = bounds ) elif ndvi : p = NDVIWithManualColorMap ( path , dst_path = settings . PROCESSED_IMAGE , verbose = verbose , force_unzip = force_unzip , bounds = bounds ) else : p = Simple ( path , bands = bands , dst_path = settings . PROCESSED_IMAGE , verbose = verbose , force_unzip = force_unzip , bounds = bounds ) except IOError as err : exit ( str ( err ) , 1 ) except FileDoesNotExist as err : exit ( str ( err ) , 1 ) return p . run ( ) | Handles constructing and image process . |
20,070 | def _read_bands ( self ) : bands = [ ] try : for i , band in enumerate ( self . bands ) : bands . append ( rasterio . open ( self . bands_path [ i ] ) . read_band ( 1 ) ) except IOError as e : exit ( e . message , 1 ) return bands | Reads a band with rasterio |
20,071 | def _unzip ( self , src , dst , scene , force_unzip = False ) : self . output ( "Unzipping %s - It might take some time" % scene , normal = True , arrow = True ) try : if isdir ( dst ) and not force_unzip : self . output ( '%s is already unzipped.' % scene , normal = True , color = 'green' , indent = 1 ) return else : tar = tarfile . open ( src , 'r' ) tar . extractall ( path = dst ) tar . close ( ) except tarfile . ReadError : check_create_folder ( dst ) subprocess . check_call ( [ 'tar' , '-xf' , src , '-C' , dst ] ) | Unzip tar files |
20,072 | def _filename ( self , name = None , suffix = None , prefix = None ) : filename = '' if prefix : filename += str ( prefix ) + '_' if name : filename += str ( name ) else : filename += str ( self . scene ) if suffix : filename += '_' + str ( suffix ) if self . clipped : bounds = [ tuple ( self . bounds [ 0 : 2 ] ) , tuple ( self . bounds [ 2 : 4 ] ) ] polyline = PolylineCodec ( ) . encode ( bounds ) filename += '_clipped_' + polyline filename += '.TIF' return filename | File name generator for processed images |
20,073 | def run ( self ) : self . output ( 'Image processing started for bands %s' % '-' . join ( map ( str , self . bands ) ) , normal = True , arrow = True ) bands = self . _read_bands ( ) image_data = self . _get_image_data ( ) new_bands = self . _generate_new_bands ( image_data [ 'shape' ] ) self . _warp ( image_data , bands , new_bands ) del bands rasterio_options = { 'driver' : 'GTiff' , 'width' : image_data [ 'shape' ] [ 1 ] , 'height' : image_data [ 'shape' ] [ 0 ] , 'count' : 3 , 'dtype' : numpy . uint8 , 'nodata' : 0 , 'transform' : image_data [ 'dst_transform' ] , 'photometric' : 'RGB' , 'crs' : self . dst_crs } return self . _write_to_file ( new_bands , ** rasterio_options ) | Executes the image processing . |
20,074 | def output ( self , value , normal = False , color = None , error = False , arrow = False , indent = None ) : if error and value and ( normal or self . verbose ) : return self . _print ( value , color = 'red' , indent = indent ) if self . verbose or normal : return self . _print ( value , color , arrow , indent ) return | Handles verbosity of this calls . if priority is set to 1 the value is printed |
20,075 | def subprocess ( self , argv ) : if self . verbose : proc = subprocess . Popen ( argv , stderr = subprocess . PIPE ) else : proc = subprocess . Popen ( argv , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) self . output ( proc . stderr . read ( ) , error = True ) return | Execute subprocess commands with proper ouput . This is no longer used in landsat - util |
20,076 | def exit ( self , message ) : self . output ( message , normal = True , color = "green" ) sys . exit ( ) | outputs an exit message and exits |
20,077 | def _print ( self , msg , color = None , arrow = False , indent = None ) : if color : msg = colored ( msg , color ) if arrow : msg = colored ( '===> ' , 'blue' ) + msg if indent : msg = ( ' ' * indent ) + msg print ( msg ) return msg | Print the msg with the color provided . |
20,078 | def nn_symbols ( ) : "query the names and values of nanomsg symbols" value = ctypes . c_int ( ) name_value_pairs = [ ] i = 0 while True : name = _nn_symbol ( i , ctypes . byref ( value ) ) if name is None : break i += 1 name_value_pairs . append ( ( name . decode ( 'ascii' ) , value . value ) ) return name_value_pairs | query the names and values of nanomsg symbols |
20,079 | def nn_setsockopt ( socket , level , option , value ) : try : return _nn_setsockopt ( socket , level , option , ctypes . addressof ( value ) , len ( value ) ) except ( TypeError , AttributeError ) : buf_value = ctypes . create_string_buffer ( value ) return _nn_setsockopt ( socket , level , option , ctypes . addressof ( buf_value ) , len ( value ) ) | set a socket option |
20,080 | def nn_getsockopt ( socket , level , option , value ) : if memoryview ( value ) . readonly : raise TypeError ( 'Writable buffer is required' ) size_t_size = ctypes . c_size_t ( len ( value ) ) rtn = _nn_getsockopt ( socket , level , option , ctypes . addressof ( value ) , ctypes . byref ( size_t_size ) ) return ( rtn , size_t_size . value ) | retrieve a socket option |
20,081 | def nn_send ( socket , msg , flags ) : "send a message" try : return _nn_send ( socket , ctypes . addressof ( msg ) , len ( buffer ( msg ) ) , flags ) except ( TypeError , AttributeError ) : buf_msg = ctypes . create_string_buffer ( msg ) return _nn_send ( socket , ctypes . addressof ( buf_msg ) , len ( msg ) , flags ) | send a message |
20,082 | def nn_allocmsg ( size , type ) : "allocate a message" pointer = _nn_allocmsg ( size , type ) if pointer is None : return None return _create_message ( pointer , size ) | allocate a message |
20,083 | def nn_recv ( socket , * args ) : "receive a message" if len ( args ) == 1 : flags , = args pointer = ctypes . c_void_p ( ) rtn = _nn_recv ( socket , ctypes . byref ( pointer ) , ctypes . c_size_t ( - 1 ) , flags ) if rtn < 0 : return rtn , None else : return rtn , _create_message ( pointer . value , rtn ) elif len ( args ) == 2 : msg_buf , flags = args mv_buf = memoryview ( msg_buf ) if mv_buf . readonly : raise TypeError ( 'Writable buffer is required' ) rtn = _nn_recv ( socket , ctypes . addressof ( msg_buf ) , len ( mv_buf ) , flags ) return rtn , msg_buf | receive a message |
20,084 | def create_message_buffer ( size , type ) : rtn = wrapper . nn_allocmsg ( size , type ) if rtn is None : raise NanoMsgAPIError ( ) return rtn | Create a message buffer |
20,085 | def bind ( self , address ) : if self . uses_nanoconfig : raise ValueError ( "Nanoconfig address must be sole endpoint" ) endpoint_id = _nn_check_positive_rtn ( wrapper . nn_bind ( self . _fd , address ) ) ep = Socket . BindEndpoint ( self , endpoint_id , address ) self . _endpoints . append ( ep ) return ep | Add a local endpoint to the socket |
20,086 | def connect ( self , address ) : if self . uses_nanoconfig : raise ValueError ( "Nanoconfig address must be sole endpoint" ) endpoint_id = _nn_check_positive_rtn ( wrapper . nn_connect ( self . fd , address ) ) ep = Socket . ConnectEndpoint ( self , endpoint_id , address ) self . _endpoints . append ( ep ) return ep | Add a remote endpoint to the socket |
20,087 | def configure ( self , address ) : global nanoconfig_started if len ( self . _endpoints ) : raise ValueError ( "Nanoconfig address must be sole endpoint" ) endpoint_id = _nn_check_positive_rtn ( wrapper . nc_configure ( self . fd , address ) ) if not nanoconfig_started : nanoconfig_started = True ep = Socket . NanoconfigEndpoint ( self , endpoint_id , address ) self . _endpoints . append ( ep ) return ep | Configure socket s addresses with nanoconfig |
20,088 | def close ( self ) : if self . is_open ( ) : fd = self . _fd self . _fd = - 1 if self . uses_nanoconfig : wrapper . nc_close ( fd ) else : _nn_check_positive_rtn ( wrapper . nn_close ( fd ) ) | Close the socket |
20,089 | def recv ( self , buf = None , flags = 0 ) : if buf is None : rtn , out_buf = wrapper . nn_recv ( self . fd , flags ) else : rtn , out_buf = wrapper . nn_recv ( self . fd , buf , flags ) _nn_check_positive_rtn ( rtn ) return bytes ( buffer ( out_buf ) ) [ : rtn ] | Recieve a message . |
20,090 | def bounter ( size_mb = None , need_iteration = True , need_counts = True , log_counting = None ) : if not need_counts : return CardinalityEstimator ( ) if size_mb is None : raise ValueError ( "Max size in MB must be provided." ) if need_iteration : if log_counting : raise ValueError ( "Log counting is only supported with CMS implementation (need_iteration=False)." ) return HashTable ( size_mb = size_mb ) else : return CountMinSketch ( size_mb = size_mb , log_counting = log_counting ) | Factory method for bounter implementation . |
20,091 | def to_dict ( obj , classkey = None ) : if isinstance ( obj , dict ) : data = { } for ( k , v ) in obj . items ( ) : data [ k ] = to_dict ( v , classkey ) return data elif hasattr ( obj , "_ast" ) : return to_dict ( obj . _ast ( ) ) elif hasattr ( obj , "__iter__" ) and not isinstance ( obj , str ) : return [ to_dict ( v , classkey ) for v in obj ] elif hasattr ( obj , "__dict__" ) : if six . PY2 : data = dict ( [ ( key , to_dict ( value , classkey ) ) for key , value in obj . __dict__ . iteritems ( ) if not callable ( value ) and not key . startswith ( '_' ) ] ) else : data = dict ( [ ( key , to_dict ( value , classkey ) ) for key , value in obj . __dict__ . items ( ) if not callable ( value ) and not key . startswith ( '_' ) ] ) if classkey is not None and hasattr ( obj , "__class__" ) : data [ classkey ] = obj . __class__ . __name__ return data else : return obj | Recursively converts Python object into a dictionary |
20,092 | def json_filter ( self ) : return lambda obj : dict ( ( k , v ) for k , v in obj . __dict__ . items ( ) if not k . startswith ( '_' ) and getattr ( obj , k ) is not None ) | filter out properties that have names starting with _ or properties that have a value of None |
20,093 | def get_current_user ( self ) : url = self . current_user_url result = self . get ( url ) return result | Get data from the current user endpoint |
20,094 | def get_report ( self , report_type , qs = None ) : if qs is None : qs = { } url = self . api_url + "/company/{0}/reports/{1}" . format ( self . company_id , report_type ) result = self . get ( url , params = qs ) return result | Get data from the report endpoint |
20,095 | def _meters_per_pixel ( zoom , lat = 0.0 , tilesize = 256 ) : return ( math . cos ( lat * math . pi / 180.0 ) * 2 * math . pi * 6378137 ) / ( tilesize * 2 ** zoom ) | Return the pixel resolution for a given mercator tile zoom and lattitude . |
20,096 | def zoom_for_pixelsize ( pixel_size , max_z = 24 , tilesize = 256 ) : for z in range ( max_z ) : if pixel_size > _meters_per_pixel ( z , 0 , tilesize = tilesize ) : return max ( 0 , z - 1 ) return max_z - 1 | Get mercator zoom level corresponding to a pixel resolution . |
20,097 | def metadata ( address , pmin = 2 , pmax = 98 , ** kwargs ) : info = { "address" : address } info . update ( utils . raster_get_stats ( address , percentiles = ( pmin , pmax ) , ** kwargs ) ) return info | Return image bounds and band statistics . |
20,098 | def tile ( address , tile_x , tile_y , tile_z , tilesize = 256 , ** kwargs ) : with rasterio . open ( address ) as src : wgs_bounds = transform_bounds ( * [ src . crs , "epsg:4326" ] + list ( src . bounds ) , densify_pts = 21 ) if not utils . tile_exists ( wgs_bounds , tile_z , tile_x , tile_y ) : raise TileOutsideBounds ( "Tile {}/{}/{} is outside image bounds" . format ( tile_z , tile_x , tile_y ) ) mercator_tile = mercantile . Tile ( x = tile_x , y = tile_y , z = tile_z ) tile_bounds = mercantile . xy_bounds ( mercator_tile ) return utils . tile_read ( src , tile_bounds , tilesize , ** kwargs ) | Create mercator tile from any images . |
20,099 | def _stats ( arr , percentiles = ( 2 , 98 ) , ** kwargs ) : sample , edges = np . histogram ( arr [ ~ arr . mask ] , ** kwargs ) return { "pc" : np . percentile ( arr [ ~ arr . mask ] , percentiles ) . astype ( arr . dtype ) . tolist ( ) , "min" : arr . min ( ) . item ( ) , "max" : arr . max ( ) . item ( ) , "std" : arr . std ( ) . item ( ) , "histogram" : [ sample . tolist ( ) , edges . tolist ( ) ] , } | Calculate array statistics . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.