idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
40,900 | def _initialize_id ( self ) : self . id = str ( self . db . incr ( self . _key [ 'id' ] ) ) | Initializes the id of the instance . |
40,901 | def _write ( self , _new = False ) : pipeline = self . db . pipeline ( ) self . _create_membership ( pipeline ) self . _update_indices ( pipeline ) h = { } for k , v in self . attributes . iteritems ( ) : if isinstance ( v , DateTimeField ) : if v . auto_now : setattr ( self , k , datetime . now ( ) ) if v . auto_now_add and _new : setattr ( self , k , datetime . now ( ) ) elif isinstance ( v , DateField ) : if v . auto_now : setattr ( self , k , date . today ( ) ) if v . auto_now_add and _new : setattr ( self , k , date . today ( ) ) for_storage = getattr ( self , k ) if for_storage is not None : h [ k ] = v . typecast_for_storage ( for_storage ) for index in self . indices : if index not in self . lists and index not in self . attributes : v = getattr ( self , index ) if callable ( v ) : v = v ( ) if v : try : h [ index ] = unicode ( v ) except UnicodeError : h [ index ] = unicode ( v . decode ( 'utf-8' ) ) pipeline . delete ( self . key ( ) ) if h : pipeline . hmset ( self . key ( ) , h ) for k , v in self . lists . iteritems ( ) : l = List ( self . key ( ) [ k ] , pipeline = pipeline ) l . clear ( ) values = getattr ( self , k ) if values : if v . _redisco_model : l . extend ( [ item . id for item in values ] ) else : l . extend ( values ) pipeline . execute ( ) | Writes the values of the attributes to the datastore . |
40,902 | def _create_membership ( self , pipeline = None ) : Set ( self . _key [ 'all' ] , pipeline = pipeline ) . add ( self . id ) | Adds the id of the object to the set of all objects of the same class . |
40,903 | def _delete_membership ( self , pipeline = None ) : Set ( self . _key [ 'all' ] , pipeline = pipeline ) . remove ( self . id ) | Removes the id of the object to the set of all objects of the same class . |
40,904 | def _add_to_indices ( self , pipeline ) : for att in self . indices : self . _add_to_index ( att , pipeline = pipeline ) | Adds the base64 encoded values of the indices . |
40,905 | def _add_to_index ( self , att , val = None , pipeline = None ) : index = self . _index_key_for ( att ) if index is None : return t , index = index if t == 'attribute' : pipeline . sadd ( index , self . id ) pipeline . sadd ( self . key ( ) [ '_indices' ] , index ) elif t == 'list' : for i in index : pipeline . sadd ( i , self . id ) pipeline . sadd ( self . key ( ) [ '_indices' ] , i ) elif t == 'sortedset' : zindex , index = index pipeline . sadd ( index , self . id ) pipeline . sadd ( self . key ( ) [ '_indices' ] , index ) descriptor = self . attributes [ att ] score = descriptor . typecast_for_storage ( getattr ( self , att ) ) pipeline . zadd ( zindex , self . id , score ) pipeline . sadd ( self . key ( ) [ '_zindices' ] , zindex ) | Adds the id to the index . |
40,906 | def _index_key_for ( self , att , value = None ) : if value is None : value = getattr ( self , att ) if callable ( value ) : value = value ( ) if value is None : return None if att not in self . lists : return self . _get_index_key_for_non_list_attr ( att , value ) else : return self . _tuple_for_index_key_attr_list ( att , value ) | Returns a key based on the attribute and its value . |
40,907 | def isdisjoint ( self , other ) : return not bool ( self . db . sinter ( [ self . key , other . key ] ) ) | Return True if the set has no elements in common with other . |
40,908 | def union ( self , key , * others ) : if not isinstance ( key , str ) : raise ValueError ( "String expected." ) self . db . sunionstore ( key , [ self . key ] + [ o . key for o in others ] ) return Set ( key ) | Return a new set with elements from the set and all others . |
40,909 | def intersection ( self , key , * others ) : if not isinstance ( key , str ) : raise ValueError ( "String expected." ) self . db . sinterstore ( key , [ self . key ] + [ o . key for o in others ] ) return Set ( key ) | Return a new set with elements common to the set and all others . |
40,910 | def difference ( self , key , * others ) : if not isinstance ( key , str ) : raise ValueError ( "String expected." ) self . db . sdiffstore ( key , [ self . key ] + [ o . key for o in others ] ) return Set ( key ) | Return a new set with elements in the set that are not in the others . |
40,911 | def update ( self , * others ) : self . db . sunionstore ( self . key , [ self . key ] + [ o . key for o in others ] ) | Update the set adding elements from all others . |
40,912 | def intersection_update ( self , * others ) : self . db . sinterstore ( self . key , [ o . key for o in [ self . key ] + others ] ) | Update the set keeping only elements found in it and all others . |
40,913 | def difference_update ( self , * others ) : self . db . sdiffstore ( self . key , [ o . key for o in [ self . key ] + others ] ) | Update the set removing elements found in others . |
40,914 | def copy ( self , key ) : copy = Set ( key = key , db = self . db ) copy . clear ( ) copy |= self return copy | Copy the set to another key and return the new Set . |
40,915 | def sinter ( self , * other_sets ) : return self . db . sinter ( [ self . key ] + [ s . key for s in other_sets ] ) | Performs an intersection between Sets . |
40,916 | def reverse ( self ) : r = self [ : ] r . reverse ( ) self . clear ( ) self . extend ( r ) | Reverse in place . |
40,917 | def copy ( self , key ) : copy = List ( key , self . db ) copy . clear ( ) copy . extend ( self ) return copy | Copy the list to a new list . |
40,918 | def lt ( self , v , limit = None , offset = None ) : if limit is not None and offset is None : offset = 0 return self . zrangebyscore ( self . _min_score , "(%f" % v , start = offset , num = limit ) | Returns the list of the members of the set that have scores less than v . |
40,919 | def gt ( self , v , limit = None , offset = None ) : if limit is not None and offset is None : offset = 0 return self . zrangebyscore ( "(%f" % v , self . _max_score , start = offset , num = limit ) | Returns the list of the members of the set that have scores greater than v . |
40,920 | def between ( self , min , max , limit = None , offset = None ) : if limit is not None and offset is None : offset = 0 return self . zrangebyscore ( min , max , start = offset , num = limit ) | Returns the list of the members of the set that have scores between min and max . |
40,921 | def hex_to_bytes ( s ) : if len ( s ) % 2 : s = b'0' + s ia = [ int ( s [ i : i + 2 ] , 16 ) for i in range ( 0 , len ( s ) , 2 ) ] return bs ( ia ) if PYTHON_MAJOR_VER == 3 else b'' . join ( [ chr ( c ) for c in ia ] ) | convert hex string to bytes |
40,922 | def decimal128_to_decimal ( b ) : "decimal128 bytes to Decimal" v = decimal128_to_sign_digits_exponent ( b ) if isinstance ( v , Decimal ) : return v sign , digits , exponent = v return Decimal ( ( sign , Decimal ( digits ) . as_tuple ( ) [ 1 ] , exponent ) ) | decimal128 bytes to Decimal |
40,923 | def error_response ( response ) : if response . status_code >= 500 : raise exceptions . GeocodioServerError elif response . status_code == 403 : raise exceptions . GeocodioAuthError elif response . status_code == 422 : raise exceptions . GeocodioDataError ( response . json ( ) [ "error" ] ) else : raise exceptions . GeocodioError ( "Unknown service error (HTTP {0})" . format ( response . status_code ) ) | Raises errors matching the response code |
40,924 | def _req ( self , method = "get" , verb = None , headers = { } , params = { } , data = { } ) : url = self . BASE_URL . format ( verb = verb ) request_headers = { "content-type" : "application/json" } request_params = { "api_key" : self . API_KEY } request_headers . update ( headers ) request_params . update ( params ) return getattr ( requests , method ) ( url , params = request_params , headers = request_headers , data = data ) | Method to wrap all request building |
40,925 | def geocode_address ( self , address , ** kwargs ) : fields = "," . join ( kwargs . pop ( "fields" , [ ] ) ) response = self . _req ( verb = "geocode" , params = { "q" : address , "fields" : fields } ) if response . status_code != 200 : return error_response ( response ) return Location ( response . json ( ) ) | Returns a Location dictionary with the components of the queried address and the geocoded location . |
40,926 | def geocode ( self , address_data , ** kwargs ) : if isinstance ( address_data , list ) : return self . batch_geocode ( address_data , ** kwargs ) return self . geocode_address ( address_data , ** kwargs ) | Returns geocoding data for either a list of addresses or a single address represented as a string . |
40,927 | def reverse_point ( self , latitude , longitude , ** kwargs ) : fields = "," . join ( kwargs . pop ( "fields" , [ ] ) ) point_param = "{0},{1}" . format ( latitude , longitude ) response = self . _req ( verb = "reverse" , params = { "q" : point_param , "fields" : fields } ) if response . status_code != 200 : return error_response ( response ) return Location ( response . json ( ) ) | Method for identifying an address from a geographic point |
40,928 | def reverse ( self , points , ** kwargs ) : if isinstance ( points , list ) : return self . batch_reverse ( points , ** kwargs ) if self . order == "lat" : x , y = points else : y , x = points return self . reverse_point ( x , y , ** kwargs ) | General method for reversing addresses either a single address or multiple . |
40,929 | def str_to_bytes ( self , s ) : "convert str to bytes" if ( PYTHON_MAJOR_VER == 3 or ( PYTHON_MAJOR_VER == 2 and type ( s ) == unicode ) ) : return s . encode ( charset_map . get ( self . charset , self . charset ) ) return s | convert str to bytes |
40,930 | def bytes_to_str ( self , b ) : "convert bytes array to raw string" if PYTHON_MAJOR_VER == 3 : return b . decode ( charset_map . get ( self . charset , self . charset ) ) return b | convert bytes array to raw string |
40,931 | def bytes_to_ustr ( self , b ) : "convert bytes array to unicode string" return b . decode ( charset_map . get ( self . charset , self . charset ) ) | convert bytes array to unicode string |
40,932 | def calc_blr ( xsqlda ) : "Calculate BLR from XSQLVAR array." ln = len ( xsqlda ) * 2 blr = [ 5 , 2 , 4 , 0 , ln & 255 , ln >> 8 ] for x in xsqlda : sqltype = x . sqltype if sqltype == SQL_TYPE_VARYING : blr += [ 37 , x . sqllen & 255 , x . sqllen >> 8 ] elif sqltype == SQL_TYPE_TEXT : blr += [ 14 , x . sqllen & 255 , x . sqllen >> 8 ] elif sqltype == SQL_TYPE_LONG : blr += [ 8 , x . sqlscale ] elif sqltype == SQL_TYPE_SHORT : blr += [ 7 , x . sqlscale ] elif sqltype == SQL_TYPE_INT64 : blr += [ 16 , x . sqlscale ] elif sqltype == SQL_TYPE_QUAD : blr += [ 9 , x . sqlscale ] elif sqltype == SQL_TYPE_DEC_FIXED : blr += [ 26 , x . sqlscale ] else : blr += sqltype2blr [ sqltype ] blr += [ 7 , 0 ] blr += [ 255 , 76 ] return bs ( 256 + b if b < 0 else b for b in blr ) | Calculate BLR from XSQLVAR array . |
40,933 | def _parse_date ( self , raw_value ) : "Convert raw data to datetime.date" nday = bytes_to_bint ( raw_value ) + 678882 century = ( 4 * nday - 1 ) // 146097 nday = 4 * nday - 1 - 146097 * century day = nday // 4 nday = ( 4 * day + 3 ) // 1461 day = 4 * day + 3 - 1461 * nday day = ( day + 4 ) // 4 month = ( 5 * day - 3 ) // 153 day = 5 * day - 3 - 153 * month day = ( day + 5 ) // 5 year = 100 * century + nday if month < 10 : month += 3 else : month -= 9 year += 1 return year , month , day | Convert raw data to datetime . date |
40,934 | def _parse_time ( self , raw_value ) : "Convert raw data to datetime.time" n = bytes_to_bint ( raw_value ) s = n // 10000 m = s // 60 h = m // 60 m = m % 60 s = s % 60 return ( h , m , s , ( n % 10000 ) * 100 ) | Convert raw data to datetime . time |
40,935 | def execute_update ( args ) : provider_class = getattr ( dnsupdater , dnsupdater . AVAILABLE_PLUGINS . get ( args . provider ) ) updater_options = { } process_message = None auth = None if args . store : if provider_class . auth_type == 'T' : user_arg = args . usertoken or utils . read_input ( "Paste your auth token: " ) auth = authinfo . ApiAuth ( usertoken = user_arg ) else : user_arg = args . usertoken or utils . read_input ( "Type your username: " ) pass_arg = args . password or getpass . getpass ( "Type your password: " ) auth = authinfo . ApiAuth ( user_arg , pass_arg ) authinfo . store ( auth , args . provider , args . config ) exec_result = EXECUTION_RESULT_OK if not args . hostname : update_ddns = False process_message = "Auth info stored." else : update_ddns = True elif args . usertoken and args . hostname : if provider_class . auth_type == 'T' : auth = authinfo . ApiAuth ( args . usertoken ) else : auth = authinfo . ApiAuth ( args . usertoken , args . password ) update_ddns = True exec_result = EXECUTION_RESULT_OK elif args . hostname : if authinfo . exists ( args . provider , args . config ) : auth = authinfo . load ( args . provider , args . config ) update_ddns = True exec_result = EXECUTION_RESULT_OK else : update_ddns = False exec_result = EXECUTION_RESULT_NOK process_message = "No stored auth information found for " "provider: '%s'" % args . provider else : update_ddns = False exec_result = EXECUTION_RESULT_NOK process_message = "Warning: The hostname to be updated must be " "provided.\nUsertoken and password can be either " "provided via command line or stored with --store " "option.\nExecute noipy --help for more details." if update_ddns and args . provider == 'generic' : if args . url : if not URL_RE . match ( args . url ) : process_message = "Malformed URL." exec_result = EXECUTION_RESULT_NOK update_ddns = False else : updater_options [ 'url' ] = args . url else : process_message = "Must use --url if --provider is 'generic' " "(default)" exec_result = EXECUTION_RESULT_NOK update_ddns = False response_code = None response_text = None if update_ddns : ip_address = args . ip if args . ip else utils . get_ip ( ) if not ip_address : process_message = "Unable to get IP address. Check connection." exec_result = EXECUTION_RESULT_NOK elif ip_address == utils . get_dns_ip ( args . hostname ) : process_message = "No update required." else : updater = provider_class ( auth , args . hostname , updater_options ) print ( "Updating hostname '%s' with IP address %s " "[provider: '%s']..." % ( args . hostname , ip_address , args . provider ) ) response_code , response_text = updater . update_dns ( ip_address ) process_message = updater . status_message proc_result = { 'exec_result' : exec_result , 'response_code' : response_code , 'response_text' : response_text , 'process_message' : process_message , } return proc_result | Execute the update based on command line args and returns a dictionary with execution result response code response info and process friendly message . |
40,936 | def update_dns ( self , new_ip ) : headers = None if self . auth_type == 'T' : api_call_url = self . _base_url . format ( hostname = self . hostname , token = self . auth . token , ip = new_ip ) else : api_call_url = self . _base_url . format ( hostname = self . hostname , ip = new_ip ) headers = { 'Authorization' : "Basic %s" % self . auth . base64key . decode ( 'utf-8' ) , 'User-Agent' : "%s/%s %s" % ( __title__ , __version__ , __email__ ) } r = requests . get ( api_call_url , headers = headers ) self . last_ddns_response = str ( r . text ) . strip ( ) return r . status_code , r . text | Call No - IP API based on dict login_info and return the status code . |
40,937 | def status_message ( self ) : msg = None if self . last_ddns_response in response_messages . keys ( ) : return response_messages . get ( self . last_ddns_response ) if 'good' in self . last_ddns_response : ip = re . search ( r'(\d{1,3}\.?){4}' , self . last_ddns_response ) . group ( ) msg = "SUCCESS: DNS hostname IP (%s) successfully updated." % ip elif 'nochg' in self . last_ddns_response : ip = re . search ( r'(\d{1,3}\.?){4}' , self . last_ddns_response ) . group ( ) msg = "SUCCESS: IP address (%s) is up to date, nothing was changed. " "Additional 'nochg' updates may be considered abusive." % ip else : msg = "ERROR: Ooops! Something went wrong !!!" return msg | Return friendly response from API based on response code . |
40,938 | def get_ip ( ) : try : r = requests . get ( HTTPBIN_URL ) ip , _ = r . json ( ) [ 'origin' ] . split ( ',' ) return ip if r . status_code == 200 else None except requests . exceptions . ConnectionError : return None | Return machine s origin IP address . |
40,939 | def store ( auth , provider , config_location = DEFAULT_CONFIG_DIR ) : auth_file = None try : _create_config_dir ( config_location , "Creating custom config directory [%s]... " ) config_dir = os . path . join ( config_location , NOIPY_CONFIG ) _create_config_dir ( config_dir , "Creating directory [%s]... " ) auth_file = os . path . join ( config_dir , provider ) print ( "Creating auth info file [%s]... " % auth_file , end = "" ) with open ( auth_file , 'w' ) as f : buff = auth . base64key . decode ( 'utf-8' ) f . write ( buff ) print ( "OK." ) except IOError as e : print ( '{0}: "{1}"' . format ( e . strerror , auth_file ) ) raise e | Store auth info in file for specified provider |
40,940 | def load ( provider , config_location = DEFAULT_CONFIG_DIR ) : auth = None auth_file = None try : config_dir = os . path . join ( config_location , NOIPY_CONFIG ) print ( "Loading stored auth info [%s]... " % config_dir , end = "" ) auth_file = os . path . join ( config_dir , provider ) with open ( auth_file ) as f : auth_key = f . read ( ) auth = ApiAuth . get_instance ( auth_key . encode ( 'utf-8' ) ) print ( "OK." ) except IOError as e : print ( '{0}: "{1}"' . format ( e . strerror , auth_file ) ) raise e return auth | Load provider specific auth info from file |
40,941 | def exists ( provider , config_location = DEFAULT_CONFIG_DIR ) : config_dir = os . path . join ( config_location , NOIPY_CONFIG ) auth_file = os . path . join ( config_dir , provider ) return os . path . exists ( auth_file ) | Check whether provider info is already stored |
40,942 | def get_instance ( cls , encoded_key ) : login_str = base64 . b64decode ( encoded_key ) . decode ( 'utf-8' ) usertoken , password = login_str . strip ( ) . split ( ':' , 1 ) instance = cls ( usertoken , password ) return instance | Return an ApiAuth instance from an encoded key |
40,943 | def create_indexed_document ( index_instance , model_items , action ) : data = [ ] if action == 'delete' : for pk in model_items : data . append ( { '_id' : pk , '_op_type' : action } ) else : for doc in model_items : if index_instance . matches_indexing_condition ( doc ) : data . append ( index_instance . serialize_object ( doc ) ) return data | Creates the document that will be passed into the bulk index function . Either a list of serialized objects to index or a a dictionary specifying the primary keys of items to be delete . |
40,944 | def filter_model_items ( index_instance , model_items , model_name , start_date , end_date ) : if index_instance . updated_field is None : logger . warning ( "No updated date field found for {} - not restricting with start and end date" . format ( model_name ) ) else : if start_date : model_items = model_items . filter ( ** { '{}__gte' . format ( index_instance . updated_field ) : __str_to_tzdate__ ( start_date ) } ) if end_date : model_items = model_items . filter ( ** { '{}__lte' . format ( index_instance . updated_field ) : __str_to_tzdate__ ( end_date ) } ) return model_items | Filters the model items queryset based on start and end date . |
40,945 | def extract_version ( txt ) : words = txt . replace ( ',' , ' ' ) . split ( ) version = None for x in reversed ( words ) : if len ( x ) > 2 : if x [ 0 ] . lower ( ) == 'v' : x = x [ 1 : ] if '.' in x and x [ 0 ] . isdigit ( ) : version = x break return version | This function tries to extract the version from the help text of any program . |
40,946 | def check ( self , return_code = 0 ) : ret = self . call ( ) . return_code ok = ret == return_code if not ok : raise EasyProcessError ( self , 'check error, return code is not {0}!' . format ( return_code ) ) return self | Run command with arguments . Wait for command to complete . If the exit code was as expected and there is no exception then return otherwise raise EasyProcessError . |
40,947 | def call ( self , timeout = None ) : self . start ( ) . wait ( timeout = timeout ) if self . is_alive ( ) : self . stop ( ) return self | Run command with arguments . Wait for command to complete . |
40,948 | def start ( self ) : if self . is_started : raise EasyProcessError ( self , 'process was started twice!' ) if self . use_temp_files : self . _stdout_file = tempfile . TemporaryFile ( prefix = 'stdout_' ) self . _stderr_file = tempfile . TemporaryFile ( prefix = 'stderr_' ) stdout = self . _stdout_file stderr = self . _stderr_file else : stdout = subprocess . PIPE stderr = subprocess . PIPE cmd = list ( map ( uniencode , self . cmd ) ) try : self . popen = subprocess . Popen ( cmd , stdout = stdout , stderr = stderr , cwd = self . cwd , env = self . env , ) except OSError as oserror : log . debug ( 'OSError exception: %s' , oserror ) self . oserror = oserror raise EasyProcessError ( self , 'start error' ) self . is_started = True log . debug ( 'process was started (pid=%s)' , self . pid ) return self | start command in background and does not wait for it . |
40,949 | def wait ( self , timeout = None ) : if timeout is not None : if not self . _thread : self . _thread = threading . Thread ( target = self . _wait4process ) self . _thread . daemon = 1 self . _thread . start ( ) if self . _thread : self . _thread . join ( timeout = timeout ) self . timeout_happened = self . timeout_happened or self . _thread . isAlive ( ) else : self . _wait4process ( ) return self | Wait for command to complete . |
40,950 | def send_message ( self , message , room_id , ** kwargs ) : return SendMessage ( settings = self . settings , ** kwargs ) . call ( message = message , room_id = room_id , ** kwargs ) | Send a message to a given room |
40,951 | def get_private_rooms ( self , ** kwargs ) : return GetPrivateRooms ( settings = self . settings , ** kwargs ) . call ( ** kwargs ) | Get a listing of all private rooms with their names and IDs |
40,952 | def get_private_room_history ( self , room_id , oldest = None , ** kwargs ) : return GetPrivateRoomHistory ( settings = self . settings , ** kwargs ) . call ( room_id = room_id , oldest = oldest , ** kwargs ) | Get various history of specific private group in this case private |
40,953 | def get_public_rooms ( self , ** kwargs ) : return GetPublicRooms ( settings = self . settings , ** kwargs ) . call ( ** kwargs ) | Get a listing of all public rooms with their names and IDs |
40,954 | def get_private_room_info ( self , room_id , ** kwargs ) : return GetPrivateRoomInfo ( settings = self . settings , ** kwargs ) . call ( room_id = room_id , ** kwargs ) | Get various information about a specific private group |
40,955 | def _clone ( self ) : instance = super ( Bungiesearch , self ) . _clone ( ) instance . _raw_results_only = self . _raw_results_only return instance | Must clone additional fields to those cloned by elasticsearch - dsl - py . |
40,956 | def execute ( self , return_results = True ) : if self . results : return self . results if return_results else None self . execute_raw ( ) if self . _raw_results_only : self . results = self . raw_results else : self . map_results ( ) if return_results : return self . results | Executes the query and attempts to create model objects from results . |
40,957 | def hook_alias ( self , alias , model_obj = None ) : try : search_alias = self . _alias_hooks [ alias ] except KeyError : raise AttributeError ( 'Could not find search alias named {}. Is this alias defined in BUNGIESEARCH["ALIASES"]?' . format ( alias ) ) else : if search_alias . _applicable_models and ( ( model_obj and model_obj not in search_alias . _applicable_models ) or not any ( [ app_model_obj . __name__ in self . _doc_type for app_model_obj in search_alias . _applicable_models ] ) ) : raise ValueError ( 'Search alias {} is not applicable to model/doc_types {}.' . format ( alias , model_obj if model_obj else self . _doc_type ) ) return search_alias . prepare ( self , model_obj ) . alias_for | Returns the alias function if it exists and if it can be applied to this model . |
40,958 | def custom_search ( self , index , doc_type ) : from bungiesearch import Bungiesearch return Bungiesearch ( raw_results = True ) . index ( index ) . doc_type ( doc_type ) | Performs a search on a custom elasticsearch index and mapping . Will not attempt to map result objects . |
40,959 | def contribute_to_class ( self , cls , name ) : super ( BungiesearchManager , self ) . contribute_to_class ( cls , name ) from . import Bungiesearch from . signals import get_signal_processor settings = Bungiesearch . BUNGIE if 'SIGNALS' in settings : self . signal_processor = get_signal_processor ( ) self . signal_processor . setup ( self . model ) | Sets up the signal processor . Since self . model is not available in the constructor we perform this operation here . |
40,960 | def django_field_to_index ( field , ** attr ) : dj_type = field . get_internal_type ( ) if dj_type in ( 'DateField' , 'DateTimeField' ) : return DateField ( ** attr ) elif dj_type in ( 'BooleanField' , 'NullBooleanField' ) : return BooleanField ( ** attr ) elif dj_type in ( 'DecimalField' , 'FloatField' ) : return NumberField ( coretype = 'float' , ** attr ) elif dj_type in ( 'PositiveSmallIntegerField' , 'SmallIntegerField' ) : return NumberField ( coretype = 'short' , ** attr ) elif dj_type in ( 'IntegerField' , 'PositiveIntegerField' , 'AutoField' ) : return NumberField ( coretype = 'integer' , ** attr ) elif dj_type in ( 'BigIntegerField' ) : return NumberField ( coretype = 'long' , ** attr ) return StringField ( ** attr ) | Returns the index field type that would likely be associated with each Django type . |
40,961 | def split_command ( cmd , posix = None ) : if not isinstance ( cmd , string_types ) : pass else : if not PY3 : if isinstance ( cmd , unicode ) : try : cmd = unicodedata . normalize ( 'NFKD' , cmd ) . encode ( 'ascii' , 'strict' ) except UnicodeEncodeError : raise EasyProcessUnicodeError ( 'unicode command "%s" can not be processed.' % cmd + 'Use string list instead of string' ) log . debug ( 'unicode is normalized' ) if posix is None : posix = 'win' not in sys . platform cmd = shlex . split ( cmd , posix = posix ) return cmd | - cmd is string list - > nothing to do - cmd is string - > split it using shlex |
40,962 | def get_mapping ( self , meta_fields = True ) : return { 'properties' : dict ( ( name , field . json ( ) ) for name , field in iteritems ( self . fields ) if meta_fields or name not in AbstractField . meta_fields ) } | Returns the mapping for the index as a dictionary . |
40,963 | def serialize_object ( self , obj , obj_pk = None ) : if not obj : try : obj = self . model . objects . filter ( pk = obj_pk ) . values ( * self . fields_to_fetch ) [ 0 ] except Exception as e : raise ValueError ( 'Could not find object of primary key = {} in model {} (model index class {}). (Original exception: {}.)' . format ( obj_pk , self . model , self . __class__ . __name__ , e ) ) serialized_object = { } for name , field in iteritems ( self . fields ) : if hasattr ( self , "prepare_%s" % name ) : value = getattr ( self , "prepare_%s" % name ) ( obj ) else : value = field . value ( obj ) serialized_object [ name ] = value return serialized_object | Serializes an object for it to be added to the index . |
40,964 | def _get_fields ( self , fields , excludes , hotfixes ) : final_fields = { } fields = fields or [ ] excludes = excludes or [ ] for f in self . model . _meta . fields : if f . name in self . fields : continue if fields and f . name not in fields : continue if excludes and f . name in excludes : continue if getattr ( f , 'rel' ) : continue attr = { 'model_attr' : f . name } if f . has_default ( ) : attr [ 'null_value' ] = f . default if f . name in hotfixes : attr . update ( hotfixes [ f . name ] ) final_fields [ f . name ] = django_field_to_index ( f , ** attr ) return final_fields | Given any explicit fields to include and fields to exclude add additional fields based on the associated model . If the field needs a hotfix apply it . |
40,965 | def validate_items ( self ) : logger . debug ( fmt ( "Validating {}" , self ) ) from python_jsonschema_objects import classbuilder if self . __itemtype__ is None : return type_checks = self . __itemtype__ if not isinstance ( type_checks , ( tuple , list ) ) : type_checks = [ type_checks ] * len ( self . data ) elif len ( type_checks ) > len ( self . data ) : raise ValidationError ( "{1} does not have sufficient elements to validate against {0}" . format ( self . __itemtype__ , self . data ) ) typed_elems = [ ] for elem , typ in zip ( self . data , type_checks ) : if isinstance ( typ , dict ) : for param , paramval in six . iteritems ( typ ) : validator = registry ( param ) if validator is not None : validator ( paramval , elem , typ ) typed_elems . append ( elem ) elif util . safe_issubclass ( typ , classbuilder . LiteralValue ) : val = typ ( elem ) val . validate ( ) typed_elems . append ( val ) elif util . safe_issubclass ( typ , classbuilder . ProtocolBase ) : if not isinstance ( elem , typ ) : try : if isinstance ( elem , ( six . string_types , six . integer_types , float ) ) : val = typ ( elem ) else : val = typ ( ** util . coerce_for_expansion ( elem ) ) except TypeError as e : raise ValidationError ( "'{0}' is not a valid value for '{1}': {2}" . format ( elem , typ , e ) ) else : val = elem val . validate ( ) typed_elems . append ( val ) elif util . safe_issubclass ( typ , ArrayWrapper ) : val = typ ( elem ) val . validate ( ) typed_elems . append ( val ) elif isinstance ( typ , ( classbuilder . TypeProxy , classbuilder . TypeRef ) ) : try : if isinstance ( elem , ( six . string_types , six . integer_types , float ) ) : val = typ ( elem ) else : val = typ ( ** util . coerce_for_expansion ( elem ) ) except TypeError as e : raise ValidationError ( "'{0}' is not a valid value for '{1}': {2}" . format ( elem , typ , e ) ) else : val . validate ( ) typed_elems . append ( val ) self . _dirty = False self . _typed = typed_elems return typed_elems | Validates the items in the backing array including performing type validation . |
40,966 | def extendMarkdown ( self , md , md_globals ) : md . registerExtension ( self ) md . preprocessors . add ( 'fenced_code_block' , SpecialFencePreprocessor ( md ) , ">normalize_whitespace" ) | Add FencedBlockPreprocessor to the Markdown instance . |
40,967 | def propmerge ( into , data_from ) : newprops = copy . deepcopy ( into ) for prop , propval in six . iteritems ( data_from ) : if prop not in newprops : newprops [ prop ] = propval continue new_sp = newprops [ prop ] for subprop , spval in six . iteritems ( propval ) : if subprop not in new_sp : new_sp [ subprop ] = spval elif subprop == 'enum' : new_sp [ subprop ] = set ( spval ) & set ( new_sp [ subprop ] ) elif subprop == 'type' : if spval != new_sp [ subprop ] : raise TypeError ( "Type cannot conflict in allOf'" ) elif subprop in ( 'minLength' , 'minimum' ) : new_sp [ subprop ] = ( new_sp [ subprop ] if new_sp [ subprop ] > spval else spval ) elif subprop in ( 'maxLength' , 'maximum' ) : new_sp [ subprop ] = ( new_sp [ subprop ] if new_sp [ subprop ] < spval else spval ) elif subprop == 'multipleOf' : if new_sp [ subprop ] % spval == 0 : new_sp [ subprop ] = spval else : raise AttributeError ( "Cannot set conflicting multipleOf values" ) else : new_sp [ subprop ] = spval newprops [ prop ] = new_sp return newprops | Merge JSON schema requirements into a dictionary |
40,968 | def as_dict ( self ) : out = { } for prop in self : propval = getattr ( self , prop ) if hasattr ( propval , 'for_json' ) : out [ prop ] = propval . for_json ( ) elif isinstance ( propval , list ) : out [ prop ] = [ getattr ( x , 'for_json' , lambda : x ) ( ) for x in propval ] elif isinstance ( propval , ( ProtocolBase , LiteralValue ) ) : out [ prop ] = propval . as_dict ( ) elif propval is not None : out [ prop ] = propval return out | Return a dictionary containing the current values of the object . |
40,969 | def from_json ( cls , jsonmsg ) : import json msg = json . loads ( jsonmsg ) obj = cls ( ** msg ) obj . validate ( ) return obj | Create an object directly from a JSON string . |
40,970 | def validate ( self ) : missing = self . missing_property_names ( ) if len ( missing ) > 0 : raise validators . ValidationError ( "'{0}' are required attributes for {1}" . format ( missing , self . __class__ . __name__ ) ) for prop , val in six . iteritems ( self . _properties ) : if val is None : continue if isinstance ( val , ProtocolBase ) : val . validate ( ) elif getattr ( val , 'isLiteralClass' , None ) is True : val . validate ( ) elif isinstance ( val , list ) : for subval in val : subval . validate ( ) else : setattr ( self , prop , val ) return True | Applies all defined validation to the current state of the object and raises an error if they are not all met . |
40,971 | def missing_property_names ( self ) : propname = lambda x : self . __prop_names__ [ x ] missing = [ ] for x in self . __required__ : propinfo = self . propinfo ( propname ( x ) ) null_type = False if 'type' in propinfo : type_info = propinfo [ 'type' ] null_type = ( type_info == 'null' or isinstance ( type_info , ( list , tuple ) ) and 'null' in type_info ) elif 'oneOf' in propinfo : for o in propinfo [ 'oneOf' ] : type_info = o . get ( 'type' ) if type_info and type_info == 'null' or isinstance ( type_info , ( list , tuple ) ) and 'null' in type_info : null_type = True break if ( propname ( x ) not in self . _properties and null_type ) or ( self . _properties [ propname ( x ) ] is None and not null_type ) : missing . append ( x ) return missing | Returns a list of properties which are required and missing . |
40,972 | def construct ( self , uri , * args , ** kw ) : logger . debug ( util . lazy_format ( "Constructing {0}" , uri ) ) if ( 'override' not in kw or kw [ 'override' ] is False ) and uri in self . resolved : logger . debug ( util . lazy_format ( "Using existing {0}" , uri ) ) return self . resolved [ uri ] else : ret = self . _construct ( uri , * args , ** kw ) logger . debug ( util . lazy_format ( "Constructed {0}" , ret ) ) return ret | Wrapper to debug things |
40,973 | def build_classes ( self , strict = False , named_only = False , standardize_names = True ) : kw = { "strict" : strict } builder = classbuilder . ClassBuilder ( self . resolver ) for nm , defn in iteritems ( self . schema . get ( 'definitions' , { } ) ) : uri = python_jsonschema_objects . util . resolve_ref_uri ( self . resolver . resolution_scope , "#/definitions/" + nm ) builder . construct ( uri , defn , ** kw ) if standardize_names : name_transform = lambda t : inflection . camelize ( inflection . parameterize ( six . text_type ( t ) , '_' ) ) else : name_transform = lambda t : t nm = self . schema [ 'title' ] if 'title' in self . schema else self . schema [ 'id' ] nm = inflection . parameterize ( six . text_type ( nm ) , '_' ) builder . construct ( nm , self . schema , ** kw ) self . _resolved = builder . resolved classes = { } for uri , klass in six . iteritems ( builder . resolved ) : title = getattr ( klass , '__title__' , None ) if title is not None : classes [ name_transform ( title ) ] = klass elif not named_only : classes [ name_transform ( uri . split ( '/' ) [ - 1 ] ) ] = klass return python_jsonschema_objects . util . Namespace . from_mapping ( classes ) | Build all of the classes named in the JSONSchema . |
40,974 | def interpolate ( self ) : self . latitude = self . _interp ( self . lat_tiepoint ) self . longitude = self . _interp ( self . lon_tiepoint ) return self . latitude , self . longitude | Do the interpolation and return resulting longitudes and latitudes . |
40,975 | def get ( self , key , default = None , reraise = False ) : if not self . enabled : if reraise : raise exceptions . DisabledCache ( ) return default try : return self . _get ( key ) except exceptions . NotInCache : if reraise : raise return default | Get the given key from the cache if present . A default value can be provided in case the requested key is not present otherwise None will be returned . |
40,976 | def resolve_attr ( obj , name ) : try : return obj [ name ] except TypeError : pass except KeyError : raise exceptions . MissingField ( 'Dict {0} has no attribute or key "{1}"' . format ( obj , name ) ) try : return obj . __dict__ [ name ] except ( KeyError , AttributeError ) : pass try : return getattr ( obj , name ) except AttributeError : pass if isinstance ( obj , collections . Iterable ) : return IterableAttr ( obj , name ) raise exceptions . MissingField ( 'Object {0} has no attribute or key "{1}"' . format ( obj , name ) ) | A custom attrgetter that operates both on dictionaries and objects |
40,977 | def build_filter_from_kwargs ( self , ** kwargs ) : query = None for path_to_convert , value in kwargs . items ( ) : path_parts = path_to_convert . split ( '__' ) lookup_class = None try : lookup_class = lookups . registry [ path_parts [ - 1 ] ] path_to_convert = '__' . join ( path_parts [ : - 1 ] ) except KeyError : pass path = lookup_to_path ( path_to_convert ) if lookup_class : q = QueryNode ( path , lookup = lookup_class ( value ) ) else : q = path == value if query : query = query & q else : query = q return query | Convert django - s like lookup to SQLAlchemy ones |
40,978 | def get_scene_splits ( nlines_swath , nlines_scan , n_cpus ) : nscans = nlines_swath // nlines_scan if nscans < n_cpus : nscans_subscene = 1 else : nscans_subscene = nscans // n_cpus nlines_subscene = nscans_subscene * nlines_scan return range ( nlines_subscene , nlines_swath , nlines_subscene ) | Calculate the line numbers where the swath will be split in smaller granules for parallel processing |
40,979 | def metop20kmto1km ( lons20km , lats20km ) : cols20km = np . array ( [ 0 ] + list ( range ( 4 , 2048 , 20 ) ) + [ 2047 ] ) cols1km = np . arange ( 2048 ) lines = lons20km . shape [ 0 ] rows20km = np . arange ( lines ) rows1km = np . arange ( lines ) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator ( ( lons20km , lats20km ) , ( rows20km , cols20km ) , ( rows1km , cols1km ) , along_track_order , cross_track_order ) return satint . interpolate ( ) | Getting 1km geolocation for metop avhrr from 20km tiepoints . |
40,980 | def modis5kmto1km ( lons5km , lats5km ) : cols5km = np . arange ( 2 , 1354 , 5 ) / 5.0 cols1km = np . arange ( 1354 ) / 5.0 lines = lons5km . shape [ 0 ] * 5 rows5km = np . arange ( 2 , lines , 5 ) / 5.0 rows1km = np . arange ( lines ) / 5.0 along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator ( ( lons5km , lats5km ) , ( rows5km , cols5km ) , ( rows1km , cols1km ) , along_track_order , cross_track_order , chunk_size = 10 ) satint . fill_borders ( "y" , "x" ) lons1km , lats1km = satint . interpolate ( ) return lons1km , lats1km | Getting 1km geolocation for modis from 5km tiepoints . |
40,981 | def _multi ( fun , lons , lats , chunk_size , cores = 1 ) : pool = Pool ( processes = cores ) splits = get_scene_splits ( lons . shape [ 0 ] , chunk_size , cores ) lons_parts = np . vsplit ( lons , splits ) lats_parts = np . vsplit ( lats , splits ) results = [ pool . apply_async ( fun , ( lons_parts [ i ] , lats_parts [ i ] ) ) for i in range ( len ( lons_parts ) ) ] pool . close ( ) pool . join ( ) lons , lats = zip ( * ( res . get ( ) for res in results ) ) return np . vstack ( lons ) , np . vstack ( lats ) | Work on multiple cores . |
40,982 | def modis1kmto500m ( lons1km , lats1km , cores = 1 ) : if cores > 1 : return _multi ( modis1kmto500m , lons1km , lats1km , 10 , cores ) cols1km = np . arange ( 1354 ) cols500m = np . arange ( 1354 * 2 ) / 2.0 lines = lons1km . shape [ 0 ] rows1km = np . arange ( lines ) rows500m = ( np . arange ( lines * 2 ) - 0.5 ) / 2. along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator ( ( lons1km , lats1km ) , ( rows1km , cols1km ) , ( rows500m , cols500m ) , along_track_order , cross_track_order , chunk_size = 20 ) satint . fill_borders ( "y" , "x" ) lons500m , lats500m = satint . interpolate ( ) return lons500m , lats500m | Getting 500m geolocation for modis from 1km tiepoints . |
40,983 | def modis1kmto250m ( lons1km , lats1km , cores = 1 ) : if cores > 1 : return _multi ( modis1kmto250m , lons1km , lats1km , 10 , cores ) cols1km = np . arange ( 1354 ) cols250m = np . arange ( 1354 * 4 ) / 4.0 along_track_order = 1 cross_track_order = 3 lines = lons1km . shape [ 0 ] rows1km = np . arange ( lines ) rows250m = ( np . arange ( lines * 4 ) - 1.5 ) / 4.0 satint = SatelliteInterpolator ( ( lons1km , lats1km ) , ( rows1km , cols1km ) , ( rows250m , cols250m ) , along_track_order , cross_track_order , chunk_size = 40 ) satint . fill_borders ( "y" , "x" ) lons250m , lats250m = satint . interpolate ( ) return lons250m , lats250m | Getting 250m geolocation for modis from 1km tiepoints . |
40,984 | def generic_modis5kmto1km ( * data5km ) : cols5km = np . arange ( 2 , 1354 , 5 ) cols1km = np . arange ( 1354 ) lines = data5km [ 0 ] . shape [ 0 ] * 5 rows5km = np . arange ( 2 , lines , 5 ) rows1km = np . arange ( lines ) along_track_order = 1 cross_track_order = 3 satint = Interpolator ( list ( data5km ) , ( rows5km , cols5km ) , ( rows1km , cols1km ) , along_track_order , cross_track_order , chunk_size = 10 ) satint . fill_borders ( "y" , "x" ) return satint . interpolate ( ) | Getting 1km data for modis from 5km tiepoints . |
40,985 | def fill_borders ( self , * args ) : to_run = [ ] cases = { "y" : self . _fill_row_borders , "x" : self . _fill_col_borders } for dim in args : try : to_run . append ( cases [ dim ] ) except KeyError : raise NameError ( "Unrecognized dimension: " + str ( dim ) ) for fun in to_run : fun ( ) | Extrapolate tiepoint lons and lats to fill in the border of the chunks . |
40,986 | def _extrapolate_cols ( self , data , first = True , last = True ) : if first : pos = self . col_indices [ : 2 ] first_column = _linear_extrapolate ( pos , ( data [ : , 0 ] , data [ : , 1 ] ) , self . hcol_indices [ 0 ] ) if last : pos = self . col_indices [ - 2 : ] last_column = _linear_extrapolate ( pos , ( data [ : , - 2 ] , data [ : , - 1 ] ) , self . hcol_indices [ - 1 ] ) if first and last : return np . hstack ( ( np . expand_dims ( first_column , 1 ) , data , np . expand_dims ( last_column , 1 ) ) ) elif first : return np . hstack ( ( np . expand_dims ( first_column , 1 ) , data ) ) elif last : return np . hstack ( ( data , np . expand_dims ( last_column , 1 ) ) ) else : return data | Extrapolate the column of data to get the first and last together with the data . |
40,987 | def _fill_col_borders ( self ) : first = True last = True if self . col_indices [ 0 ] == self . hcol_indices [ 0 ] : first = False if self . col_indices [ - 1 ] == self . hcol_indices [ - 1 ] : last = False for num , data in enumerate ( self . tie_data ) : self . tie_data [ num ] = self . _extrapolate_cols ( data , first , last ) if first and last : self . col_indices = np . concatenate ( ( np . array ( [ self . hcol_indices [ 0 ] ] ) , self . col_indices , np . array ( [ self . hcol_indices [ - 1 ] ] ) ) ) elif first : self . col_indices = np . concatenate ( ( np . array ( [ self . hcol_indices [ 0 ] ] ) , self . col_indices ) ) elif last : self . col_indices = np . concatenate ( ( self . col_indices , np . array ( [ self . hcol_indices [ - 1 ] ] ) ) ) | Add the first and last column to the data by extrapolation . |
40,988 | def _extrapolate_rows ( self , data , row_indices , first_index , last_index ) : pos = row_indices [ : 2 ] first_row = _linear_extrapolate ( pos , ( data [ 0 , : ] , data [ 1 , : ] ) , first_index ) pos = row_indices [ - 2 : ] last_row = _linear_extrapolate ( pos , ( data [ - 2 , : ] , data [ - 1 , : ] ) , last_index ) return np . vstack ( ( np . expand_dims ( first_row , 0 ) , data , np . expand_dims ( last_row , 0 ) ) ) | Extrapolate the rows of data to get the first and last together with the data . |
40,989 | def _fill_row_borders ( self ) : lines = len ( self . hrow_indices ) chunk_size = self . chunk_size or lines factor = len ( self . hrow_indices ) / len ( self . row_indices ) tmp_data = [ ] for num in range ( len ( self . tie_data ) ) : tmp_data . append ( [ ] ) row_indices = [ ] for index in range ( 0 , lines , chunk_size ) : indices = np . logical_and ( self . row_indices >= index / factor , self . row_indices < ( index + chunk_size ) / factor ) ties = np . argwhere ( indices ) . squeeze ( ) tiepos = self . row_indices [ indices ] . squeeze ( ) for num , data in enumerate ( self . tie_data ) : to_extrapolate = data [ ties , : ] if len ( to_extrapolate ) > 0 : extrapolated = self . _extrapolate_rows ( to_extrapolate , tiepos , self . hrow_indices [ index ] , self . hrow_indices [ index + chunk_size - 1 ] ) tmp_data [ num ] . append ( extrapolated ) row_indices . append ( np . array ( [ self . hrow_indices [ index ] ] ) ) row_indices . append ( tiepos ) row_indices . append ( np . array ( [ self . hrow_indices [ index + chunk_size - 1 ] ] ) ) for num in range ( len ( self . tie_data ) ) : self . tie_data [ num ] = np . vstack ( tmp_data [ num ] ) self . row_indices = np . concatenate ( row_indices ) | Add the first and last rows to the data by extrapolation . |
40,990 | def _interp ( self ) : if np . all ( self . hrow_indices == self . row_indices ) : return self . _interp1d ( ) xpoints , ypoints = np . meshgrid ( self . hrow_indices , self . hcol_indices ) for num , data in enumerate ( self . tie_data ) : spl = RectBivariateSpline ( self . row_indices , self . col_indices , data , s = 0 , kx = self . kx_ , ky = self . ky_ ) new_data_ = spl . ev ( xpoints . ravel ( ) , ypoints . ravel ( ) ) self . new_data [ num ] = new_data_ . reshape ( xpoints . shape ) . T . copy ( order = 'C' ) | Interpolate the cartesian coordinates . |
40,991 | def _interp1d ( self ) : lines = len ( self . hrow_indices ) for num , data in enumerate ( self . tie_data ) : self . new_data [ num ] = np . empty ( ( len ( self . hrow_indices ) , len ( self . hcol_indices ) ) , data . dtype ) for cnt in range ( lines ) : tck = splrep ( self . col_indices , data [ cnt , : ] , k = self . ky_ , s = 0 ) self . new_data [ num ] [ cnt , : ] = splev ( self . hcol_indices , tck , der = 0 ) | Interpolate in one dimension . |
40,992 | def get_lats_from_cartesian ( x__ , y__ , z__ , thr = 0.8 ) : lats = np . where ( np . logical_and ( np . less ( z__ , thr * EARTH_RADIUS ) , np . greater ( z__ , - 1. * thr * EARTH_RADIUS ) ) , 90 - rad2deg ( arccos ( z__ / EARTH_RADIUS ) ) , sign ( z__ ) * ( 90 - rad2deg ( arcsin ( sqrt ( x__ ** 2 + y__ ** 2 ) / EARTH_RADIUS ) ) ) ) return lats | Get latitudes from cartesian coordinates . |
40,993 | def set_tiepoints ( self , lon , lat ) : self . lon_tiepoint = lon self . lat_tiepoint = lat | Defines the lon lat tie points . |
40,994 | def compute_expansion_alignment ( satz_a , satz_b , satz_c , satz_d ) : zeta_a = satz_a zeta_b = satz_b phi_a = compute_phi ( zeta_a ) phi_b = compute_phi ( zeta_b ) theta_a = compute_theta ( zeta_a , phi_a ) theta_b = compute_theta ( zeta_b , phi_b ) phi = ( phi_a + phi_b ) / 2 zeta = compute_zeta ( phi ) theta = compute_theta ( zeta , phi ) c_expansion = 4 * ( ( ( theta_a + theta_b ) / 2 - theta ) / ( theta_a - theta_b ) ) sin_beta_2 = scan_width / ( 2 * H ) d = ( ( R + H ) / R * np . cos ( phi ) - np . cos ( zeta ) ) * sin_beta_2 e = np . cos ( zeta ) - np . sqrt ( np . cos ( zeta ) ** 2 - d ** 2 ) c_alignment = 4 * e * np . sin ( zeta ) / ( theta_a - theta_b ) return c_expansion , c_alignment | All angles in radians . |
40,995 | def lonlat2xyz ( lons , lats ) : R = 6370997.0 x_coords = R * da . cos ( da . deg2rad ( lats ) ) * da . cos ( da . deg2rad ( lons ) ) y_coords = R * da . cos ( da . deg2rad ( lats ) ) * da . sin ( da . deg2rad ( lons ) ) z_coords = R * da . sin ( da . deg2rad ( lats ) ) return x_coords , y_coords , z_coords | Convert lons and lats to cartesian coordinates . |
40,996 | def setup_fields ( attrs ) : fields = { } iterator = list ( attrs . items ( ) ) for key , value in iterator : if not isinstance ( value , Field ) : continue fields [ key ] = value del attrs [ key ] return fields | Collect all fields declared on the class and remove them from attrs |
40,997 | def _parse_jing_line ( line ) : parts = line . split ( ':' , 4 ) filename , line , column , type_ , message = [ x . strip ( ) for x in parts ] if type_ == 'fatal' : if message in KNOWN_FATAL_MESSAGES_MAPPING : message = KNOWN_FATAL_MESSAGES_MAPPING [ message ] return ErrorLine ( filename , line , column , type_ , message ) | Parse a line of jing output to a list of line column type and message . |
40,998 | def _parse_jing_output ( output ) : output = output . strip ( ) values = [ _parse_jing_line ( l ) for l in output . split ( '\n' ) if l ] return tuple ( values ) | Parse the jing output into a tuple of line column type and message . |
40,999 | def jing ( rng_filepath , * xml_filepaths ) : cmd = [ 'java' , '-jar' ] cmd . extend ( [ str ( JING_JAR ) , str ( rng_filepath ) ] ) for xml_filepath in xml_filepaths : cmd . append ( str ( xml_filepath ) ) proc = subprocess . Popen ( cmd , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , close_fds = True ) out , err = proc . communicate ( ) return _parse_jing_output ( out . decode ( 'utf-8' ) ) | Run jing . jar using the RNG file against the given XML file . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.