idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
233,000
def hashi ( self , key , replica = 0 ) : dh = self . _listbytes ( md5 ( str ( key ) . encode ( 'utf-8' ) ) . digest ( ) ) rd = replica * 4 return ( ( dh [ 3 + rd ] << 24 ) | ( dh [ 2 + rd ] << 16 ) | ( dh [ 1 + rd ] << 8 ) | dh [ 0 + rd ] )
Returns a ketama compatible hash from the given key .
95
11
233,001
def _hashi_weight_generator ( self , node_name , node_conf ) : ks = ( node_conf [ 'vnodes' ] * len ( self . _nodes ) * node_conf [ 'weight' ] ) // self . _weight_sum for w in range ( 0 , ks ) : w_node_name = '%s-%s' % ( node_name , w ) for i in range ( 0 , self . _replicas ) : yield self . hashi ( w_node_name , replica = i )
Calculate the weight factor of the given node and yield its hash key for every configured replica .
123
20
233,002
def lapmod ( n , cc , ii , kk , fast = True , return_cost = True , fp_version = FP_DYNAMIC ) : # log = logging.getLogger('lapmod') check_cost ( n , cc , ii , kk ) if fast is True : # log.debug('[----CR & RT & ARR & augmentation ----]') x , y = _lapmod ( n , cc , ii , kk , fp_version = fp_version ) else : cc = np . ascontiguousarray ( cc , dtype = np . float64 ) ii = np . ascontiguousarray ( ii , dtype = np . int32 ) kk = np . ascontiguousarray ( kk , dtype = np . int32 ) x = np . empty ( ( n , ) , dtype = np . int32 ) y = np . empty ( ( n , ) , dtype = np . int32 ) v = np . empty ( ( n , ) , dtype = np . float64 ) free_rows = np . empty ( ( n , ) , dtype = np . int32 ) # log.debug('[----Column reduction & reduction transfer----]') n_free_rows = _pycrrt ( n , cc , ii , kk , free_rows , x , y , v ) # log.debug( # 'free, x, y, v: %s %s %s %s', free_rows[:n_free_rows], x, y, v) if n_free_rows == 0 : # log.info('Reduction solved it.') if return_cost is True : return get_cost ( n , cc , ii , kk , x ) , x , y else : return x , y for it in range ( 2 ) : # log.debug('[---Augmenting row reduction (iteration: %d)---]', it) n_free_rows = _pyarr ( n , cc , ii , kk , n_free_rows , free_rows , x , y , v ) # log.debug( # 'free, x, y, v: %s %s %s %s', free_rows[:n_free_rows], x, y, v) if n_free_rows == 0 : # log.info('Augmenting row reduction solved it.') if return_cost is True : return get_cost ( n , cc , ii , kk , x ) , x , y else : return x , y # log.info('[----Augmentation----]') _pya ( n , cc , ii , kk , n_free_rows , free_rows , x , y , v ) # log.debug('x, y, v: %s %s %s', x, y, v) if return_cost is True : return get_cost ( n , cc , ii , kk , x ) , x , y else : return x , y
Solve sparse linear assignment problem using Jonker - Volgenant algorithm .
652
15
233,003
def register_provider ( cls , provider ) : def decorator ( subclass ) : """Register as decorator function.""" cls . _providers [ provider ] = subclass subclass . name = provider return subclass return decorator
Register method to keep list of providers .
48
8
233,004
def tar_to_bigfile ( self , fname , outfile ) : fnames = [ ] tmpdir = mkdtemp ( ) # Extract files to temporary directory with tarfile . open ( fname ) as tar : tar . extractall ( path = tmpdir ) for root , _ , files in os . walk ( tmpdir ) : fnames += [ os . path . join ( root , fname ) for fname in files ] # Concatenate with open ( outfile , "w" ) as out : for infile in fnames : for line in open ( infile ) : out . write ( line ) os . unlink ( infile ) # Remove temp dir shutil . rmtree ( tmpdir )
Convert tar of multiple FASTAs to one file .
156
12
233,005
def find_plugins ( ) : plugin_dir = os . path . dirname ( os . path . realpath ( __file__ ) ) plugin_dir = os . path . join ( plugin_dir , "plugins" ) plugin_files = [ x [ : - 3 ] for x in os . listdir ( plugin_dir ) if x . endswith ( ".py" ) ] sys . path . insert ( 0 , plugin_dir ) for plugin in plugin_files : __import__ ( plugin )
Locate and initialize all available plugins .
108
8
233,006
def convert ( name ) : s1 = re . sub ( '(.)([A-Z][a-z]+)' , r'\1_\2' , name ) return re . sub ( '([a-z0-9])([A-Z])' , r'\1_\2' , s1 ) . lower ( )
Convert CamelCase to underscore
76
6
233,007
def init_plugins ( ) : find_plugins ( ) d = { } for c in Plugin . __subclasses__ ( ) : ins = c ( ) if ins . name ( ) in config . get ( "plugin" , [ ] ) : ins . activate ( ) d [ ins . name ( ) ] = ins return d
Return dictionary of available plugins
69
5
233,008
def activate ( name ) : if name in plugins : plugins [ name ] . activate ( ) else : raise Exception ( "plugin {} not found" . format ( name ) )
Activate plugin .
36
4
233,009
def deactivate ( name ) : if name in plugins : plugins [ name ] . deactivate ( ) else : raise Exception ( "plugin {} not found" . format ( name ) )
Deactivate plugin .
38
4
233,010
def manage_config ( cmd , * args ) : if cmd == "file" : print ( config . config_file ) elif cmd == "show" : with open ( config . config_file ) as f : print ( f . read ( ) ) elif cmd == "generate" : fname = os . path . join ( user_config_dir ( "genomepy" ) , "{}.yaml" . format ( "genomepy" ) ) if not os . path . exists ( user_config_dir ( "genomepy" ) ) : os . makedirs ( user_config_dir ( "genomepy" ) ) with open ( fname , "w" ) as fout : with open ( config . config_file ) as fin : fout . write ( fin . read ( ) ) print ( "Created config file {}" . format ( fname ) )
Manage genomepy config file .
193
7
233,011
def search ( term , provider = None ) : if provider : providers = [ ProviderBase . create ( provider ) ] else : # if provider is not specified search all providers providers = [ ProviderBase . create ( p ) for p in ProviderBase . list_providers ( ) ] for p in providers : for row in p . search ( term ) : yield [ x . encode ( 'latin-1' ) for x in [ p . name ] + list ( row ) ]
Search for a genome .
100
5
233,012
def install_genome ( name , provider , version = None , genome_dir = None , localname = None , mask = "soft" , regex = None , invert_match = False , annotation = False ) : if not genome_dir : genome_dir = config . get ( "genome_dir" , None ) if not genome_dir : raise norns . exceptions . ConfigError ( "Please provide or configure a genome_dir" ) genome_dir = os . path . expanduser ( genome_dir ) localname = get_localname ( name , localname ) # Download genome from provider p = ProviderBase . create ( provider ) p . download_genome ( name , genome_dir , version = version , mask = mask , localname = localname , regex = regex , invert_match = invert_match ) if annotation : # Download annotation from provider p . download_annotation ( name , genome_dir , localname = localname , version = version ) g = Genome ( localname , genome_dir = genome_dir ) for plugin in get_active_plugins ( ) : plugin . after_genome_download ( g ) generate_env ( )
Install a genome .
253
4
233,013
def generate_exports ( ) : env = [ ] for name in list_installed_genomes ( ) : try : g = Genome ( name ) env_name = re . sub ( r'[^\w]+' , "_" , name ) . upper ( ) env . append ( "export {}={}" . format ( env_name , g . filename ) ) except : pass return env
Print export commands for setting environment variables .
85
8
233,014
def generate_env ( fname = None ) : config_dir = user_config_dir ( "genomepy" ) if os . path . exists ( config_dir ) : fname = os . path . join ( config_dir , "exports.txt" ) with open ( fname , "w" ) as fout : for env in generate_exports ( ) : fout . write ( "{}\n" . format ( env ) )
Generate file with exports .
98
6
233,015
def manage_plugins ( command , plugin_names = None ) : if plugin_names is None : plugin_names = [ ] active_plugins = config . get ( "plugin" , [ ] ) plugins = init_plugins ( ) if command == "enable" : for name in plugin_names : if name not in plugins : raise ValueError ( "Unknown plugin: {}" . format ( name ) ) if name not in active_plugins : active_plugins . append ( name ) elif command == "disable" : for name in plugin_names : if name in active_plugins : active_plugins . remove ( name ) elif command == "list" : print ( "{:20}{}" . format ( "plugin" , "enabled" ) ) for plugin in sorted ( plugins ) : print ( "{:20}{}" . format ( plugin , { False : "" , True : "*" } [ plugin in active_plugins ] ) ) else : raise ValueError ( "Invalid plugin command" ) config [ "plugin" ] = active_plugins config . save ( ) if command in [ "enable" , "disable" ] : print ( "Enabled plugins: {}" . format ( ", " . join ( sorted ( active_plugins ) ) ) )
Enable or disable plugins .
263
5
233,016
def get_random_sequences ( self , n = 10 , length = 200 , chroms = None , max_n = 0.1 ) : retries = 100 cutoff = length * max_n if not chroms : chroms = self . keys ( ) try : gap_sizes = self . gap_sizes ( ) except : gap_sizes = { } sizes = dict ( [ ( chrom , len ( self [ chrom ] ) - gap_sizes . get ( chrom , 0 ) ) for chrom in chroms ] ) l = [ ( sizes [ x ] , x ) for x in chroms if sizes [ x ] / len ( self [ x ] ) > 0.1 and sizes [ x ] > 10 * length ] chroms = _weighted_selection ( l , n ) coords = [ ] count = { } for chrom in chroms : if chrom in count : count [ chrom ] += 1 else : count [ chrom ] = 1 for chrom in chroms : for i in range ( retries ) : start = int ( random . random ( ) * ( sizes [ chrom ] - length ) ) end = start + length count_n = self [ chrom ] [ start : end ] . seq . upper ( ) . count ( "N" ) if count_n <= cutoff : break if count_n > cutoff : raise ValueError ( "Failed to find suitable non-N sequence for {}" . format ( chrom ) ) coords . append ( [ chrom , start , end ] ) return coords
Return random genomic sequences .
324
5
233,017
def search ( term , provider = None ) : for row in genomepy . search ( term , provider ) : print ( "\t" . join ( [ x . decode ( 'utf-8' , 'ignore' ) for x in row ] ) )
Search for genomes that contain TERM in their name or description .
53
13
233,018
def install ( name , provider , genome_dir , localname , mask , regex , match , annotation ) : genomepy . install_genome ( name , provider , genome_dir = genome_dir , localname = localname , mask = mask , regex = regex , invert_match = not ( match ) , annotation = annotation )
Install genome NAME from provider PROVIDER in directory GENOME_DIR .
71
14
233,019
def generate_gap_bed ( fname , outname ) : f = Fasta ( fname ) with open ( outname , "w" ) as bed : for chrom in f . keys ( ) : for m in re . finditer ( r'N+' , f [ chrom ] [ : ] . seq ) : bed . write ( "{}\t{}\t{}\n" . format ( chrom , m . start ( 0 ) , m . end ( 0 ) ) )
Generate a BED file with gap locations .
103
10
233,020
def generate_sizes ( name , genome_dir ) : fa = os . path . join ( genome_dir , name , "{}.fa" . format ( name ) ) sizes = fa + ".sizes" g = Fasta ( fa ) with open ( sizes , "w" ) as f : for seqname in g . keys ( ) : f . write ( "{}\t{}\n" . format ( seqname , len ( g [ seqname ] ) ) )
Generate a sizes file with length of sequences in FASTA file .
101
15
233,021
def filter_fasta ( infa , outfa , regex = ".*" , v = False , force = False ) : if infa == outfa : raise ValueError ( "Input and output FASTA are the same file." ) if os . path . exists ( outfa ) : if force : os . unlink ( outfa ) if os . path . exists ( outfa + ".fai" ) : os . unlink ( outfa + ".fai" ) else : raise ValueError ( "{} already exists, set force to True to overwrite" . format ( outfa ) ) filt_function = re . compile ( regex ) . search fa = Fasta ( infa , filt_function = filt_function ) seqs = fa . keys ( ) if v : original_fa = Fasta ( infa ) seqs = [ s for s in original_fa . keys ( ) if s not in seqs ] fa = original_fa if len ( seqs ) == 0 : raise ValueError ( "No sequences left after filtering!" ) with open ( outfa , "w" ) as out : for chrom in seqs : out . write ( ">{}\n" . format ( fa [ chrom ] . name ) ) out . write ( "{}\n" . format ( fa [ chrom ] [ : ] . seq ) ) return Fasta ( outfa )
Filter fasta file based on regex .
296
8
233,022
def cmd_ok ( cmd ) : try : sp . check_call ( cmd , stderr = sp . PIPE , stdout = sp . PIPE ) except sp . CalledProcessError : # bwa gives return code of 1 with no argument pass except : sys . stderr . write ( "{} not found, skipping\n" . format ( cmd ) ) return False return True
Returns True if cmd can be run .
85
8
233,023
def run_index_cmd ( name , cmd ) : sys . stderr . write ( "Creating {} index...\n" . format ( name ) ) # Create index p = sp . Popen ( cmd , shell = True , stdout = sp . PIPE , stderr = sp . PIPE ) stdout , stderr = p . communicate ( ) if p . returncode != 0 : sys . stderr . write ( "Index for {} failed\n" . format ( name ) ) sys . stderr . write ( stdout ) sys . stderr . write ( stderr )
Run command show errors if the returncode is non - zero .
134
13
233,024
def scan_cgroups ( subsys_name , filters = list ( ) ) : status = SubsystemStatus ( ) if subsys_name not in status . get_all ( ) : raise NoSuchSubsystemError ( "No such subsystem found: " + subsys_name ) if subsys_name not in status . get_available ( ) : raise EnvironmentError ( "Disabled in the kernel: " + subsys_name ) if subsys_name not in status . get_enabled ( ) : raise EnvironmentError ( "Not enabled in the system: " + subsys_name ) subsystem = _get_subsystem ( subsys_name ) mount_point = status . get_path ( subsys_name ) return _scan_cgroups_recursive ( subsystem , mount_point , mount_point , filters )
It returns a control group hierarchy which belong to the subsys_name . When collecting cgroups filters are applied to the cgroups . See pydoc of apply_filters method of CGroup for more information about the filters .
176
47
233,025
def walk_cgroups ( cgroup , action , opaque ) : action ( cgroup , opaque ) for child in cgroup . childs : walk_cgroups ( child , action , opaque )
The function applies the action function with the opaque object to each control group under the cgroup recursively .
41
22
233,026
def get_cgroup ( fullpath ) : # Canonicalize symbolic links fullpath = os . path . realpath ( fullpath ) status = SubsystemStatus ( ) name = None for name , path in status . paths . items ( ) : if path in fullpath : break else : raise Exception ( 'Invalid path: ' + fullpath ) subsys = _get_subsystem ( name ) return CGroup ( subsys , fullpath )
It returns a CGroup object which is pointed by the fullpath .
94
14
233,027
def parse ( content ) : ret = { } lines = content . split ( '\n' ) for line in lines : m = RdmaStat . _RE . match ( line ) if m is None : continue name = m . group ( 'name' ) hca_handle = long ( m . group ( 'hca_handle' ) ) hca_object = m . group ( 'hca_object' ) if hca_object != "max" : hca_object = long ( hca_object ) ret [ name ] = { "hca_handle" : hca_handle , "hca_object" : hca_object } return ret
Parse rdma . curren and rdma . max
144
14
233,028
def apply_filters ( self , filters ) : _configs = self . configs _stats = self . stats self . configs = { } self . stats = { } for f in filters : if f in _configs : self . configs [ f ] = _configs [ f ] elif f in _stats : self . stats [ f ] = _stats [ f ] else : raise NoSuchControlFileError ( "%s for %s" % ( f , self . subsystem . name ) )
It applies a specified filters . The filters are used to reduce the control groups which are accessed by get_confgs get_stats and get_defaults methods .
109
33
233,029
def get_configs ( self ) : configs = { } for name , default in self . configs . items ( ) : cls = default . __class__ path = self . paths [ name ] if os . path . exists ( path ) : try : configs [ name ] = self . _PARSERS [ cls ] ( fileops . read ( path ) ) except IOError as e : if e . errno == errno . EOPNOTSUPP : # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass else : raise return configs
It returns a name and a current value pairs of control files which are categorised in the configs group .
177
22
233,030
def get_stats ( self ) : stats = { } for name , cls in self . stats . items ( ) : path = self . paths [ name ] if os . path . exists ( path ) : try : stats [ name ] = self . _PARSERS [ cls ] ( fileops . read ( path ) ) except IOError as e : # XXX: we have to distinguish unexpected errors from the expected ones if e . errno == errno . EOPNOTSUPP : # Since 3.5 memory.memsw.* are always created even if disabled. # If disabled we will get EOPNOTSUPP when read or write them. # See commit af36f906c0f4c2ffa0482ecdf856a33dc88ae8c5 of the kernel. pass if e . errno == errno . EIO : # memory.kmem.slabinfo throws EIO until limit_in_bytes is set. pass else : raise return stats
It returns a name and a value pairs of control files which are categorised in the stats group .
212
20
233,031
def update ( self ) : pids = fileops . readlines ( self . paths [ 'cgroup.procs' ] ) self . pids = [ int ( pid ) for pid in pids if pid != '' ] self . n_procs = len ( pids )
It updates process information of the cgroup .
60
9
233,032
def wait ( self ) : ret = os . read ( self . event_fd , 64 / 8 ) return struct . unpack ( 'Q' , ret )
It returns when an event which we have configured by set_threshold happens . Note that it blocks until then .
34
23
233,033
def dumpf ( obj , path ) : with open ( path , 'w' ) as f : dump ( obj , f ) return path
Write an nginx configuration to file .
29
8
233,034
def as_strings ( self ) : ret = [ ] for x in self . children : if isinstance ( x , ( Key , Comment ) ) : ret . append ( x . as_strings ) else : for y in x . as_strings : ret . append ( y ) if ret : ret [ - 1 ] = re . sub ( '}\n+$' , '}\n' , ret [ - 1 ] ) return ret
Return the entire Conf as nginx config strings .
92
10
233,035
def as_list ( self ) : return [ self . name , self . value , [ x . as_list for x in self . children ] ]
Return all child objects in nested lists of strings .
32
10
233,036
def as_dict ( self ) : dicts = [ x . as_dict for x in self . children ] return { '{0} {1}' . format ( self . name , self . value ) : dicts }
Return all child objects in nested dict .
49
8
233,037
def as_strings ( self ) : ret = [ ] container_title = ( INDENT * self . _depth ) container_title += '{0}{1} {{\n' . format ( self . name , ( ' {0}' . format ( self . value ) if self . value else '' ) ) ret . append ( container_title ) for x in self . children : if isinstance ( x , Key ) : ret . append ( INDENT + x . as_strings ) elif isinstance ( x , Comment ) : if x . inline and len ( ret ) >= 1 : ret [ - 1 ] = ret [ - 1 ] . rstrip ( '\n' ) + ' ' + x . as_strings else : ret . append ( INDENT + x . as_strings ) elif isinstance ( x , Container ) : y = x . as_strings ret . append ( '\n' + y [ 0 ] ) for z in y [ 1 : ] : ret . append ( INDENT + z ) else : y = x . as_strings ret . append ( INDENT + y ) ret [ - 1 ] = re . sub ( '}\n+$' , '}\n' , ret [ - 1 ] ) ret . append ( '}\n\n' ) return ret
Return the entire Container as nginx config strings .
278
10
233,038
def as_strings ( self ) : if self . value == '' or self . value is None : return '{0};\n' . format ( self . name ) if '"' not in self . value and ( ';' in self . value or '#' in self . value ) : return '{0} "{1}";\n' . format ( self . name , self . value ) return '{0} {1};\n' . format ( self . name , self . value )
Return key as nginx config string .
108
8
233,039
def convert_aws_args ( aws_args ) : if not isinstance ( aws_args , dict ) : raise errors . InvalidConfiguration ( 'Elastic DocManager config option "aws" must be a dict' ) old_session_kwargs = dict ( region = "region_name" , access_id = "aws_access_key_id" , secret_key = "aws_secret_access_key" , ) new_kwargs = { } for arg in aws_args : if arg in old_session_kwargs : new_kwargs [ old_session_kwargs [ arg ] ] = aws_args [ arg ] else : new_kwargs [ arg ] = aws_args [ arg ] return new_kwargs
Convert old style options into arguments to boto3 . session . Session .
164
16
233,040
def _index_and_mapping ( self , namespace ) : index , doc_type = namespace . split ( "." , 1 ) return index . lower ( ) , doc_type
Helper method for getting the index and type from a namespace .
39
12
233,041
def _stream_search ( self , * args , * * kwargs ) : for hit in scan ( self . elastic , query = kwargs . pop ( "body" , None ) , scroll = "10m" , * * kwargs ) : hit [ "_source" ] [ "_id" ] = hit [ "_id" ] yield hit [ "_source" ]
Helper method for iterating over ES search results .
81
10
233,042
def search ( self , start_ts , end_ts ) : return self . _stream_search ( index = self . meta_index_name , body = { "query" : { "range" : { "_ts" : { "gte" : start_ts , "lte" : end_ts } } } } , )
Query Elasticsearch for documents in a time range .
73
10
233,043
def commit ( self ) : self . send_buffered_operations ( ) retry_until_ok ( self . elastic . indices . refresh , index = "" )
Send buffered requests and refresh all indexes .
36
9
233,044
def add_upsert ( self , action , meta_action , doc_source , update_spec ) : # Whenever update_spec is provided to this method # it means that doc source needs to be retrieved # from Elasticsearch. It means also that source # is not stored in local buffer if update_spec : self . bulk_index ( action , meta_action ) # -1 -> to get latest index number # -1 -> to get action instead of meta_action # Update document based on source retrieved from ES self . add_doc_to_update ( action , update_spec , len ( self . action_buffer ) - 2 ) else : # Insert and update operations provide source # Store it in local buffer and use for comming updates # inside same buffer # add_to_sources will not be called for delete operation # as it does not provide doc_source if doc_source : self . add_to_sources ( action , doc_source ) self . bulk_index ( action , meta_action )
Function which stores sources for insert actions and decide if for update action has to add docs to get source buffer
214
21
233,045
def add_doc_to_update ( self , action , update_spec , action_buffer_index ) : doc = { "_index" : action [ "_index" ] , "_type" : action [ "_type" ] , "_id" : action [ "_id" ] , } # If get_from_ES == True -> get document's source from Elasticsearch get_from_ES = self . should_get_id ( action ) self . doc_to_update . append ( ( doc , update_spec , action_buffer_index , get_from_ES ) )
Prepare document for update based on Elasticsearch response . Set flag if document needs to be retrieved from Elasticsearch
124
22
233,046
def get_docs_sources_from_ES ( self ) : docs = [ doc for doc , _ , _ , get_from_ES in self . doc_to_update if get_from_ES ] if docs : documents = self . docman . elastic . mget ( body = { "docs" : docs } , realtime = True ) return iter ( documents [ "docs" ] ) else : return iter ( [ ] )
Get document sources using MGET elasticsearch API
94
9
233,047
def update_sources ( self ) : ES_documents = self . get_docs_sources_from_ES ( ) for doc , update_spec , action_buffer_index , get_from_ES in self . doc_to_update : if get_from_ES : # Update source based on response from ES ES_doc = next ( ES_documents ) if ES_doc [ "found" ] : source = ES_doc [ "_source" ] else : # Document not found in elasticsearch, # Seems like something went wrong during replication LOG . error ( "mGET: Document id: %s has not been found " "in Elasticsearch. Due to that " "following update failed: %s" , doc [ "_id" ] , update_spec , ) self . reset_action ( action_buffer_index ) continue else : # Get source stored locally before applying update # as it is up-to-date source = self . get_from_sources ( doc [ "_index" ] , doc [ "_type" ] , doc [ "_id" ] ) if not source : LOG . error ( "mGET: Document id: %s has not been found " "in local sources. Due to that following " "update failed: %s" , doc [ "_id" ] , update_spec , ) self . reset_action ( action_buffer_index ) continue updated = self . docman . apply_update ( source , update_spec ) # Remove _id field from source if "_id" in updated : del updated [ "_id" ] # Everytime update locally stored sources to keep them up-to-date self . add_to_sources ( doc , updated ) self . action_buffer [ action_buffer_index ] [ "_source" ] = self . docman . _formatter . format_document ( updated ) # Remove empty actions if there were errors self . action_buffer = [ each_action for each_action in self . action_buffer if each_action ]
Update local sources based on response from Elasticsearch
429
9
233,048
def add_to_sources ( self , action , doc_source ) : mapping = self . sources . setdefault ( action [ "_index" ] , { } ) . setdefault ( action [ "_type" ] , { } ) mapping [ action [ "_id" ] ] = doc_source
Store sources locally
63
3
233,049
def get_from_sources ( self , index , doc_type , document_id ) : return self . sources . get ( index , { } ) . get ( doc_type , { } ) . get ( document_id , { } )
Get source stored locally
53
4
233,050
def clean_up ( self ) : self . action_buffer = [ ] self . sources = { } self . doc_to_get = { } self . doc_to_update = [ ]
Do clean - up before returning buffer
42
7
233,051
def get_buffer ( self ) : # Get sources for documents which are in Elasticsearch # and they are not in local buffer if self . doc_to_update : self . update_sources ( ) ES_buffer = self . action_buffer self . clean_up ( ) return ES_buffer
Get buffer which needs to be bulked to elasticsearch
63
11
233,052
def run ( self ) : self . socket = self . bluez . hci_open_dev ( self . bt_device_id ) filtr = self . bluez . hci_filter_new ( ) self . bluez . hci_filter_all_events ( filtr ) self . bluez . hci_filter_set_ptype ( filtr , self . bluez . HCI_EVENT_PKT ) self . socket . setsockopt ( self . bluez . SOL_HCI , self . bluez . HCI_FILTER , filtr ) self . set_scan_parameters ( ) self . toggle_scan ( True ) while self . keep_going : pkt = self . socket . recv ( 255 ) event = to_int ( pkt [ 1 ] ) subevent = to_int ( pkt [ 3 ] ) if event == LE_META_EVENT and subevent == EVT_LE_ADVERTISING_REPORT : # we have an BLE advertisement self . process_packet ( pkt ) self . socket . close ( )
Continously scan for BLE advertisements .
240
8
233,053
def set_scan_parameters ( self , scan_type = ScanType . ACTIVE , interval_ms = 10 , window_ms = 10 , address_type = BluetoothAddressType . RANDOM , filter_type = ScanFilter . ALL ) : interval_fractions = interval_ms / MS_FRACTION_DIVIDER if interval_fractions < 0x0004 or interval_fractions > 0x4000 : raise ValueError ( "Invalid interval given {}, must be in range of 2.5ms to 10240ms!" . format ( interval_fractions ) ) window_fractions = window_ms / MS_FRACTION_DIVIDER if window_fractions < 0x0004 or window_fractions > 0x4000 : raise ValueError ( "Invalid window given {}, must be in range of 2.5ms to 10240ms!" . format ( window_fractions ) ) interval_fractions , window_fractions = int ( interval_fractions ) , int ( window_fractions ) scan_parameter_pkg = struct . pack ( ">BHHBB" , scan_type , interval_fractions , window_fractions , address_type , filter_type ) self . bluez . hci_send_cmd ( self . socket , OGF_LE_CTL , OCF_LE_SET_SCAN_PARAMETERS , scan_parameter_pkg )
sets the le scan parameters
302
5
233,054
def toggle_scan ( self , enable , filter_duplicates = False ) : command = struct . pack ( ">BB" , enable , filter_duplicates ) self . bluez . hci_send_cmd ( self . socket , OGF_LE_CTL , OCF_LE_SET_SCAN_ENABLE , command )
Enables or disables BLE scanning
76
8
233,055
def process_packet ( self , pkt ) : # check if this could be a valid packet before parsing # this reduces the CPU load significantly if not ( ( ( self . mode & ScannerMode . MODE_IBEACON ) and ( pkt [ 19 : 23 ] == b"\x4c\x00\x02\x15" ) ) or ( ( self . mode & ScannerMode . MODE_EDDYSTONE ) and ( pkt [ 19 : 21 ] == b"\xaa\xfe" ) ) or ( ( self . mode & ScannerMode . MODE_ESTIMOTE ) and ( pkt [ 19 : 21 ] == b"\x9a\xfe" ) ) ) : return bt_addr = bt_addr_to_string ( pkt [ 7 : 13 ] ) rssi = bin_to_int ( pkt [ - 1 ] ) # strip bluetooth address and parse packet packet = parse_packet ( pkt [ 14 : - 1 ] ) # return if packet was not an beacon advertisement if not packet : return # we need to remeber which eddystone beacon has which bt address # because the TLM and URL frames do not contain the namespace and instance self . save_bt_addr ( packet , bt_addr ) # properties holds the identifying information for a beacon # e.g. instance and namespace for eddystone; uuid, major, minor for iBeacon properties = self . get_properties ( packet , bt_addr ) if self . device_filter is None and self . packet_filter is None : # no filters selected self . callback ( bt_addr , rssi , packet , properties ) elif self . device_filter is None : # filter by packet type if is_one_of ( packet , self . packet_filter ) : self . callback ( bt_addr , rssi , packet , properties ) else : # filter by device and packet type if self . packet_filter and not is_one_of ( packet , self . packet_filter ) : # return if packet filter does not match return # iterate over filters and call .matches() on each for filtr in self . device_filter : if isinstance ( filtr , BtAddrFilter ) : if filtr . matches ( { 'bt_addr' : bt_addr } ) : self . callback ( bt_addr , rssi , packet , properties ) return elif filtr . matches ( properties ) : self . callback ( bt_addr , rssi , packet , properties ) return
Parse the packet and call callback if one of the filters matches .
564
14
233,056
def save_bt_addr ( self , packet , bt_addr ) : if isinstance ( packet , EddystoneUIDFrame ) : # remove out old mapping new_mappings = [ m for m in self . eddystone_mappings if m [ 0 ] != bt_addr ] new_mappings . append ( ( bt_addr , packet . properties ) ) self . eddystone_mappings = new_mappings
Add to the list of mappings .
95
8
233,057
def get_properties ( self , packet , bt_addr ) : if is_one_of ( packet , [ EddystoneTLMFrame , EddystoneURLFrame , EddystoneEncryptedTLMFrame , EddystoneEIDFrame ] ) : # here we retrieve the namespace and instance which corresponds to the # eddystone beacon with this bt address return self . properties_from_mapping ( bt_addr ) else : return packet . properties
Get properties of beacon depending on type .
99
8
233,058
def terminate ( self ) : self . toggle_scan ( False ) self . keep_going = False self . join ( )
Signal runner to stop and join thread .
26
9
233,059
def data_to_uuid ( data ) : string = data_to_hexstring ( data ) return string [ 0 : 8 ] + '-' + string [ 8 : 12 ] + '-' + string [ 12 : 16 ] + '-' + string [ 16 : 20 ] + '-' + string [ 20 : 32 ]
Convert an array of binary data to the iBeacon uuid format .
69
16
233,060
def bt_addr_to_string ( addr ) : addr_str = array . array ( 'B' , addr ) addr_str . reverse ( ) hex_str = hexlify ( addr_str . tostring ( ) ) . decode ( 'ascii' ) # insert ":" seperator between the bytes return ':' . join ( a + b for a , b in zip ( hex_str [ : : 2 ] , hex_str [ 1 : : 2 ] ) )
Convert a binary string to the hex representation .
106
10
233,061
def is_one_of ( obj , types ) : for type_ in types : if isinstance ( obj , type_ ) : return True return False
Return true iff obj is an instance of one of the types .
32
14
233,062
def is_packet_type ( cls ) : from . packet_types import EddystoneUIDFrame , EddystoneURLFrame , EddystoneEncryptedTLMFrame , EddystoneTLMFrame , EddystoneEIDFrame , IBeaconAdvertisement , EstimoteTelemetryFrameA , EstimoteTelemetryFrameB return ( cls in [ EddystoneURLFrame , EddystoneUIDFrame , EddystoneEncryptedTLMFrame , EddystoneTLMFrame , EddystoneEIDFrame , IBeaconAdvertisement , EstimoteTelemetryFrameA , EstimoteTelemetryFrameB ] )
Check if class is one the packet types .
136
9
233,063
def bin_to_int ( string ) : if isinstance ( string , str ) : return struct . unpack ( "b" , string ) [ 0 ] else : return struct . unpack ( "b" , bytes ( [ string ] ) ) [ 0 ]
Convert a one element byte string to signed int for python 2 support .
56
15
233,064
def get_mode ( device_filter ) : from . device_filters import IBeaconFilter , EddystoneFilter , BtAddrFilter , EstimoteFilter if device_filter is None or len ( device_filter ) == 0 : return ScannerMode . MODE_ALL mode = ScannerMode . MODE_NONE for filtr in device_filter : if isinstance ( filtr , IBeaconFilter ) : mode |= ScannerMode . MODE_IBEACON elif isinstance ( filtr , EddystoneFilter ) : mode |= ScannerMode . MODE_EDDYSTONE elif isinstance ( filtr , EstimoteFilter ) : mode |= ScannerMode . MODE_ESTIMOTE elif isinstance ( filtr , BtAddrFilter ) : mode |= ScannerMode . MODE_ALL break return mode
Determine which beacons the scanner should look for .
193
12
233,065
def matches ( self , filter_props ) : if filter_props is None : return False found_one = False for key , value in filter_props . items ( ) : if key in self . properties and value != self . properties [ key ] : return False elif key in self . properties and value == self . properties [ key ] : found_one = True return found_one
Check if the filter matches the supplied properties .
84
9
233,066
def parse_packet ( packet ) : frame = parse_ltv_packet ( packet ) if frame is None : frame = parse_ibeacon_packet ( packet ) return frame
Parse a beacon advertisement packet .
40
7
233,067
def parse_ltv_packet ( packet ) : try : frame = LTVFrame . parse ( packet ) for ltv in frame : if ltv [ 'type' ] == SERVICE_DATA_TYPE : data = ltv [ 'value' ] if data [ "service_identifier" ] == EDDYSTONE_UUID : return parse_eddystone_service_data ( data ) elif data [ "service_identifier" ] == ESTIMOTE_UUID : return parse_estimote_service_data ( data ) except ConstructError : return None return None
Parse a tag - length - value style beacon packet .
126
12
233,068
def parse_eddystone_service_data ( data ) : if data [ 'frame_type' ] == EDDYSTONE_UID_FRAME : return EddystoneUIDFrame ( data [ 'frame' ] ) elif data [ 'frame_type' ] == EDDYSTONE_TLM_FRAME : if data [ 'frame' ] [ 'tlm_version' ] == EDDYSTONE_TLM_ENCRYPTED : return EddystoneEncryptedTLMFrame ( data [ 'frame' ] [ 'data' ] ) elif data [ 'frame' ] [ 'tlm_version' ] == EDDYSTONE_TLM_UNENCRYPTED : return EddystoneTLMFrame ( data [ 'frame' ] [ 'data' ] ) elif data [ 'frame_type' ] == EDDYSTONE_URL_FRAME : return EddystoneURLFrame ( data [ 'frame' ] ) elif data [ 'frame_type' ] == EDDYSTONE_EID_FRAME : return EddystoneEIDFrame ( data [ 'frame' ] ) else : return None
Parse Eddystone service data .
255
8
233,069
def parse_estimote_service_data ( data ) : if data [ 'frame_type' ] & 0xF == ESTIMOTE_TELEMETRY_FRAME : protocol_version = ( data [ 'frame_type' ] & 0xF0 ) >> 4 if data [ 'frame' ] [ 'subframe_type' ] == ESTIMOTE_TELEMETRY_SUBFRAME_A : return EstimoteTelemetryFrameA ( data [ 'frame' ] , protocol_version ) elif data [ 'frame' ] [ 'subframe_type' ] == ESTIMOTE_TELEMETRY_SUBFRAME_B : return EstimoteTelemetryFrameB ( data [ 'frame' ] , protocol_version ) return None
Parse Estimote service data .
168
8
233,070
def parse_motion_state ( val ) : number = val & 0b00111111 unit = ( val & 0b11000000 ) >> 6 if unit == 1 : number *= 60 # minutes elif unit == 2 : number *= 60 * 60 # hours elif unit == 3 and number < 32 : number *= 60 * 60 * 24 # days elif unit == 3 : number -= 32 number *= 60 * 60 * 24 * 7 # weeks return number
Convert motion state byte to seconds .
99
8
233,071
def monkey_patch ( cls ) : on_read_the_docs = os . environ . get ( 'READTHEDOCS' , False ) if on_read_the_docs : sys . modules [ 'zbarlight._zbarlight' ] = cls
Monkey path zbarlight C extension on Read The Docs
60
13
233,072
def set_pixel ( framebuf , x , y , color ) : index = ( y >> 3 ) * framebuf . stride + x offset = y & 0x07 framebuf . buf [ index ] = ( framebuf . buf [ index ] & ~ ( 0x01 << offset ) ) | ( ( color != 0 ) << offset )
Set a given pixel to a color .
72
8
233,073
def get_pixel ( framebuf , x , y ) : index = ( y >> 3 ) * framebuf . stride + x offset = y & 0x07 return ( framebuf . buf [ index ] >> offset ) & 0x01
Get the color of a given pixel
50
7
233,074
def pixel ( self , x , y , color = None ) : if self . rotation == 1 : x , y = y , x x = self . width - x - 1 if self . rotation == 2 : x = self . width - x - 1 y = self . height - y - 1 if self . rotation == 3 : x , y = y , x y = self . height - y - 1 if x < 0 or x >= self . width or y < 0 or y >= self . height : return None if color is None : return self . format . get_pixel ( self , x , y ) self . format . set_pixel ( self , x , y , color ) return None
If color is not given get the color value of the specified pixel . If color is given set the specified pixel to the given color .
146
27
233,075
def hline ( self , x , y , width , color ) : self . rect ( x , y , width , 1 , color , fill = True )
Draw a horizontal line up to a given length .
33
10
233,076
def vline ( self , x , y , height , color ) : self . rect ( x , y , 1 , height , color , fill = True )
Draw a vertical line up to a given length .
33
10
233,077
def rect ( self , x , y , width , height , color , * , fill = False ) : # pylint: disable=too-many-arguments if self . rotation == 1 : x , y = y , x width , height = height , width x = self . width - x - width if self . rotation == 2 : x = self . width - x - width y = self . height - y - height if self . rotation == 3 : x , y = y , x width , height = height , width y = self . height - y - height # pylint: disable=too-many-boolean-expressions if width < 1 or height < 1 or ( x + width ) <= 0 or ( y + height ) <= 0 or y >= self . height or x >= self . width : return x_end = min ( self . width - 1 , x + width - 1 ) y_end = min ( self . height - 1 , y + height - 1 ) x = max ( x , 0 ) y = max ( y , 0 ) if fill : self . format . fill_rect ( self , x , y , x_end - x + 1 , y_end - y + 1 , color ) else : self . format . fill_rect ( self , x , y , x_end - x + 1 , 1 , color ) self . format . fill_rect ( self , x , y , 1 , y_end - y + 1 , color ) self . format . fill_rect ( self , x , y_end , x_end - x + 1 , 1 , color ) self . format . fill_rect ( self , x_end , y , 1 , y_end - y + 1 , color )
Draw a rectangle at the given location size and color . The rect method draws only a 1 pixel outline .
371
21
233,078
def line ( self , x_0 , y_0 , x_1 , y_1 , color ) : # pylint: disable=too-many-arguments d_x = abs ( x_1 - x_0 ) d_y = abs ( y_1 - y_0 ) x , y = x_0 , y_0 s_x = - 1 if x_0 > x_1 else 1 s_y = - 1 if y_0 > y_1 else 1 if d_x > d_y : err = d_x / 2.0 while x != x_1 : self . pixel ( x , y , color ) err -= d_y if err < 0 : y += s_y err += d_x x += s_x else : err = d_y / 2.0 while y != y_1 : self . pixel ( x , y , color ) err -= d_x if err < 0 : x += s_x err += d_y y += s_y self . pixel ( x , y , color )
Bresenham s line algorithm
231
7
233,079
def scroll ( self , delta_x , delta_y ) : if delta_x < 0 : shift_x = 0 xend = self . width + delta_x dt_x = 1 else : shift_x = self . width - 1 xend = delta_x - 1 dt_x = - 1 if delta_y < 0 : y = 0 yend = self . height + delta_y dt_y = 1 else : y = self . height - 1 yend = delta_y - 1 dt_y = - 1 while y != yend : x = shift_x while x != xend : self . format . set_pixel ( self , x , y , self . format . get_pixel ( self , x - delta_x , y - delta_y ) ) x += dt_x y += dt_y
shifts framebuf in x and y direction
184
9
233,080
def text ( self , string , x , y , color , * , font_name = "font5x8.bin" ) : if not self . _font or self . _font . font_name != font_name : # load the font! self . _font = BitmapFont ( ) w = self . _font . font_width for i , char in enumerate ( string ) : self . _font . draw_char ( char , x + ( i * ( w + 1 ) ) , y , self , color )
text is not yet implemented
114
5
233,081
def parse_auth_token_from_request ( self , auth_header ) : if not auth_header : raise falcon . HTTPUnauthorized ( description = 'Missing Authorization Header' ) parts = auth_header . split ( ) if parts [ 0 ] . lower ( ) != self . auth_header_prefix . lower ( ) : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: ' 'Must start with {0}' . format ( self . auth_header_prefix ) ) elif len ( parts ) == 1 : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Token Missing' ) elif len ( parts ) > 2 : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Contains extra content' ) return parts [ 1 ]
Parses and returns Auth token from the request header . Raises falcon . HTTPUnauthoried exception with proper error message
181
28
233,082
def authenticate ( self , req , resp , resource ) : payload = self . _decode_jwt_token ( req ) user = self . user_loader ( payload ) if not user : raise falcon . HTTPUnauthorized ( description = 'Invalid JWT Credentials' ) return user
Extract auth token from request authorization header decode jwt token verify configured claims and return either a user object if successful else raise an falcon . HTTPUnauthoried exception
66
36
233,083
def get_auth_token ( self , user_payload ) : now = datetime . utcnow ( ) payload = { 'user' : user_payload } if 'iat' in self . verify_claims : payload [ 'iat' ] = now if 'nbf' in self . verify_claims : payload [ 'nbf' ] = now + self . leeway if 'exp' in self . verify_claims : payload [ 'exp' ] = now + self . expiration_delta if self . audience is not None : payload [ 'aud' ] = self . audience if self . issuer is not None : payload [ 'iss' ] = self . issuer return jwt . encode ( payload , self . secret_key , algorithm = self . algorithm , json_encoder = ExtendedJSONEncoder ) . decode ( 'utf-8' )
Create a JWT authentication token from user_payload
186
11
233,084
def get_auth_token ( self , user_payload ) : token = user_payload . get ( 'token' ) or None if not token : raise ValueError ( '`user_payload` must provide api token' ) return '{auth_header_prefix} {token}' . format ( auth_header_prefix = self . auth_header_prefix , token = token )
Extracts token from the user_payload
85
10
233,085
def parse_auth_token_from_request ( self , auth_header ) : if not auth_header : raise falcon . HTTPUnauthorized ( description = 'Missing Authorization Header' ) try : auth_header_prefix , _ = auth_header . split ( ' ' , 1 ) except ValueError : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Missing Scheme or Parameters' ) if auth_header_prefix . lower ( ) != self . auth_header_prefix . lower ( ) : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: ' 'Must start with {0}' . format ( self . auth_header_prefix ) ) return auth_header
Parses and returns the Hawk Authorization header if it is present and well - formed . Raises falcon . HTTPUnauthoried exception with proper error message
158
34
233,086
def _apply_base_theme ( app ) : if QT_VERSION < ( 5 , ) : app . setStyle ( 'plastique' ) else : app . setStyle ( 'Fusion' ) with open ( _STYLESHEET ) as stylesheet : app . setStyleSheet ( stylesheet . read ( ) )
Apply base theme to the application .
73
7
233,087
def dark ( app ) : _apply_base_theme ( app ) darkPalette = QPalette ( ) # base darkPalette . setColor ( QPalette . WindowText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Button , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . Light , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Midlight , QColor ( 90 , 90 , 90 ) ) darkPalette . setColor ( QPalette . Dark , QColor ( 35 , 35 , 35 ) ) darkPalette . setColor ( QPalette . Text , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . BrightText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . ButtonText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Base , QColor ( 42 , 42 , 42 ) ) darkPalette . setColor ( QPalette . Window , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . Shadow , QColor ( 20 , 20 , 20 ) ) darkPalette . setColor ( QPalette . Highlight , QColor ( 42 , 130 , 218 ) ) darkPalette . setColor ( QPalette . HighlightedText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Link , QColor ( 56 , 252 , 196 ) ) darkPalette . setColor ( QPalette . AlternateBase , QColor ( 66 , 66 , 66 ) ) darkPalette . setColor ( QPalette . ToolTipBase , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . ToolTipText , QColor ( 180 , 180 , 180 ) ) # disabled darkPalette . setColor ( QPalette . Disabled , QPalette . WindowText , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . Text , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . ButtonText , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . Highlight , QColor ( 80 , 80 , 80 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . HighlightedText , QColor ( 127 , 127 , 127 ) ) app . setPalette ( darkPalette )
Apply Dark Theme to the Qt application instance .
593
9
233,088
def inheritance_diagram_directive ( name , arguments , options , content , lineno , content_offset , block_text , state , state_machine ) : node = inheritance_diagram ( ) class_names = arguments # Create a graph starting with the list of classes graph = InheritanceGraph ( class_names ) # Create xref nodes for each target of the graph's image map and # add them to the doc tree so that Sphinx can resolve the # references to real URLs later. These nodes will eventually be # removed from the doctree after we're done with them. for name in graph . get_all_class_names ( ) : refnodes , x = xfileref_role ( 'class' , ':class:`%s`' % name , name , 0 , state ) node . extend ( refnodes ) # Store the graph object so we can use it to generate the # dot file later node [ 'graph' ] = graph # Store the original content for use as a hash node [ 'parts' ] = options . get ( 'parts' , 0 ) node [ 'content' ] = " " . join ( class_names ) return [ node ]
Run when the inheritance_diagram directive is first encountered .
251
12
233,089
def run_dot ( self , args , name , parts = 0 , urls = { } , graph_options = { } , node_options = { } , edge_options = { } ) : try : dot = subprocess . Popen ( [ 'dot' ] + list ( args ) , stdin = subprocess . PIPE , stdout = subprocess . PIPE , close_fds = True ) except OSError : raise DotException ( "Could not execute 'dot'. Are you sure you have 'graphviz' installed?" ) except ValueError : raise DotException ( "'dot' called with invalid arguments" ) except : raise DotException ( "Unexpected error calling 'dot'" ) self . generate_dot ( dot . stdin , name , parts , urls , graph_options , node_options , edge_options ) dot . stdin . close ( ) result = dot . stdout . read ( ) returncode = dot . wait ( ) if returncode != 0 : raise DotException ( "'dot' returned the errorcode %d" % returncode ) return result
Run graphviz dot over this graph returning whatever dot writes to stdout .
235
16
233,090
def parse_md_to_rst ( file ) : try : from m2r import parse_from_file return parse_from_file ( file ) . replace ( "artwork/" , "http://198.27.119.65/" ) except ImportError : # m2r may not be installed in user environment return read ( file )
Read Markdown file and convert to ReStructured Text .
74
12
233,091
def deleted_count ( self ) : if isinstance ( self . raw_result , list ) : return len ( self . raw_result ) else : return self . raw_result
The number of documents deleted .
38
6
233,092
def generate_id ( ) : # TODO: Use six.string_type to Py3 compat try : return unicode ( uuid1 ( ) ) . replace ( u"-" , u"" ) except NameError : return str ( uuid1 ( ) ) . replace ( u"-" , u"" )
Generate new UUID
66
5
233,093
def insert ( self , docs , * args , * * kwargs ) : if isinstance ( docs , list ) : return self . insert_many ( docs , * args , * * kwargs ) else : return self . insert_one ( docs , * args , * * kwargs )
Backwards compatibility with insert
64
5
233,094
def update ( self , query , doc , * args , * * kwargs ) : if isinstance ( doc , list ) : return [ self . update_one ( query , item , * args , * * kwargs ) for item in doc ] else : return self . update_one ( query , doc , * args , * * kwargs )
BAckwards compatibility with update
76
6
233,095
def update_one ( self , query , doc ) : if self . table is None : self . build_table ( ) if u"$set" in doc : doc = doc [ u"$set" ] allcond = self . parse_query ( query ) try : result = self . table . update ( doc , allcond ) except : # TODO: check table.update result # check what pymongo does in that case result = None return UpdateResult ( raw_result = result )
Updates one element of the collection
105
7
233,096
def find ( self , filter = None , sort = None , skip = None , limit = None , * args , * * kwargs ) : if self . table is None : self . build_table ( ) if filter is None : result = self . table . all ( ) else : allcond = self . parse_query ( filter ) try : result = self . table . search ( allcond ) except ( AttributeError , TypeError ) : result = [ ] result = TinyMongoCursor ( result , sort = sort , skip = skip , limit = limit ) return result
Finds all matching results
123
5
233,097
def find_one ( self , filter = None ) : if self . table is None : self . build_table ( ) allcond = self . parse_query ( filter ) return self . table . get ( allcond )
Finds one matching query element
47
6
233,098
def remove ( self , spec_or_id , multi = True , * args , * * kwargs ) : if multi : return self . delete_many ( spec_or_id ) return self . delete_one ( spec_or_id )
Backwards compatibility with remove
54
5
233,099
def delete_one ( self , query ) : item = self . find_one ( query ) result = self . table . remove ( where ( u'_id' ) == item [ u'_id' ] ) return DeleteResult ( raw_result = result )
Deletes one document from the collection
56
7