idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
23,200
def import_attribute ( path ) : module_name , attribute_name = path . rsplit ( '.' , 1 ) module = import_module ( module_name ) attribute = getattr ( module , attribute_name ) return attribute
Import attribute by path like package . module . attribute
23,201
def extract_options ( options , names ) : result = { } for name , value in copy ( options ) . items ( ) : if name in names : result [ name ] = value del options [ name ] return result
Return options for names and remove it from given options in - place .
23,202
def stringify_value ( value ) : if value is None : return u'' isoformat = getattr ( value , 'isoformat' , None ) if isoformat is not None : value = isoformat ( ) return type ( u'' ) ( value )
Convert any value to string .
23,203
def reset ( self ) : if self . __row_number > self . __sample_size : self . __parser . reset ( ) self . __extract_sample ( ) self . __extract_headers ( ) self . __row_number = 0
Resets the stream pointer to the beginning of the file .
23,204
def sample ( self ) : sample = [ ] iterator = iter ( self . __sample_extended_rows ) iterator = self . __apply_processors ( iterator ) for row_number , headers , row in iterator : sample . append ( row ) return sample
Returns the stream s rows used as sample .
23,205
def iter ( self , keyed = False , extended = False ) : if self . closed : message = 'Stream is closed. Please call "stream.open()" first.' raise exceptions . TabulatorException ( message ) iterator = chain ( self . __sample_extended_rows , self . __parser . extended_rows ) iterator = self . __apply_processors ( iterator ) for row_number , headers , row in iterator : if row_number > self . __row_number : self . __row_number = row_number if extended : yield ( row_number , headers , row ) elif keyed : yield dict ( zip ( headers , row ) ) else : yield row
Iterate over the rows .
23,206
def save ( self , target , format = None , encoding = None , ** options ) : if encoding is None : encoding = config . DEFAULT_ENCODING if format is None : _ , format = helpers . detect_scheme_and_format ( target ) writer_class = self . __custom_writers . get ( format ) if writer_class is None : if format not in config . WRITERS : message = 'Format "%s" is not supported' % format raise exceptions . FormatError ( message ) writer_class = helpers . import_attribute ( config . WRITERS [ format ] ) writer_options = helpers . extract_options ( options , writer_class . options ) if options : message = 'Not supported options "%s" for format "%s"' message = message % ( ', ' . join ( options ) , format ) raise exceptions . TabulatorException ( message ) writer = writer_class ( ** writer_options ) writer . write ( self . iter ( ) , target , headers = self . headers , encoding = encoding )
Save stream to the local filesystem .
23,207
def validate ( source , scheme = None , format = None ) : detected_scheme , detected_format = helpers . detect_scheme_and_format ( source ) scheme = scheme or detected_scheme format = format or detected_format if scheme is not None : if scheme not in config . LOADERS : raise exceptions . SchemeError ( 'Scheme "%s" is not supported' % scheme ) if format not in config . PARSERS : raise exceptions . FormatError ( 'Format "%s" is not supported' % format ) return True
Check if tabulator is able to load the source .
23,208
def require_axis ( f ) : @ wraps ( f ) def _wrapper ( self , * args , ** kwargs ) : if None in ( self . axis , self . sel_axis ) : raise ValueError ( '%(func_name) requires the node %(node)s ' 'to have an axis and a sel_axis function' % dict ( func_name = f . __name__ , node = repr ( self ) ) ) return f ( self , * args , ** kwargs ) return _wrapper
Check if the object of the function has axis and sel_axis members
23,209
def create ( point_list = None , dimensions = None , axis = 0 , sel_axis = None ) : if not point_list and not dimensions : raise ValueError ( 'either point_list or dimensions must be provided' ) elif point_list : dimensions = check_dimensionality ( point_list , dimensions ) sel_axis = sel_axis or ( lambda prev_axis : ( prev_axis + 1 ) % dimensions ) if not point_list : return KDNode ( sel_axis = sel_axis , axis = axis , dimensions = dimensions ) point_list = list ( point_list ) point_list . sort ( key = lambda point : point [ axis ] ) median = len ( point_list ) // 2 loc = point_list [ median ] left = create ( point_list [ : median ] , dimensions , sel_axis ( axis ) ) right = create ( point_list [ median + 1 : ] , dimensions , sel_axis ( axis ) ) return KDNode ( loc , left , right , axis = axis , sel_axis = sel_axis , dimensions = dimensions )
Creates a kd - tree from a list of points
23,210
def level_order ( tree , include_all = False ) : q = deque ( ) q . append ( tree ) while q : node = q . popleft ( ) yield node if include_all or node . left : q . append ( node . left or node . __class__ ( ) ) if include_all or node . right : q . append ( node . right or node . __class__ ( ) )
Returns an iterator over the tree in level - order
23,211
def visualize ( tree , max_level = 100 , node_width = 10 , left_padding = 5 ) : height = min ( max_level , tree . height ( ) - 1 ) max_width = pow ( 2 , height ) per_level = 1 in_level = 0 level = 0 for node in level_order ( tree , include_all = True ) : if in_level == 0 : print ( ) print ( ) print ( ' ' * left_padding , end = ' ' ) width = int ( max_width * node_width / per_level ) node_str = ( str ( node . data ) if node else '' ) . center ( width ) print ( node_str , end = ' ' ) in_level += 1 if in_level == per_level : in_level = 0 per_level *= 2 level += 1 if level > height : break print ( ) print ( )
Prints the tree to stdout
23,212
def is_leaf ( self ) : return ( not self . data ) or ( all ( not bool ( c ) for c , p in self . children ) )
Returns True if a Node has no subnodes
23,213
def children ( self ) : if self . left and self . left . data is not None : yield self . left , 0 if self . right and self . right . data is not None : yield self . right , 1
Returns an iterator for the non - empty children of the Node
23,214
def set_child ( self , index , child ) : if index == 0 : self . left = child else : self . right = child
Sets one of the node s children
23,215
def get_child_pos ( self , child ) : for c , pos in self . children : if child == c : return pos
Returns the position if the given child
23,216
def add ( self , point ) : current = self while True : check_dimensionality ( [ point ] , dimensions = current . dimensions ) if current . data is None : current . data = point return current if point [ current . axis ] < current . data [ current . axis ] : if current . left is None : current . left = current . create_subnode ( point ) return current . left else : current = current . left else : if current . right is None : current . right = current . create_subnode ( point ) return current . right else : current = current . right
Adds a point to the current node or iteratively descends to one of its children .
23,217
def create_subnode ( self , data ) : return self . __class__ ( data , axis = self . sel_axis ( self . axis ) , sel_axis = self . sel_axis , dimensions = self . dimensions )
Creates a subnode for the current node
23,218
def find_replacement ( self ) : if self . right : child , parent = self . right . extreme_child ( min , self . axis ) else : child , parent = self . left . extreme_child ( max , self . axis ) return ( child , parent if parent is not None else self )
Finds a replacement for the current node
23,219
def remove ( self , point , node = None ) : if not self : return if self . should_remove ( point , node ) : return self . _remove ( point ) if self . left and self . left . should_remove ( point , node ) : self . left = self . left . _remove ( point ) elif self . right and self . right . should_remove ( point , node ) : self . right = self . right . _remove ( point ) if point [ self . axis ] <= self . data [ self . axis ] : if self . left : self . left = self . left . remove ( point , node ) if point [ self . axis ] >= self . data [ self . axis ] : if self . right : self . right = self . right . remove ( point , node ) return self
Removes the node with the given point from the tree
23,220
def axis_dist ( self , point , axis ) : return math . pow ( self . data [ axis ] - point [ axis ] , 2 )
Squared distance at the given axis between the current Node and the given point
23,221
def dist ( self , point ) : r = range ( self . dimensions ) return sum ( [ self . axis_dist ( point , i ) for i in r ] )
Squared distance between the current Node and the given point
23,222
def search_knn ( self , point , k , dist = None ) : if k < 1 : raise ValueError ( "k must be greater than 0." ) if dist is None : get_dist = lambda n : n . dist ( point ) else : get_dist = lambda n : dist ( n . data , point ) results = [ ] self . _search_node ( point , k , results , get_dist , itertools . count ( ) ) return [ ( node , - d ) for d , _ , node in sorted ( results , reverse = True ) ]
Return the k nearest neighbors of point and their distances
23,223
def search_nn ( self , point , dist = None ) : return next ( iter ( self . search_knn ( point , 1 , dist ) ) , None )
Search the nearest node of the given point
23,224
def search_nn_dist ( self , point , distance , best = None ) : results = [ ] get_dist = lambda n : n . dist ( point ) self . _search_nn_dist ( point , distance , results , get_dist ) return results
Search the n nearest nodes of the given point which are within given distance
23,225
def is_valid ( self ) : if not self : return True if self . left and self . data [ self . axis ] < self . left . data [ self . axis ] : return False if self . right and self . data [ self . axis ] > self . right . data [ self . axis ] : return False return all ( c . is_valid ( ) for c , _ in self . children ) or self . is_leaf
Checks recursively if the tree is valid
23,226
def extreme_child ( self , sel_func , axis ) : max_key = lambda child_parent : child_parent [ 0 ] . data [ axis ] me = [ ( self , None ) ] if self else [ ] child_max = [ c . extreme_child ( sel_func , axis ) for c , _ in self . children ] child_max = [ ( c , p if p is not None else self ) for c , p in child_max ] candidates = me + child_max if not candidates : return None , None return sel_func ( candidates , key = max_key )
Returns a child of the subtree and its parent
23,227
def dump_raw ( self , text , stream = None ) : encrypted = self . vault . encrypt ( text ) if stream : stream . write ( encrypted ) else : return encrypted
Encrypt raw data and write to stream .
23,228
def dump ( self , data , stream = None ) : yaml_text = yaml . dump ( data , default_flow_style = False , allow_unicode = True ) return self . dump_raw ( yaml_text , stream = stream )
Encrypt data and print stdout or write to stream .
23,229
def add_item ( c , name , item ) : if isinstance ( item , MenuItem ) : if name not in c . items : c . items [ name ] = [ ] c . items [ name ] . append ( item ) c . sorted [ name ] = False
add_item adds MenuItems to the menu identified by name
23,230
def load_menus ( c ) : if c . loaded : return app_names = settings . INSTALLED_APPS if apps : app_names = [ app_config . name for app_config in apps . get_app_configs ( ) ] for app in app_names : if app . startswith ( "django." ) : continue menu_module = '%s.menus' % app try : __import__ ( menu_module , fromlist = [ "menu" , ] ) except ImportError : pass c . loaded = True
load_menus loops through INSTALLED_APPS and loads the menu . py files from them .
23,231
def sort_menus ( c ) : for name in c . items : if not c . sorted [ name ] : c . items [ name ] . sort ( key = lambda x : x . weight ) c . sorted [ name ] = True
sort_menus goes through the items and sorts them based on their weight
23,232
def process ( c , request , name = None ) : c . load_menus ( ) c . sort_menus ( ) if name is None : items = { } for name in c . items : items [ name ] = c . process ( request , name ) return items if name not in c . items : return [ ] items = copy . deepcopy ( c . items [ name ] ) curitem = None for item in items : item . process ( request ) if item . visible : item . selected = False if item . match_url ( request ) : if curitem is None or len ( curitem . url ) < len ( item . url ) : curitem = item if curitem is not None : curitem . selected = True visible = [ item for item in items if item . visible ] if getattr ( settings , 'MENU_SELECT_PARENTS' , False ) : def is_child_selected ( item ) : for child in item . children : if child . selected or is_child_selected ( child ) : return True for item in visible : if is_child_selected ( item ) : item . selected = True return visible
process uses the current request to determine which menus should be visible which are selected etc .
23,233
def check ( self , request ) : if callable ( self . check_func ) : self . visible = self . check_func ( request )
Evaluate if we should be visible for this request
23,234
def process ( self , request ) : self . check ( request ) if not self . visible : return if callable ( self . title ) : self . title = self . title ( request ) if self . slug is None : if sys . version_info > ( 3 , 0 ) : self . slug = slugify ( self . title ) else : self . slug = slugify ( unicode ( self . title ) ) if callable ( self . children ) : children = list ( self . children ( request ) ) else : children = list ( self . children ) for child in children : child . parent = self child . process ( request ) self . children = [ child for child in children if child . visible ] self . children . sort ( key = lambda child : child . weight ) hide_empty = getattr ( settings , 'MENU_HIDE_EMPTY' , False ) if hide_empty and len ( self . children ) == 0 : self . visible = False return curitem = None for item in self . children : item . selected = False if item . match_url ( request ) : if curitem is None or len ( curitem . url ) < len ( item . url ) : curitem = item if curitem is not None : curitem . selected = True
process determines if this item should visible if its selected etc ...
23,235
def match_url ( self , request ) : matched = False if self . exact_url : if re . match ( "%s$" % ( self . url , ) , request . path ) : matched = True elif re . match ( "%s" % self . url , request . path ) : matched = True return matched
match url determines if this is selected
23,236
def configure ( self , proto ) : if not proto : defer . returnValue ( None ) reply = yield self . __send_ismaster ( proto , timeout = self . initialDelay ) if len ( reply . documents ) != 1 : raise OperationFailure ( "TxMongo: invalid document length." ) config = reply . documents [ 0 ] . decode ( ) if not config . get ( "ok" ) : code = config . get ( "code" ) msg = "TxMongo: " + config . get ( "err" , "Unknown error" ) raise OperationFailure ( msg , code ) set_name = config . get ( "setName" ) expected_set_name = self . uri [ "options" ] . get ( "replicaset" ) if expected_set_name and ( expected_set_name != set_name ) : msg = "TxMongo: Mongo instance does not match requested replicaSet." raise ConfigurationError ( msg ) proto . max_bson_size = config . get ( "maxBsonObjectSize" , DEFAULT_MAX_BSON_SIZE ) proto . max_write_batch_size = config . get ( "maxWriteBatchSize" , DEFAULT_MAX_WRITE_BATCH_SIZE ) proto . set_wire_versions ( config . get ( "minWireVersion" , 0 ) , config . get ( "maxWireVersion" , 0 ) ) hosts = config . get ( "hosts" ) if isinstance ( hosts , list ) and hosts : for host in hosts : if ':' not in host : host = ( host , 27017 ) else : host = host . split ( ':' , 1 ) host [ 1 ] = int ( host [ 1 ] ) host = tuple ( host ) if host not in self . __allnodes : self . __allnodes . append ( host ) ismaster = config . get ( "ismaster" ) if not ismaster : msg = "TxMongo: MongoDB host `%s` is not master." % config . get ( 'me' ) raise AutoReconnect ( msg )
Configures the protocol using the information gathered from the remote Mongo instance . Such information may contain the max BSON document size replica set configuration and the master status of the instance .
23,237
def notifyReady ( self ) : if self . instance : return defer . succeed ( self . instance ) def on_cancel ( d ) : self . __notify_ready . remove ( d ) df = defer . Deferred ( on_cancel ) self . __notify_ready . append ( df ) return df
Returns a deferred that will fire when the factory has created a protocol that can be used to communicate with a Mongo server .
23,238
def retryNextHost ( self , connector = None ) : if not self . continueTrying : msg = "TxMongo: Abandoning {0} on explicit request." . format ( connector ) log . msg ( msg ) return if connector is None : if self . connector is None : raise ValueError ( "TxMongo: No additional connector to retry." ) else : connector = self . connector delay = False self . __index += 1 if self . __index >= len ( self . __allnodes ) : self . __index = 0 delay = True connector . host , connector . port = self . __allnodes [ self . __index ] if delay : self . retry ( connector ) else : connector . connect ( )
Have this connector connect again to the next host in the configured list of hosts .
23,239
def indexes_created ( self ) : d = defer . Deferred ( ) self . __indexes_created_defer . chainDeferred ( d ) return d
Returns a defer on the creation of this GridFS instance s indexes
23,240
def get_last_version ( self , filename ) : def ok ( doc ) : if doc is None : raise NoFile ( "TxMongo: no file in gridfs with filename {0}" . format ( repr ( filename ) ) ) return GridOut ( self . __collection , doc ) return self . __files . find_one ( { "filename" : filename } , filter = filter . sort ( DESCENDING ( "uploadDate" ) ) ) . addCallback ( ok )
Get a file from GridFS by filename .
23,241
def authenticate ( self , name , password , mechanism = "DEFAULT" ) : if not isinstance ( name , ( bytes , unicode ) ) : raise TypeError ( "TxMongo: name must be an instance of basestring." ) if not isinstance ( password , ( bytes , unicode ) ) : raise TypeError ( "TxMongo: password must be an instance of basestring." ) return self . connection . authenticate ( self , name , password , mechanism )
Send an authentication command for this database . mostly stolen from pymongo
23,242
def timeout ( func ) : @ wraps ( func ) def _timeout ( * args , ** kwargs ) : now = time ( ) deadline = kwargs . pop ( "deadline" , None ) seconds = kwargs . pop ( "timeout" , None ) if deadline is None and seconds is not None : deadline = now + seconds if deadline is not None and deadline < now : raise TimeExceeded ( "TxMongo: run time exceeded by {0}s." . format ( now - deadline ) ) kwargs [ '_deadline' ] = deadline raw_d = func ( * args , ** kwargs ) if deadline is None : return raw_d if seconds is None and deadline is not None and deadline - now > 0 : seconds = deadline - now timeout_d = defer . Deferred ( ) times_up = reactor . callLater ( seconds , timeout_d . callback , None ) def on_ok ( result ) : if timeout_d . called : raw_d . cancel ( ) raise TimeExceeded ( "TxMongo: run time of {0}s exceeded." . format ( seconds ) ) else : times_up . cancel ( ) return result [ 0 ] def on_fail ( failure ) : failure . trap ( defer . FirstError ) assert failure . value . index == 0 times_up . cancel ( ) failure . value . subFailure . raiseException ( ) return defer . DeferredList ( [ raw_d , timeout_d ] , fireOnOneCallback = True , fireOnOneErrback = True , consumeErrors = True ) . addCallbacks ( on_ok , on_fail ) return _timeout
Decorator to add timeout to Deferred calls
23,243
def writelines ( self , sequence ) : iterator = iter ( sequence ) def iterate ( _ = None ) : try : return self . write ( next ( iterator ) ) . addCallback ( iterate ) except StopIteration : return return defer . maybeDeferred ( iterate )
Write a sequence of strings to the file .
23,244
def get_new_requests ( self ) : content_type = self . __queue_item . response . headers . get ( 'content-type' ) scrapers = self . __get_all_scrapers ( ) new_requests = [ ] for scraper in scrapers : instance = scraper ( self . __options , self . __queue_item ) if self . __content_type_matches ( content_type , instance . content_types ) : new_requests . extend ( instance . get_requests ( ) ) return new_requests
Retrieve all the new request that were found in this request .
23,245
def __make_request ( self , url , method , data , auth , cookies , headers , proxies , timeout , verify ) : request_by_method = getattr ( requests , method ) return request_by_method ( url = url , data = data , auth = auth , cookies = cookies , headers = headers , proxies = proxies , timeout = timeout , verify = verify , allow_redirects = True , stream = False )
Execute a request with the given data .
23,246
def __get_all_scrapers ( self ) : modules_strings = self . __get_all_scrapers_modules ( ) modules = [ ] for module_string in modules_strings : module = importlib . import_module ( "nyawc.scrapers." + module_string ) modules . append ( getattr ( module , module_string ) ) return modules
Find all available scraper references .
23,247
def __get_all_scrapers_modules ( self ) : modules = [ ] file = os . path . realpath ( __file__ ) folder = os . path . dirname ( file ) for filename in os . listdir ( folder + "/../scrapers" ) : if filename . endswith ( "Scraper.py" ) and not filename . startswith ( "Base" ) : modules . append ( filename [ : - 3 ] ) return modules
Find all available scraper modules .
23,248
def __content_type_matches ( self , content_type , available_content_types ) : if content_type is None : return False if content_type in available_content_types : return True for available_content_type in available_content_types : if available_content_type in content_type : return True return False
Check if the given content type matches one of the available content types .
23,249
def increase_route_count ( self , crawled_request ) : for route in self . __routing_options . routes : if re . compile ( route ) . match ( crawled_request . url ) : count_key = str ( route ) + crawled_request . method if count_key in self . __routing_count . keys ( ) : self . __routing_count [ count_key ] += 1 else : self . __routing_count [ count_key ] = 1 break
Increase the count that determines how many times a URL of a certain route has been crawled .
23,250
def is_treshold_reached ( self , scraped_request ) : for route in self . __routing_options . routes : if re . compile ( route ) . match ( scraped_request . url ) : count_key = str ( route ) + scraped_request . method if count_key in self . __routing_count . keys ( ) : return self . __routing_count [ count_key ] >= self . __routing_options . minimum_threshold return False
Check if similar requests to the given requests have already been crawled X times . Where X is the minimum treshold amount from the options .
23,251
def add_request ( self , request ) : queue_item = QueueItem ( request , Response ( request . url ) ) self . add ( queue_item ) return queue_item
Add a request to the queue .
23,252
def has_request ( self , request ) : queue_item = QueueItem ( request , Response ( request . url ) ) key = queue_item . get_hash ( ) for status in QueueItem . STATUSES : if key in self . __get_var ( "items_" + status ) . keys ( ) : return True return False
Check if the given request already exists in the queue .
23,253
def get_first ( self , status ) : items = self . get_all ( status ) if items : return list ( items . items ( ) ) [ 0 ] [ 1 ] return None
Get the first item in the queue that has the given status .
23,254
def get_for_type ( input_type = "text" ) : if input_type in RandomInputHelper . cache : return RandomInputHelper . cache [ input_type ] types = { "text" : RandomInputHelper . get_random_value , "hidden" : RandomInputHelper . get_random_value , "search" : RandomInputHelper . get_random_value , "color" : RandomInputHelper . get_random_color , "week" : { "function" : RandomInputHelper . get_random_value , "params" : [ 2 , [ "1234" ] ] } , "password" : RandomInputHelper . get_random_password , "number" : RandomInputHelper . get_random_number , "tel" : RandomInputHelper . get_random_telephonenumber , "url" : RandomInputHelper . get_random_url , "textarea" : RandomInputHelper . get_random_text , "email" : RandomInputHelper . get_random_email } if types . get ( input_type ) is None : return "" if type ( types . get ( input_type ) ) is dict : generator = types . get ( input_type ) value = generator . get ( "function" ) ( * generator . get ( "params" ) ) else : value = types . get ( input_type ) ( ) RandomInputHelper . cache [ input_type ] = value return value
Get a random string for the given html input type
23,255
def get_random_value ( length = 10 , character_sets = [ string . ascii_uppercase , string . ascii_lowercase ] ) : return "" . join ( random . choice ( "" . join ( character_sets ) ) for i in range ( length ) )
Get a random string with the given length .
23,256
def get_random_email ( ltd = "com" ) : email = [ RandomInputHelper . get_random_value ( 6 , [ string . ascii_lowercase ] ) , "@" , RandomInputHelper . get_random_value ( 6 , [ string . ascii_lowercase ] ) , "." , ltd ] return "" . join ( email )
Get a random email address with the given ltd .
23,257
def get_random_password ( ) : password = [ ] password . append ( RandomInputHelper . get_random_value ( 4 , [ string . ascii_lowercase ] ) ) password . append ( RandomInputHelper . get_random_value ( 2 , [ string . digits ] ) ) password . append ( RandomInputHelper . get_random_value ( 2 , [ "$&*@!" ] ) ) password . append ( RandomInputHelper . get_random_value ( 4 , [ string . ascii_uppercase ] ) ) return "" . join ( password )
Get a random password that complies with most of the requirements .
23,258
def get_random_url ( ltd = "com" ) : url = [ "https://" , RandomInputHelper . get_random_value ( 8 , [ string . ascii_lowercase ] ) , "." , ltd ] return "" . join ( url )
Get a random url with the given ltd .
23,259
def get_random_telephonenumber ( ) : phone = [ RandomInputHelper . get_random_value ( 3 , "123456789" ) , RandomInputHelper . get_random_value ( 3 , "12345678" ) , "" . join ( map ( str , random . sample ( range ( 10 ) , 4 ) ) ) ] return "-" . join ( phone )
Get a random 10 digit phone number that complies with most of the requirements .
23,260
def complies_with_scope ( queue_item , new_request , scope ) : if not URLHelper . is_parsable ( queue_item . request . url ) : return False if not URLHelper . is_parsable ( new_request . url ) : return False if scope . request_methods : if not queue_item . request . method in scope . request_methods : return False if scope . protocol_must_match : if URLHelper . get_protocol ( queue_item . request . url ) != URLHelper . get_protocol ( new_request . url ) : return False if scope . subdomain_must_match : current_subdomain = URLHelper . get_subdomain ( queue_item . request . url ) new_subdomain = URLHelper . get_subdomain ( new_request . url ) www_matches = False if current_subdomain == "www" and new_subdomain == "" : www_matches = True if new_subdomain == "www" and current_subdomain == "" : www_matches = True if not www_matches and current_subdomain != new_subdomain : return False if scope . hostname_must_match : if URLHelper . get_hostname ( queue_item . request . url ) != URLHelper . get_hostname ( new_request . url ) : return False if scope . tld_must_match : if URLHelper . get_tld ( queue_item . request . url ) != URLHelper . get_tld ( new_request . url ) : return False return True
Check if the new request complies with the crawling scope .
23,261
def get_cookie_header ( queue_item ) : header = [ ] path = URLHelper . get_path ( queue_item . request . url ) for cookie in queue_item . request . cookies : root_path = cookie . path == "" or cookie . path == "/" if path . startswith ( cookie . path ) or root_path : header . append ( cookie . name + "=" + cookie . value ) return "&" . join ( header )
Convert a requests cookie jar to a HTTP request cookie header value .
23,262
def get_soup_response ( self ) : if self . response is not None : if self . __response_soup is None : result = BeautifulSoup ( self . response . text , "lxml" ) if self . decomposed : return result else : self . __response_soup = BeautifulSoup ( self . response . text , "lxml" ) return self . __response_soup
Get the response as a cached BeautifulSoup container .
23,263
def get_hash ( self ) : if self . __index_hash : return self . __index_hash key = self . request . method key += URLHelper . get_protocol ( self . request . url ) key += URLHelper . get_subdomain ( self . request . url ) key += URLHelper . get_hostname ( self . request . url ) key += URLHelper . get_tld ( self . request . url ) key += URLHelper . get_path ( self . request . url ) key += str ( URLHelper . get_ordered_params ( self . request . url ) ) if self . request . data is not None : key += str ( self . request . data . keys ( ) ) self . __index_hash = key return self . __index_hash
Generate and return the dict index hash of the given queue item .
23,264
def __get_request ( self , host , soup ) : url = URLHelper . make_absolute ( host , self . __trim_grave_accent ( soup [ "action" ] ) ) if soup . has_attr ( "action" ) else host method_original = soup [ "method" ] if soup . has_attr ( "method" ) else "get" method = "post" if method_original . lower ( ) == "post" else "get" data = self . __get_form_data ( soup ) return Request ( url , method , data )
Build a request from the given soup form .
23,265
def __get_form_data ( self , soup ) : elements = self . __get_valid_form_data_elements ( soup ) form_data = self . __get_default_form_data_input ( elements ) callback = self . options . callbacks . form_before_autofill action = callback ( self . queue_item , elements , form_data ) if action == CrawlerActions . DO_AUTOFILL_FORM : self . __autofill_form_data ( form_data , elements ) return form_data
Build a form data dict from the given form .
23,266
def __get_valid_form_data_elements ( self , soup ) : elements = [ ] for element in soup . find_all ( [ "input" , "button" , "textarea" , "select" ] ) : if element . has_attr ( "name" ) : elements . append ( element ) return elements
Get all valid form input elements .
23,267
def __autofill_form_data ( self , form_data , elements ) : for element in elements : if not element [ "name" ] in form_data : continue if not len ( form_data [ element [ "name" ] ] ) is 0 : continue if element . name == "textarea" : form_data [ element [ "name" ] ] = RandomInputHelper . get_for_type ( "textarea" ) continue if element . has_attr ( "type" ) : form_data [ element [ "name" ] ] = RandomInputHelper . get_for_type ( element [ "type" ] )
Autofill empty form data with random data .
23,268
def __get_default_value_from_element ( self , element ) : if element . name == "select" : options = element . find_all ( "option" ) is_multiple = element . has_attr ( "multiple" ) selected_options = [ option for option in options if option . has_attr ( "selected" ) ] if not selected_options and options : selected_options = [ options [ 0 ] ] selected_values = [ ] if is_multiple : for option in selected_options : value = option [ "value" ] if option . has_attr ( "value" ) else option . string selected_values . append ( value ) return selected_values elif len ( selected_options ) >= 1 : if selected_options [ 0 ] . has_attr ( "value" ) : return selected_options [ 0 ] [ "value" ] else : return selected_options [ 0 ] . string return "" if element . name == "textarea" : return element . string if element . string is not None else "" if element . name == "input" and element . has_attr ( "type" ) : if element [ "type" ] in ( "checkbox" , "radio" ) : if not element . has_attr ( "checked" ) : return False if element . has_attr ( "value" ) : return element [ "value" ] else : return "on" if element . has_attr ( "value" ) : return element [ "value" ] return ""
Get the default value of a form element
23,269
def append_with_data ( url , data ) : if data is None : return url url_parts = list ( urlparse ( url ) ) query = OrderedDict ( parse_qsl ( url_parts [ 4 ] , keep_blank_values = True ) ) query . update ( data ) url_parts [ 4 ] = URLHelper . query_dict_to_string ( query ) return urlunparse ( url_parts )
Append the given URL with the given data OrderedDict .
23,270
def get_subdomain ( url ) : if url not in URLHelper . __cache : URLHelper . __cache [ url ] = urlparse ( url ) return "." . join ( URLHelper . __cache [ url ] . netloc . split ( "." ) [ : - 2 ] )
Get the subdomain of the given URL .
23,271
def get_hostname ( url ) : if url not in URLHelper . __cache : URLHelper . __cache [ url ] = urlparse ( url ) parts = URLHelper . __cache [ url ] . netloc . split ( "." ) if len ( parts ) == 1 : return parts [ 0 ] else : return "." . join ( parts [ - 2 : - 1 ] )
Get the hostname of the given URL .
23,272
def get_tld ( url ) : if url not in URLHelper . __cache : URLHelper . __cache [ url ] = urlparse ( url ) parts = URLHelper . __cache [ url ] . netloc . split ( "." ) if len ( parts ) == 1 : return "" else : return parts [ - 1 ]
Get the tld of the given URL .
23,273
def get_ordered_params ( url ) : if url not in URLHelper . __cache : URLHelper . __cache [ url ] = urlparse ( url ) params = URLHelper . query_string_to_dict ( URLHelper . __cache [ url ] . query ) return OrderedDict ( sorted ( params . items ( ) ) )
Get the query parameters of the given URL in alphabetical order .
23,274
def query_dict_to_string ( query ) : query_params = [ ] for key , value in query . items ( ) : query_params . append ( key + "=" + value ) return "&" . join ( query_params )
Convert an OrderedDict to a query string .
23,275
def query_string_to_dict ( query ) : query_params = { } for key_value in query . split ( "&" ) : key_value_pair = key_value . split ( "=" , 1 ) key = key_value_pair [ 0 ] if len ( key_value_pair ) >= 1 else "" value = key_value_pair [ 1 ] if len ( key_value_pair ) == 2 else "" query_params [ key ] = value return query_params
Convert a string to a query dict .
23,276
def get_version ( ) : if PackageHelper . __version : return PackageHelper . __version PackageHelper . __version = "Unknown" file = os . path . realpath ( __file__ ) folder = os . path . dirname ( file ) try : semver = open ( folder + "/../../.semver" , "r" ) PackageHelper . __version = semver . read ( ) . rstrip ( ) semver . close ( ) return PackageHelper . __version except : pass try : distribution = pkg_resources . get_distribution ( PackageHelper . get_alias ( ) ) if distribution . version : PackageHelper . __version = distribution . version return PackageHelper . __version except : pass return PackageHelper . __version
Get the version number of this package .
23,277
def run ( self ) : try : self . __options . callbacks . request_in_thread_before_start ( self . __queue_item ) except Exception as e : print ( e ) new_requests = [ ] failed = False try : handler = Handler ( self . __options , self . __queue_item ) new_requests = handler . get_new_requests ( ) try : self . __queue_item . response . raise_for_status ( ) except Exception : if self . __queue_item . request . parent_raised_error : failed = True else : for new_request in new_requests : new_request . parent_raised_error = True except Exception as e : failed = True error_message = "Setting status of '{}' to '{}' because of an HTTP error." . format ( self . __queue_item . request . url , QueueItem . STATUS_ERRORED ) DebugHelper . output ( self . __options , error_message ) DebugHelper . output ( self . __options , e ) try : self . __options . callbacks . request_on_error ( self . __queue_item , str ( e ) ) except Exception as e : print ( e ) for new_request in new_requests : new_request . parent_url = self . __queue_item . request . url try : self . __options . callbacks . request_in_thread_after_finish ( self . __queue_item ) except Exception as e : print ( e ) with self . __callback_lock : self . __callback ( self . __queue_item , new_requests , failed )
Executes the HTTP call .
23,278
def start_with ( self , request ) : HTTPRequestHelper . patch_with_options ( request , self . __options ) self . queue . add_request ( request ) self . __crawler_start ( )
Start the crawler using the given request .
23,279
def __spawn_new_requests ( self ) : self . __should_spawn_new_requests = False in_progress_count = len ( self . queue . get_all ( QueueItem . STATUS_IN_PROGRESS ) ) while in_progress_count < self . __options . performance . max_threads : if self . __spawn_new_request ( ) : in_progress_count += 1 else : break if in_progress_count == 0 : self . __crawler_stop ( )
Spawn new requests until the max threads option value is reached .
23,280
def __spawn_new_request ( self ) : first_in_line = self . queue . get_first ( QueueItem . STATUS_QUEUED ) if first_in_line is None : return False while self . routing . is_treshold_reached ( first_in_line . request ) : self . queue . move ( first_in_line , QueueItem . STATUS_CANCELLED ) first_in_line = self . queue . get_first ( QueueItem . STATUS_QUEUED ) if first_in_line is None : return False self . __request_start ( first_in_line ) return True
Spawn the first queued request if there is one available .
23,281
def __crawler_start ( self ) : try : self . __options . callbacks . crawler_before_start ( ) except Exception as e : print ( e ) print ( traceback . format_exc ( ) ) self . __spawn_new_requests ( ) while not self . __stopped : if self . __should_stop : self . __crawler_stop ( ) if self . __should_spawn_new_requests : self . __spawn_new_requests ( ) time . sleep ( 0.1 )
Spawn the first X queued request where X is the max threads option .
23,282
def __crawler_stop ( self ) : if self . __stopping : return self . __stopping = True self . __wait_for_current_threads ( ) self . queue . move_bulk ( [ QueueItem . STATUS_QUEUED , QueueItem . STATUS_IN_PROGRESS ] , QueueItem . STATUS_CANCELLED ) self . __crawler_finish ( ) self . __stopped = True
Mark the crawler as stopped .
23,283
def __crawler_finish ( self ) : try : self . __options . callbacks . crawler_after_finish ( self . queue ) except Exception as e : print ( e ) print ( traceback . format_exc ( ) )
Called when the crawler is finished because there are no queued requests left or it was stopped .
23,284
def __request_start ( self , queue_item ) : try : action = self . __options . callbacks . request_before_start ( self . queue , queue_item ) except Exception as e : action = None print ( e ) print ( traceback . format_exc ( ) ) if action == CrawlerActions . DO_STOP_CRAWLING : self . __should_stop = True if action == CrawlerActions . DO_SKIP_TO_NEXT : self . queue . move ( queue_item , QueueItem . STATUS_FINISHED ) self . __should_spawn_new_requests = True if action == CrawlerActions . DO_CONTINUE_CRAWLING or action is None : self . queue . move ( queue_item , QueueItem . STATUS_IN_PROGRESS ) thread = CrawlerThread ( self . __request_finish , self . __lock , self . __options , queue_item ) self . __threads [ queue_item . get_hash ( ) ] = thread thread . daemon = True thread . start ( )
Execute the request in given queue item .
23,285
def __request_finish ( self , queue_item , new_requests , request_failed = False ) : if self . __stopping : return del self . __threads [ queue_item . get_hash ( ) ] if request_failed : new_queue_items = [ ] self . queue . move ( queue_item , QueueItem . STATUS_ERRORED ) else : self . routing . increase_route_count ( queue_item . request ) new_queue_items = self . __add_scraped_requests_to_queue ( queue_item , new_requests ) self . queue . move ( queue_item , QueueItem . STATUS_FINISHED ) try : action = self . __options . callbacks . request_after_finish ( self . queue , queue_item , new_queue_items ) except Exception as e : action = None print ( e ) print ( traceback . format_exc ( ) ) queue_item . decompose ( ) if action == CrawlerActions . DO_STOP_CRAWLING : self . __should_stop = True if action == CrawlerActions . DO_CONTINUE_CRAWLING or action is None : self . __should_spawn_new_requests = True
Called when the crawler finished the given queue item .
23,286
def __add_scraped_requests_to_queue ( self , queue_item , scraped_requests ) : new_queue_items = [ ] for scraped_request in scraped_requests : HTTPRequestHelper . patch_with_options ( scraped_request , self . __options , queue_item ) if not HTTPRequestHelper . complies_with_scope ( queue_item , scraped_request , self . __options . scope ) : continue if self . queue . has_request ( scraped_request ) : continue scraped_request . depth = queue_item . request . depth + 1 if self . __options . scope . max_depth is not None : if scraped_request . depth > self . __options . scope . max_depth : continue new_queue_item = self . queue . add_request ( scraped_request ) new_queue_items . append ( new_queue_item ) return new_queue_items
Convert the scraped requests to queue items return them and also add them to the queue .
23,287
def fit ( self , train_set , test_set ) : with tf . Graph ( ) . as_default ( ) , tf . Session ( ) as self . tf_session : self . build_model ( ) tf . global_variables_initializer ( ) . run ( ) third = self . num_epochs // 3 for i in range ( self . num_epochs ) : lr_decay = self . lr_decay ** max ( i - third , 0.0 ) self . tf_session . run ( tf . assign ( self . lr_var , tf . multiply ( self . learning_rate , lr_decay ) ) ) train_perplexity = self . _run_train_step ( train_set , 'train' ) print ( "Epoch: %d Train Perplexity: %.3f" % ( i + 1 , train_perplexity ) ) test_perplexity = self . _run_train_step ( test_set , 'test' ) print ( "Test Perplexity: %.3f" % test_perplexity )
Fit the model to the given data .
23,288
def _run_train_step ( self , data , mode = 'train' ) : epoch_size = ( ( len ( data ) // self . batch_size ) - 1 ) // self . num_steps costs = 0.0 iters = 0 step = 0 state = self . _init_state . eval ( ) op = self . _train_op if mode == 'train' else tf . no_op ( ) for step , ( x , y ) in enumerate ( utilities . seq_data_iterator ( data , self . batch_size , self . num_steps ) ) : cost , state , _ = self . tf_session . run ( [ self . cost , self . final_state , op ] , { self . input_data : x , self . input_labels : y , self . _init_state : state } ) costs += cost iters += self . num_steps if step % ( epoch_size // 10 ) == 10 : print ( "%.3f perplexity" % ( step * 1.0 / epoch_size ) ) return np . exp ( costs / iters )
Run a single training step .
23,289
def build_model ( self ) : with tf . variable_scope ( "model" , reuse = None , initializer = self . initializer ) : self . _create_placeholders ( ) self . _create_rnn_cells ( ) self . _create_initstate_and_embeddings ( ) self . _create_rnn_architecture ( ) self . _create_optimizer_node ( )
Build the model s computational graph .
23,290
def _create_placeholders ( self ) : self . input_data = tf . placeholder ( tf . int32 , [ self . batch_size , self . num_steps ] ) self . input_labels = tf . placeholder ( tf . int32 , [ self . batch_size , self . num_steps ] )
Create the computational graph s placeholders .
23,291
def _create_rnn_cells ( self ) : lstm_cell = tf . nn . rnn_cell . LSTMCell ( self . num_hidden , forget_bias = 0.0 ) lstm_cell = tf . nn . rnn_cell . DropoutWrapper ( lstm_cell , output_keep_prob = self . dropout ) self . cell = tf . nn . rnn_cell . MultiRNNCell ( [ lstm_cell ] * self . num_layers )
Create the LSTM cells .
23,292
def _create_initstate_and_embeddings ( self ) : self . _init_state = self . cell . zero_state ( self . batch_size , tf . float32 ) embedding = tf . get_variable ( "embedding" , [ self . vocab_size , self . num_hidden ] ) inputs = tf . nn . embedding_lookup ( embedding , self . input_data ) self . inputs = tf . nn . dropout ( inputs , self . dropout )
Create the initial state for the cell and the data embeddings .
23,293
def _create_rnn_architecture ( self ) : self . inputs = [ tf . squeeze ( i , [ 1 ] ) for i in tf . split ( axis = 1 , num_or_size_splits = self . num_steps , value = self . inputs ) ] outputs , state = tf . nn . rnn ( self . cell , self . inputs , initial_state = self . _init_state ) output = tf . reshape ( tf . concat ( axis = 1 , values = outputs ) , [ - 1 , self . num_hidden ] ) softmax_w = tf . get_variable ( "softmax_w" , [ self . num_hidden , self . vocab_size ] ) softmax_b = tf . get_variable ( "softmax_b" , [ self . vocab_size ] ) logits = tf . add ( tf . matmul ( output , softmax_w ) , softmax_b ) loss = tf . nn . seq2seq . sequence_loss_by_example ( [ logits ] , [ tf . reshape ( self . input_labels , [ - 1 ] ) ] , [ tf . ones ( [ self . batch_size * self . num_steps ] ) ] ) self . cost = tf . div ( tf . reduce_sum ( loss ) , self . batch_size ) self . final_state = state
Create the training architecture and the last layer of the LSTM .
23,294
def _create_optimizer_node ( self ) : self . lr_var = tf . Variable ( 0.0 , trainable = False ) tvars = tf . trainable_variables ( ) grads , _ = tf . clip_by_global_norm ( tf . gradients ( self . cost , tvars ) , self . max_grad_norm ) optimizer = tf . train . GradientDescentOptimizer ( self . lr_var ) self . _train_op = optimizer . apply_gradients ( zip ( grads , tvars ) )
Create the optimizer node of the graph .
23,295
def _create_encoding_layers ( self ) : next_train = self . input_data self . layer_nodes = [ ] for l , layer in enumerate ( self . layers ) : with tf . name_scope ( "encode-{}" . format ( l ) ) : y_act = tf . add ( tf . matmul ( next_train , self . encoding_w_ [ l ] ) , self . encoding_b_ [ l ] ) if self . finetune_enc_act_func [ l ] is not None : layer_y = self . finetune_enc_act_func [ l ] ( y_act ) else : layer_y = None next_train = tf . nn . dropout ( layer_y , self . keep_prob ) self . layer_nodes . append ( next_train ) self . encode = next_train
Create the encoding layers for supervised finetuning .
23,296
def _create_decoding_layers ( self ) : next_decode = self . encode for l , layer in reversed ( list ( enumerate ( self . layers ) ) ) : with tf . name_scope ( "decode-{}" . format ( l ) ) : if self . tied_weights : dec_w = tf . transpose ( self . encoding_w_ [ l ] ) else : dec_w = tf . Variable ( tf . transpose ( self . encoding_w_ [ l ] . initialized_value ( ) ) ) dec_b = tf . Variable ( tf . constant ( 0.1 , shape = [ dec_w . get_shape ( ) . dims [ 1 ] . value ] ) ) self . decoding_w . append ( dec_w ) self . decoding_b . append ( dec_b ) y_act = tf . add ( tf . matmul ( next_decode , dec_w ) , dec_b ) if self . finetune_dec_act_func [ l ] is not None : layer_y = self . finetune_dec_act_func [ l ] ( y_act ) else : layer_y = None next_decode = tf . nn . dropout ( layer_y , self . keep_prob ) self . layer_nodes . append ( next_decode ) self . reconstruction = next_decode
Create the decoding layers for reconstruction finetuning .
23,297
def load_mnist_dataset ( mode = 'supervised' , one_hot = True ) : mnist = input_data . read_data_sets ( "MNIST_data/" , one_hot = one_hot ) trX = mnist . train . images trY = mnist . train . labels vlX = mnist . validation . images vlY = mnist . validation . labels teX = mnist . test . images teY = mnist . test . labels if mode == 'supervised' : return trX , trY , vlX , vlY , teX , teY elif mode == 'unsupervised' : return trX , vlX , teX
Load the MNIST handwritten digits dataset .
23,298
def load_cifar10_dataset ( cifar_dir , mode = 'supervised' ) : trX = None trY = np . array ( [ ] ) teX = np . array ( [ ] ) teY = np . array ( [ ] ) for fn in os . listdir ( cifar_dir ) : if not fn . startswith ( 'batches' ) and not fn . startswith ( 'readme' ) : fo = open ( os . path . join ( cifar_dir , fn ) , 'rb' ) data_batch = pickle . load ( fo ) fo . close ( ) if fn . startswith ( 'data' ) : if trX is None : trX = data_batch [ 'data' ] trY = data_batch [ 'labels' ] else : trX = np . concatenate ( ( trX , data_batch [ 'data' ] ) , axis = 0 ) trY = np . concatenate ( ( trY , data_batch [ 'labels' ] ) , axis = 0 ) if fn . startswith ( 'test' ) : teX = data_batch [ 'data' ] teY = data_batch [ 'labels' ] trX = trX . astype ( np . float32 ) / 255. teX = teX . astype ( np . float32 ) / 255. if mode == 'supervised' : return trX , trY , teX , teY elif mode == 'unsupervised' : return trX , teX
Load the cifar10 dataset .
23,299
def linear ( prev_layer , out_dim , name = "linear" ) : with tf . name_scope ( name ) : in_dim = prev_layer . get_shape ( ) [ 1 ] . value W = tf . Variable ( tf . truncated_normal ( [ in_dim , out_dim ] , stddev = 0.1 ) ) b = tf . Variable ( tf . constant ( 0.1 , shape = [ out_dim ] ) ) out = tf . add ( tf . matmul ( prev_layer , W ) , b ) return ( out , W , b )
Create a linear fully - connected layer .