idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
45,200
def headers ( self ) : client_headers = getattr ( self . client , 'headers' , { } ) return dict ( self . DEFAULT_HEADERS , ** client_headers )
Return headers of the uploader instance . This would include the headers of the client instance .
45,201
def headers_as_list ( self ) : headers = self . headers headers_list = [ '{}: {}' . format ( key , value ) for key , value in iteritems ( headers ) ] return headers_list
Does the same as headers except it is returned as a list .
45,202
def get_offset ( self ) : resp = requests . head ( self . url , headers = self . headers ) offset = resp . headers . get ( 'upload-offset' ) if offset is None : msg = 'Attempt to retrieve offset fails with status {}' . format ( resp . status_code ) raise TusCommunicationError ( msg , resp . status_code , resp . content ) return int ( offset )
Return offset from tus server .
45,203
def encode_metadata ( self ) : encoded_list = [ ] for key , value in iteritems ( self . metadata ) : key_str = str ( key ) if re . search ( r'^$|[\s,]+' , key_str ) : msg = 'Upload-metadata key "{}" cannot be empty nor contain spaces or commas.' raise ValueError ( msg . format ( key_str ) ) value_bytes = b ( value ) encoded_list . append ( '{} {}' . format ( key_str , b64encode ( value_bytes ) . decode ( 'ascii' ) ) ) return encoded_list
Return list of encoded metadata as defined by the Tus protocol .
45,204
def get_url ( self ) : if self . store_url and self . url_storage : key = self . fingerprinter . get_fingerprint ( self . get_file_stream ( ) ) url = self . url_storage . get_item ( key ) if not url : url = self . create_url ( ) self . url_storage . set_item ( key , url ) return url else : return self . create_url ( )
Return the tus upload url .
45,205
def create_url ( self ) : headers = self . headers headers [ 'upload-length' ] = str ( self . file_size ) headers [ 'upload-metadata' ] = ',' . join ( self . encode_metadata ( ) ) resp = requests . post ( self . client . url , headers = headers ) url = resp . headers . get ( "location" ) if url is None : msg = 'Attempt to retrieve create file url with status {}' . format ( resp . status_code ) raise TusCommunicationError ( msg , resp . status_code , resp . content ) return urljoin ( self . client . url , url )
Return upload url .
45,206
def request_length ( self ) : remainder = self . stop_at - self . offset return self . chunk_size if remainder > self . chunk_size else remainder
Return length of next chunk upload .
45,207
def verify_upload ( self ) : if self . request . status_code == 204 : return True else : raise TusUploadFailed ( '' , self . request . status_code , self . request . response_content )
Confirm that the last upload was sucessful . Raises TusUploadFailed exception if the upload was not sucessful .
45,208
def get_file_stream ( self ) : if self . file_stream : self . file_stream . seek ( 0 ) return self . file_stream elif os . path . isfile ( self . file_path ) : return open ( self . file_path , 'rb' ) else : raise ValueError ( "invalid file {}" . format ( self . file_path ) )
Return a file stream instance of the upload .
45,209
def file_size ( self ) : stream = self . get_file_stream ( ) stream . seek ( 0 , os . SEEK_END ) return stream . tell ( )
Return size of the file .
45,210
def upload ( self , stop_at = None ) : self . stop_at = stop_at or self . file_size while self . offset < self . stop_at : self . upload_chunk ( ) else : if self . log_func : self . log_func ( "maximum upload specified({} bytes) has been reached" . format ( self . stop_at ) )
Perform file upload .
45,211
def upload_chunk ( self ) : self . _retried = 0 self . _do_request ( ) self . offset = int ( self . request . response_headers . get ( 'upload-offset' ) ) if self . log_func : msg = '{} bytes uploaded ...' . format ( self . offset ) self . log_func ( msg )
Upload chunk of file .
45,212
def get_item ( self , key ) : result = self . _db . search ( self . _urls . key == key ) return result [ 0 ] . get ( 'url' ) if result else None
Return the tus url of a file identified by the key specified .
45,213
def set_item ( self , key , url ) : if self . _db . search ( self . _urls . key == key ) : self . _db . update ( { 'url' : url } , self . _urls . key == key ) else : self . _db . insert ( { 'key' : key , 'url' : url } )
Store the url value under the unique key .
45,214
def perform ( self ) : try : host = '{}://{}' . format ( self . _url . scheme , self . _url . netloc ) path = self . _url . geturl ( ) . replace ( host , '' , 1 ) chunk = self . file . read ( self . _content_length ) if self . _upload_checksum : self . _request_headers [ "upload-checksum" ] = " " . join ( ( self . _checksum_algorithm_name , base64 . b64encode ( self . _checksum_algorithm ( chunk ) . digest ( ) ) . decode ( "ascii" ) , ) ) self . handle . request ( "PATCH" , path , chunk , self . _request_headers ) self . _response = self . handle . getresponse ( ) self . status_code = self . _response . status self . response_headers = { k . lower ( ) : v for k , v in self . _response . getheaders ( ) } except http . client . HTTPException as e : raise TusUploadFailed ( e ) except OSError as e : if e . errno in ( errno . EPIPE , errno . ESHUTDOWN , errno . ECONNABORTED , errno . ECONNREFUSED , errno . ECONNRESET ) : raise TusUploadFailed ( e ) raise e
Perform actual request .
45,215
def get_fingerprint ( self , fs ) : hasher = hashlib . md5 ( ) buf = self . _encode_data ( fs . read ( self . BLOCK_SIZE ) ) while len ( buf ) > 0 : hasher . update ( buf ) buf = fs . read ( self . BLOCK_SIZE ) return 'md5:' + hasher . hexdigest ( )
Return a unique fingerprint string value based on the file stream recevied
45,216
def init_poolmanager ( self , connections , maxsize , block = False , ** pool_kwargs ) : try : pool_kwargs [ 'ssl_version' ] = ssl . PROTOCOL_TLS except AttributeError : pool_kwargs [ 'ssl_version' ] = ssl . PROTOCOL_SSLv23 return super ( SSLAdapter , self ) . init_poolmanager ( connections , maxsize , block , ** pool_kwargs )
Called to initialize the HTTPAdapter when no proxy is used .
45,217
def proxy_manager_for ( self , proxy , ** proxy_kwargs ) : try : proxy_kwargs [ 'ssl_version' ] = ssl . PROTOCOL_TLS except AttributeError : proxy_kwargs [ 'ssl_version' ] = ssl . PROTOCOL_SSLv23 return super ( SSLAdapter , self ) . proxy_manager_for ( proxy , ** proxy_kwargs )
Called to initialize the HTTPAdapter when a proxy is used .
45,218
def make_calls ( self , num_calls = 1 ) : self . _cull ( ) while self . _outstanding_calls + num_calls > self . _max_calls_per_second : time . sleep ( 0 ) self . _cull ( ) self . _call_times . append ( self . CallRecord ( time = time . time ( ) , num_calls = num_calls ) ) self . _outstanding_calls += num_calls
Adds appropriate sleep to avoid making too many calls .
45,219
def _cull ( self ) : right_now = time . time ( ) cull_from = - 1 for index in range ( len ( self . _call_times ) ) : if right_now - self . _call_times [ index ] . time >= 1.0 : cull_from = index self . _outstanding_calls -= self . _call_times [ index ] . num_calls else : break if cull_from > - 1 : self . _call_times = self . _call_times [ cull_from + 1 : ]
Remove calls more than 1 second old from the queue .
45,220
def map_with_retries ( self , requests , responses_for_requests ) : retries = [ ] response_futures = [ preq . callable ( ) for preq in requests ] for request , response_future in zip ( requests , response_futures ) : try : response = response_future . result ( ) if response is not None and response . status_code == 403 : logging . warning ( 'Request to {} caused a 403 response status code.' . format ( request . url ) ) raise InvalidRequestError ( 'Access forbidden' ) if response is not None : responses_for_requests [ request ] = response except RequestException as re : logging . error ( 'An exception was raised for {}: {}' . format ( request . url , re ) ) if self . total_retries > 0 : self . total_retries -= 1 retries . append ( request ) if retries : self . map_with_retries ( retries , responses_for_requests )
Provides session - based retry functionality
45,221
def multi_get ( self , urls , query_params = None , to_json = True ) : return self . _multi_request ( MultiRequest . _VERB_GET , urls , query_params , data = None , to_json = to_json , )
Issue multiple GET requests .
45,222
def multi_post ( self , urls , query_params = None , data = None , to_json = True , send_as_file = False ) : return self . _multi_request ( MultiRequest . _VERB_POST , urls , query_params , data , to_json = to_json , send_as_file = send_as_file , )
Issue multiple POST requests .
45,223
def _zip_request_params ( self , urls , query_params , data ) : if not isinstance ( urls , list ) : urls = [ urls ] if not isinstance ( query_params , list ) : query_params = [ query_params ] if not isinstance ( data , list ) : data = [ data ] url_count = len ( urls ) query_param_count = len ( query_params ) data_count = len ( data ) max_count = max ( url_count , query_param_count , data_count ) if ( max_count > url_count > 1 or max_count > query_param_count > 1 or max_count > data_count > 1 ) : raise InvalidRequestError ( 'Mismatched parameter count url_count:{0} query_param_count:{1} data_count:{2} max_count:{3}' , url_count , query_param_count , data_count , max_count , ) if url_count < max_count : urls = urls * max_count if query_param_count < max_count : query_params = query_params * max_count if data_count < max_count : data = data * max_count return list ( zip ( urls , query_params , data ) )
Massages inputs and returns a list of 3 - tuples zipping them up .
45,224
def _wait_for_response ( self , requests ) : failed_requests = [ ] responses_for_requests = OrderedDict . fromkeys ( requests ) for retry in range ( self . _max_retry ) : try : logging . debug ( 'Try #{0}' . format ( retry + 1 ) ) self . _availability_limiter . map_with_retries ( requests , responses_for_requests ) failed_requests = [ ] for request , response in responses_for_requests . items ( ) : if self . _drop_404s and response is not None and response . status_code == 404 : logging . warning ( 'Request to {0} failed with status code 404, dropping.' . format ( request . url ) ) elif not response : failed_requests . append ( ( request , response ) ) if not failed_requests : break logging . warning ( 'Try #{0}. Expected {1} successful response(s) but only got {2}.' . format ( retry + 1 , len ( requests ) , len ( requests ) - len ( failed_requests ) , ) ) requests = [ fr [ 0 ] for fr in failed_requests ] except InvalidRequestError : raise except Exception as e : logging . exception ( 'Try #{0}. Exception occured: {1}. Retrying.' . format ( retry + 1 , e ) ) pass if failed_requests : logging . warning ( 'Still {0} failed request(s) after {1} retries:' . format ( len ( failed_requests ) , self . _max_retry , ) ) for failed_request , failed_response in failed_requests : if failed_response is not None : failed_response_text = failed_response . text . encode ( 'ascii' , 'xmlcharrefreplace' ) logging . warning ( 'Request to {0} failed with status code {1}. Response text: {2}' . format ( failed_request . url , failed_response . status_code , failed_response_text , ) ) else : logging . warning ( 'Request to {0} failed with None response.' . format ( failed_request . url ) ) return list ( responses_for_requests . values ( ) )
Issues a batch of requests and waits for the responses . If some of the requests fail it will retry the failed ones up to _max_retry times .
45,225
def _convert_to_json ( self , response ) : try : return response . json ( ) except ValueError : logging . warning ( 'Expected response in JSON format from {0} but the actual response text is: {1}' . format ( response . request . url , response . text , ) ) return None
Converts response to JSON . If the response cannot be converted to JSON then None is returned .
45,226
def _multi_request ( self , verb , urls , query_params , data , to_json = True , send_as_file = False ) : if not urls : raise InvalidRequestError ( 'No URL supplied' ) request_params = self . _zip_request_params ( urls , query_params , data ) batch_of_params = [ request_params [ pos : pos + self . _max_requests ] for pos in range ( 0 , len ( request_params ) , self . _max_requests ) ] all_responses = [ ] for param_batch in batch_of_params : if self . _rate_limiter : self . _rate_limiter . make_calls ( num_calls = len ( param_batch ) ) prepared_requests = [ self . _create_request ( verb , url , query_params = query_param , data = datum , send_as_file = send_as_file , ) for url , query_param , datum in param_batch ] responses = self . _wait_for_response ( prepared_requests ) for response in responses : if response : all_responses . append ( self . _convert_to_json ( response ) if to_json else response ) else : all_responses . append ( None ) return all_responses
Issues multiple batches of simultaneous HTTP requests and waits for responses .
45,227
def error_handling ( cls , fn ) : def wrapper ( * args , ** kwargs ) : try : result = fn ( * args , ** kwargs ) return result except InvalidRequestError as e : write_exception ( e ) if hasattr ( e , 'request' ) : write_error_message ( 'request {0}' . format ( repr ( e . request ) ) ) if hasattr ( e , 'response' ) : write_error_message ( 'response {0}' . format ( repr ( e . response ) ) ) raise e return wrapper
Decorator to handle errors
45,228
def acquire_node ( self , node ) : try : return node . set ( self . resource , self . lock_key , nx = True , px = self . ttl ) except ( redis . exceptions . ConnectionError , redis . exceptions . TimeoutError ) : return False
acquire a single redis node
45,229
def release_node ( self , node ) : try : node . _release_script ( keys = [ self . resource ] , args = [ self . lock_key ] ) except ( redis . exceptions . ConnectionError , redis . exceptions . TimeoutError ) : pass
release a single redis node
45,230
def _extract_response_xml ( self , domain , response ) : attributes = { } alexa_keys = { 'POPULARITY' : 'TEXT' , 'REACH' : 'RANK' , 'RANK' : 'DELTA' } try : xml_root = ET . fromstring ( response . _content ) for xml_child in xml_root . findall ( 'SD//' ) : if xml_child . tag in alexa_keys and alexa_keys [ xml_child . tag ] in xml_child . attrib : attributes [ xml_child . tag . lower ( ) ] = xml_child . attrib [ alexa_keys [ xml_child . tag ] ] except ParseError : pass attributes [ 'domain' ] = domain return { 'attributes' : attributes }
Extract XML content of an HTTP response into dictionary format .
45,231
def _bulk_cache_lookup ( self , api_name , keys ) : if self . _cache : responses = self . _cache . bulk_lookup ( api_name , keys ) missing_keys = [ key for key in keys if key not in responses . keys ( ) ] return ( responses , missing_keys ) return ( { } , keys )
Performes a bulk cache lookup and returns a tuple with the results found and the keys missing in the cache . If cached is not configured it will return an empty dictionary of found results and the initial list of keys .
45,232
def _write_cache_to_file ( self ) : with ( open ( self . _cache_file_name , 'w' ) ) as fp : fp . write ( simplejson . dumps ( self . _cache ) )
Write the contents of the cache to a file on disk .
45,233
def _read_cache_from_file ( self ) : cache = { } try : with ( open ( self . _cache_file_name , 'r' ) ) as fp : contents = fp . read ( ) cache = simplejson . loads ( contents ) except ( IOError , JSONDecodeError ) : pass return cache
Read the contents of the cache from a file on disk .
45,234
def bulk_lookup ( self , api_name , keys ) : cached_data = { } for key in keys : value = self . lookup_value ( api_name , key ) if value is not None : cached_data [ key ] = value return cached_data
Perform lookup on an enumerable of keys .
45,235
def _cached_by_domain ( api_name ) : def wrapped ( func ) : def decorated ( self , domains ) : if not self . _cache : return func ( self , domains ) all_responses = self . _cache . bulk_lookup ( api_name , domains ) domains = list ( set ( domains ) - set ( all_responses ) ) if domains : response = func ( self , domains ) if not response : raise ResponseError ( "No response for uncached domains" ) for domain in response : self . _cache . cache_value ( api_name , domain , response [ domain ] ) all_responses [ domain ] = response [ domain ] return all_responses return decorated return wrapped
A caching wrapper for functions that take a list of domains as parameters .
45,236
def domain_score ( self , domains ) : warn ( 'OpenDNS Domain Scores endpoint is deprecated. Use ' 'InvestigateApi.categorization() instead' , DeprecationWarning , ) url_path = 'domains/score/' return self . _multi_post ( url_path , domains )
Calls domain scores endpoint .
45,237
def _multi_get ( self , cache_api_name , fmt_url_path , url_params , query_params = None ) : all_responses = { } if self . _cache : all_responses = self . _cache . bulk_lookup ( cache_api_name , url_params ) url_params = [ key for key in url_params if key not in all_responses . keys ( ) ] if len ( url_params ) : urls = self . _to_urls ( fmt_url_path , url_params ) responses = self . _requests . multi_get ( urls , query_params ) for url_param , response in zip ( url_params , responses ) : if self . _cache : self . _cache . cache_value ( cache_api_name , url_param , response ) all_responses [ url_param ] = response return all_responses
Makes multiple GETs to an OpenDNS endpoint .
45,238
def security ( self , domains ) : api_name = 'opendns-security' fmt_url_path = u'security/name/{0}.json' return self . _multi_get ( api_name , fmt_url_path , domains )
Calls security end point and adds an is_suspicious key to each response .
45,239
def whois_emails ( self , emails ) : api_name = 'opendns-whois-emails' fmt_url_path = u'whois/emails/{0}' return self . _multi_get ( api_name , fmt_url_path , emails )
Calls WHOIS Email end point
45,240
def whois_nameservers ( self , nameservers ) : api_name = 'opendns-whois-nameservers' fmt_url_path = u'whois/nameservers/{0}' return self . _multi_get ( api_name , fmt_url_path , nameservers )
Calls WHOIS Nameserver end point
45,241
def whois_domains ( self , domains ) : api_name = 'opendns-whois-domain' fmt_url_path = u'whois/{0}' return self . _multi_get ( api_name , fmt_url_path , domains )
Calls WHOIS domain end point
45,242
def whois_domains_history ( self , domains ) : api_name = 'opendns-whois-domain-history' fmt_url_path = u'whois/{0}/history' return self . _multi_get ( api_name , fmt_url_path , domains )
Calls WHOIS domain history end point
45,243
def domain_tag ( self , domains ) : api_name = 'opendns-domain_tag' fmt_url_path = u'domains/{0}/latest_tags' return self . _multi_get ( api_name , fmt_url_path , domains )
Get the data range when a domain is part of OpenDNS block list .
45,244
def rr_history ( self , ips ) : api_name = 'opendns-rr_history' fmt_url_path = u'dnsdb/ip/a/{0}.json' return self . _multi_get ( api_name , fmt_url_path , ips )
Get the domains related to input ips .
45,245
def sample ( self , hashes ) : api_name = 'opendns-sample' fmt_url_path = u'sample/{0}' return self . _multi_get ( api_name , fmt_url_path , hashes )
Get the information about a sample based on its hash .
45,246
def search ( self , patterns , start = 30 , limit = 1000 , include_category = False ) : api_name = 'opendns-patterns' fmt_url_path = u'search/{0}' start = '-{0}days' . format ( start ) include_category = str ( include_category ) . lower ( ) query_params = { 'start' : start , 'limit' : limit , 'includecategory' : include_category , } return self . _multi_get ( api_name , fmt_url_path , patterns , query_params )
Performs pattern searches against the Investigate database .
45,247
def risk_score ( self , domains ) : api_name = 'opendns-risk_score' fmt_url_path = u'domains/risk-score/{0}' return self . _multi_get ( api_name , fmt_url_path , domains )
Performs Umbrella risk score analysis on the input domains
45,248
def _extract_all_responses ( self , resources , api_endpoint , api_name ) : all_responses , resources = self . _bulk_cache_lookup ( api_name , resources ) resource_chunks = self . _prepare_resource_chunks ( resources ) response_chunks = self . _request_reports ( "resource" , resource_chunks , api_endpoint ) self . _extract_response_chunks ( all_responses , response_chunks , api_name ) return all_responses
Aux function to extract all the API endpoint responses .
45,249
def get_url_distribution ( self , params = None ) : params = params or { } all_responses = { } api_name = 'virustotal-url-distribution' response_chunks = self . _request_reports ( list ( params . keys ( ) ) , list ( params . values ( ) ) , 'url/distribution' ) self . _extract_response_chunks ( all_responses , response_chunks , api_name ) return all_responses
Retrieves a live feed with the latest URLs submitted to VT .
45,250
def get_url_reports ( self , resources ) : api_name = 'virustotal-url-reports' ( all_responses , resources ) = self . _bulk_cache_lookup ( api_name , resources ) resource_chunks = self . _prepare_resource_chunks ( resources , '\n' ) response_chunks = self . _request_reports ( "resource" , resource_chunks , 'url/report' ) self . _extract_response_chunks ( all_responses , response_chunks , api_name ) return all_responses
Retrieves a scan report on a given URL .
45,251
def get_ip_reports ( self , ips ) : api_name = 'virustotal-ip-address-reports' ( all_responses , ips ) = self . _bulk_cache_lookup ( api_name , ips ) responses = self . _request_reports ( "ip" , ips , 'ip-address/report' ) for ip , response in zip ( ips , responses ) : if self . _cache : self . _cache . cache_value ( api_name , ip , response ) all_responses [ ip ] = response return all_responses
Retrieves the most recent VT info for a set of ips .
45,252
def get_file_clusters ( self , date ) : api_name = 'virustotal-file-clusters' ( all_responses , resources ) = self . _bulk_cache_lookup ( api_name , date ) response = self . _request_reports ( "date" , date , 'file/clusters' ) self . _extract_response_chunks ( all_responses , response , api_name ) return all_responses
Retrieves file similarity clusters for a given time frame .
45,253
def _prepare_resource_chunks ( self , resources , resource_delim = ',' ) : return [ self . _prepare_resource_chunk ( resources , resource_delim , pos ) for pos in range ( 0 , len ( resources ) , self . _resources_per_req ) ]
As in some VirusTotal API methods the call can be made for multiple resources at once this method prepares a list of concatenated resources according to the maximum number of resources per requests .
45,254
def _extract_response_chunks ( self , all_responses , response_chunks , api_name ) : for response_chunk in response_chunks : if not isinstance ( response_chunk , list ) : response_chunk = [ response_chunk ] for response in response_chunk : if not response : continue if self . _cache : self . _cache . cache_value ( api_name , response [ 'resource' ] , response ) all_responses [ response [ 'resource' ] ] = response
Extracts and caches the responses from the response chunks in case of the responses for the requests containing multiple concatenated resources . Extracted responses are added to the already cached responses passed in the all_responses parameter .
45,255
def get_editor_widget ( self , request , plugins , plugin ) : cancel_url_name = self . get_admin_url_name ( 'delete_on_cancel' ) cancel_url = reverse ( 'admin:%s' % cancel_url_name ) render_plugin_url_name = self . get_admin_url_name ( 'render_plugin' ) render_plugin_url = reverse ( 'admin:%s' % render_plugin_url_name ) action_token = self . get_action_token ( request , plugin ) delete_text_on_cancel = ( 'delete-on-cancel' in request . GET and not plugin . get_plugin_instance ( ) [ 0 ] ) widget = TextEditorWidget ( installed_plugins = plugins , pk = plugin . pk , placeholder = plugin . placeholder , plugin_language = plugin . language , configuration = self . ckeditor_configuration , render_plugin_url = render_plugin_url , cancel_url = cancel_url , action_token = action_token , delete_on_cancel = delete_text_on_cancel , ) return widget
Returns the Django form Widget to be used for the text area
45,256
def get_form_class ( self , request , plugins , plugin ) : widget = self . get_editor_widget ( request = request , plugins = plugins , plugin = plugin , ) instance = plugin . get_plugin_instance ( ) [ 0 ] if instance : context = RequestContext ( request ) context [ 'request' ] = request rendered_text = plugin_tags_to_admin_html ( text = instance . body , context = context , ) else : rendered_text = None class TextPluginForm ( self . form ) : body = CharField ( widget = widget , required = False ) def __init__ ( self , * args , ** kwargs ) : initial = kwargs . pop ( 'initial' , { } ) if rendered_text : initial [ 'body' ] = rendered_text super ( TextPluginForm , self ) . __init__ ( * args , initial = initial , ** kwargs ) return TextPluginForm
Returns a subclass of Form to be used by this plugin
45,257
def _plugin_tags_to_html ( text , output_func ) : plugins_by_id = get_plugins_from_text ( text ) def _render_tag ( m ) : try : plugin_id = int ( m . groupdict ( ) [ 'pk' ] ) obj = plugins_by_id [ plugin_id ] except KeyError : return u'' else : obj . _render_meta . text_enabled = True return output_func ( obj , m ) return OBJ_ADMIN_RE . sub ( _render_tag , text )
Convert plugin object tags into the form for public site .
45,258
def extract_images ( data , plugin ) : if not settings . TEXT_SAVE_IMAGE_FUNCTION : return data tree_builder = html5lib . treebuilders . getTreeBuilder ( 'dom' ) parser = html5lib . html5parser . HTMLParser ( tree = tree_builder ) dom = parser . parse ( data ) found = False for img in dom . getElementsByTagName ( 'img' ) : src = img . getAttribute ( 'src' ) if not src . startswith ( 'data:' ) : continue width = img . getAttribute ( 'width' ) height = img . getAttribute ( 'height' ) data_re = re . compile ( r'data:(?P<mime_type>[^"]*);(?P<encoding>[^"]*),(?P<data>[^"]*)' ) m = data_re . search ( src ) dr = m . groupdict ( ) mime_type = dr [ 'mime_type' ] image_data = dr [ 'data' ] if mime_type . find ( ';' ) : mime_type = mime_type . split ( ';' ) [ 0 ] try : image_data = base64 . b64decode ( image_data ) except Exception : image_data = base64 . urlsafe_b64decode ( image_data ) try : image_type = mime_type . split ( '/' ) [ 1 ] except IndexError : image_type = '' image = BytesIO ( image_data ) if image_type == 'jpg' or image_type == 'jpeg' : file_ending = 'jpg' elif image_type == 'png' : file_ending = 'png' elif image_type == 'gif' : file_ending = 'gif' else : im = Image . open ( image ) new_image = BytesIO ( ) file_ending = 'jpg' im . save ( new_image , 'JPEG' ) new_image . seek ( 0 ) image = new_image filename = u'%s.%s' % ( uuid . uuid4 ( ) , file_ending ) image_plugin = img_data_to_plugin ( filename , image , parent_plugin = plugin , width = width , height = height ) new_img_html = plugin_to_tag ( image_plugin ) img . parentNode . replaceChild ( parser . parseFragment ( new_img_html ) . childNodes [ 0 ] , img ) found = True if found : return u'' . join ( [ y . toxml ( ) for y in dom . getElementsByTagName ( 'body' ) [ 0 ] . childNodes ] ) else : return data
extracts base64 encoded images from drag and drop actions in browser and saves those images as plugins
45,259
def default_config_filename ( root_dir = None ) : root_dir = Path ( root_dir ) if root_dir else Path ( '.' ) . abspath ( ) locale_dir = root_dir / 'locale' if not os . path . exists ( locale_dir ) : locale_dir = root_dir / 'conf' / 'locale' return locale_dir / BASE_CONFIG_FILENAME
Returns the default name of the configuration file .
45,260
def rtl_langs ( self ) : def is_rtl ( lang ) : base_rtl = [ 'ar' , 'fa' , 'he' , 'ur' ] return any ( [ lang . startswith ( base_code ) for base_code in base_rtl ] ) return sorted ( set ( [ lang for lang in self . translated_locales if is_rtl ( lang ) ] ) )
Returns the set of translated RTL language codes present in self . locales . Ignores source locale .
45,261
def clean_conf_folder ( self , locale ) : dirname = self . configuration . get_messages_dir ( locale ) dirname . removedirs_p ( )
Remove the configuration directory for locale
45,262
def segment_pofiles ( configuration , locale ) : files_written = set ( ) for filename , segments in configuration . segment . items ( ) : filename = configuration . get_messages_dir ( locale ) / filename files_written . update ( segment_pofile ( filename , segments ) ) return files_written
Segment all the pofiles for locale .
45,263
def segment_pofile ( filename , segments ) : reading_msg = "Reading {num} entries from {file}" writing_msg = "Writing {num} entries to {file}" source_po = polib . pofile ( filename ) LOG . info ( reading_msg . format ( file = filename , num = len ( source_po ) ) ) remaining_po = copy . deepcopy ( source_po ) remaining_po [ : ] = [ ] segment_po_files = { filename : remaining_po } segment_patterns = [ ] for segmentfile , patterns in segments . items ( ) : segment_po_files [ segmentfile ] = copy . deepcopy ( remaining_po ) segment_patterns . extend ( ( pat , segmentfile ) for pat in patterns ) for msg in source_po : msg_segments = set ( ) for occ_file , _ in msg . occurrences : for pat , segment_file in segment_patterns : if fnmatch . fnmatch ( occ_file , pat ) : msg_segments . add ( segment_file ) break else : msg_segments . add ( filename ) assert msg_segments if len ( msg_segments ) == 1 : segment_file = msg_segments . pop ( ) segment_po_files [ segment_file ] . append ( msg ) else : remaining_po . append ( msg ) files_written = set ( ) for segment_file , pofile in segment_po_files . items ( ) : out_file = filename . dirname ( ) / segment_file if not pofile : LOG . error ( "No messages to write to %s, did you run segment twice?" , out_file ) else : LOG . info ( writing_msg . format ( file = out_file , num = len ( pofile ) ) ) pofile . save ( out_file ) files_written . add ( out_file ) return files_written
Segment a . po file using patterns in segments .
45,264
def fix_header ( pofile ) : pofile . metadata_is_fuzzy = [ ] header = pofile . header fixes = ( ( 'SOME DESCRIPTIVE TITLE' , EDX_MARKER ) , ( 'Translations template for PROJECT.' , EDX_MARKER ) , ( 'YEAR' , str ( datetime . utcnow ( ) . year ) ) , ( 'ORGANIZATION' , 'edX' ) , ( "THE PACKAGE'S COPYRIGHT HOLDER" , "EdX" ) , ( 'This file is distributed under the same license as the PROJECT project.' , 'This file is distributed under the GNU AFFERO GENERAL PUBLIC LICENSE.' ) , ( 'This file is distributed under the same license as the PACKAGE package.' , 'This file is distributed under the GNU AFFERO GENERAL PUBLIC LICENSE.' ) , ( 'FIRST AUTHOR <EMAIL@ADDRESS>' , 'EdX Team <info@edx.org>' ) , ) for src , dest in fixes : header = header . replace ( src , dest ) pofile . header = header
Replace default headers with edX headers
45,265
def strip_key_strings ( pofile ) : newlist = [ entry for entry in pofile if not is_key_string ( entry . msgid ) ] del pofile [ : ] pofile += newlist
Removes all entries in PO which are key strings . These entries should appear only in messages . po not in any other po files .
45,266
def rename_source_file ( self , src , dst ) : try : os . rename ( self . source_msgs_dir . joinpath ( src ) , self . source_msgs_dir . joinpath ( dst ) ) except OSError : pass
Rename a file in the source directory .
45,267
def get_valid_commands ( ) : modules = [ m . basename ( ) . split ( '.' ) [ 0 ] for m in Path ( __file__ ) . dirname ( ) . files ( '*.py' ) ] commands = [ ] for modname in modules : if modname == 'main' : continue mod = importlib . import_module ( 'i18n.%s' % modname ) if hasattr ( mod , 'main' ) : commands . append ( modname ) return commands
Returns valid commands .
45,268
def error_message ( ) : sys . stderr . write ( 'valid commands:\n' ) for cmd in get_valid_commands ( ) : sys . stderr . write ( '\t%s\n' % cmd ) return - 1
Writes out error message specifying the valid commands .
45,269
def main ( ) : try : command = sys . argv [ 1 ] except IndexError : return error_message ( ) try : module = importlib . import_module ( 'i18n.%s' % command ) module . main . args = sys . argv [ 2 : ] except ( ImportError , AttributeError ) : return error_message ( ) return module . main ( )
Executes the given command . Returns error_message if command is not valid .
45,270
def validate_po_files ( configuration , locale_dir , root_dir = None , report_empty = False , check_all = False ) : found_problems = False merged_files = configuration . generate_merge . keys ( ) for dirpath , __ , filenames in os . walk ( root_dir if root_dir else locale_dir ) : for name in filenames : __ , ext = os . path . splitext ( name ) filename = os . path . join ( dirpath , name ) if ext . lower ( ) == '.po' and ( check_all or os . path . basename ( filename ) not in merged_files ) : if msgfmt_check_po_file ( locale_dir , filename ) : found_problems = True if "/locale/en/" not in filename : problems = check_messages ( filename , report_empty ) if problems : report_problems ( filename , problems ) found_problems = True dup_filename = filename . replace ( '.po' , '.dup' ) has_duplicates = os . path . exists ( dup_filename ) if has_duplicates : log . warning ( " Duplicates found in %s, details in .dup file" , dup_filename ) found_problems = True if not ( problems or has_duplicates ) : log . info ( " No problems found in %s" , filename ) return found_problems
Validate all of the po files found in the root directory that are not product of a merge .
45,271
def msgfmt_check_po_file ( locale_dir , filename ) : found_problems = False rfile = os . path . relpath ( filename , locale_dir ) out , err = call ( 'msgfmt -c -o /dev/null {}' . format ( rfile ) , working_directory = locale_dir ) if err : log . info ( u'\n' + out . decode ( 'utf8' ) ) log . warning ( u'\n' + err . decode ( 'utf8' ) ) found_problems = True return found_problems
Call GNU msgfmt - c on each . po file to validate its format . Any errors caught by msgfmt are logged to log .
45,272
def tags_in_string ( msg ) : def is_linguistic_tag ( tag ) : if tag . startswith ( "&" ) : return True if any ( x in tag for x in [ "<abbr>" , "<abbr " , "</abbr>" ] ) : return True return False __ , tags = Converter ( ) . detag_string ( msg ) return set ( t for t in tags if not is_linguistic_tag ( t ) )
Return the set of tags in a message string .
45,273
def astral ( msg ) : utf32 = msg . encode ( "utf32" ) [ 4 : ] code_points = struct . unpack ( "%dI" % ( len ( utf32 ) / 4 ) , utf32 ) return any ( cp > 0xFFFF for cp in code_points )
Does msg have characters outside the Basic Multilingual Plane?
45,274
def report_problems ( filename , problems ) : problem_file = filename . replace ( ".po" , ".prob" ) id_filler = textwrap . TextWrapper ( width = 79 , initial_indent = " msgid: " , subsequent_indent = " " * 9 ) tx_filler = textwrap . TextWrapper ( width = 79 , initial_indent = " --- , subsequent_indent = " " * 9 ) with codecs . open ( problem_file , "w" , encoding = "utf8" ) as prob_file : for problem in problems : desc , msgid = problem [ : 2 ] prob_file . write ( u"{}\n{}\n" . format ( desc , id_filler . fill ( msgid ) ) ) info = u"{}\n{}\n" . format ( desc , id_filler . fill ( msgid ) ) for translation in problem [ 2 : ] : prob_file . write ( u"{}\n" . format ( tx_filler . fill ( translation ) ) ) info += u"{}\n" . format ( tx_filler . fill ( translation ) ) log . info ( info ) prob_file . write ( u"\n" ) log . error ( " %s problems in %s, details in .prob file" , len ( problems ) , filename )
Report on the problems found in filename .
45,275
def merge ( configuration , locale , target = 'django.po' , sources = ( 'django-partial.po' , ) , fail_if_missing = True ) : LOG . info ( 'Merging %s locale %s' , target , locale ) locale_directory = configuration . get_messages_dir ( locale ) try : validate_files ( locale_directory , sources ) except Exception : if not fail_if_missing : return raise merge_cmd = 'msgcat -o merged.po ' + ' ' . join ( sources ) execute ( merge_cmd , working_directory = locale_directory ) merged_filename = locale_directory . joinpath ( 'merged.po' ) duplicate_entries = clean_pofile ( merged_filename ) target_filename = locale_directory . joinpath ( target ) os . rename ( merged_filename , target_filename ) if duplicate_entries : dup_file = target_filename . replace ( ".po" , ".dup" ) with codecs . open ( dup_file , "w" , encoding = "utf8" ) as dfile : for ( entry , translations ) in duplicate_entries : dfile . write ( u"{}\n" . format ( entry ) ) dfile . write ( u"Translations found were:\n\t{}\n\n" . format ( translations ) ) LOG . warning ( " %s duplicates in %s, details in .dup file" , len ( duplicate_entries ) , target_filename )
For the given locale merge the sources files to become the target file . Note that the target file might also be one of the sources .
45,276
def merge_files ( configuration , locale , fail_if_missing = True ) : for target , sources in configuration . generate_merge . items ( ) : merge ( configuration , locale , target , sources , fail_if_missing )
Merge all the files in locale as specified in config . yaml .
45,277
def clean_pofile ( pofile_path ) : pomsgs = pofile ( pofile_path ) pomsgs . metadata_is_fuzzy = False duplicate_entries = [ ] for entry in pomsgs : entry . occurrences = [ ( filename , None ) for filename , __ in entry . occurrences ] if 'fuzzy' in entry . flags : entry . flags = [ f for f in entry . flags if f != 'fuzzy' ] dup_msg = 'Multiple translations found for single string.\n\tString "{0}"\n\tPresent in files {1}' . format ( entry . msgid , [ f for ( f , __ ) in entry . occurrences ] ) duplicate_entries . append ( ( dup_msg , entry . msgstr ) ) for msgstr in DUPLICATE_ENTRY_PATTERN . split ( entry . msgstr ) : if msgstr : entry . msgstr = msgstr . strip ( ) if entry . msgid . startswith ( '\n' ) or entry . msgid . endswith ( '\n' ) : raise ValueError ( u'{} starts or ends with a new line character, which is not allowed. ' 'Please fix before continuing. Source string is found in {}' . format ( entry . msgid , entry . occurrences ) . encode ( 'utf-8' ) ) break pomsgs . save ( ) return duplicate_entries
Clean various aspect of a . po file .
45,278
def new_filename ( original_filename , new_locale ) : orig_file = Path ( original_filename ) new_file = orig_file . parent . parent . parent / new_locale / orig_file . parent . name / orig_file . name return new_file . abspath ( )
Returns a filename derived from original_filename using new_locale as the locale
45,279
def run ( self , args ) : configuration = self . configuration source_messages_dir = configuration . source_messages_dir for locale , converter in zip ( configuration . dummy_locales , [ Dummy ( ) , Dummy2 ( ) , ArabicDummy ( ) ] ) : print ( 'Processing source language files into dummy strings, locale "{}"' . format ( locale ) ) for source_file in configuration . source_messages_dir . walkfiles ( '*.po' ) : if args . verbose : print ( ' ' , source_file . relpath ( ) ) make_dummy ( source_messages_dir . joinpath ( source_file ) , locale , converter ) if args . verbose : print ( )
Generate dummy strings for all source po files .
45,280
def execute ( command , working_directory = config . BASE_DIR , stderr = sp . STDOUT ) : LOG . info ( "Executing in %s ..." , working_directory ) LOG . info ( command ) sp . check_call ( command , cwd = working_directory , stderr = stderr , shell = True )
Executes shell command in a given working_directory . Command is a string to pass to the shell . Output is ignored .
45,281
def remove_file ( filename , verbose = True ) : if verbose : LOG . info ( 'Deleting file %s' , os . path . relpath ( filename , config . BASE_DIR ) ) if not os . path . exists ( filename ) : LOG . warning ( "File does not exist: %s" , os . path . relpath ( filename , config . BASE_DIR ) ) else : os . remove ( filename )
Attempt to delete filename . log is boolean . If true removal is logged . Log a warning if file does not exist . Logging filenames are relative to config . BASE_DIR to cut down on noise in output .
45,282
def push ( * resources ) : cmd = 'tx push -s' if resources : for resource in resources : execute ( cmd + ' -r {resource}' . format ( resource = resource ) ) else : execute ( cmd )
Push translation source English files to Transifex .
45,283
def pull_all_ltr ( configuration ) : print ( "Pulling all translated LTR languages from transifex..." ) for lang in configuration . ltr_langs : print ( 'rm -rf conf/locale/' + lang ) execute ( 'rm -rf conf/locale/' + lang ) execute ( 'tx pull -l ' + lang ) clean_translated_locales ( configuration , langs = configuration . ltr_langs )
Pulls all translations - reviewed or not - for LTR languages
45,284
def pull_all_rtl ( configuration ) : print ( "Pulling all translated RTL languages from transifex..." ) for lang in configuration . rtl_langs : print ( 'rm -rf conf/locale/' + lang ) execute ( 'rm -rf conf/locale/' + lang ) execute ( 'tx pull -l ' + lang ) clean_translated_locales ( configuration , langs = configuration . rtl_langs )
Pulls all translations - reviewed or not - for RTL languages
45,285
def clean_translated_locales ( configuration , langs = None ) : if not langs : langs = configuration . translated_locales for locale in langs : clean_locale ( configuration , locale )
Strips out the warning from all translated po files about being an English source file .
45,286
def clean_locale ( configuration , locale ) : dirname = configuration . get_messages_dir ( locale ) if not dirname . exists ( ) : return for filename in dirname . files ( '*.po' ) : clean_file ( configuration , dirname . joinpath ( filename ) )
Strips out the warning from all of a locale s translated po files about being an English source file . Iterates over machine - generated files .
45,287
def clean_file ( configuration , filename ) : pofile = polib . pofile ( filename ) if pofile . header . find ( EDX_MARKER ) != - 1 : new_header = get_new_header ( configuration , pofile ) new = pofile . header . replace ( EDX_MARKER , new_header ) pofile . header = new pofile . save ( )
Strips out the warning from a translated po file about being an English source file . Replaces warning with a note about coming from Transifex .
45,288
def get_new_header ( configuration , pofile ) : team = pofile . metadata . get ( 'Language-Team' , None ) if not team : return TRANSIFEX_HEADER . format ( configuration . TRANSIFEX_URL ) return TRANSIFEX_HEADER . format ( team )
Insert info about edX into the po file headers
45,289
def detag_string ( self , string ) : counter = itertools . count ( 0 ) count = lambda m : '<%s>' % next ( counter ) tags = self . tag_pattern . findall ( string ) tags = [ '' . join ( tag ) for tag in tags ] ( new , nfound ) = self . tag_pattern . subn ( count , string ) if len ( tags ) != nfound : raise Exception ( 'tags dont match:' + string ) return ( new , tags )
Extracts tags from string .
45,290
def options ( self ) : arg = self . get ( 0 ) if arg . startswith ( '-' ) and not self . is_asking_for_help : return arg [ 1 : ] return '' . join ( x for x in arg if x in 'dgktz' )
Train tickets query options .
45,291
def trains ( self ) : for row in self . _rows : train_no = row . get ( 'station_train_code' ) initial = train_no [ 0 ] . lower ( ) if not self . _opts or initial in self . _opts : train = [ train_no , '\n' . join ( [ colored . green ( row . get ( 'from_station_name' ) ) , colored . red ( row . get ( 'to_station_name' ) ) , ] ) , '\n' . join ( [ colored . green ( row . get ( 'start_time' ) ) , colored . red ( row . get ( 'arrive_time' ) ) , ] ) , self . _get_duration ( row ) , row . get ( 'swz_num' ) , row . get ( 'zy_num' ) , row . get ( 'ze_num' ) , row . get ( 'rw_num' ) , row . get ( 'yw_num' ) , row . get ( 'rz_num' ) , row . get ( 'yz_num' ) , row . get ( 'wz_num' ) ] yield train
Filter rows according to headers
45,292
def pretty_print ( self ) : pt = PrettyTable ( ) if len ( self ) == 0 : pt . _set_field_names ( [ 'Sorry,' ] ) pt . add_row ( [ TRAIN_NOT_FOUND ] ) else : pt . _set_field_names ( self . headers ) for train in self . trains : pt . add_row ( train ) print ( pt )
Use PrettyTable to perform formatted outprint .
45,293
def _valid_date ( self ) : date = self . _parse_date ( self . date ) if not date : exit_after_echo ( INVALID_DATE ) try : date = datetime . strptime ( date , '%Y%m%d' ) except ValueError : exit_after_echo ( INVALID_DATE ) offset = date - datetime . today ( ) if offset . days not in range ( - 1 , 50 ) : exit_after_echo ( INVALID_DATE ) return datetime . strftime ( date , '%Y-%m-%d' )
Check and return a valid query date .
45,294
def _parse_date ( date ) : result = '' . join ( re . findall ( '\d' , date ) ) l = len ( result ) if l in ( 2 , 3 , 4 ) : year = str ( datetime . today ( ) . year ) return year + result if l in ( 6 , 7 , 8 ) : return result return ''
Parse from the user input date .
45,295
def _build_params ( self ) : d = OrderedDict ( ) d [ 'purpose_codes' ] = 'ADULT' d [ 'queryDate' ] = self . _valid_date d [ 'from_station' ] = self . _from_station_telecode d [ 'to_station' ] = self . _to_station_telecode return d
Have no idea why wrong params order can t get data . So use OrderedDict here .
45,296
def date_range ( self ) : try : days = int ( self . days ) except ValueError : exit_after_echo ( QUERY_DAYS_INVALID ) if days < 1 : exit_after_echo ( QUERY_DAYS_INVALID ) start = datetime . today ( ) end = start + timedelta ( days = days ) return ( datetime . strftime ( start , '%Y-%m-%d' ) , datetime . strftime ( end , '%Y-%m-%d' ) )
Generate date range according to the days user input .
45,297
def query ( params ) : r = requests_get ( QUERY_URL , verify = True ) return HospitalCollection ( r . json ( ) , params )
params is a city name or a city name + hospital name .
45,298
def cli ( ) : if args . is_asking_for_help : exit_after_echo ( cli . __doc__ , color = None ) elif args . is_querying_lottery : from . lottery import query result = query ( ) elif args . is_querying_movie : from . movies import query result = query ( ) elif args . is_querying_lyric : from . lyrics import query result = query ( args . as_lyric_query_params ) elif args . is_querying_show : from . showes import query result = query ( args . as_show_query_params ) elif args . is_querying_putian_hospital : from . hospitals import query result = query ( args . as_hospital_query_params ) elif args . is_querying_train : from . trains import query result = query ( args . as_train_query_params ) else : exit_after_echo ( show_usage . __doc__ , color = None ) result . pretty_print ( )
Various information query via command line .
45,299
def query ( ) : r = requests_get ( QUERY_URL ) try : rows = r . json ( ) [ 'subject_collection_items' ] except ( IndexError , TypeError ) : rows = [ ] return MoviesCollection ( rows )
Query hot movies infomation from douban .