idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
57,500
def authenticate ( self , request ) : if request . user and request . user . is_authenticated ( ) : return request . user if 'HTTP_AUTHORIZATION' in request . META : auth = request . META [ 'HTTP_AUTHORIZATION' ] . split ( ) if len ( auth ) == 2 : if auth [ 0 ] . lower ( ) == "basic" : uname , passwd = base64 . b64decode ( auth [ 1 ] ) . split ( ':' ) user = authenticate ( username = uname , password = passwd ) if user is not None : if user . is_active : request . user = user return user else : raise Forbidden ( ) raise Unauthorized ( )
Authenticate request using HTTP Basic authentication protocl .
57,501
def clean ( ) : os . chdir ( os . path . join ( project_root , 'docs' ) ) sh ( "make clean" ) os . chdir ( project_root ) sh ( "rm -rf pyoauth2.egg-info" )
Clean up previous garbage
57,502
def average ( ) : count = 0 total = total ( ) i = 0 while 1 : i = yield ( ( total . send ( i ) * 1.0 ) / count if count else 0 ) count += 1
generator that holds a rolling average
57,503
def args ( self ) : if self . _args is None : parser = self . _build_parser ( ) self . _args = parser . parse_args ( ) return self . _args
Parsed command - line arguments .
57,504
def build_pypackage_basename ( self , pytree , base ) : dirname = os . path . dirname ( pytree ) parsed_package_name = base . replace ( dirname , '' ) . strip ( '/' ) return parsed_package_name
Build the string representing the parsed package basename .
57,505
def _build_parser ( self ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '--pytree' , required = True , type = self . _valid_directory , help = 'This is the path, absolute or relative, of the Python package ' 'that is to be parsed.' ) parser . add_argument ( '--doctree' , required = True , type = self . _valid_directory , help = 'This is the path, absolute or relative, of the documentation ' 'package that is to be parsed.' ) parser . add_argument ( '--no-fail' , action = 'store_true' , help = 'Using this option will cause this program to return an exit ' 'code of 0 even when the given trees do not match.' ) parser . add_argument ( '--doc-ignores' , action = AddDocIgnores , help = 'A comma separated list of additional doc files to ignore' ) return parser
Build the needed command - line parser .
57,506
def build_pyfile_path_from_docname ( self , docfile ) : name , ext = os . path . splitext ( docfile ) expected_py_name = name . replace ( '.' , '/' ) + '.py' return expected_py_name
Build the expected Python file name based on the given documentation file name .
57,507
def calculate_tree_differences ( self , pytree , doctree ) : pykeys = set ( pytree . keys ( ) ) dockeys = set ( doctree . keys ( ) ) missing_doc_keys = pykeys - dockeys missing_docs = { pytree [ pyfile ] for pyfile in missing_doc_keys } missing_py_keys = dockeys - pykeys missing_pys = { docfile for docfile in missing_py_keys } return missing_pys , missing_docs
Calculate the differences between the given trees .
57,508
def compare_trees ( self , parsed_pytree , parsed_doctree ) : if parsed_pytree == parsed_doctree : return 0 missing_pys , missing_docs = self . calculate_tree_differences ( pytree = parsed_pytree , doctree = parsed_doctree ) self . pprint_tree_differences ( missing_pys = missing_pys , missing_docs = missing_docs ) return 0 if self . args . no_fail else 1
Compare the given parsed trees .
57,509
def parse_doc_tree ( self , doctree , pypackages ) : parsed_doctree = { } for filename in os . listdir ( doctree ) : if self . _ignore_docfile ( filename ) : continue expected_pyfile = self . build_pyfile_path_from_docname ( filename ) parsed_doctree [ expected_pyfile ] = filename pypackages = { name + '.py' for name in pypackages } return { elem : parsed_doctree [ elem ] for elem in parsed_doctree if elem not in pypackages }
Parse the given documentation tree .
57,510
def parse_py_tree ( self , pytree ) : parsed_pytree = { } pypackages = set ( ) for base , dirs , files in os . walk ( pytree ) : if self . _ignore_pydir ( os . path . basename ( base ) ) : continue elif '__init__.py' not in files : continue package_basename = self . build_pypackage_basename ( pytree = pytree , base = base ) pypackages . add ( package_basename ) for filename in files : if self . _ignore_pyfile ( filename ) : continue parsed_path = os . path . join ( package_basename , filename ) parsed_pytree [ parsed_path ] = self . build_rst_name_from_pypath ( parsed_path ) return parsed_pytree , pypackages
Parse the given Python package tree .
57,511
def pprint_tree_differences ( self , missing_pys , missing_docs ) : if missing_pys : print ( 'The following Python files appear to be missing:' ) for pyfile in missing_pys : print ( pyfile ) print ( '\n' ) if missing_docs : print ( 'The following documentation files appear to be missing:' ) for docfiile in missing_docs : print ( docfiile ) print ( '\n' )
Pprint the missing files of each given set .
57,512
def _valid_directory ( self , path ) : abspath = os . path . abspath ( path ) if not os . path . isdir ( abspath ) : raise argparse . ArgumentTypeError ( 'Not a valid directory: {}' . format ( abspath ) ) return abspath
Ensure that the given path is valid .
57,513
def main ( self ) : args = self . args parsed_pytree , pypackages = self . parse_py_tree ( pytree = args . pytree ) parsed_doctree = self . parse_doc_tree ( doctree = args . doctree , pypackages = pypackages ) return self . compare_trees ( parsed_pytree = parsed_pytree , parsed_doctree = parsed_doctree )
Parse package trees and report on any discrepancies .
57,514
def where_unique ( cls , ip , object_id , location ) : return cls . query . filter_by ( ip = ip , object_id = object_id , location = location ) . first ( )
Get db model by username
57,515
def delete_where_unique ( cls , ip , object_id , location ) : result = cls . where_unique ( ip , object_id , location ) if result is None : return None result . delete ( ) return True
delete by ip and object id
57,516
def do_req ( self , method , url , body = None , headers = None , status = None ) : if body is None : body = '' else : body = json . dumps ( body ) res = self . backend . dispatch_request ( method = method , url = url , body = body , headers = self . get_headers ( headers ) , auth = self . auth ) if not isinstance ( res , MapiResponse ) : res = MapiResponse ( * res ) if status is None : if res . status // 100 != 2 : raise MapiError ( * res ) elif res . status != status : raise MapiError ( * res ) return res
Used internally to send a request to the API left public so it can be used to talk to the API more directly .
57,517
def _depaginate_all ( self , url ) : items = [ ] for x in self . _depagination_generator ( url ) : items += x return items
GETs the url provided and traverses the next url that s returned while storing the data in a list . Returns a single list of all items .
57,518
def create_user ( self , user_id , roles = None , netmask = None , secret = None , pubkey = None ) : u arguments = { 'id' : user_id , 'roles' : roles , 'netmask' : netmask , 'secret' : secret , 'pubkey' : pubkey } return self . do_req ( 'POST' , self . merchant_api_base_url + '/user/' , arguments ) . json ( )
u Create user for the Merchant given in the X - Mcash - Merchant header .
57,519
def update_user ( self , user_id , roles = None , netmask = None , secret = None , pubkey = None ) : arguments = { 'roles' : roles , 'netmask' : netmask , 'secret' : secret , 'pubkey' : pubkey } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/user/' + user_id + '/' , arguments )
Update user . Returns the raw response object .
57,520
def create_pos ( self , name , pos_type , pos_id , location = None ) : arguments = { 'name' : name , 'type' : pos_type , 'id' : pos_id , 'location' : location } return self . do_req ( 'POST' , self . merchant_api_base_url + '/pos/' , arguments ) . json ( )
Create POS resource
57,521
def update_pos ( self , pos_id , name , pos_type , location = None ) : arguments = { 'name' : name , 'type' : pos_type , 'location' : location } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/pos/' + pos_id + '/' , arguments )
Update POS resource . Returns the raw response object .
57,522
def create_payment_request ( self , customer , currency , amount , allow_credit , pos_id , pos_tid , action , ledger = None , display_message_uri = None , callback_uri = None , additional_amount = None , additional_edit = None , text = None , expires_in = None , required_scope = None , required_scope_text = None , links = None , line_items = None ) : arguments = { 'customer' : customer , 'currency' : currency , 'amount' : amount , 'allow_credit' : allow_credit , 'pos_id' : pos_id , 'pos_tid' : pos_tid , 'action' : action , 'ledger' : ledger , 'display_message_uri' : display_message_uri , 'callback_uri' : callback_uri , 'additional_amount' : additional_amount , 'additional_edit' : additional_edit , 'text' : text , 'expires_in' : expires_in } if required_scope : arguments [ 'required_scope' ] = required_scope arguments [ 'required_scope_text' ] = required_scope_text if links : arguments [ 'links' ] = links if line_items : arguments [ 'line_items' ] = line_items return self . do_req ( 'POST' , self . merchant_api_base_url + '/payment_request/' , arguments ) . json ( )
Post payment request . The call is idempotent ; that is if one posts the same pos_id and pos_tid twice only one payment request is created .
57,523
def update_payment_request ( self , tid , currency = None , amount = None , action = None , ledger = None , callback_uri = None , display_message_uri = None , capture_id = None , additional_amount = None , text = None , refund_id = None , required_scope = None , required_scope_text = None , line_items = None ) : arguments = { 'ledger' : ledger , 'display_message_uri' : display_message_uri , 'callback_uri' : callback_uri , 'currency' : currency , 'amount' : amount , 'additional_amount' : additional_amount , 'capture_id' : capture_id , 'action' : action , 'text' : text , 'refund_id' : refund_id } if required_scope : arguments [ 'required_scope' ] = required_scope arguments [ 'required_scope_text' ] = required_scope_text if line_items : arguments [ 'line_items' ] = line_items arguments = { k : v for k , v in arguments . items ( ) if v is not None } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/payment_request/' + tid + '/' , arguments )
Update payment request reauthorize capture release or abort
57,524
def post_chat_message ( self , merchant_id , channel_id , message ) : return self . do_req ( 'POST' , self . base_url + '/chat/v1/merchant/%s/channel/%s/message/' % ( merchant_id , channel_id ) , message )
post a chat message
57,525
def create_shortlink ( self , callback_uri = None , description = None , serial_number = None ) : arguments = { 'callback_uri' : callback_uri , 'description' : description , 'serial_number' : serial_number } return self . do_req ( 'POST' , self . merchant_api_base_url + '/shortlink/' , arguments ) . json ( )
Register new shortlink
57,526
def update_shortlink ( self , shortlink_id , callback_uri = None , description = None ) : arguments = { 'callback_uri' : callback_uri , 'description' : description } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/shortlink/' + shortlink_id + '/' , arguments )
Update existing shortlink registration
57,527
def get_shortlink ( self , shortlink_id_or_url ) : if "://" not in shortlink_id_or_url : shortlink_id_or_url = self . merchant_api_base_url + '/shortlink/' + shortlink_id_or_url + '/' return self . do_req ( 'GET' , shortlink_id_or_url ) . json ( )
Retrieve registered shortlink info
57,528
def create_ledger ( self , currency , description = None ) : arguments = { 'currency' : currency , 'description' : description } return self . do_req ( 'POST' , self . merchant_api_base_url + '/ledger/' , arguments ) . json ( )
Create a ledger
57,529
def update_ledger ( self , ledger_id , description = None ) : arguments = { 'description' : description } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/ledger/' + ledger_id + '/' , arguments )
Update ledger info
57,530
def close_report ( self , ledger_id , report_id , callback_uri = None ) : u arguments = { 'callback_uri' : callback_uri } return self . do_req ( 'PUT' , self . merchant_api_base_url + '/ledger/' + ledger_id + '/report/' + report_id + '/' , arguments )
u Close Report
57,531
def get_report ( self , ledger_id , report_id ) : return self . do_req ( 'GET' , self . merchant_api_base_url + '/ledger/' + ledger_id + '/report/' + report_id + '/' ) . json ( )
Get report info
57,532
def create_permission_request ( self , customer , pos_id , pos_tid , scope , ledger = None , text = None , callback_uri = None , expires_in = None ) : arguments = { 'customer' : customer , 'pos_id' : pos_id , 'pos_tid' : pos_tid , 'scope' : scope , 'ledger' : ledger , 'text' : text , 'callback_uri' : callback_uri , 'expires_in' : expires_in } return self . do_req ( 'POST' , self . merchant_api_base_url + '/permission_request/' , arguments ) . json ( )
Create permission request
57,533
def upload_receipt ( self , url , data ) : return self . upload_attachment ( url = url , data = data , mime_type = 'application/vnd.mcash.receipt.v1+json' )
Upload a receipt to the give url
57,534
def safe_twitter_request_handler ( twitter_api_func , call_rate_limit , call_counter , time_window_start , max_retries , wait_period , * args , ** kw ) : error_count = 0 while True : try : if call_counter >= call_rate_limit : call_counter = 0 elapsed_time = time . perf_counter ( ) - time_window_start sleep_time = 15 * 60 - elapsed_time if sleep_time < 0.1 : sleep_time = 0.1 time . sleep ( sleep_time ) time_window_start = time . perf_counter ( ) else : call_counter += 1 twitter_api_function_result = twitter_api_func ( * args , ** kw ) return twitter_api_function_result , call_counter , time_window_start except twython . TwythonError as e : error_count , call_counter , time_window_start , wait_period = handle_twitter_http_error ( e , error_count , call_counter , time_window_start , wait_period ) if error_count > max_retries : print ( "Max error count reached. Abandoning effort." ) raise e except URLError as e : error_count += 1 if error_count > max_retries : print ( "Max error count reached. Abandoning effort." ) raise e except BadStatusLine as e : error_count += 1 if error_count > max_retries : print ( "Max error count reached. Abandoning effort." ) raise e
This is a safe function handler for any twitter request .
57,535
def handle_twitter_http_error ( e , error_count , call_counter , time_window_start , wait_period ) : if e . error_code == 401 : raise e elif e . error_code == 404 : raise e elif e . error_code == 429 : error_count += 0.5 call_counter = 0 wait_period = 2 time . sleep ( 60 * 15 + 5 ) time_window_start = time . perf_counter ( ) return error_count , call_counter , time_window_start , wait_period elif e . error_code in ( 500 , 502 , 503 , 504 ) : error_count += 1 time . sleep ( wait_period ) wait_period *= 1.5 return error_count , call_counter , time_window_start , wait_period else : raise e
This function handles the twitter request in case of an HTTP error .
57,536
def make_bundle ( bundle , fixed_version = None ) : tmp_output_file_name = '%s.%s.%s' % ( os . path . join ( bundle . bundle_file_root , bundle . bundle_filename ) , 'temp' , bundle . bundle_type ) iter_input = iter_bundle_files ( bundle ) output_pipeline = processor_pipeline ( bundle . processors , iter_input ) m = md5 ( ) with open ( tmp_output_file_name , 'wb' ) as output_file : for chunk in output_pipeline : m . update ( chunk ) output_file . write ( chunk ) hash_version = fixed_version or m . hexdigest ( ) output_file_name = bundle . get_path ( hash_version ) os . rename ( tmp_output_file_name , output_file_name ) return hash_version
Does all of the processing required to create a bundle and write it to disk returning its hash version
57,537
def html_to_text ( html_string ) : html_tree = html . document_fromstring ( html_string ) for h in html_tree . cssselect ( "h1, h2, h3, h4, h5, h6" ) : h . text = h . text + '\n\n' for a in html_tree . xpath ( "//a" ) : href = a . attrib [ 'href' ] a . text = a . text + " (" + href + ")" for p in html_tree . xpath ( "//p" ) : p . tail = p . tail if p . tail else "\n\n" for br in html_tree . xpath ( "//br" ) : br . tail = "\n" + br . tail if br . tail else "\n" return html_tree . text_content ( )
returns a plain text string when given a html string text handles a p h1 to h6 and br inserts newline chars to create space in the string
57,538
def random_string ( ** kwargs ) : n = kwargs . get ( 'length' , 10 ) pool = kwargs . get ( 'pool' ) or string . digits + string . ascii_lowercase return '' . join ( random . SystemRandom ( ) . choice ( pool ) for _ in range ( n ) )
By default generates a random string of 10 chars composed of digits and ascii lowercase letters . String length and pool can be override by using kwargs . Pool must be a list of strings
57,539
def _run_parallel_process_with_profiling ( self , start_path , stop_path , queue , filename ) : runctx ( 'Engine._run_parallel_process(self, start_path, stop_path, queue)' , globals ( ) , locals ( ) , filename )
wrapper for usage of profiling
57,540
def _run_parallel_process ( self , start_path , stop_path , queue ) : process_num = int ( current_process ( ) . name . split ( '-' , 2 ) [ 1 ] ) self . _run_process ( start_path , stop_path , process_num ) queue . put ( self . consumer . put ( ) )
The function calls _run_process and puts results produced by consumer at observations of top most consumer in to the queue
57,541
def _run_process ( self , start_path , stop_path , process_num = 0 ) : self . producer . initialize_worker ( process_num ) self . consumer . initialize_worker ( process_num ) for path in range ( start_path , stop_path ) : self . _run_path ( path ) self . consumer . finalize_worker ( process_num )
The function calls _run_path for given set of paths
57,542
def _run_path ( self , path_num ) : self . producer . initialize_path ( path_num ) self . consumer . initialize_path ( path_num ) for new_date in self . grid : state = self . producer . evolve ( new_date ) self . consumer . consume ( state ) self . consumer . finalize_path ( path_num )
standalone function implementing a single loop of Monte Carlo It returns list produced by consumer at observation dates
57,543
def initialize_worker ( self , process_num = None ) : self . initialize ( self . grid , self . num_of_paths , self . seed )
reinitialize consumer for process in multiprocesing
57,544
def initialize_path ( self , path_num = None ) : self . state = copy ( self . initial_state ) return self . state
initialize consumer for next path
57,545
def consume ( self , state ) : self . state . append ( self . func ( state ) ) return self . state
consume new producer state
57,546
def get ( self , queue_get ) : if isinstance ( queue_get , ( tuple , list ) ) : self . result . extend ( queue_get )
to get states from multiprocessing . queue
57,547
def walk_revctrl ( dirname = '' , ff = '' ) : file_finder = None items = [ ] if not ff : distutils . log . error ( 'No file-finder passed to walk_revctrl' ) sys . exit ( 1 ) for ep in pkg_resources . iter_entry_points ( 'setuptools.file_finders' ) : if ff == ep . name : distutils . log . info ( 'using %s file-finder' , ep . name ) file_finder = ep . load ( ) finder_items = [ ] with pythonpath_off ( ) : for item in file_finder ( dirname ) : if not basename ( item ) . startswith ( ( '.svn' , '.hg' , '.git' ) ) : finder_items . append ( item ) distutils . log . info ( '%d files found' , len ( finder_items ) ) items . extend ( finder_items ) if file_finder is None : distutils . log . error ( 'Failed to load %s file-finder; setuptools-%s extension missing?' , ff , 'subversion' if ff == 'svn' else ff ) sys . exit ( 1 ) return items or [ '' ]
Return files found by the file - finder ff .
57,548
def cleanup_pycache ( ) : try : for file in glob . glob ( 'setup.py[co]' ) : os . remove ( file ) if isdir ( '__pycache__' ) : for file in glob . glob ( join ( '__pycache__' , 'setup.*.py[co]' ) ) : os . remove ( file ) if not glob . glob ( join ( '__pycache__' , '*' ) ) : os . rmdir ( '__pycache__' ) except ( IOError , OSError ) : pass
Remove . pyc files we leave around because of import .
57,549
def run ( args , ff = '' ) : import setuptools . command . egg_info if ff == 'none' : setuptools . command . egg_info . walk_revctrl = no_walk_revctrl else : setuptools . command . egg_info . walk_revctrl = partial ( walk_revctrl , ff = ff ) sys . argv = [ 'setup.py' ] + args import setup cleanup_pycache ( )
Run setup . py with monkey patches applied .
57,550
def _get_sorted_iterator ( self , iterator ) : lines = list ( next ( iterator ) ) if len ( lines ) < self . max_lines : return iter ( sorted ( lines , key = self . key ) ) import tempfile tmp_dir = tempfile . mkdtemp ( ) fnames = self . _split ( chain ( [ lines ] , iterator ) , tmp_dir ) return SortedIteratorMerger ( [ unpickle_iter ( open ( fname , 'rb' ) ) for fname in fnames ] , self . key )
Get the iterator over the sorted items .
57,551
def _split ( self , iterator , tmp_dir ) : fnames = [ ] for i , lines in enumerate ( iterator ) : lines = list ( lines ) out_fname = os . path . join ( tmp_dir , self . TMP_FNAME . format ( i + 1 ) ) self . _write ( lines , out_fname ) fnames . append ( out_fname ) if len ( lines ) < self . max_lines : break return fnames
Splits the file into several chunks .
57,552
def _write ( self , lines , fname ) : with open ( fname , 'wb' ) as out_fhndl : for line in sorted ( lines , key = self . key ) : pickle . dump ( line , out_fhndl )
Writes a intermediate temporary sorted file
57,553
def get_iso_time ( date_part , time_part ) : r str_date = datetime . datetime . strptime ( date_part , '%m/%d/%Y' ) . strftime ( '%Y-%m-%d' ) str_time = datetime . datetime . strptime ( time_part , '%I:%M %p' ) . strftime ( '%H:%M:%S' ) return str_date + "T" + str_time + "-7:00"
r Combign date and time into an iso datetime .
57,554
def get_user_list ( host_name , client_name , client_pass ) : request = construct_request ( model_type = "pers" , client_name = client_name , client_pass = client_pass , command = "getusrs" , values = "whr=*" ) request_result = send_request ( host_name , request ) user_id_list = list ( ) append_user_id = user_id_list . append if request_result is not None : user_list_xml = request_result . text tree = etree . parse ( StringIO ( user_list_xml ) ) root = tree . getroot ( ) xml_rows = root . findall ( "./result/row/usr" ) for xml_row in xml_rows : append_user_id ( xml_row . text ) return user_id_list
Pulls the list of users in a client .
57,555
def add_features ( host_name , client_name , client_pass , feature_names ) : init_feats = ( "&" . join ( [ "%s=0" ] * len ( feature_names ) ) ) % tuple ( feature_names ) features_req = construct_request ( "pers" , client_name , client_pass , "addftr" , init_feats ) send_request ( host_name , features_req )
Add a number of numerical features in the client .
57,556
def delete_features ( host_name , client_name , client_pass , feature_names = None ) : if feature_names is None : feature_names = get_feature_names ( host_name , client_name , client_pass ) feature_to_be_removed = ( "&" . join ( [ "ftr=%s" ] * len ( feature_names ) ) ) % tuple ( feature_names ) features_req = construct_request ( "pers" , client_name , client_pass , 'remftr' , feature_to_be_removed ) send_request ( host_name , features_req )
Remove a number of numerical features in the client . If a list is not provided remove all features .
57,557
def get_feature_names ( host_name , client_name , client_pass ) : request = construct_request ( model_type = "pers" , client_name = client_name , client_pass = client_pass , command = "getftrdef" , values = "ftr=*" ) request_result = send_request ( host_name , request ) feature_names = list ( ) append_feature_name = feature_names . append if request_result is not None : feature_names_xml = request_result . text tree = etree . parse ( StringIO ( feature_names_xml ) ) root = tree . getroot ( ) xml_rows = root . findall ( "row/ftr" ) for xml_row in xml_rows : append_feature_name ( xml_row . text ) return feature_names
Get the names of all features in a PServer client .
57,558
def construct_request ( model_type , client_name , client_pass , command , values ) : base_request = ( "{model_type}?" "clnt={client_name}|{client_pass}&" "com={command}&{values}" . format ( model_type = model_type , client_name = client_name , client_pass = client_pass , command = command , values = values ) ) return base_request
Construct the request url .
57,559
def send_request ( host_name , request ) : request = "%s%s" % ( host_name , request ) try : result = requests . get ( request ) if result . status_code == 200 : return result else : raise Exception except Exception as e : raise e
Sends a PServer url request .
57,560
def update_feature_value ( host_name , client_name , client_pass , user_twitter_id , feature_name , feature_score ) : username = str ( user_twitter_id ) feature_value = "{0:.2f}" . format ( feature_score ) joined_ftr_value = "ftr_" + feature_name + "=" + str ( feature_value ) values = "usr=%s&%s" % ( username , joined_ftr_value ) request = construct_request ( model_type = "pers" , client_name = client_name , client_pass = client_pass , command = "setusr" , values = values ) send_request ( host_name , request )
Updates a single topic score for a single user .
57,561
def init_app ( self , app ) : self . app = app self . log = app . logger . getChild ( 'compass' ) self . log . debug ( "Initializing compass integration" ) self . compass_path = self . app . config . get ( 'COMPASS_PATH' , 'compass' ) self . config_files = self . app . config . get ( 'COMPASS_CONFIGS' , None ) self . requestcheck_debug_only = self . app . config . get ( 'COMPASS_REQUESTCHECK_DEBUG_ONLY' , True ) self . skip_mtime_check = self . app . config . get ( 'COMPASS_SKIP_MTIME_CHECK' , False ) self . debug_only = self . app . config . get ( 'COMPASS_DEBUG_ONLY' , False ) self . disabled = self . app . config . get ( 'COMPASS_DISABLED' , False ) if not self . debug_only : self . compile ( ) if ( not self . debug_only ) and ( not self . requestcheck_debug_only or self . app . debug ) : self . app . after_request ( self . after_request )
Initialize the application once the configuration has been loaded there .
57,562
def compile ( self ) : if self . disabled : return self . _check_configs ( ) for _ , cfg in self . configs . iteritems ( ) : cfg . parse ( ) if cfg . changes_found ( ) or self . skip_mtime_check : self . log . debug ( "Changes found for " + cfg . path + " or checks disabled. Compiling..." ) cfg . compile ( self )
Main entry point that compiles all the specified or found compass projects .
57,563
def after_request ( self , response ) : if response is not None and request is not None : if request . endpoint in [ None , "static" ] : return response self . compile ( ) return response
after_request handler for compiling the compass projects with each request .
57,564
def _check_configs ( self ) : configs = set ( self . _find_configs ( ) ) known_configs = set ( self . configs . keys ( ) ) new_configs = configs - known_configs for cfg in ( known_configs - configs ) : self . log . debug ( "Compass configuration has been removed: " + cfg ) del self . configs [ cfg ] for cfg in new_configs : self . log . debug ( "Found new compass configuration: " + cfg ) self . configs [ cfg ] = CompassConfig ( cfg )
Reloads the configuration files .
57,565
def _find_configs ( self ) : if self . config_files is not None : return self . config_files result = [ ] for path , _ , files in os . walk ( self . app . root_path ) : if "config.rb" in files : result . append ( os . path . join ( path , "config.rb" ) ) return result
Scans the project directory for config files or returns the explicitly specified list of files .
57,566
def parse ( self , replace = False ) : if self . last_parsed is not None and self . last_parsed > os . path . getmtime ( self . path ) and not replace : return self . last_parsed = time . time ( ) with open ( self . path , 'r' ) as file_ : for line in file_ : match = CONFIG_LINE_RE . match ( line . rstrip ( ) ) if match : if match . group ( 1 ) == 'sass_dir' : self . src = os . path . join ( self . base_dir , match . group ( 2 ) [ 1 : - 1 ] ) elif match . group ( 1 ) == 'css_dir' : self . dest = os . path . join ( self . base_dir , match . group ( 2 ) [ 1 : - 1 ] )
Parse the given compass config file
57,567
def changes_found ( self ) : if self . dest is None : warnings . warn ( "dest directory not found!" ) if self . src is None : warnings . warn ( "src directory not found!" ) if self . src is None or self . dest is None : return False dest_mtime = - 1 src_mtime = os . path . getmtime ( self . src ) if os . path . exists ( self . dest ) : dest_mtime = os . path . getmtime ( self . dest ) if src_mtime >= dest_mtime : return True for folder , _ , files in os . walk ( self . src ) : for filename in fnmatch . filter ( files , '*.scss' ) : src_path = os . path . join ( folder , filename ) if os . path . getmtime ( src_path ) >= dest_mtime : return True return False
Returns True if the target folder is older than the source folder .
57,568
def compile ( self , compass ) : try : output = subprocess . check_output ( [ compass . compass_path , 'compile' , '-q' ] , cwd = self . base_dir ) os . utime ( self . dest , None ) compass . log . debug ( output ) except OSError , e : if e . errno == errno . ENOENT : compass . log . error ( "Compass could not be found in the PATH " + "and/or in the COMPASS_PATH setting! " + "Disabling compilation." ) compass . disabled = True else : raise e
Calls the compass script specified in the compass extension with the paths provided by the config . rb .
57,569
def _remove_otiose ( lst ) : listtype = type ( [ ] ) while type ( lst ) == listtype and len ( lst ) == 1 : lst = lst [ 0 ] return lst
lift deeply nested expressions out of redundant parentheses
57,570
def get_work_commits ( repo_addr , ascending = True , tz = 'US/Eastern' , correct_times = True ) : repo = git . Repo ( repo_addr ) commits = list ( repo . iter_commits ( ) ) logs = [ ( c . authored_datetime , c . message . strip ( '\n' ) , str ( c ) ) for c in repo . iter_commits ( ) ] work = pd . DataFrame . from_records ( logs , columns = [ 'time' , 'message' , 'hash' ] ) work . time = pd . DatetimeIndex ( [ pd . Timestamp ( i ) . tz_convert ( tz ) for i in work . time ] ) work . set_index ( 'time' , inplace = True ) with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) work = work . sort_index ( ascending = ascending ) if correct_times : work = adjust_time ( work ) return work , repo
Retrives work commits from repo
57,571
def get_topic_set ( file_path ) : topic_set = set ( ) file_row_gen = get_file_row_generator ( file_path , "," ) for file_row in file_row_gen : topic_set . add ( file_row [ 0 ] ) return topic_set
Opens one of the topic set resource files and returns a set of topics .
57,572
def get_reveal_set ( ) : file_path = get_package_path ( ) + "/twitter/res/topics/story_set.txt" story_topics = get_topic_set ( file_path ) file_path = get_package_path ( ) + "/twitter/res/topics/theme_set.txt" theme_topics = get_topic_set ( file_path ) file_path = get_package_path ( ) + "/twitter/res/topics/attribute_set.txt" attribute_topics = get_topic_set ( file_path ) file_path = get_package_path ( ) + "/twitter/res/topics/stance_set.txt" stance_topics = get_topic_set ( file_path ) file_path = get_package_path ( ) + "/twitter/res/topics/geographical_set.txt" geographical_topics = get_topic_set ( file_path ) topics = story_topics | theme_topics | attribute_topics | stance_topics | geographical_topics return topics
Returns a set of all the topics that are interesting for REVEAL use - cases .
57,573
def get_topic_keyword_dictionary ( ) : topic_keyword_dictionary = dict ( ) file_row_gen = get_file_row_generator ( get_package_path ( ) + "/twitter/res/topics/topic_keyword_mapping" + ".txt" , "," , "utf-8" ) for file_row in file_row_gen : topic_keyword_dictionary [ file_row [ 0 ] ] = set ( [ keyword for keyword in file_row [ 1 : ] ] ) return topic_keyword_dictionary
Opens the topic - keyword map resource file and returns the corresponding python dictionary .
57,574
def get_input ( self , name , ds ) : columns = self . inputs . get ( name ) df = ds . get_dataframe ( ) for column in columns : if column not in df . columns : df [ column ] = self . defaults . get ( column ) return df [ columns ]
Retrieves the content of an input given a DataSource . The input acts like a filter over the outputs of the DataSource .
57,575
def domain_relationship ( self ) : if self . __domain_relationship is None : ent = self . relator . get_entity ( ) self . __domain_relationship = self . descriptor . make_relationship ( ent ) return self . __domain_relationship
Returns a domain relationship equivalent with this resource relationship .
57,576
def fit ( self ) : self . _mcmcfit = self . mcmcsetup . run ( ) self . _mcmcfit . burnin ( self . burnin ) dmin = min ( self . _mcmcfit . depth_segments ) dmax = max ( self . _mcmcfit . depth_segments ) self . _thick = ( dmax - dmin ) / len ( self . mcmcfit . depth_segments ) self . _depth = np . arange ( dmin , dmax + 0.001 ) self . _age_ensemble = np . array ( [ self . agedepth ( d = dx ) for dx in self . depth ] )
Fit MCMC AgeDepthModel
57,577
def date ( self , proxy , how = 'median' , n = 500 ) : assert how in [ 'median' , 'ensemble' ] ens_members = self . mcmcfit . n_members ( ) if how == 'ensemble' : select_idx = np . random . choice ( range ( ens_members ) , size = n , replace = True ) out = [ ] for d in proxy . data . depth . values : age = self . agedepth ( d ) if how == 'median' : age = np . median ( age ) elif how == 'ensemble' : age = age [ select_idx ] out . append ( age ) return DatedProxyRecord ( proxy . data . copy ( ) , out )
Date a proxy record
57,578
def plot ( self , agebins = 50 , p = ( 2.5 , 97.5 ) , ax = None ) : if ax is None : ax = plt . gca ( ) ax . hist2d ( np . repeat ( self . depth , self . age_ensemble . shape [ 1 ] ) , self . age_ensemble . flatten ( ) , ( len ( self . depth ) , agebins ) , cmin = 1 ) ax . step ( self . depth , self . age_median ( ) , where = 'mid' , color = 'red' ) ax . step ( self . depth , self . age_percentile ( p [ 0 ] ) , where = 'mid' , color = 'red' , linestyle = ':' ) ax . step ( self . depth , self . age_percentile ( p [ 1 ] ) , where = 'mid' , color = 'red' , linestyle = ':' ) ax . set_ylabel ( 'Age (cal yr BP)' ) ax . set_xlabel ( 'Depth (cm)' ) ax . grid ( True ) return ax
Age - depth plot
57,579
def agedepth ( self , d ) : x = self . mcmcfit . sediment_rate theta0 = self . mcmcfit . headage deltac = self . thick c0 = min ( self . depth ) assert d > c0 or np . isclose ( c0 , d , atol = 1e-4 ) out = theta0 . astype ( float ) i = int ( np . floor ( ( d - c0 ) / deltac ) ) for j in range ( i ) : out += x [ j ] * deltac ci = c0 + i * deltac assert ci < d or np . isclose ( ci , d , atol = 1e-4 ) try : next_x = x [ i ] except IndexError : next_x = x [ i - 1 ] out += next_x * ( d - ci ) return out
Get calendar age for a depth
57,580
def plot_prior_dates ( self , dwidth = 30 , ax = None ) : if ax is None : ax = plt . gca ( ) depth , probs = self . prior_dates ( ) pat = [ ] for i , d in enumerate ( depth ) : p = probs [ i ] z = np . array ( [ p [ : , 0 ] , dwidth * p [ : , 1 ] / np . sum ( p [ : , 1 ] ) ] ) z = z [ : , z [ 0 ] . argsort ( kind = 'mergesort' ) ] zy = np . linspace ( np . min ( z [ 0 ] ) , np . max ( z [ 0 ] ) , num = 200 ) zp = np . interp ( x = zy , xp = z [ 0 ] , fp = z [ 1 ] ) pol = np . vstack ( [ np . concatenate ( [ d + zp , d - zp [ : : - 1 ] ] ) , np . concatenate ( [ zy , zy [ : : - 1 ] ] ) ] ) pat . append ( Polygon ( pol . T ) ) p = PatchCollection ( pat ) p . set_label ( 'Prior dates' ) ax . add_collection ( p ) ax . autoscale_view ( ) ax . set_ylabel ( 'Age (cal yr BP)' ) ax . set_xlabel ( 'Depth (cm)' ) ax . grid ( True ) return ax
Plot prior chronology dates in age - depth plot
57,581
def plot_sediment_rate ( self , ax = None ) : if ax is None : ax = plt . gca ( ) y_prior , x_prior = self . prior_sediment_rate ( ) ax . plot ( x_prior , y_prior , label = 'Prior' ) y_posterior = self . mcmcfit . sediment_rate density = scipy . stats . gaussian_kde ( y_posterior . flat ) density . covariance_factor = lambda : 0.25 density . _compute_covariance ( ) ax . plot ( x_prior , density ( x_prior ) , label = 'Posterior' ) acc_shape = self . mcmcsetup . mcmc_kws [ 'acc_shape' ] acc_mean = self . mcmcsetup . mcmc_kws [ 'acc_mean' ] annotstr_template = 'acc_shape: {0}\nacc_mean: {1}' annotstr = annotstr_template . format ( acc_shape , acc_mean ) ax . annotate ( annotstr , xy = ( 0.9 , 0.9 ) , xycoords = 'axes fraction' , horizontalalignment = 'right' , verticalalignment = 'top' ) ax . set_ylabel ( 'Density' ) ax . set_xlabel ( 'Acc. rate (yr/cm)' ) ax . grid ( True ) return ax
Plot sediment accumulation rate prior and posterior distributions
57,582
def plot_sediment_memory ( self , ax = None ) : if ax is None : ax = plt . gca ( ) y_prior , x_prior = self . prior_sediment_memory ( ) ax . plot ( x_prior , y_prior , label = 'Prior' ) y_posterior = self . mcmcfit . sediment_memory density = scipy . stats . gaussian_kde ( y_posterior ** ( 1 / self . thick ) ) density . covariance_factor = lambda : 0.25 density . _compute_covariance ( ) ax . plot ( x_prior , density ( x_prior ) , label = 'Posterior' ) mem_mean = self . mcmcsetup . mcmc_kws [ 'mem_mean' ] mem_strength = self . mcmcsetup . mcmc_kws [ 'mem_strength' ] annotstr_template = 'mem_strength: {0}\nmem_mean: {1}\nthick: {2} cm' annotstr = annotstr_template . format ( mem_strength , mem_mean , self . thick ) ax . annotate ( annotstr , xy = ( 0.9 , 0.9 ) , xycoords = 'axes fraction' , horizontalalignment = 'right' , verticalalignment = 'top' ) ax . set_ylabel ( 'Density' ) ax . set_xlabel ( 'Memory (ratio)' ) ax . grid ( True ) return ax
Plot sediment memory prior and posterior distributions
57,583
def qpinfo ( ) : parser = qpinfo_parser ( ) args = parser . parse_args ( ) path = pathlib . Path ( args . path ) . resolve ( ) try : ds = load_data ( path ) except UnknownFileFormatError : print ( "Unknown file format: {}" . format ( path ) ) return print ( "{} ({})" . format ( ds . __class__ . __doc__ , ds . __class__ . __name__ ) ) print ( "- number of images: {}" . format ( len ( ds ) ) ) for key in ds . meta_data : print ( "- {}: {}" . format ( key , ds . meta_data [ key ] ) )
Print information of a quantitative phase imaging dataset
57,584
def get_node_meta_type ( manager , handle_id ) : node = get_node ( manager = manager , handle_id = handle_id , legacy = False ) for label in node . labels : if label in META_TYPES : return label raise exceptions . NoMetaLabelFound ( handle_id )
Returns the meta type of the supplied node as a string .
57,585
def create_relationship ( manager , handle_id , other_handle_id , rel_type ) : meta_type = get_node_meta_type ( manager , handle_id ) if meta_type == 'Location' : return create_location_relationship ( manager , handle_id , other_handle_id , rel_type ) elif meta_type == 'Logical' : return create_logical_relationship ( manager , handle_id , other_handle_id , rel_type ) elif meta_type == 'Relation' : return create_relation_relationship ( manager , handle_id , other_handle_id , rel_type ) elif meta_type == 'Physical' : return create_physical_relationship ( manager , handle_id , other_handle_id , rel_type ) other_meta_type = get_node_meta_type ( manager , other_handle_id ) raise exceptions . NoRelationshipPossible ( handle_id , meta_type , other_handle_id , other_meta_type , rel_type )
Makes a relationship from node to other_node depending on which meta_type the nodes are . Returns the relationship or raises NoRelationshipPossible exception .
57,586
def parse_code ( self , url , html ) : soup = BeautifulSoup ( html , 'html5lib' , from_encoding = 'utf-8' ) div = ( soup . find ( 'div' , id = 'content_false' ) . find ( 'div' , attrs = { 'class' : 'data' } ) ) code = Code ( self . id_code , date_pub = self . date_pub , url_code = cleanup_url ( url ) ) div_title = div . find ( 'div' , id = 'titreTexte' ) span_subtitle = div_title . find ( 'span' , attrs = { 'class' : 'sousTitreTexte' } ) if span_subtitle : code . title = div_title . text . replace ( span_subtitle . text , '' ) code . subtitle = span_subtitle . text . strip ( ) regex = r'Version consolidée au (\d{1,2}(?:er)?\s+[^\s]+\s+\d{4})' m = re . search ( regex , code . subtitle ) if m : code . date_pub = parse_date ( m . group ( 1 ) ) code . title = code . title . strip ( ) code . children = [ self . parse_code_ul ( url , child ) for child in div . find_all ( 'ul' , recursive = False ) ] return code
Parse the code details and TOC from the given HTML content
57,587
def parse_code_ul ( self , url , ul ) : li_list = ul . find_all ( 'li' , recursive = False ) li = li_list [ 0 ] span_title = li . find ( 'span' , attrs = { 'class' : re . compile ( r'TM\d+Code' ) } , recursive = False ) section = Section ( span_title . attrs [ 'id' ] , span_title . text . strip ( ) ) div_italic = li . find ( 'div' , attrs = { 'class' : 'italic' } , recursive = False ) if div_italic : section . content = div_italic . text . strip ( ) span_link = li . find ( 'span' , attrs = { 'class' : 'codeLienArt' } , recursive = False ) if span_link : a_link = span_link . find ( 'a' , recursive = False ) if self . with_articles : service = self . section_service section . articles = service . articles ( self . id_code , section . id_section , self . date_pub ) else : section . articles = a_link . text . strip ( ) section . url_section = cleanup_url ( urljoin ( url , a_link . attrs [ 'href' ] ) ) section . children = [ self . parse_code_ul ( url , child ) for child in li . find_all ( 'ul' , recursive = False ) ] return section
Fill the toc item
57,588
def add ( self , interval , offset ) : start , stop = self . get_start_stop ( interval ) if len ( self . starts ) > 0 : if start < self . starts [ - 1 ] or offset <= self . offsets [ - 1 ] [ 1 ] : raise ValueError ( 'intervals and offsets must be added in-order' ) self . offsets [ - 1 ] [ 1 ] = offset self . offsets [ - 1 ] [ 2 ] += 1 else : self . starts . append ( start ) self . stops . append ( stop ) self . offsets . append ( [ offset , offset , 1 ] )
The added interval must be overlapping or beyond the last stored interval ie . added in sorted order .
57,589
def get_sum ( qs , field ) : sum_field = '%s__sum' % field qty = qs . aggregate ( Sum ( field ) ) [ sum_field ] return qty if qty else 0
get sum for queryset .
57,590
def get_max ( qs , field ) : max_field = '%s__max' % field num = qs . aggregate ( Max ( field ) ) [ max_field ] return num if num else 0
get max for queryset .
57,591
def do_filter ( qs , qdata , quick_query_fields = [ ] , int_quick_query_fields = [ ] ) : try : qs = qs . filter ( __gen_quick_query_params ( qdata . get ( 'q_quick_search_kw' ) , quick_query_fields , int_quick_query_fields ) ) q , kw_query_params = __gen_query_params ( qdata ) qs = qs . filter ( q , ** kw_query_params ) except : import traceback traceback . print_exc ( ) return qs
auto filter queryset by dict .
57,592
def read_gcvs ( filename ) : with open ( filename , 'r' ) as fp : parser = GcvsParser ( fp ) for star in parser : yield star
Reads variable star data in GCVS format _ .
57,593
def dict_to_body ( star_dict ) : if ephem is None : raise NotImplementedError ( "Please install PyEphem in order to use dict_to_body." ) body = ephem . FixedBody ( ) body . name = star_dict [ 'name' ] body . _ra = ephem . hours ( str ( star_dict [ 'ra' ] ) ) body . _dec = ephem . degrees ( str ( star_dict [ 'dec' ] ) ) body . _epoch = ephem . J2000 return body
Converts a dictionary of variable star data to a Body instance .
57,594
def tempfile ( self ) : "write the docx to a named tmpfile and return the tmpfile filename" tf = tempfile . NamedTemporaryFile ( ) tfn = tf . name tf . close ( ) os . remove ( tf . name ) shutil . copy ( self . fn , tfn ) return tfn
write the docx to a named tmpfile and return the tmpfile filename
57,595
def sheets ( self ) : data = Dict ( ) for src in [ src for src in self . zipfile . namelist ( ) if 'xl/worksheets/' in src ] : name = os . path . splitext ( os . path . basename ( src ) ) [ 0 ] xml = self . xml ( src ) data [ name ] = xml return data
return the sheets of data .
57,596
def workbook_data ( self ) : document = XML ( fn = os . path . splitext ( self . fn ) [ 0 ] + '.xml' , root = Element . workbook ( ) ) shared_strings = [ str ( t . text ) for t in self . xml ( 'xl/sharedStrings.xml' ) . root . xpath ( ".//xl:t" , namespaces = self . NS ) ] for key in self . sheets . keys ( ) : worksheet = self . sheets [ key ] . transform ( XT , shared_strings = shared_strings ) document . root . append ( worksheet . root ) return document
return a readable XML form of the data .
57,597
def process ( self , event ) : logger . info ( f"{self}: put {event.src_path}" ) self . queue . put ( os . path . basename ( event . src_path ) )
Put and process tasks in queue .
57,598
def main ( ) : print "Downloading Holidays from Apple's server..." r = requests . get ( 'http://files.apple.com/calendars/Australian32Holidays.ics' ) cal = Calendar . from_ical ( r . text ) print "Processing calendar data..." valid_states = [ 'ACT' , 'NSW' , 'NT' , 'QLD' , 'SA' , 'TAS' , 'VIC' , 'WA' ] state_cal = { } all_cal = make_calendar ( ) for state in valid_states : state_cal [ state ] = make_calendar ( ) for event in cal . walk ( 'VEVENT' ) : event_name = event . decoded ( 'SUMMARY' ) . lower ( ) if filter ( lambda x : x in event_name , IGNORED_EVENTS ) : continue if '(' in event_name : states = event_name . split ( '(' , 2 ) [ 1 ] . split ( ')' ) [ 0 ] . split ( ',' ) if states == [ 'day in lieu' ] : all_cal . add_component ( event ) continue for state in states : state = state . strip ( ) . upper ( ) assert state in valid_states , 'state=%r' % state state_cal [ state ] . add_component ( event ) else : all_cal . add_component ( event ) print "Writing to disk..." with open ( 'au_holidays.ics' , 'wb' ) as f : f . write ( all_cal . to_ical ( ) ) for state in state_cal . keys ( ) : with open ( '%s_holidays.ics' % state . lower ( ) , 'wb' ) as f : f . write ( state_cal [ state ] . to_ical ( ) ) print "All done!"
Scrapes Apple s iCal feed for Australian public holidays and generates per - state listings .
57,599
def init_editing_mode ( self , e ) : self . show_all_if_ambiguous = 'on' self . key_dispatch = { } self . __vi_insert_mode = None self . _vi_command = None self . _vi_command_edit = None self . _vi_key_find_char = None self . _vi_key_find_direction = True self . _vi_yank_buffer = None self . _vi_multiplier1 = '' self . _vi_multiplier2 = '' self . _vi_undo_stack = [ ] self . _vi_undo_cursor = - 1 self . _vi_current = None self . _vi_search_text = '' self . vi_save_line ( ) self . vi_set_insert_mode ( True ) for c in range ( ord ( ' ' ) , 127 ) : self . _bind_key ( '%s' % chr ( c ) , self . vi_key ) self . _bind_key ( 'BackSpace' , self . vi_backspace ) self . _bind_key ( 'Escape' , self . vi_escape ) self . _bind_key ( 'Return' , self . vi_accept_line ) self . _bind_key ( 'Left' , self . backward_char ) self . _bind_key ( 'Right' , self . forward_char ) self . _bind_key ( 'Home' , self . beginning_of_line ) self . _bind_key ( 'End' , self . end_of_line ) self . _bind_key ( 'Delete' , self . delete_char ) self . _bind_key ( 'Control-d' , self . vi_eof ) self . _bind_key ( 'Control-z' , self . vi_eof ) self . _bind_key ( 'Control-r' , self . vi_redo ) self . _bind_key ( 'Up' , self . vi_arrow_up ) self . _bind_key ( 'Control-p' , self . vi_up ) self . _bind_key ( 'Down' , self . vi_arrow_down ) self . _bind_key ( 'Control-n' , self . vi_down ) self . _bind_key ( 'Tab' , self . vi_complete )
Initialize vi editingmode