idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
42,500
def skip ( type_name , filename ) : report = [ 'Skipping {} file: {}' . format ( type_name , filename ) ] report_stats = ReportStats ( filename , report = report ) return report_stats
Provide reporting statistics for a skipped file .
42,501
def gifsicle ( ext_args ) : args = _GIFSICLE_ARGS + [ ext_args . new_filename ] extern . run_ext ( args ) return _GIF_FORMAT
Run the EXTERNAL program gifsicle .
42,502
def _optimize_image_external ( filename , func , image_format , new_ext ) : new_filename = filename + TMP_SUFFIX + new_ext new_filename = os . path . normpath ( new_filename ) shutil . copy2 ( filename , new_filename ) ext_args = ExtArgs ( filename , new_filename ) new_image_format = func ( ext_args ) report_stats = files . cleanup_after_optimize ( filename , new_filename , image_format , new_image_format ) percent = stats . new_percent_saved ( report_stats ) if percent != 0 : report = '{}: {}' . format ( func . __name__ , percent ) else : report = '' report_stats . report_list . append ( report ) return report_stats
Optimize the file with the external function .
42,503
def _optimize_with_progs ( format_module , filename , image_format ) : filesize_in = os . stat ( filename ) . st_size report_stats = None for func in format_module . PROGRAMS : if not getattr ( Settings , func . __name__ ) : continue report_stats = _optimize_image_external ( filename , func , image_format , format_module . OUT_EXT ) filename = report_stats . final_filename if format_module . BEST_ONLY : break if report_stats is not None : report_stats . bytes_in = filesize_in else : report_stats = stats . skip ( image_format , filename ) return report_stats
Use the correct optimizing functions in sequence .
42,504
def _get_format_module ( image_format ) : format_module = None nag_about_gifs = False if detect_format . is_format_selected ( image_format , Settings . to_png_formats , png . PROGRAMS ) : format_module = png elif detect_format . is_format_selected ( image_format , jpeg . FORMATS , jpeg . PROGRAMS ) : format_module = jpeg elif detect_format . is_format_selected ( image_format , gif . FORMATS , gif . PROGRAMS ) : format_module = gif nag_about_gifs = True return format_module , nag_about_gifs
Get the format module to use for optimizing the image .
42,505
def optimize_image ( arg ) : try : filename , image_format , settings = arg Settings . update ( settings ) format_module , nag_about_gifs = _get_format_module ( image_format ) if format_module is None : if Settings . verbose > 1 : print ( filename , image_format ) print ( "\tFile format not selected." ) return None report_stats = _optimize_with_progs ( format_module , filename , image_format ) report_stats . nag_about_gifs = nag_about_gifs stats . report_saved ( report_stats ) return report_stats except Exception as exc : print ( exc ) traceback . print_exc ( ) return stats . ReportStats ( filename , error = "Optimizing Image" )
Optimize a given image from a filename .
42,506
def _is_program_selected ( progs ) : mode = False for prog in progs : if getattr ( Settings , prog . __name__ ) : mode = True break return mode
Determine if the program is enabled in the settings .
42,507
def is_format_selected ( image_format , formats , progs ) : intersection = formats & Settings . formats mode = _is_program_selected ( progs ) result = ( image_format in intersection ) and mode return result
Determine if the image format is selected by command line arguments .
42,508
def _is_image_sequenced ( image ) : try : image . seek ( 1 ) image . seek ( 0 ) result = True except EOFError : result = False return result
Determine if the image is a sequenced image .
42,509
def get_image_format ( filename ) : image = None bad_image = 1 image_format = NONE_FORMAT sequenced = False try : bad_image = Image . open ( filename ) . verify ( ) image = Image . open ( filename ) image_format = image . format sequenced = _is_image_sequenced ( image ) except ( OSError , IOError , AttributeError ) : pass if sequenced : image_format = gif . SEQUENCED_TEMPLATE . format ( image_format ) elif image is None or bad_image or image_format == NONE_FORMAT : image_format = ERROR_FORMAT comic_format = comic . get_comic_format ( filename ) if comic_format : image_format = comic_format if ( Settings . verbose > 1 ) and image_format == ERROR_FORMAT and ( not Settings . list_only ) : print ( filename , "doesn't look like an image or comic archive." ) return image_format
Get the image format .
42,510
def detect_file ( filename ) : image_format = get_image_format ( filename ) if image_format in Settings . formats : return image_format if image_format in ( NONE_FORMAT , ERROR_FORMAT ) : return None if Settings . verbose > 1 and not Settings . list_only : print ( filename , image_format , 'is not a enabled image or ' 'comic archive type.' ) return None
Decide what to do with the file .
42,511
def update ( cls , settings ) : for key , val in settings . __dict__ . items ( ) : if key . startswith ( '_' ) : continue setattr ( cls , key , val )
Update settings with a dict .
42,512
def _set_program_defaults ( cls , programs ) : for program in programs : val = getattr ( cls , program . __name__ ) and extern . does_external_program_run ( program . __name__ , Settings . verbose ) setattr ( cls , program . __name__ , val )
Run the external program tester on the required binaries .
42,513
def config_program_reqs ( cls , programs ) : cls . _set_program_defaults ( programs ) do_png = cls . optipng or cls . pngout or cls . advpng do_jpeg = cls . mozjpeg or cls . jpegrescan or cls . jpegtran do_comics = cls . comics if not do_png and not do_jpeg and not do_comics : print ( "All optimizers are not available or disabled." ) exit ( 1 )
Run the program tester and determine if we can do anything .
42,514
def optipng ( ext_args ) : args = _OPTIPNG_ARGS + [ ext_args . new_filename ] extern . run_ext ( args ) return _PNG_FORMAT
Run the external program optipng on the file .
42,515
def advpng ( ext_args ) : args = _ADVPNG_ARGS + [ ext_args . new_filename ] extern . run_ext ( args ) return _PNG_FORMAT
Run the external program advpng on the file .
42,516
def pngout ( ext_args ) : args = _PNGOUT_ARGS + [ ext_args . old_filename , ext_args . new_filename ] extern . run_ext ( args ) return _PNG_FORMAT
Run the external program pngout on the file .
42,517
def _nested_transactional ( fn ) : @ wraps ( fn ) def wrapped ( self , * args , ** kwargs ) : try : rv = fn ( self , * args , ** kwargs ) except _TransactionalPolicyViolationError as e : getattr ( self , _TX_HOLDER_ATTRIBUTE ) . rollback ( ) rv = e . result return rv return wrapped
In a transactional method create a nested transaction .
42,518
def check_file ( state , fname , missing_msg = "Did you create a file named `{}`?" , is_dir_msg = "Want to check a file named `{}`, but found a directory." , parse = True , use_fs = True , use_solution = False , ) : if use_fs : p = Path ( fname ) if not p . exists ( ) : state . report ( Feedback ( missing_msg . format ( fname ) ) ) if p . is_dir ( ) : state . report ( Feedback ( is_dir_msg . format ( fname ) ) ) code = p . read_text ( ) else : code = _get_fname ( state , "student_code" , fname ) if code is None : state . report ( Feedback ( missing_msg . format ( fname ) ) ) sol_kwargs = { "solution_code" : None , "solution_ast" : None } if use_solution : sol_code = _get_fname ( state , "solution_code" , fname ) if sol_code is None : raise Exception ( "Solution code does not have file named: %s" % fname ) sol_kwargs [ "solution_code" ] = sol_code sol_kwargs [ "solution_ast" ] = ( state . parse ( sol_code , test = False ) if parse else None ) return state . to_child ( student_code = code , student_ast = state . parse ( code ) if parse else None , fname = fname , ** sol_kwargs )
Test whether file exists and make its contents the student code .
42,519
def has_dir ( state , fname , incorrect_msg = "Did you create a directory named `{}`?" ) : if not Path ( fname ) . is_dir ( ) : state . report ( Feedback ( incorrect_msg . format ( fname ) ) ) return state
Test whether a directory exists .
42,520
def exec_task ( task_path , data ) : if not data : data = { 'data' : None , 'path' : task_path } elif not isinstance ( data , ( str , bytes ) ) : data = { 'data' : json . dumps ( data , cls = RequestJSONEncoder ) , 'path' : task_path } else : if data is not None and data . startswith ( "file://" ) : with open ( data [ len ( "file://" ) : ] ) as f : data = f . read ( ) data = { 'data' : data , 'path' : task_path } job = Job ( data ) ( task , task_callable ) = create_task ( task_path ) with delegating_job_context ( job , task , task_callable ) as jc : return jc . task_callable ( jc . task_data )
Execute task .
42,521
def dump ( node , config ) : if config . is_node ( node ) : fields = OrderedDict ( ) for name in config . fields_iter ( node ) : attr = config . field_val ( node , name ) if attr is not None : fields [ name ] = dump ( attr , config ) return { "type" : config . node_type ( node ) , "data" : fields } elif config . is_list ( node ) : return [ dump ( x , config ) for x in config . list_iter ( node ) ] else : return config . leaf_val ( node )
Convert a node tree to a simple nested dict
42,522
def cluster ( seqs , threshold = 0.975 , out_file = None , temp_dir = None , make_db = True , quiet = False , threads = 0 , return_just_seq_ids = False , max_memory = 800 , debug = False ) : if make_db : ofile , cfile , seq_db , db_path = cdhit ( seqs , out_file = out_file , temp_dir = temp_dir , threshold = threshold , make_db = True , quiet = quiet , threads = threads , max_memory = max_memory , debug = debug ) return parse_clusters ( ofile , cfile , seq_db = seq_db , db_path = db_path , return_just_seq_ids = return_just_seq_ids ) else : seqs = [ Sequence ( s ) for s in seqs ] seq_dict = { s . id : s for s in seqs } ofile , cfile , = cdhit ( seqs , out_file = out_file , temp_dir = temp_dir , threads = threads , threshold = threshold , make_db = False , quiet = quiet , max_memory = max_memory , debug = debug ) return parse_clusters ( ofile , cfile , seq_dict = seq_dict , return_just_seq_ids = return_just_seq_ids )
Perform sequence clustering with CD - HIT .
42,523
def cdhit ( seqs , out_file = None , temp_dir = None , threshold = 0.975 , make_db = True , quiet = False , threads = 0 , max_memory = 800 , retries = 5 , debug = False ) : start_time = time . time ( ) seqs = [ Sequence ( s ) for s in seqs ] if not quiet : print ( 'CD-HIT: clustering {} seqeunces' . format ( len ( seqs ) ) ) if out_file is None : out_file = tempfile . NamedTemporaryFile ( dir = temp_dir , delete = False ) out_file . close ( ) ofile = out_file . name else : ofile = os . path . expanduser ( out_file ) cfile = ofile + '.clstr' with open ( ofile , 'w' ) as f : f . write ( '' ) with open ( cfile , 'w' ) as f : f . write ( '' ) ifile = _make_cdhit_input ( seqs , temp_dir ) cdhit_cmd = 'cdhit -i {} -o {} -c {} -n 5 -d 0 -T {} -M {}' . format ( ifile , ofile , threshold , threads , max_memory ) while not all ( [ os . path . getsize ( cfile ) , os . path . getsize ( cfile ) ] ) : cluster = sp . Popen ( cdhit_cmd , shell = True , stdout = sp . PIPE , stderr = sp . PIPE ) stdout , stderr = cluster . communicate ( ) if not retries : break retries -= 1 end_time = time . time ( ) if debug : print ( stdout ) print ( stderr ) else : os . unlink ( ifile ) if not quiet : print ( 'CD-HIT: clustering took {:.2f} seconds' . format ( end_time - start_time ) ) if make_db : if not quiet : print ( 'CD-HIT: building a SQLite3 database' ) seq_db , db_path = _build_seq_db ( seqs , direc = temp_dir ) return ofile , cfile , seq_db , db_path return ofile , cfile
Run CD - HIT .
42,524
def parse_clusters ( out_file , clust_file , seq_db = None , db_path = None , seq_dict = None , return_just_seq_ids = False ) : raw_clusters = [ c . split ( '\n' ) for c in open ( clust_file , 'r' ) . read ( ) . split ( '\n>' ) ] if return_just_seq_ids : ids = [ ] for rc in raw_clusters : _ids = [ ] for c in rc [ 1 : ] : if c : _ids . append ( c . split ( ) [ 2 ] [ 1 : - 3 ] ) ids . append ( _ids ) os . unlink ( out_file ) os . unlink ( clust_file ) return ids os . unlink ( out_file ) os . unlink ( clust_file ) clusters = [ Cluster ( rc , seq_db , db_path , seq_dict ) for rc in raw_clusters ] return CDHITResult ( clusters , seq_db = seq_db , db_path = db_path , seq_dict = seq_dict )
Parses CD - HIT output .
42,525
def log_output ( f ) : @ wraps ( f ) def wrapper_fn ( * args , ** kwargs ) : res = f ( * args , ** kwargs ) logging . debug ( "Logging result %s." , res ) return res return wrapper_fn
Logs the output value .
42,526
def save_to_file ( destination_filename , append = False ) : def decorator_fn ( f ) : @ wraps ( f ) def wrapper_fn ( * args , ** kwargs ) : res = f ( * args , ** kwargs ) makedirs ( os . path . dirname ( destination_filename ) ) mode = "a" if append else "w" with open ( destination_filename , mode ) as text_file : text_file . write ( res ) return res return wrapper_fn return decorator_fn
Save the output value to file .
42,527
def json_input ( f ) : @ wraps ( f ) def json_input_decorator ( * args , ** kwargs ) : task_data = _get_data_from_args ( args ) if task_data is None : logging . error ( "Task data is empty during JSON decoding." ) if task_data . payload : try : is_transformed = request . get_json ( ) if not is_transformed : task_data . transform_payload ( json . loads ) except ( ValueError , RuntimeError ) : logging . error ( "Exception while processing JSON input decorator." ) task_data . transform_payload ( json . loads ) else : task_data . transform_payload ( lambda _ : { } ) return f ( * args , ** kwargs ) return json_input_decorator
Expects task input data in json format and parse this data .
42,528
def jsonp_wrap ( callback_key = 'callback' ) : def decorator_fn ( f ) : @ wraps ( f ) def jsonp_output_decorator ( * args , ** kwargs ) : task_data = _get_data_from_args ( args ) data = task_data . get_data ( ) if callback_key not in data : raise KeyError ( 'Missing required parameter "{0}" for task.' . format ( callback_key ) ) callback = data [ callback_key ] jsonp = f ( * args , ** kwargs ) if isinstance ( JobContext . get_current_context ( ) , WebJobContext ) : JobContext . get_current_context ( ) . add_responder ( MimeSetterWebTaskResponder ( 'application/javascript' ) ) jsonp = "{callback}({data})" . format ( callback = callback , data = jsonp ) return jsonp return jsonp_output_decorator return decorator_fn
Format response to jsonp and add a callback to JSON data - a jsonp request
42,529
def append_get_parameters ( accept_only_web = True ) : def wrapper ( f ) : @ wraps ( f ) def append_get_parameters_wrapper_fn ( * args , ** kwargs ) : jc = JobContext . get_current_context ( ) if isinstance ( jc , WebJobContext ) : web_request = jc . get_web_request ( ) task_data = _get_data_from_args ( args ) data = task_data . get_data ( ) data . update ( web_request . args . to_dict ( flat = True ) ) elif accept_only_web : raise Exception ( "append_get_parameters decorator may be used " "with GET requests only." ) return f ( * args , ** kwargs ) return append_get_parameters_wrapper_fn return wrapper
Task decorator which appends the GET data to the task data .
42,530
def secured_task ( f ) : @ wraps ( f ) def secured_task_decorator ( * args , ** kwargs ) : task_data = _get_data_from_args ( args ) assert isinstance ( task_data , TaskData ) if not verify_security_data ( task_data . get_data ( ) [ 'security' ] ) : raise SecurityException ( task_data . get_data ( ) [ 'security' ] [ 'hashed_token' ] ) task_data . transform_payload ( lambda x : x [ 'data' ] ) return f ( * args , ** kwargs ) return secured_task_decorator
Secured task decorator .
42,531
def file_upload ( f ) : @ wraps ( f ) def file_upload_decorator ( * args , ** kwargs ) : task_data = _get_data_from_args ( args ) if task_data is None : logging . error ( "Task data is empty during FilesUploadDecorator." ) task_data . transform_payload ( lambda _ : request . files . getlist ( 'file' ) ) return f ( * args , ** kwargs ) return file_upload_decorator
Return list of werkzeug . datastructures . FileStorage objects - files to be uploaded
42,532
def forbid_web_access ( f ) : @ wraps ( f ) def wrapper_fn ( * args , ** kwargs ) : if isinstance ( JobContext . get_current_context ( ) , WebJobContext ) : raise ForbiddenError ( 'Access forbidden from web.' ) return f ( * args , ** kwargs ) return wrapper_fn
Forbids running task using http request .
42,533
def filter_from_url_arg ( model_cls , query , arg , query_operator = and_ , arg_types = None ) : fields = arg . split ( ',' ) mapper = class_mapper ( model_cls ) if not arg_types : arg_types = { } exprs = [ ] joins = set ( ) for expr in fields : if expr == "" : continue e_mapper = mapper e_model_cls = model_cls operator = None method = None for op in operator_order : if op in expr : operator = op method = operator_to_method [ op ] break if operator is None : raise Exception ( 'No operator in expression "{0}".' . format ( expr ) ) ( column_names , value ) = expr . split ( operator ) column_names = column_names . split ( '__' ) value = value . strip ( ) for column_name in column_names : if column_name in arg_types : typed_value = arg_types [ column_name ] ( value ) else : typed_value = value if column_name in e_mapper . relationships : joins . add ( column_name ) e_model_cls = e_mapper . attrs [ column_name ] . mapper . class_ e_mapper = class_mapper ( e_model_cls ) if hasattr ( e_model_cls , column_name ) : column = getattr ( e_model_cls , column_name ) exprs . append ( getattr ( column , method ) ( typed_value ) ) else : raise Exception ( 'Invalid property {0} in class {1}.' . format ( column_name , e_model_cls ) ) return query . join ( * joins ) . filter ( query_operator ( * exprs ) )
Parse filter URL argument arg and apply to query
42,534
def model_tree ( name , model_cls , visited = None ) : if not visited : visited = set ( ) visited . add ( model_cls ) mapper = class_mapper ( model_cls ) columns = [ column . key for column in mapper . column_attrs ] related = [ model_tree ( rel . key , rel . mapper . entity , visited ) for rel in mapper . relationships if rel . mapper . entity not in visited ] return { name : columns + related }
Create a simple tree of model s properties and its related models .
42,535
def flat_model ( tree ) : names = [ ] for columns in viewvalues ( tree ) : for col in columns : if isinstance ( col , dict ) : col_name = list ( col ) [ 0 ] names += [ col_name + '__' + c for c in flat_model ( col ) ] else : names . append ( col ) return names
Flatten the tree into a list of properties adding parents as prefixes .
42,536
def execute_job ( job , app = Injected , task_router = Injected ) : app . logger . info ( "Job fetched, preparing the task '{0}'." . format ( job . path ) ) task , task_callable = task_router . route ( job . path ) jc = JobContext ( job , task , task_callable ) app . logger . info ( "Executing task." ) result = jc . task_callable ( jc . task_data ) app . logger . info ( "Task {0} executed successfully." . format ( job . path ) ) return { 'task_name' : job . path , 'data' : result }
Execute a job .
42,537
def handle_exception ( self , e , task_path ) : self . _app . logger . error ( str ( e ) + "\n" + traceback . format_exc ( ) ) return { 'task_name' : task_path , 'data' : None , 'error' : str ( e ) }
Handle exception raised during task execution .
42,538
def execute_job ( self , job ) : try : return execute_job ( job ) except KillWorkerException : self . _app . logger . info ( "Stopping Gearman worker on demand flag set." ) self . stop_worker ( ) except Exception as e : return self . handle_exception ( e , job . path )
Execute job given by the task queue .
42,539
def legacy_signature ( ** kwargs_mapping ) : def signature_decorator ( f ) : @ wraps ( f ) def wrapper ( * args , ** kwargs ) : redirected_kwargs = { kwargs_mapping [ k ] if k in kwargs_mapping else k : v for k , v in kwargs . items ( ) } return f ( * args , ** redirected_kwargs ) return wrapper return signature_decorator
This decorator makes it possible to call a function using old argument names when they are passed as keyword arguments .
42,540
def _bind_service ( package_name , cls_name , binder = Injected ) : module = importlib . import_module ( package_name ) cls = getattr ( module , cls_name ) binder . bind ( cls , to = binder . injector . create_object ( cls ) , scope = singleton ) logging . debug ( "Created {0} binding." . format ( cls ) )
Bind service to application injector .
42,541
def initialize ( config ) : service_injection_config = config . get ( 'SERVICE_INJECTION' , ( ) ) if not isinstance ( service_injection_config , ( tuple , list ) ) : service_injection_config = ( service_injection_config , ) for si_conf in service_injection_config : if isinstance ( si_conf , str ) : package_name , cls_name = si_conf . rsplit ( '.' , 1 ) ServiceInitializer . _bind_service ( package_name , cls_name ) elif isinstance ( si_conf , dict ) : services = si_conf [ 'list' ] service_package = si_conf [ 'package' ] for cls_name in services : module_name = camelcase_to_underscore ( cls_name ) package_name = "{0}.{1}" . format ( service_package , module_name ) ServiceInitializer . _bind_service ( package_name , cls_name )
Initialize method .
42,542
def _get_link_pages ( page , per_page , count , page_url ) : current_page = _page_arg ( page ) links = { } end = page * per_page if page > 1 : links [ 'prev' ] = page_url . replace ( current_page , _page_arg ( page - 1 ) ) if end < count : links [ 'next' ] = page_url . replace ( current_page , _page_arg ( page + 1 ) ) if per_page < count : links [ 'first' ] = page_url . replace ( current_page , _page_arg ( 1 ) ) links [ 'last' ] = page_url . replace ( current_page , _page_arg ( ( count + per_page - 1 ) // per_page ) ) return links
Create link header for page metadata .
42,543
def to_filter ( self , query , arg ) : return filter_from_url_arg ( self . model_cls , query , arg , query_operator = or_ )
Json - server filter using the _or_ operator .
42,544
def create ( self , * args , ** kwargs ) : resource = super ( JsonServerResource , self ) . create ( * args , ** kwargs ) return ResourceResult ( body = resource , status = get_http_status_code_value ( http . client . CREATED ) , location = "{}/{}" . format ( request . url , resource . get_id ( ) ) )
Adds created http status response and location link .
42,545
def _create_filter_by ( self ) : filter_by = [ ] for name , values in request . args . copy ( ) . lists ( ) : if name not in _SKIPPED_ARGUMENTS : column = _re_column_name . search ( name ) . group ( 1 ) if column not in self . _model_columns : continue for value in values : if name . endswith ( '_ne' ) : filter_by . append ( name [ : - 3 ] + '!=' + value ) elif name . endswith ( '_lte' ) : filter_by . append ( name [ : - 4 ] + '<=' + value ) elif name . endswith ( '_gte' ) : filter_by . append ( name [ : - 4 ] + '>=' + value ) elif name . endswith ( '_like' ) : filter_by . append ( name [ : - 5 ] + '::like::%' + value + '%' ) else : filter_by . append ( name . replace ( '__' , '.' ) + '==' + value ) filter_by += self . _create_fulltext_query ( ) return ',' . join ( filter_by )
Transform the json - server filter arguments to model - resource ones .
42,546
def _create_related ( args ) : if '_embed' in request . args : embeds = request . args . getlist ( '_embed' ) args [ 'related' ] = ',' . join ( embeds ) del args [ '_embed' ]
Create related field from _embed arguments .
42,547
def _create_fulltext_query ( self ) : filter_by = [ ] if 'q' in request . args : columns = flat_model ( model_tree ( self . __class__ . __name__ , self . model_cls ) ) for q in request . args . getlist ( 'q' ) : filter_by += [ '{col}::like::%{q}%' . format ( col = col , q = q ) for col in columns ] return filter_by
Support the json - server fulltext search with a broad LIKE filter .
42,548
def _transform_list_args ( self , args ) : if '_limit' in args : args [ 'limit' ] = int ( args [ '_limit' ] ) del args [ '_limit' ] if '_page' in args : page = int ( args [ '_page' ] ) if page < 0 : page = 1 args [ 'page' ] = page del args [ '_page' ] if 'limit' not in args : args [ 'limit' ] = 10 if '_end' in args : end = int ( args [ '_end' ] ) args [ 'limit' ] = end - int ( args . get ( '_start' , 0 ) ) if '_start' in args : args [ 'offset' ] = args [ '_start' ] del args [ '_start' ] if '_sort' in args : args [ 'order_by' ] = args [ '_sort' ] . replace ( '__' , '.' ) del args [ '_sort' ] if args . get ( '_order' , 'ASC' ) == 'DESC' : args [ 'order_by' ] = '-' + args [ 'order_by' ] if '_order' in args : del args [ '_order' ] filter_by = self . _create_filter_by ( ) if filter_by : args [ 'filter_by' ] = filter_by
Transforms all list arguments from json - server to model - resource ones .
42,549
def read ( self , params , args , data ) : result_count = None result_links = None if params is None : params = [ ] if args : args = args . copy ( ) else : args = { } ctx = self . _create_context ( params , args , data ) row_id = ctx . get_row_id ( ) if not row_id : self . _transform_list_args ( args ) if 'page' in args or 'limit' in args : ctx = self . _create_context ( params , args , data ) result_count = self . _get_collection_count ( ctx ) if 'page' in args : result_links = _get_link_pages ( page = args [ 'page' ] , per_page = int ( args [ 'limit' ] ) , count = result_count , page_url = request . url ) if 'limit' not in args : args [ 'limit' ] = 'unlimited' self . _create_related ( args ) try : return ResourceResult ( body = super ( JsonServerResource , self ) . read ( params , args , data ) , count = result_count , links = result_links ) except NoResultFound : return NOT_FOUND
Modifies the parameters and adds metadata for read results .
42,550
def update ( self , * args , ** kwargs ) : if request . method == 'PUT' : logging . warning ( "Called not implemented resource method PUT" ) resource = super ( JsonServerResource , self ) . update ( * args , ** kwargs ) if resource : return resource else : return NOT_FOUND
Modifies the parameters and adds metadata for update results .
42,551
def delete ( self , params , args , data ) : ctx = self . _create_context ( params , args , data ) row_id = ctx . get_row_id ( ) if row_id : deleted = self . _delete_one ( row_id , ctx ) if deleted : return ResourceResult ( body = { } ) else : return NOT_FOUND else : return NOT_FOUND
Supports only singular delete and adds proper http status .
42,552
def _aggregate ( data , norm = True , sort_by = 'value' , keys = None ) : if keys : vdict = { k : 0 for k in keys } for d in data : if d in keys : vdict [ d ] += 1 else : vdict = { } for d in data : vdict [ d ] = vdict [ d ] + 1 if d in vdict else 1 vals = [ ( k , v ) for k , v in vdict . items ( ) ] if sort_by == 'value' : vals . sort ( key = lambda x : x [ 0 ] ) else : vals . sort ( key = lambda x : x [ 1 ] ) xs = [ v [ 0 ] for v in vals ] if norm : raw_y = [ v [ 1 ] for v in vals ] total_y = sum ( raw_y ) ys = [ 100. * y / total_y for y in raw_y ] else : ys = [ v [ 1 ] for v in vals ] return xs , ys
Counts the number of occurances of each item in data .
42,553
def generate_apiary_doc ( task_router ) : generator = ApiaryDoc ( ) for m in task_router . get_task_packages ( ) + get_method_packages ( ) : m = importlib . import_module ( m ) generator . docmodule ( m ) return generator
Generate apiary documentation .
42,554
def setup_logging ( logfile , print_log_location = True , debug = False ) : log_dir = os . path . dirname ( logfile ) make_dir ( log_dir ) fmt = '[%(levelname)s] %(name)s %(asctime)s %(message)s' if debug : logging . basicConfig ( filename = logfile , filemode = 'w' , format = fmt , level = logging . DEBUG ) else : logging . basicConfig ( filename = logfile , filemode = 'w' , format = fmt , level = logging . INFO ) logger = logging . getLogger ( 'log' ) logger = add_stream_handler ( logger ) if print_log_location : logger . info ( 'LOG LOCATION: {}' . format ( logfile ) )
Set up logging using the built - in logging package .
42,555
def get_logger ( name = None ) : logger = logging . getLogger ( name ) if len ( logger . handlers ) == 0 : logger = add_stream_handler ( logger ) return logger
Get a logging handle .
42,556
def inject ( ** bindings ) : def outer_wrapper ( f ) : def function_wrapper ( ff ) : for key , value in viewitems ( bindings ) : bindings [ key ] = BindingKey ( value ) @ functools . wraps ( ff ) def _inject ( * args , ** kwargs ) : inj = get_current_app ( ) . injector dependencies = inj . args_to_inject ( function = ff , bindings = bindings , owner_key = ff ) dependencies . update ( kwargs ) try : return ff ( * args , ** dependencies ) except TypeError as e : reraise ( e , CallError ( ff , args , dependencies , e ) ) return _inject def method_or_class_wrapper ( * a , ** kwargs ) : inj = get_current_app ( ) . injector inj . install_into ( a [ 0 ] ) inject_f = injector . inject ( ** bindings ) return inject_f ( f ) ( * a , ** kwargs ) if inspect . ismethod ( f ) : return method_or_class_wrapper else : return function_wrapper ( f ) return outer_wrapper
Decorator for injecting parameters for ASL objects .
42,557
def igphyml ( input_file = None , tree_file = None , root = None , verbose = False ) : if shutil . which ( 'igphyml' ) is None : raise RuntimeError ( 'It appears that IgPhyML is not installed.\nPlease install and try again.' ) igphyml_cmd1 = 'igphyml -i {} -m GY -w M0 -t e --run_id gy94' . format ( aln_file ) p1 = sp . Popen ( igphyml_cmd1 , stdout = sp . PIPE , stderr = sp . PIPE ) stdout1 , stderr1 = p1 . communicate ( ) if verbose : print ( stdout1 + '\n' ) print ( stderr1 + '\n\n' ) intermediate = input_file + '_igphyml_tree.txt_gy94' igphyml_cmd2 = 'igphyml -i {0} -m HLP17 --root {1} -o lr -u {}_igphyml_tree.txt_gy94 -o {}' . format ( input_file , root , tree_file ) p2 = sp . Popen ( igphyml_cmd2 , stdout = sp . PIPE , stderr = sp . PIPE ) stdout2 , stderr2 = p2 . communicate ( ) if verbose : print ( stdout2 + '\n' ) print ( stderr2 + '\n' ) return tree_file + '_igphyml_tree.txt'
Computes a phylogenetic tree using IgPhyML .
42,558
def integrate_to_file ( what , filename , start_line , end_line ) : try : with open ( filename ) as f : lines = f . readlines ( ) except IOError : lines = [ ] tmp_file = tempfile . NamedTemporaryFile ( delete = False ) lines . reverse ( ) while lines : line = lines . pop ( ) if line == start_line : break tmp_file . write ( line ) tmp_file . write ( start_line ) tmp_file . write ( what ) tmp_file . write ( end_line ) while lines : line = lines . pop ( ) if line == end_line : break tmp_file . writelines ( lines ) tmp_file . close ( ) os . rename ( tmp_file . name , filename )
WARNING this is working every second run .. so serious bug Integrate content into a file withing line marks
42,559
def update_model ( raw_model , app_model , forbidden_keys = None , inverse = False ) : if forbidden_keys is None : forbidden_keys = [ ] if type ( app_model ) != dict : app_model = app_model . __dict__ if inverse : for k in app_model : logging . debug ( "Considering property {0}." . format ( k ) ) if ( hasattr ( raw_model , k ) ) and ( k not in forbidden_keys ) : logging . debug ( "Setting property {0} to value '{1}'." . format ( k , app_model [ k ] ) ) setattr ( raw_model , k , app_model [ k ] ) else : for k in raw_model . __dict__ : logging . debug ( "Considering property {0}." . format ( k ) ) if ( k in app_model ) and ( k not in forbidden_keys ) : logging . debug ( "Setting property {0} to value '{1}'." . format ( k , app_model [ k ] ) ) setattr ( raw_model , k , app_model [ k ] )
Updates the raw_model according to the values in the app_model .
42,560
def progress_bar ( finished , total , start_time = None , extra_info = None , autocomplete = True , completion_string = '/n' ) : pct = int ( 100. * finished / total ) ticks = int ( pct / 2 ) spaces = int ( 50 - ticks ) if start_time is not None : elapsed = ( datetime . now ( ) - start_time ) . seconds minutes = int ( elapsed / 60 ) seconds = int ( elapsed % 60 ) minute_str = '0' * ( 2 - len ( str ( minutes ) ) ) + str ( minutes ) second_str = '0' * ( 2 - len ( str ( seconds ) ) ) + str ( seconds ) prog_bar = '\r({}/{}) |{}{}| {}% ({}:{}) ' . format ( finished , total , '|' * ticks , ' ' * spaces , pct , minute_str , second_str ) else : prog_bar = '\r({}/{}) |{}{}| {}% ' . format ( finished , total , '|' * ticks , ' ' * spaces , pct ) if extra_info is not None : prog_bar += str ( extra_info ) if autocomplete and finished == total : prog_bar += completion_string sys . stdout . write ( prog_bar ) sys . stdout . flush ( )
Prints an ASCII progress bar .
42,561
def update ( self , app_model , forbidden_keys = None , inverse = False ) : if forbidden_keys is None : forbidden_keys = [ ] update_model ( self , app_model , forbidden_keys , inverse )
Updates the raw model . Consult zsl . utils . model_helper . update_model .
42,562
def _parse_dict ( element , definition ) : sub_dict = { } for name , subdef in viewitems ( definition ) : ( name , required ) = _parse_name ( name ) sub_dict [ name ] = xml_to_json ( element , subdef , required ) return sub_dict
Parse xml element by a definition given in dict format .
42,563
def _parse_tuple ( element , definition , required ) : d_len = len ( definition ) if d_len == 0 : return None if d_len == 1 : return xml_to_json ( element , definition [ 0 ] , required ) first = definition [ 0 ] if hasattr ( first , '__call__' ) : return first ( * [ xml_to_json ( element , d ) for d in definition [ 1 : ] ] ) if not isinstance ( first , str ) : raise XmlToJsonException ( 'Tuple definition must start with function or string' ) if first [ 0 ] == '@' : raise XmlToJsonException ( 'Tuple definition must not start with attribute' ) sub_elem = element . find ( first ) if sub_elem is None : if required : raise NotCompleteXmlException ( 'Expecting {0} in element {1}' . format ( first , element . tag ) ) return None return xml_to_json ( sub_elem , definition [ 1 ] , required )
Parse xml element by a definition given in tuple format .
42,564
def _parse_list ( element , definition ) : if len ( definition ) == 0 : raise XmlToJsonException ( 'List definition needs some definition' ) tag = definition [ 0 ] tag_def = definition [ 1 ] if len ( definition ) > 1 else None sub_list = [ ] for el in element . findall ( tag ) : sub_list . append ( xml_to_json ( el , tag_def ) ) return sub_list
Parse xml element by definition given by list .
42,565
def _parse_name ( name ) : required = False if name [ - 1 ] == '*' : name = name [ 0 : - 1 ] required = True return name , required
Parse name in complex dict definition .
42,566
def name ( self ) : clonify_ids = [ p . heavy [ 'clonify' ] [ 'id' ] for p in self . heavies if 'clonify' in p . heavy ] if len ( clonify_ids ) > 0 : return clonify_ids [ 0 ] return None
Returns the lineage name or None if the name cannot be found .
42,567
def verified_pairs ( self ) : if not hasattr ( self . just_pairs [ 0 ] , 'verified' ) : self . verify_light_chains ( ) return [ p for p in self . just_pairs if p . verified ]
Returns all lineage Pair objects that contain verified pairings .
42,568
def size ( self , pairs_only = False ) : if pairs_only : return len ( self . just_pairs ) else : return len ( self . heavies )
Calculate the size of the lineage .
42,569
def dot_alignment ( self , seq_field = 'vdj_nt' , name_field = 'seq_id' , uca = None , chain = 'heavy' , uca_name = 'UCA' , as_fasta = False , just_alignment = False ) : if uca is None : uca = self . uca . heavy if chain == 'heavy' else self . uca . light uca . id = 'UCA' if chain == 'heavy' : sequences = [ p . heavy for p in self . heavies if seq_field in p . heavy ] if name_field != 'seq_id' : uca [ name_field ] = uca [ 'seq_id' ] sequences . append ( uca ) seqs = [ ( s [ name_field ] , s [ seq_field ] ) for s in sequences ] else : sequences = [ p . light for p in self . lights if seq_field in p . light ] if name_field != 'seq_id' : uca [ name_field ] = uca [ 'seq_id' ] sequences . append ( uca ) seqs = [ ( s [ name_field ] , s [ seq_field ] ) for s in sequences ] aln = muscle ( seqs ) g_aln = [ a for a in aln if a . id == 'UCA' ] [ 0 ] dots = [ ( uca_name , str ( g_aln . seq ) ) , ] for seq in [ a for a in aln if a . id != 'UCA' ] : s_aln = '' for g , q in zip ( str ( g_aln . seq ) , str ( seq . seq ) ) : if g == q == '-' : s_aln += '-' elif g == q : s_aln += '.' else : s_aln += q dots . append ( ( seq . id , s_aln ) ) if just_alignment : return [ d [ 1 ] for d in dots ] name_len = max ( [ len ( d [ 0 ] ) for d in dots ] ) + 2 dot_aln = [ ] for d in dots : if as_fasta : dot_aln . append ( '>{}\n{}' . format ( d [ 0 ] , d [ 1 ] ) ) else : spaces = name_len - len ( d [ 0 ] ) dot_aln . append ( d [ 0 ] + ' ' * spaces + d [ 1 ] ) return '\n' . join ( dot_aln )
Returns a multiple sequence alignment of all lineage sequence with the UCA where matches to the UCA are shown as dots and mismatches are shown as the mismatched residue .
42,570
def to_child ( self , append_message = "" , ** kwargs ) : bad_pars = set ( kwargs ) - set ( self . _child_params ) if bad_pars : raise KeyError ( "Invalid init params for State: %s" % ", " . join ( bad_pars ) ) child = copy ( self ) for k , v in kwargs . items ( ) : setattr ( child , k , v ) child . parent = self if not isinstance ( append_message , dict ) : append_message = { "msg" : append_message , "kwargs" : { } } child . messages = [ * self . messages , append_message ] return child
Basic implementation of returning a child state
42,571
def complex_el_from_dict ( parent , data , key ) : el = ET . SubElement ( parent , key ) value = data [ key ] if isinstance ( value , dict ) : if '_attr' in value : for a_name , a_value in viewitems ( value [ '_attr' ] ) : el . set ( a_name , a_value ) if '_text' in value : el . text = value [ '_text' ] else : el . text = value return el
Create element from a dict definition and add it to parent .
42,572
def rss_create ( channel , articles ) : channel = channel . copy ( ) articles = list ( articles ) rss = ET . Element ( 'rss' ) rss . set ( 'version' , '2.0' ) channel_node = ET . SubElement ( rss , 'channel' ) element_from_dict ( channel_node , channel , 'title' ) element_from_dict ( channel_node , channel , 'link' ) element_from_dict ( channel_node , channel , 'description' ) element_from_dict ( channel_node , channel , 'language' ) for article in articles : item = ET . SubElement ( channel_node , 'item' ) element_from_dict ( item , article , 'title' ) element_from_dict ( item , article , 'description' ) element_from_dict ( item , article , 'link' ) for key in article : complex_el_from_dict ( item , article , key ) return ET . ElementTree ( rss )
Create RSS xml feed .
42,573
def compute_token ( random_token , config ) : secure_token = config [ TOKEN_SERVICE_SECURITY_CONFIG ] sha1hash = hashlib . sha1 ( ) sha1hash . update ( random_token + secure_token ) return sha1hash . hexdigest ( ) . upper ( )
Compute a hash of the given token with a preconfigured secret .
42,574
def verify_security_data ( security ) : random_token = security [ TOKEN_RANDOM ] hashed_token = security [ TOKEN_HASHED ] return str ( hashed_token ) == str ( compute_token ( random_token ) )
Verify an untrusted security token .
42,575
def initialize ( log_file , project_dir = None , debug = False ) : print_splash ( ) log . setup_logging ( log_file , print_log_location = False , debug = debug ) logger = log . get_logger ( 'pipeline' ) if project_dir is not None : make_dir ( os . path . normpath ( project_dir ) ) logger . info ( 'PROJECT DIRECTORY: {}' . format ( project_dir ) ) logger . info ( '' ) logger . info ( 'LOG LOCATION: {}' . format ( log_file ) ) print ( '' ) return logger
Initializes an AbTools pipeline .
42,576
def list_files ( d , extension = None ) : if os . path . isdir ( d ) : expanded_dir = os . path . expanduser ( d ) files = sorted ( glob . glob ( expanded_dir + '/*' ) ) else : files = [ d , ] if extension is not None : if type ( extension ) in STR_TYPES : extension = [ extension , ] files = [ f for f in files if any ( [ f . split ( '.' ) [ - 1 ] in extension , f . split ( '.' ) [ - 1 ] . upper ( ) in extension , f . split ( '.' ) [ - 1 ] . lower ( ) in extension ] ) ] return files
Lists files in a given directory .
42,577
def _get_version ( ctx , _ , value ) : if not value or ctx . resilient_parsing : return message = 'Zsl %(version)s\nPython %(python_version)s' click . echo ( message % { 'version' : version , 'python_version' : sys . version , } , color = ctx . color ) ctx . exit ( )
Click callback for option to show current ZSL version .
42,578
def dict_pick ( dictionary , allowed_keys ) : return { key : value for key , value in viewitems ( dictionary ) if key in allowed_keys }
Return a dictionary only with keys found in allowed_keys
42,579
def _create_one ( self , ctx ) : assert isinstance ( ctx , ResourceQueryContext ) fields = dict_pick ( ctx . data , self . _model_columns ) model = self . model_cls ( ** fields ) return model
Creates an instance to be saved when a model is created .
42,580
def _save_one ( self , model , ctx ) : assert isinstance ( ctx , ResourceQueryContext ) self . _orm . add ( model ) self . _orm . flush ( )
Saves the created instance .
42,581
def _create_delete_one_query ( self , row_id , ctx ) : assert isinstance ( ctx , ResourceQueryContext ) return self . _orm . query ( self . model_cls ) . filter ( self . _model_pk == row_id )
Delete row by id query creation .
42,582
def _delete_collection ( self , ctx ) : assert isinstance ( ctx , ResourceQueryContext ) filter_by = ctx . get_filter_by ( ) q = self . _orm . query ( self . model_cls ) if filter_by is not None : q = self . to_filter ( q , filter_by ) return q . delete ( )
Delete a collection from DB optionally filtered by filter_by
42,583
def route ( self , path ) : logging . getLogger ( __name__ ) . debug ( "Routing path '%s'." , path ) cls = None for strategy in self . _strategies : if strategy . can_route ( path ) : cls = strategy . route ( path ) break if cls is None : raise RoutingError ( path ) return self . _create_result ( cls )
Returns the task handling the given request path .
42,584
def abi_to_fasta ( input , output ) : direcs = [ input , ] zip_files = list_files ( input , [ 'zip' ] ) if zip_files : direcs . extend ( _process_zip_files ( zip_files ) ) for d in direcs : files = list_files ( d , [ 'ab1' , 'abi' ] ) seqs = [ SeqIO . read ( open ( f , 'rb' ) , 'abi' ) for f in files ] fastas = [ '>{}\n{}' . format ( s . id , str ( s . seq ) ) for s in seqs ] ofile = os . path . basename ( os . path . normpath ( d ) ) + '.fasta' opath = os . path . join ( output , ofile ) open ( opath , 'w' ) . write ( '\n' . join ( fastas ) )
Converts ABI or AB1 files to FASTA format .
42,585
def extend ( instance , new_class ) : instance . __class__ = type ( '%s_extended_with_%s' % ( instance . __class__ . __name__ , new_class . __name__ ) , ( new_class , instance . __class__ , ) , { } )
Adds new_class to the ancestors of instance .
42,586
def generate_js_models ( module , models , collection_prefix , model_prefix , model_fn , collection_fn , marker , integrate , js_file ) : options = { 'model_prefix' : model_prefix , 'collection_prefix' : collection_prefix , 'model_fn' : model_fn , 'collection_fn' : collection_fn } generator = ModelGenerator ( module , ** { o : options [ o ] for o in options if options [ o ] is not None } ) models = generator . generate_models ( parse_model_arg ( models ) ) if integrate : sys . stderr . write ( "Integrate is really experimental" ) if not marker : marker = hashlib . md5 ( "{0}{1}" . format ( module , models ) ) . hexdigest ( ) start = "// * -- START AUTOGENERATED %s -- * //\n" % marker end = "// * -- END AUTOGENERATED %s -- * //\n" % marker return integrate_to_file ( "\n" . join ( models ) , js_file , start , end ) else : return "\n" . join ( models )
Generate models for Backbone Javascript applications .
42,587
def _map_table_name ( self , model_names ) : for model in model_names : if isinstance ( model , tuple ) : model = model [ 0 ] try : model_cls = getattr ( self . models , model ) self . table_to_class [ class_mapper ( model_cls ) . tables [ 0 ] . name ] = model except AttributeError : pass
Pre foregin_keys potrbejeme pre z nazvu tabulky zistit class tak si to namapujme
42,588
def push_msg ( self , channel_id , msg ) : if type ( msg ) is not str : msg = urlencode ( msg ) return self . push ( channel_id , msg )
Push msg for given channel_id . If msg is not string it will be urlencoded
42,589
def push_object ( self , channel_id , obj ) : return self . push ( channel_id , json . dumps ( obj ) . replace ( '"' , '\\"' ) )
Push obj for channel_id . obj will be encoded as JSON in the request .
42,590
def push ( self , channel_id , data ) : channel_path = self . channel_path ( channel_id ) response = requests . post ( channel_path , data ) return response . json ( )
Push message with POST data for channel_id
42,591
def initialize ( ) : from zsl . interface . web . performers . default import create_not_found_mapping from zsl . interface . web . performers . resource import create_resource_mapping create_not_found_mapping ( ) create_resource_mapping ( )
Import in this form is necessary so that we avoid the unwanted behavior and immediate initialization of the application objects . This makes the initialization procedure run in the time when it is necessary and has every required resources .
42,592
def run_web ( self , flask , host = '127.0.0.1' , port = 5000 , ** options ) : return flask . run ( host = flask . config . get ( 'FLASK_HOST' , host ) , port = flask . config . get ( 'FLASK_PORT' , port ) , debug = flask . config . get ( 'DEBUG' , False ) , ** options )
Alias for Flask . run
42,593
def bound ( self , instance ) : bounded_dispatcher = CommandDispatcher ( ) bounded_dispatcher . commands = self . commands . copy ( ) for name in self . commands : method = getattr ( instance , name , None ) if method and inspect . ismethod ( method ) and method . __self__ == instance : bounded_dispatcher . commands [ name ] = method return bounded_dispatcher
Return a new dispatcher which will switch all command functions with bounded methods of given instance matched by name . It will match only regular methods .
42,594
def compress_and_upload ( data , compressed_file , s3_path , multipart_chunk_size_mb = 500 , method = 'gz' , delete = False , access_key = None , secret_key = None ) : logger = log . get_logger ( 's3' ) if all ( [ access_key , secret_key ] ) : configure ( access_key = access_key , secret_key = secret_key , logger = logger ) compress ( data , compressed_file , fmt = method , logger = logger ) put ( compressed_file , s3_path , multipart_chunk_size_mb = multipart_chunk_size_mb , logger = logger ) if delete : os . unlink ( compressed_file )
Compresses data and uploads to S3 .
42,595
def put ( f , s3_path , multipart_chunk_size_mb = 500 , logger = None ) : if not logger : logger = log . get_logger ( 's3' ) fname = os . path . basename ( f ) target = os . path . join ( s3_path , fname ) s3cmd_cline = 's3cmd put {} {} --multipart-chunk-size-mb {}' . format ( f , target , multipart_chunk_size_mb ) print_put_info ( fname , target , logger ) s3cmd = sp . Popen ( s3cmd_cline , stdout = sp . PIPE , stderr = sp . PIPE , shell = True ) stdout , stderr = s3cmd . communicate ( )
Uploads a single file to S3 using s3cmd .
42,596
def configure ( access_key = None , secret_key = None , logger = None ) : if not logger : logger = log . get_logger ( 's3' ) if not all ( [ access_key , secret_key ] ) : logger . info ( '' ) access_key = input ( 'AWS Access Key: ' ) secret_key = input ( 'AWS Secret Key: ' ) _write_config ( access_key , secret_key ) logger . info ( '' ) logger . info ( 'Completed writing S3 config file.' ) logger . info ( '' )
Configures s3cmd prior to first use .
42,597
def required_params ( data , * r_params ) : if not reduce ( lambda still_valid , param : still_valid and param in data , r_params , True ) : raise RequestException ( msg_err_missing_params ( * r_params ) )
Check if given parameters are in the given dict if not raise an exception .
42,598
def safe_args ( fn , args ) : fn_args = inspect . getargspec ( fn ) if fn_args . defaults : required_params ( args , fn_args . args [ : - len ( fn_args . defaults ) ] ) else : required_params ( args , fn_args ) if not fn_args . keywords : return { key : value for key , value in viewitems ( args ) if key in fn_args . args } else : return args
Check if args as a dictionary has the required parameters of fn function and filter any waste parameters so fn can be safely called with them .
42,599
def get_db ( db , ip = 'localhost' , port = 27017 , user = None , password = None ) : if platform . system ( ) . lower ( ) == 'darwin' : connect = False else : connect = True if user and password : import urllib pwd = urllib . quote_plus ( password ) uri = 'mongodb://{}:{}@{}:{}' . format ( user , pwd , ip , port ) conn = MongoClient ( uri , connect = connect ) else : conn = MongoClient ( ip , port , connect = connect ) return conn [ db ]
Returns a pymongo Database object .