idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
42,400 | def render_build_args ( options , ns ) : build_args = options . get ( 'buildArgs' , { } ) for key , value in build_args . items ( ) : build_args [ key ] = value . format ( ** ns ) return build_args | Get docker build args dict rendering any templated args . |
42,401 | def build_image ( image_path , image_name , build_args = None , dockerfile_path = None ) : cmd = [ 'docker' , 'build' , '-t' , image_name , image_path ] if dockerfile_path : cmd . extend ( [ '-f' , dockerfile_path ] ) for k , v in ( build_args or { } ) . items ( ) : cmd += [ '--build-arg' , '{}={}' . format ( k , v ) ] check_call ( cmd ) | Build an image |
42,402 | def image_needs_pushing ( image ) : d = docker_client ( ) try : d . images . get_registry_data ( image ) except docker . errors . APIError : return True else : return False | Return whether an image needs pushing |
42,403 | def image_needs_building ( image ) : d = docker_client ( ) try : d . images . get ( image ) except docker . errors . ImageNotFound : pass else : return False return image_needs_pushing ( image ) | Return whether an image needs building |
42,404 | def build_images ( prefix , images , tag = None , commit_range = None , push = False , chart_version = None ) : value_modifications = { } for name , options in images . items ( ) : image_path = options . get ( 'contextPath' , os . path . join ( 'images' , name ) ) image_tag = tag paths = list ( options . get ( 'paths' , [ ] ) ) + [ image_path , 'chartpress.yaml' ] last_commit = last_modified_commit ( * paths ) if tag is None : if chart_version : image_tag = "{}-{}" . format ( chart_version , last_commit ) else : image_tag = last_commit image_name = prefix + name image_spec = '{}:{}' . format ( image_name , image_tag ) value_modifications [ options [ 'valuesPath' ] ] = { 'repository' : image_name , 'tag' : SingleQuotedScalarString ( image_tag ) , } template_namespace = { 'LAST_COMMIT' : last_commit , 'TAG' : image_tag , } if tag or image_needs_building ( image_spec ) : build_args = render_build_args ( options , template_namespace ) build_image ( image_path , image_spec , build_args , options . get ( 'dockerfilePath' ) ) else : print ( f"Skipping build for {image_spec}, it already exists" ) if push : if tag or image_needs_pushing ( image_spec ) : check_call ( [ 'docker' , 'push' , image_spec ] ) else : print ( f"Skipping push for {image_spec}, already on registry" ) return value_modifications | Build a collection of docker images |
42,405 | def publish_pages ( name , paths , git_repo , published_repo , extra_message = '' ) : version = last_modified_commit ( * paths ) checkout_dir = '{}-{}' . format ( name , version ) check_call ( [ 'git' , 'clone' , '--no-checkout' , git_remote ( git_repo ) , checkout_dir ] , echo = False , ) check_call ( [ 'git' , 'checkout' , 'gh-pages' ] , cwd = checkout_dir ) with TemporaryDirectory ( ) as td : check_call ( [ 'helm' , 'package' , name , '--destination' , td + '/' , ] ) check_call ( [ 'helm' , 'repo' , 'index' , td , '--url' , published_repo , '--merge' , os . path . join ( checkout_dir , 'index.yaml' ) , ] ) for f in os . listdir ( td ) : shutil . copy2 ( os . path . join ( td , f ) , os . path . join ( checkout_dir , f ) ) check_call ( [ 'git' , 'add' , '.' ] , cwd = checkout_dir ) if extra_message : extra_message = '\n\n%s' % extra_message else : extra_message = '' check_call ( [ 'git' , 'commit' , '-m' , '[{}] Automatic update for commit {}{}' . format ( name , version , extra_message ) ] , cwd = checkout_dir ) check_call ( [ 'git' , 'push' , 'origin' , 'gh-pages' ] , cwd = checkout_dir , ) | Publish helm chart index to github pages |
42,406 | def add_values_to_bundle_safe ( connection , bundle , values ) : for value in values : try : connection . addValueToBundle ( bundle , value ) except YouTrackException as e : if e . response . status == 409 : print ( "Value with name [ %s ] already exists in bundle [ %s ]" % ( utf8encode ( value . name ) , utf8encode ( bundle . name ) ) ) else : raise e | Adds values to specified bundle . Checks whether each value already contains in bundle . If yes it is not added . |
42,407 | def parse ( self , stream , media_type = None , parser_context = None ) : assert yaml , 'YAMLParser requires pyyaml to be installed' parser_context = parser_context or { } encoding = parser_context . get ( 'encoding' , settings . DEFAULT_CHARSET ) try : data = stream . read ( ) . decode ( encoding ) return yaml . safe_load ( data ) except ( ValueError , yaml . parser . ParserError ) as exc : raise ParseError ( 'YAML parse error - %s' % six . text_type ( exc ) ) | Parses the incoming bytestream as YAML and returns the resulting data . |
42,408 | def render ( self , data , accepted_media_type = None , renderer_context = None ) : assert yaml , 'YAMLRenderer requires pyyaml to be installed' if data is None : return '' return yaml . dump ( data , stream = None , encoding = self . charset , Dumper = self . encoder , allow_unicode = not self . ensure_ascii , default_flow_style = self . default_flow_style ) | Renders data into serialized YAML . |
42,409 | def _toplevel ( cls ) : superclasses = ( list ( set ( ClosureModel . __subclasses__ ( ) ) & set ( cls . _meta . get_parent_list ( ) ) ) ) return next ( iter ( superclasses ) ) if superclasses else cls | Find the top level of the chain we re in . |
42,410 | def rebuildtable ( cls ) : cls . _closure_model . objects . all ( ) . delete ( ) cls . _closure_model . objects . bulk_create ( [ cls . _closure_model ( parent_id = x [ 'pk' ] , child_id = x [ 'pk' ] , depth = 0 ) for x in cls . objects . values ( "pk" ) ] ) for node in cls . objects . all ( ) : node . _closure_createlink ( ) | Regenerate the entire closuretree . |
42,411 | def _closure_parent_pk ( self ) : if hasattr ( self , "%s_id" % self . _closure_parent_attr ) : return getattr ( self , "%s_id" % self . _closure_parent_attr ) else : parent = getattr ( self , self . _closure_parent_attr ) return parent . pk if parent else None | What our parent pk is in the closure tree . |
42,412 | def _closure_deletelink ( self , oldparentpk ) : self . _closure_model . objects . filter ( ** { "parent__%s__child" % self . _closure_parentref ( ) : oldparentpk , "child__%s__parent" % self . _closure_childref ( ) : self . pk } ) . delete ( ) | Remove incorrect links from the closure tree . |
42,413 | def _closure_createlink ( self ) : linkparents = self . _closure_model . objects . filter ( child__pk = self . _closure_parent_pk ) . values ( "parent" , "depth" ) linkchildren = self . _closure_model . objects . filter ( parent__pk = self . pk ) . values ( "child" , "depth" ) newlinks = [ self . _closure_model ( parent_id = p [ 'parent' ] , child_id = c [ 'child' ] , depth = p [ 'depth' ] + c [ 'depth' ] + 1 ) for p in linkparents for c in linkchildren ] self . _closure_model . objects . bulk_create ( newlinks ) | Create a link in the closure tree . |
42,414 | def get_ancestors ( self , include_self = False , depth = None ) : if self . is_root_node ( ) : if not include_self : return self . _toplevel ( ) . objects . none ( ) else : return self . _toplevel ( ) . objects . filter ( pk = self . pk ) params = { "%s__child" % self . _closure_parentref ( ) : self . pk } if depth is not None : params [ "%s__depth__lte" % self . _closure_parentref ( ) ] = depth ancestors = self . _toplevel ( ) . objects . filter ( ** params ) if not include_self : ancestors = ancestors . exclude ( pk = self . pk ) return ancestors . order_by ( "%s__depth" % self . _closure_parentref ( ) ) | Return all the ancestors of this object . |
42,415 | def get_descendants ( self , include_self = False , depth = None ) : params = { "%s__parent" % self . _closure_childref ( ) : self . pk } if depth is not None : params [ "%s__depth__lte" % self . _closure_childref ( ) ] = depth descendants = self . _toplevel ( ) . objects . filter ( ** params ) if not include_self : descendants = descendants . exclude ( pk = self . pk ) return descendants . order_by ( "%s__depth" % self . _closure_childref ( ) ) | Return all the descendants of this object . |
42,416 | def get_children ( self ) : if hasattr ( self , '_cached_children' ) : children = self . _toplevel ( ) . objects . filter ( pk__in = [ n . pk for n in self . _cached_children ] ) children . _result_cache = self . _cached_children return children else : return self . get_descendants ( include_self = False , depth = 1 ) | Return all the children of this object . |
42,417 | def get_root ( self ) : if self . is_root_node ( ) : return self return self . get_ancestors ( ) . order_by ( "-%s__depth" % self . _closure_parentref ( ) ) [ 0 ] | Return the furthest ancestor of this node . |
42,418 | def is_descendant_of ( self , other , include_self = False ) : if other . pk == self . pk : return include_self return self . _closure_model . objects . filter ( parent = other , child = self ) . exclude ( pk = self . pk ) . exists ( ) | Is this node a descendant of other ? |
42,419 | def is_ancestor_of ( self , other , include_self = False ) : return other . is_descendant_of ( self , include_self = include_self ) | Is this node an ancestor of other ? |
42,420 | def quantize ( number , digits = 0 , q = builtins . round ) : base , fraction = split ( digits ) if fraction * 10 % 1 > 0 : digits = base + 2 else : digits = base + 1 multiplier = 10 ** base * invert ( fraction , default = 1 ) quantized = q ( number * multiplier ) / multiplier return builtins . round ( quantized , digits ) | Quantize to somewhere in between a magnitude . |
42,421 | def vectorize ( fn ) : @ functools . wraps ( fn ) def vectorized_function ( values , * vargs , ** kwargs ) : return [ fn ( value , * vargs , ** kwargs ) for value in values ] return vectorized_function | Allows a method to accept a list argument but internally deal only with a single item of that list . |
42,422 | def engineering ( value , precision = 3 , prefix = False , prefixes = SI ) : display = decimal . Context ( prec = precision ) value = decimal . Decimal ( value ) . normalize ( context = display ) string = value . to_eng_string ( ) if prefix : prefixes = { e ( exponent ) : prefix for exponent , prefix in prefixes . items ( ) } return replace ( string , prefixes ) else : return string | Convert a number to engineering notation . |
42,423 | def chunked ( iterator , chunksize ) : chunk = [ ] for idx , item in enumerate ( iterator , 1 ) : chunk . append ( item ) if idx % chunksize == 0 : yield chunk chunk = [ ] if chunk : yield chunk | Yields items from iterator in chunks of size chunksize . |
42,424 | def pre_start_check ( self ) : try : sock = socket . socket ( ) sock . connect ( ( self . host , self . port ) ) return True except ( socket . error , socket . timeout ) : return False finally : sock . close ( ) | Check if process accepts connections . |
42,425 | def after_start_check ( self ) : try : conn = HTTPConnection ( self . host , self . port ) conn . request ( 'HEAD' , self . url . path ) status = str ( conn . getresponse ( ) . status ) if status == self . status or self . status_re . match ( status ) : conn . close ( ) return True except ( HTTPException , socket . timeout , socket . error ) : return False | Check if defined URL returns expected status to a HEAD request . |
42,426 | def _wait_for_output ( self ) : poll_result = self . poll_obj . poll ( 0 ) if poll_result : line = self . output ( ) . readline ( ) if self . _banner . match ( line ) : return True return False | Check if output matches banner . |
42,427 | def start ( self ) : if self . process is None : command = self . command if not self . _shell : command = self . command_parts env = os . environ . copy ( ) env [ ENV_UUID ] = self . _uuid popen_kwargs = { 'shell' : self . _shell , 'stdin' : subprocess . PIPE , 'stdout' : subprocess . PIPE , 'universal_newlines' : True , 'env' : env , } if platform . system ( ) != 'Windows' : popen_kwargs [ 'preexec_fn' ] = os . setsid self . process = subprocess . Popen ( command , ** popen_kwargs ) self . _set_timeout ( ) return self | Start defined process . |
42,428 | def kill ( self , wait = True , sig = None ) : if sig is None : sig = self . _sig_kill if self . running ( ) : os . killpg ( self . process . pid , sig ) if wait : self . process . wait ( ) self . _kill_all_kids ( sig ) self . _clear_process ( ) return self | Kill the process if running . |
42,429 | def wait_for ( self , wait_for ) : while self . check_timeout ( ) : if wait_for ( ) : return self time . sleep ( self . _sleep ) self . kill ( ) raise TimeoutExpired ( self , timeout = self . _timeout ) | Wait for callback to return True . |
42,430 | def start ( self ) : if self . pre_start_check ( ) : raise AlreadyRunning ( self ) super ( Executor , self ) . start ( ) self . wait_for ( self . check_subprocess ) return self | Start executor with additional checks . |
42,431 | def check_subprocess ( self ) : exit_code = self . process . poll ( ) if exit_code is not None and exit_code != 0 : self . _kill_all_kids ( self . _sig_kill ) self . _clear_process ( ) raise ProcessExitedWithError ( self , exit_code ) return self . after_start_check ( ) | Make sure the process didn t exit with an error and run the checks . |
42,432 | def _ncc_c_2dim ( x , y ) : den = np . array ( norm ( x , axis = 1 ) * norm ( y ) ) den [ den == 0 ] = np . Inf x_len = x . shape [ - 1 ] fft_size = 1 << ( 2 * x_len - 1 ) . bit_length ( ) cc = ifft ( fft ( x , fft_size ) * np . conj ( fft ( y , fft_size ) ) ) cc = np . concatenate ( ( cc [ : , - ( x_len - 1 ) : ] , cc [ : , : x_len ] ) , axis = 1 ) return np . real ( cc ) / den [ : , np . newaxis ] | Variant of NCCc that operates with 2 dimensional X arrays and 1 dimensional y vector |
42,433 | def _ncc_c_3dim ( x , y ) : den = norm ( x , axis = 1 ) [ : , None ] * norm ( y , axis = 1 ) den [ den == 0 ] = np . Inf x_len = x . shape [ - 1 ] fft_size = 1 << ( 2 * x_len - 1 ) . bit_length ( ) cc = ifft ( fft ( x , fft_size ) * np . conj ( fft ( y , fft_size ) ) [ : , None ] ) cc = np . concatenate ( ( cc [ : , : , - ( x_len - 1 ) : ] , cc [ : , : , : x_len ] ) , axis = 2 ) return np . real ( cc ) / den . T [ : , : , None ] | Variant of NCCc that operates with 2 dimensional X arrays and 2 dimensional y vector |
42,434 | def get_version_by_version_id ( version_id ) : for ver in registry . version_info : if ver . version_id == version_id : return ver . id return None | Get the internal version ID be the version . |
42,435 | def get_version_name ( version_id ) : ver = registry . version_info . get ( version_id ) if ver : return ver . name return 'unknown' | Get the name of a protocol version by the internal version ID . |
42,436 | def get_version_id ( protocol_version ) : ver = registry . version_info . get ( protocol_version ) if ver : return ver . version_id | Get a tuple with major and minor version number |
42,437 | def end ( self ) : with self . __lock : if self . __write : self . __write ( compress_end ( self . __ctx ) ) else : return compress_end ( self . __ctx ) | Finalise lz4 frame outputting any remaining as return from this function or by writing to fp ) |
42,438 | def get_value_name ( self , pretty = False ) : if pretty : return "%s (%x)" % ( self . enums . get ( self . _value , "n/a" ) , self . _value ) return self . enums . get ( self . _value , "n/a" ) | Get the name of the value |
42,439 | def set_value ( self , value , force = False ) : if force : self . _value = value return if value is None : self . _value = value return if isinstance ( value , six . integer_types ) : self . _value = value return if isinstance ( value , six . string_types ) : for v , n in self . enums . items ( ) : if n == value : self . _value = v return raise ValueError ( "Unable to find value name in enum list" ) raise TypeError ( "Value for '%s' must by of type String or Integer not '%s'" % ( self . name , type ( value ) ) ) | Set the value . |
42,440 | def dissect ( self , data ) : size = struct . calcsize ( "B" ) if len ( data ) < size : raise NotEnoughData ( "Not enough data to decode field '%s' value" % self . name ) curve_type = struct . unpack ( "B" , data [ : size ] ) [ 0 ] if curve_type == 0x03 : self . _value = ECParametersNamedCurveField ( "none" ) data = self . _value . dissect ( data ) else : raise NotImplementedError ( "Decoding of KeyExchange message for curve 0x%.2X not implemented" % curve_type ) return data | Dissect the field . |
42,441 | def _register_procedure ( self , procedure_name , invocation_policy = "single" ) : options = { "invoke" : invocation_policy } message = Register ( procedure = procedure_name , options = options ) request_id = message . request_id try : self . send_message ( message ) except ValueError : raise WampProtocolError ( "failed to register callee: %s" , procedure_name ) self . request_ids [ request_id ] = procedure_name | Register a procedure on a Client as callable over the Router . |
42,442 | def start ( self ) : if self . started is True : raise WampyError ( "Router already started" ) crossbar_config_path = self . config_path cbdir = self . crossbar_directory cmd = [ 'crossbar' , 'start' , '--cbdir' , cbdir , '--config' , crossbar_config_path , ] self . proc = subprocess . Popen ( cmd , preexec_fn = os . setsid ) self . _wait_until_ready ( ) logger . info ( "Crosbar.io is ready for connections on %s (IPV%s)" , self . url , self . ipv ) self . started = True | Start Crossbar . io in a subprocess . |
42,443 | def _get_handshake_headers ( self , upgrade ) : headers = [ ] headers . append ( "GET {} HTTP/1.1" . format ( self . websocket_location ) ) headers . append ( "Host: {}:{}" . format ( self . host , self . port ) ) headers . append ( "Upgrade: websocket" ) headers . append ( "Connection: Upgrade" ) headers . append ( "Sec-WebSocket-Key: {}" . format ( self . key ) ) headers . append ( "Origin: ws://{}:{}" . format ( self . host , self . port ) ) headers . append ( "Sec-WebSocket-Version: {}" . format ( WEBSOCKET_VERSION ) ) if upgrade : headers . append ( "Sec-WebSocket-Protocol: {}" . format ( WEBSOCKET_SUBPROTOCOLS ) ) logger . debug ( "connection headers: %s" , headers ) return headers | Do an HTTP upgrade handshake with the server . |
42,444 | def generate_mask ( cls , mask_key , data ) : if data is None : data = "" data = bytearray ( data , 'utf-8' ) _m = array . array ( "B" , mask_key ) _d = array . array ( "B" , data ) for i in range ( len ( _d ) ) : _d [ i ] ^= _m [ i % 4 ] return _d . tostring ( ) | Mask data . |
42,445 | def edges ( self ) : canonical_edges = set ( ) for v1 , neighbours in self . _vertices . items ( ) : for v2 in neighbours : edge = self . canonical_order ( ( v1 , v2 ) ) canonical_edges . add ( edge ) return canonical_edges | Edges of this graph in canonical order . |
42,446 | def ordered_deduplicate ( sequence ) : seen = set ( ) seen_add = seen . add return tuple ( x for x in sequence if not ( x in seen or seen_add ( x ) ) ) | Returns the sequence as a tuple with the duplicates removed preserving input order . Any duplicates following the first occurrence are removed . |
42,447 | def hash_parameters ( words , minimize_indices = False ) : words = tuple ( words ) return CzechHashBuilder ( words ) . hash_info | Gives hash parameters for the given set of words . |
42,448 | def make_pickable_hash ( words , * args , ** kwargs ) : return PickableHash ( CzechHashBuilder ( words , * args , ** kwargs ) ) . czech_hash | Creates an ordered minimal perfect hash function for the given sequence of words . |
42,449 | def hash_function ( self ) : assert hasattr ( self , 'f1' ) and hasattr ( self , 'f2' ) f1 , f2 , g = self . f1 , self . f2 , self . g def czech_hash ( word ) : v1 = f1 ( word ) v2 = f2 ( word ) return g [ v1 ] + g [ v2 ] return czech_hash | Returns the hash function proper . Ensures that self is not bound to the returned closure . |
42,450 | def generate_acyclic_graph ( self ) : self . n = 3 * len ( self . words ) max_tries = len ( self . words ) ** 2 for trial in range ( max_tries ) : try : self . generate_or_fail ( ) except forest . InvariantError : continue else : self . trials_taken = trial + 1 return raise RuntimeError ( "Could not generate graph in " "{} tries" . format ( max_tries ) ) | Generates an acyclic graph for the given words . Adds the graph and a list of edge - word associations to the object . |
42,451 | def generate_random_table ( self ) : table = list ( range ( 0 , self . n ) ) random . shuffle ( table ) return table | Generates random tables for given word lists . |
42,452 | def generate_or_fail ( self ) : t1 = self . generate_random_table ( ) t2 = self . generate_random_table ( ) f1 = self . generate_func ( t1 ) f2 = self . generate_func ( t2 ) edges = [ ( f1 ( word ) , f2 ( word ) ) for word in self . words ] graph = forest . ForestGraph ( edges = edges ) associations = { } for num in range ( len ( self . words ) ) : edge = edges [ num ] word = self . words [ num ] associations [ graph . canonical_order ( edge ) ] = ( num , word ) for name in ( 't1' , 't2' , 'f1' , 'f2' , 'graph' , 'associations' ) : self . __dict__ [ name ] = locals ( ) [ name ] | Attempts to generate a random acyclic graph raising an InvariantError if unable to . |
42,453 | def generate_func ( self , table ) : n = self . n def func ( word ) : return sum ( x * ord ( c ) for x , c in zip ( table , word ) ) % n return func | Generates a random table based mini - hashing function . |
42,454 | def create_dict_subclass ( name , hash_func , slots , doc ) : hash_length = len ( slots ) def index_or_key_error ( key ) : index = hash_func ( key ) if key != slots [ index ] : raise KeyError ( key ) return index def init ( self , * args , ** kwargs ) : self . _arr = [ None ] * hash_length self . _len = 0 self . update ( * args , ** kwargs ) def getitem ( self , key ) : index = index_or_key_error ( key ) if self . _arr [ index ] is None : raise KeyError ( key ) return self . _arr [ index ] [ 1 ] def setitem ( self , key , value ) : index = index_or_key_error ( key ) self . _arr [ index ] = ( key , value ) def delitem ( self , key ) : index = index_or_key_error ( key ) if self . _arr [ index ] is None : raise KeyError ( key ) self . _arr [ index ] = None def dict_iter ( self ) : return ( pair [ 0 ] for pair in self . _arr if pair is not None ) def dict_len ( self ) : return sum ( 1 for _ in self ) def dict_repr ( self ) : arr_repr = ( repr ( pair ) for pair in self . _arr if pair is not None ) return '' . join ( ( name , '([' , ', ' . join ( arr_repr ) , '])' ) ) bases = ( collections . MutableMapping , ) return type ( name , bases , { '__init__' : init , '__doc__' : doc , '__getitem__' : getitem , '__setitem__' : setitem , '__delitem__' : delitem , '__iter__' : dict_iter , '__len__' : dict_len , '__repr__' : dict_repr , } ) | Creates a dict subclass named name using the hash_function to index hash_length items . Doc should be any additional documentation added to the class . |
42,455 | def validate ( data , skiperrors = False , fixerrors = True ) : if not "type" in data : if fixerrors : data [ "type" ] = "FeatureCollection" else : raise ValueError ( "The geojson data needs to have a type key" ) if not data [ "type" ] == "FeatureCollection" : if fixerrors : data [ "type" ] = "FeatureCollection" else : raise ValueError ( "The geojson data needs to be a feature collection" ) if "features" in data : if not isinstance ( data [ "features" ] , list ) : raise ValueError ( "The features property needs to be a list" ) else : raise ValueError ( "The FeatureCollection needs to contain a 'features' property" ) if skiperrors : for featuredict in data [ "features" ] : feat = Feature ( featuredict ) try : feat . validate ( fixerrors ) except : data [ "features" ] . remove ( featuredict ) else : for featuredict in data [ "features" ] : feat = Feature ( featuredict ) feat . validate ( fixerrors ) return True | Checks that the geojson data is a feature collection that it contains a proper features attribute and that all features are valid too . Returns True if all goes well . |
42,456 | def validate ( self , fixerrors = True ) : if not self . _data : return True elif "type" not in self . _data or "coordinates" not in self . _data : raise Exception ( "A geometry dictionary or instance must have the type and coordinates entries" ) if not self . type in ( "Point" , "MultiPoint" , "LineString" , "MultiLineString" , "Polygon" , "MultiPolygon" ) : if fixerrors : coretype = self . type . lower ( ) . replace ( "multi" , "" ) if coretype == "point" : newtype = "Point" elif coretype == "linestring" : newtype = "LineString" elif coretype == "polygon" : newtype = "Polygon" else : raise Exception ( 'Invalid geometry type. Must be one of: "Point","MultiPoint","LineString","MultiLineString","Polygon","MultiPolygon"' ) if self . type . lower ( ) . startswith ( "multi" ) : newtype = "Multi" + newtype self . type = newtype else : raise Exception ( 'Invalid geometry type. Must be one of: "Point","MultiPoint","LineString","MultiLineString","Polygon","MultiPolygon"' ) coords = self . _data [ "coordinates" ] if not isinstance ( coords , ( list , tuple ) ) : raise Exception ( "Coordinates must be a list or tuple type" ) if self . type == "Point" : if not len ( coords ) == 2 : raise Exception ( "Point must be one coordinate pair" ) elif self . type in ( "MultiPoint" , "LineString" ) : if not len ( coords ) > 1 : raise Exception ( "MultiPoint and LineString must have more than one coordinates" ) elif self . type == "MultiLineString" : for line in coords : if not len ( line ) > 1 : raise Exception ( "All LineStrings in a MultiLineString must have more than one coordinate" ) elif self . type == "Polygon" : for exterior_or_holes in coords : if not len ( exterior_or_holes ) >= 3 : raise Exception ( "The exterior and all holes in a Polygon must have at least 3 coordinates" ) elif self . type == "MultiPolygon" : for eachmulti in coords : for exterior_or_holes in eachmulti : if not len ( exterior_or_holes ) >= 3 : raise Exception ( "The exterior and all holes in all Polygons of a MultiPolygon must have at least 3 coordinates" ) return True | Validates that the geometry is correctly formatted according to the geometry type . |
42,457 | def validate ( self , fixerrors = True ) : if not "type" in self . _data or self . _data [ "type" ] != "Feature" : if fixerrors : self . _data [ "type" ] = "Feature" else : raise Exception ( "A geojson feature dictionary must contain a type key and it must be named 'Feature'." ) if not "geometry" in self . _data : if fixerrors : self . geometry = Geometry ( ) else : raise Exception ( "A geojson feature dictionary must contain a geometry key." ) if not "properties" in self . _data or not isinstance ( self . properties , dict ) : if fixerrors : self . _data [ "properties" ] = dict ( ) else : raise Exception ( "A geojson feature dictionary must contain a properties key and it must be a dictionary type." ) self . geometry . validate ( fixerrors ) return True | Validates that the feature is correctly formatted . |
42,458 | def add_feature ( self , obj = None , geometry = None , properties = None ) : properties = properties or { } if isinstance ( obj , Feature ) : feat = obj . _data elif isinstance ( obj , dict ) : feat = obj . copy ( ) else : feat = Feature ( geometry = geometry , properties = properties ) . __geo_interface__ self . _data [ "features" ] . append ( feat ) | Adds a given feature . If obj isn t specified geometry and properties can be set as arguments directly . |
42,459 | def add_unique_id ( self ) : uid = 0 for feature in self . _data [ "features" ] : if feature [ "properties" ] . get ( "id" ) : raise Exception ( "one of the features already had an id field" ) feature [ "properties" ] [ "id" ] = uid uid += 1 | Adds a unique id property to each feature . |
42,460 | def add_all_bboxes ( self ) : for feature in self : if feature . geometry . type != "Null" : feature . geometry . _data [ "bbox" ] = Feature ( feature ) . geometry . bbox | Calculates and adds a bbox attribute to the geojson entry of all feature geometries updating any existing ones . |
42,461 | def save ( self , savepath , ** kwargs ) : self . update_bbox ( ) tempfile = open ( savepath , "w" ) json . dump ( self . _data , tempfile , ** kwargs ) tempfile . close ( ) | Saves the geojson instance to file . To save with a different text encoding use the encoding argument . |
42,462 | def _prepdata ( self ) : if not self . _data . get ( "bbox" ) : self . update_bbox ( ) if not self . _data . get ( "crs" ) : self . _data [ "crs" ] = { "type" : "name" , "properties" : { "name" : "urn:ogc:def:crs:OGC:2:84" } } | Adds potentially missing items to the geojson dictionary |
42,463 | def place_items_in_square ( items , t ) : rows = [ ( t , y , [ ] ) for y in range ( t ) ] for item in items : x = item % t y = item // t inverse_length , _ , row_contents = rows [ y ] heapq . heappush ( row_contents , ( x , item ) ) rows [ y ] = inverse_length - 1 , y , row_contents assert all ( inv_len == t - len ( rows ) for inv_len , _ , rows in rows ) heapq . heapify ( rows ) return [ row for row in rows if row [ 2 ] ] | Returns a list of rows that are stored as a priority queue to be used with heapq functions . |
42,464 | def find_first_fit ( unoccupied_columns , row , row_length ) : for free_col in unoccupied_columns : first_item_x = row [ 0 ] [ 0 ] offset = free_col - first_item_x if check_columns_fit ( unoccupied_columns , row , offset , row_length ) : return offset raise ValueError ( "Row cannot bossily fit in %r: %r" % ( list ( unoccupied_columns . keys ( ) ) , row ) ) | Finds the first index that the row s items can fit . |
42,465 | def check_columns_fit ( unoccupied_columns , row , offset , row_length ) : for index , item in row : adjusted_index = ( index + offset ) % row_length if adjusted_index not in unoccupied_columns : return False return True | Checks if all the occupied columns in the row fit in the indices given by free columns . |
42,466 | def print_square ( row_queue , t ) : occupied_rows = { y : row for _ , y , row in row_queue } empty_row = ', ' . join ( '...' for _ in range ( t ) ) for y in range ( t ) : print ( '|' , end = ' ' ) if y not in occupied_rows : print ( empty_row , end = ' ' ) else : row = dict ( occupied_rows [ y ] ) all_cols = ( '%3d' % row [ x ] if x in row else '...' for x in range ( t ) ) print ( ', ' . join ( all_cols ) , end = ' ' ) print ( "|" ) | Prints a row queue as its conceptual square array . |
42,467 | def trim_nones_from_right ( xs ) : for i , item in enumerate ( reversed ( xs ) ) : if item is not None : break return xs [ : - i ] | Returns the list without all the Nones at the right end . |
42,468 | def _get_timestamp ( dirname_full , remove ) : record_filename = os . path . join ( dirname_full , RECORD_FILENAME ) if not os . path . exists ( record_filename ) : return None mtime = os . stat ( record_filename ) . st_mtime mtime_str = datetime . fromtimestamp ( mtime ) print ( 'Found timestamp {}:{}' . format ( dirname_full , mtime_str ) ) if Settings . record_timestamp and remove : OLD_TIMESTAMPS . add ( record_filename ) return mtime | Get the timestamp from the timestamp file . |
42,469 | def _get_timestamp_cached ( dirname_full , remove ) : if dirname_full not in TIMESTAMP_CACHE : mtime = _get_timestamp ( dirname_full , remove ) TIMESTAMP_CACHE [ dirname_full ] = mtime return TIMESTAMP_CACHE [ dirname_full ] | Get the timestamp from the cache or fill the cache Much quicker than reading the same files over and over |
42,470 | def _max_timestamps ( dirname_full , remove , compare_tstamp ) : tstamp = _get_timestamp_cached ( dirname_full , remove ) return max_none ( ( tstamp , compare_tstamp ) ) | Compare a timestamp file to one passed in . Get the max . |
42,471 | def _get_parent_timestamp ( dirname , mtime ) : parent_pathname = os . path . dirname ( dirname ) mtime = _max_timestamps ( parent_pathname , False , mtime ) if dirname != os . path . dirname ( parent_pathname ) : mtime = _get_parent_timestamp ( parent_pathname , mtime ) return mtime | Get the timestamps up the directory tree . All the way to root . |
42,472 | def get_walk_after ( filename , optimize_after = None ) : if Settings . optimize_after is not None : return Settings . optimize_after dirname = os . path . dirname ( filename ) if optimize_after is None : optimize_after = _get_parent_timestamp ( dirname , optimize_after ) return _max_timestamps ( dirname , True , optimize_after ) | Figure out the which mtime to check against . |
42,473 | def record_timestamp ( pathname_full ) : if Settings . test or Settings . list_only or not Settings . record_timestamp : return if not Settings . follow_symlinks and os . path . islink ( pathname_full ) : if Settings . verbose : print ( 'Not setting timestamp because not following symlinks' ) return if not os . path . isdir ( pathname_full ) : if Settings . verbose : print ( 'Not setting timestamp for a non-directory' ) return record_filename_full = os . path . join ( pathname_full , RECORD_FILENAME ) try : with open ( record_filename_full , 'w' ) : os . utime ( record_filename_full , None ) if Settings . verbose : print ( "Set timestamp: {}" . format ( record_filename_full ) ) for fname in OLD_TIMESTAMPS : if fname . startswith ( pathname_full ) and fname != record_filename_full : os . remove ( fname ) if Settings . verbose : print ( 'Removed old timestamp: {}' . format ( fname ) ) except IOError : print ( "Could not set timestamp in {}" . format ( pathname_full ) ) | Record the timestamp of running in a dotfile . |
42,474 | def walk_comic_archive ( filename_full , image_format , optimize_after ) : tmp_dir , report_stats = comic . comic_archive_uncompress ( filename_full , image_format ) if tmp_dir is None and report_stats : return Settings . pool . apply_async ( _comic_archive_skip , args = report_stats ) archive_mtime = os . stat ( filename_full ) . st_mtime result_set = walk_dir ( tmp_dir , optimize_after , True , archive_mtime ) nag_about_gifs = False for result in result_set : res = result . get ( ) nag_about_gifs = nag_about_gifs or res . nag_about_gifs args = ( filename_full , image_format , Settings , nag_about_gifs ) return Settings . pool . apply_async ( comic . comic_archive_compress , args = ( args , ) ) | Optimize a comic archive . |
42,475 | def _is_skippable ( filename_full ) : if not Settings . follow_symlinks and os . path . islink ( filename_full ) : return True if os . path . basename ( filename_full ) == timestamp . RECORD_FILENAME : return True if not os . path . exists ( filename_full ) : if Settings . verbose : print ( filename_full , 'was not found.' ) return True return False | Handle things that are not optimizable files . |
42,476 | def walk_file ( filename , walk_after , recurse = None , archive_mtime = None ) : filename = os . path . normpath ( filename ) result_set = set ( ) if _is_skippable ( filename ) : return result_set walk_after = timestamp . get_walk_after ( filename , walk_after ) if os . path . isdir ( filename ) : return walk_dir ( filename , walk_after , recurse , archive_mtime ) if _is_older_than_timestamp ( filename , walk_after , archive_mtime ) : return result_set try : image_format = detect_format . detect_file ( filename ) except Exception : res = Settings . pool . apply_async ( stats . ReportStats , ( filename , ) , { 'error' : "Detect Format" } ) result_set . add ( res ) image_format = False if not image_format : return result_set if Settings . list_only : print ( "{}: {}" . format ( filename , image_format ) ) return result_set if detect_format . is_format_selected ( image_format , comic . FORMATS , comic . PROGRAMS ) : result = walk_comic_archive ( filename , image_format , walk_after ) else : args = [ filename , image_format , Settings ] result = Settings . pool . apply_async ( optimize . optimize_image , args = ( args , ) ) result_set . add ( result ) return result_set | Optimize an individual file . |
42,477 | def walk_dir ( dir_path , walk_after , recurse = None , archive_mtime = None ) : if recurse is None : recurse = Settings . recurse result_set = set ( ) if not recurse : return result_set for root , _ , filenames in os . walk ( dir_path ) : for filename in filenames : filename_full = os . path . join ( root , filename ) try : results = walk_file ( filename_full , walk_after , recurse , archive_mtime ) result_set = result_set . union ( results ) except Exception : print ( "Error with file: {}" . format ( filename_full ) ) raise return result_set | Recursively optimize a directory . |
42,478 | def _walk_all_files ( ) : record_dirs = set ( ) result_set = set ( ) for filename in Settings . paths : filename_full = os . path . abspath ( filename ) if Settings . recurse and os . path . isdir ( filename_full ) : record_dirs . add ( filename_full ) walk_after = timestamp . get_walk_after ( filename_full ) results = walk_file ( filename_full , walk_after , Settings . recurse ) result_set = result_set . union ( results ) bytes_in = 0 bytes_out = 0 nag_about_gifs = False errors = [ ] for result in result_set : res = result . get ( ) if res . error : errors += [ ( res . final_filename , res . error ) ] continue bytes_in += res . bytes_in bytes_out += res . bytes_out nag_about_gifs = nag_about_gifs or res . nag_about_gifs return record_dirs , bytes_in , bytes_out , nag_about_gifs , errors | Optimize the files from the arugments list in two batches . |
42,479 | def run ( ) : Settings . pool = multiprocessing . Pool ( Settings . jobs ) record_dirs , bytes_in , bytes_out , nag_about_gifs , errors = _walk_all_files ( ) Settings . pool . close ( ) Settings . pool . join ( ) for filename in record_dirs : timestamp . record_timestamp ( filename ) stats . report_totals ( bytes_in , bytes_out , nag_about_gifs , errors ) | Use preconfigured settings to optimize files . |
42,480 | def replace_ext ( filename , new_ext ) : filename_base = os . path . splitext ( filename ) [ 0 ] new_filename = '{}.{}' . format ( filename_base , new_ext ) return new_filename | Replace the file extention . |
42,481 | def parse_reqs ( filename ) : install_reqs = parse_requirements ( filename , session = False ) return [ str ( ir . req ) for ir in install_reqs ] | Parse setup requirements from a requirements . txt file . |
42,482 | def get_req_list ( ) : req_list = parse_reqs ( REQUIREMENTS [ 'prod' ] ) if len ( sys . argv ) > 2 and sys . argv [ 2 ] == ( 'develop' ) : req_list += parse_reqs ( REQUIREMENTS [ 'dev' ] ) return req_list | Get the requirements by weather we re building develop or not . |
42,483 | def get_comic_format ( filename ) : image_format = None filename_ext = os . path . splitext ( filename ) [ - 1 ] . lower ( ) if filename_ext in _COMIC_EXTS : if zipfile . is_zipfile ( filename ) : image_format = _CBZ_FORMAT elif rarfile . is_rarfile ( filename ) : image_format = _CBR_FORMAT return image_format | Return the comic format if it is a comic archive . |
42,484 | def _get_archive_tmp_dir ( filename ) : head , tail = os . path . split ( filename ) return os . path . join ( head , _ARCHIVE_TMP_DIR_TEMPLATE . format ( tail ) ) | Get the name of the working dir to use for this filename . |
42,485 | def comic_archive_uncompress ( filename , image_format ) : if not Settings . comics : report = [ 'Skipping archive file: {}' . format ( filename ) ] return None , ReportStats ( filename , report = report ) if Settings . verbose : truncated_filename = stats . truncate_cwd ( filename ) print ( "Extracting {}..." . format ( truncated_filename ) , end = '' ) tmp_dir = _get_archive_tmp_dir ( filename ) if os . path . isdir ( tmp_dir ) : shutil . rmtree ( tmp_dir ) os . mkdir ( tmp_dir ) if image_format == _CBZ_FORMAT : with zipfile . ZipFile ( filename , 'r' ) as zfile : zfile . extractall ( tmp_dir ) elif image_format == _CBR_FORMAT : with rarfile . RarFile ( filename , 'r' ) as rfile : rfile . extractall ( tmp_dir ) else : report = '{} {} is not a good format' . format ( filename , image_format ) return None , ReportStats ( filename , report = report ) if Settings . verbose : print ( 'done' ) return tmp_dir , None | Uncompress comic archives . |
42,486 | def _comic_archive_write_zipfile ( new_filename , tmp_dir ) : if Settings . verbose : print ( 'Rezipping archive' , end = '' ) with zipfile . ZipFile ( new_filename , 'w' , compression = zipfile . ZIP_DEFLATED ) as new_zf : root_len = len ( os . path . abspath ( tmp_dir ) ) for r_d_f in os . walk ( tmp_dir ) : root = r_d_f [ 0 ] filenames = r_d_f [ 2 ] archive_root = os . path . abspath ( root ) [ root_len : ] for fname in filenames : fullpath = os . path . join ( root , fname ) archive_name = os . path . join ( archive_root , fname ) if Settings . verbose : print ( '.' , end = '' ) new_zf . write ( fullpath , archive_name , zipfile . ZIP_DEFLATED ) | Zip up the files in the tempdir into the new filename . |
42,487 | def comic_archive_compress ( args ) : try : filename , old_format , settings , nag_about_gifs = args Settings . update ( settings ) tmp_dir = _get_archive_tmp_dir ( filename ) new_filename = files . replace_ext ( filename , _NEW_ARCHIVE_SUFFIX ) _comic_archive_write_zipfile ( new_filename , tmp_dir ) if os . path . isdir ( tmp_dir ) : if Settings . verbose : print ( '.' , end = '' ) shutil . rmtree ( tmp_dir ) if Settings . verbose : print ( 'done.' ) report_stats = files . cleanup_after_optimize ( filename , new_filename , old_format , _CBZ_FORMAT ) report_stats . nag_about_gifs = nag_about_gifs stats . report_saved ( report_stats ) return report_stats except Exception as exc : print ( exc ) traceback . print_exc ( exc ) raise exc | Called back by every optimization inside a comic archive . |
42,488 | def mozjpeg ( ext_args ) : args = copy . copy ( _MOZJPEG_ARGS ) if Settings . destroy_metadata : args += [ "-copy" , "none" ] else : args += [ "-copy" , "all" ] args += [ '-outfile' ] args += [ ext_args . new_filename , ext_args . old_filename ] extern . run_ext ( args ) return _JPEG_FORMAT | Create argument list for mozjpeg . |
42,489 | def jpegtran ( ext_args ) : args = copy . copy ( _JPEGTRAN_ARGS ) if Settings . destroy_metadata : args += [ "-copy" , "none" ] else : args += [ "-copy" , "all" ] if Settings . jpegtran_prog : args += [ "-progressive" ] args += [ '-outfile' ] args += [ ext_args . new_filename , ext_args . old_filename ] extern . run_ext ( args ) return _JPEG_FORMAT | Create argument list for jpegtran . |
42,490 | def jpegrescan ( ext_args ) : args = copy . copy ( _JPEGRESCAN_ARGS ) if Settings . jpegrescan_multithread : args += [ '-t' ] if Settings . destroy_metadata : args += [ '-s' ] args += [ ext_args . old_filename , ext_args . new_filename ] extern . run_ext ( args ) return _JPEG_FORMAT | Run the EXTERNAL program jpegrescan . |
42,491 | def process_arguments ( arguments ) : Settings . update ( arguments ) Settings . config_program_reqs ( PROGRAMS ) Settings . verbose = arguments . verbose + 1 Settings . paths = set ( arguments . paths ) if arguments . formats == DEFAULT_FORMATS : Settings . formats = arguments . to_png_formats | jpeg . FORMATS | gif . FORMATS else : Settings . formats = set ( arguments . formats . upper ( ) . split ( FORMAT_DELIMETER ) ) if arguments . comics : Settings . formats = Settings . formats | comic . FORMATS if arguments . optimize_after is not None : try : after_dt = dateutil . parser . parse ( arguments . optimize_after ) arguments . optimize_after = time . mktime ( after_dt . timetuple ( ) ) except Exception as ex : print ( ex ) print ( 'Could not parse date to optimize after.' ) exit ( 1 ) if arguments . jobs < 1 : Settings . jobs = 1 files_in_paths = 0 non_file_in_paths = False for filename in arguments . paths : if os . path . isfile ( filename ) : files_in_paths += 1 else : non_file_in_paths = True Settings . jpegrescan_multithread = not non_file_in_paths and Settings . jobs - ( files_in_paths * 3 ) > - 1 return arguments | Recompute special cases for input arguments . |
42,492 | def run ( args ) : raw_arguments = get_arguments ( args [ 1 : ] ) process_arguments ( raw_arguments ) walk . run ( ) return True | Process command line arguments and walk inputs . |
42,493 | def does_external_program_run ( prog , verbose ) : try : with open ( '/dev/null' ) as null : subprocess . call ( [ prog , '-h' ] , stdout = null , stderr = null ) result = True except OSError : if verbose > 1 : print ( "couldn't run {}" . format ( prog ) ) result = False return result | Test to see if the external programs can be run . |
42,494 | def run_ext ( args ) : try : subprocess . check_call ( args ) except subprocess . CalledProcessError as exc : print ( exc ) print ( exc . cmd ) print ( exc . returncode ) print ( exc . output ) raise | Run EXTERNAL program . |
42,495 | def _humanize_bytes ( num_bytes , precision = 1 ) : if num_bytes == 0 : return 'no bytes' if num_bytes == 1 : return '1 byte' factored_bytes = 0 factor_suffix = 'bytes' for factor , suffix in ABBREVS : if num_bytes >= factor : factored_bytes = num_bytes / factor factor_suffix = suffix break if factored_bytes == 1 : precision = 0 return '{:.{prec}f} {}' . format ( factored_bytes , factor_suffix , prec = precision ) | Return a humanized string representation of a number of num_bytes . |
42,496 | def new_percent_saved ( report_stats ) : size_in = report_stats . bytes_in if size_in > 0 : size_out = report_stats . bytes_out ratio = size_out / size_in kb_saved = _humanize_bytes ( size_in - size_out ) else : ratio = 0 kb_saved = 0 percent_saved = ( 1 - ratio ) * 100 result = '{:.{prec}f}% ({})' . format ( percent_saved , kb_saved , prec = 2 ) return result | Spit out how much space the optimization saved . |
42,497 | def truncate_cwd ( full_filename ) : if full_filename . startswith ( os . getcwd ( ) ) : truncated_filename = full_filename . split ( os . getcwd ( ) , 1 ) [ 1 ] truncated_filename = truncated_filename . split ( os . sep , 1 ) [ 1 ] else : truncated_filename = full_filename return truncated_filename | Remove the cwd from the full filename . |
42,498 | def report_saved ( report_stats ) : if Settings . verbose : report = '' truncated_filename = truncate_cwd ( report_stats . final_filename ) report += '{}: ' . format ( truncated_filename ) total = new_percent_saved ( report_stats ) if total : report += total else : report += '0%' if Settings . test : report += ' could be saved.' if Settings . verbose > 1 : tools_report = ', ' . join ( report_stats . report_list ) if tools_report : report += '\n\t' + tools_report print ( report ) | Record the percent saved & print it . |
42,499 | def report_totals ( bytes_in , bytes_out , nag_about_gifs , errors ) : if bytes_in : bytes_saved = bytes_in - bytes_out percent_bytes_saved = bytes_saved / bytes_in * 100 msg = '' if Settings . test : if percent_bytes_saved > 0 : msg += "Could save" elif percent_bytes_saved == 0 : msg += "Could even out for" else : msg += "Could lose" else : if percent_bytes_saved > 0 : msg += "Saved" elif percent_bytes_saved == 0 : msg += "Evened out" else : msg = "Lost" msg += " a total of {} or {:.{prec}f}%" . format ( _humanize_bytes ( bytes_saved ) , percent_bytes_saved , prec = 2 ) if Settings . verbose : print ( msg ) if Settings . test : print ( "Test run did not change any files." ) else : if Settings . verbose : print ( "Didn't optimize any files." ) if nag_about_gifs and Settings . verbose : print ( "Most animated GIFS would be better off converted to" " HTML5 video" ) if not errors : return print ( "Errors with the following files:" ) for error in errors : print ( "{}: {}" . format ( error [ 0 ] , error [ 1 ] ) ) | Report the total number and percent of bytes saved . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.