idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
2,000
def parse ( self , words ) : def exact ( words ) : """If already represented as float or int, convert.""" try : return float ( words ) except : return None guess = exact ( words ) if guess is not None : return guess split = words . split ( ' ' ) # Replace final ordinal/fraction with number if split [ - 1 ] in self . __fractions__ : split [ - 1 ] = self . __fractions__ [ split [ - 1 ] ] elif split [ - 1 ] in self . __ordinals__ : split [ - 1 ] = self . __ordinals__ [ split [ - 1 ] ] parsed_ordinals = ' ' . join ( split ) return self . parseFloat ( parsed_ordinals )
A general method for parsing word - representations of numbers . Supports floats and integers .
160
16
2,001
def parseFloat ( self , words ) : def pointFloat ( words ) : m = re . search ( r'(.*) point (.*)' , words ) if m : whole = m . group ( 1 ) frac = m . group ( 2 ) total = 0.0 coeff = 0.10 for digit in frac . split ( ' ' ) : total += coeff * self . parse ( digit ) coeff /= 10.0 return self . parseInt ( whole ) + total return None def fractionFloat ( words ) : m = re . search ( r'(.*) and (.*)' , words ) if m : whole = self . parseInt ( m . group ( 1 ) ) frac = m . group ( 2 ) # Replace plurals frac = re . sub ( r'(\w+)s(\b)' , '\g<1>\g<2>' , frac ) # Convert 'a' to 'one' (e.g., 'a third' to 'one third') frac = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , frac ) split = frac . split ( ' ' ) # Split fraction into num (regular integer), denom (ordinal) num = split [ : 1 ] denom = split [ 1 : ] while denom : try : # Test for valid num, denom num_value = self . parse ( ' ' . join ( num ) ) denom_value = self . parse ( ' ' . join ( denom ) ) return whole + float ( num_value ) / denom_value except : # Add another word to num num += denom [ : 1 ] denom = denom [ 1 : ] return None # Extract "one point two five"-type float result = pointFloat ( words ) if result : return result # Extract "one and a quarter"-type float result = fractionFloat ( words ) if result : return result # Parse as integer return self . parseInt ( words )
Convert a floating - point number described in words to a double .
438
14
2,002
def parseInt ( self , words ) : # Remove 'and', case-sensitivity words = words . replace ( " and " , " " ) . lower ( ) # 'a' -> 'one' words = re . sub ( r'(\b)a(\b)' , '\g<1>one\g<2>' , words ) def textToNumber ( s ) : """ Converts raw number string to an integer. Based on text2num.py by Greg Hewill. """ a = re . split ( r"[\s-]+" , s ) n = 0 g = 0 for w in a : x = NumberService . __small__ . get ( w , None ) if x is not None : g += x elif w == "hundred" : g *= 100 else : x = NumberService . __magnitude__ . get ( w , None ) if x is not None : n += g * x g = 0 else : raise NumberService . NumberException ( "Unknown number: " + w ) return n + g return textToNumber ( words )
Parses words to the integer they describe .
231
10
2,003
def parseMagnitude ( m ) : m = NumberService ( ) . parse ( m ) def toDecimalPrecision ( n , k ) : return float ( "%.*f" % ( k , round ( n , k ) ) ) # Cast to two digits of precision digits = 2 magnitude = toDecimalPrecision ( m , digits ) # If value is really small, keep going while not magnitude : digits += 1 magnitude = toDecimalPrecision ( m , digits ) # If item is less than one, go one beyond 'necessary' number of digits if m < 1.0 : magnitude = toDecimalPrecision ( m , digits + 1 ) # Ignore decimal accuracy if irrelevant if int ( magnitude ) == magnitude : magnitude = int ( magnitude ) # Adjust for scientific notation magString = str ( magnitude ) magString = re . sub ( r'(\d)e-(\d+)' , '\g<1> times ten to the negative \g<2>' , magString ) magString = re . sub ( r'(\d)e\+(\d+)' , '\g<1> times ten to the \g<2>' , magString ) magString = re . sub ( r'-(\d+)' , 'negative \g<1>' , magString ) magString = re . sub ( r'\b0(\d+)' , '\g<1>' , magString ) return magString
Parses a number m into a human - ready string representation . For example crops off floats if they re too accurate .
308
25
2,004
def serialize ( self , raw = False ) : if raw : return self . _key . encode ( ) return self . _key . encode ( nacl . encoding . Base64Encoder )
Encode the private part of the key in a base64 format by default but when raw is True it will return hex encoded bytes .
41
27
2,005
def _do_get ( self , url , * * kwargs ) : #TODO: # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : #response = self._session.get("{}/{}".format(self._api_url, uri)).json() response = self . _session . get ( url ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_get() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_get() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_get() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_get() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_get() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_get() - Communication error with ScaleIO gateway" ) return response
Convenient method for GET requests Returns http request status value from a POST request
294
15
2,006
def _do_post ( self , url , * * kwargs ) : #TODO: # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method scaleioapi_post_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } try : response = self . _session . post ( url , headers = scaleioapi_post_headers , * * kwargs ) self . conn . logger . debug ( '_do_post() - HTTP response: %s' , response . text ) if response . status_code == requests . codes . ok : self . conn . logger . debug ( '_do_post() - HTTP response OK, data: %s' , response . text ) return response else : self . conn . logger . error ( '_do_post() - HTTP response error: %s' , response . status_code ) self . conn . logger . error ( '_do_post() - HTTP response error, data: %s' , response . text ) raise RuntimeError ( "_do_post() - HTTP response error" + response . status_code ) except Exception as e : self . conn . logger . error ( "_do_post() - Unhandled Error Occurred: %s" % str ( e ) ) raise RuntimeError ( "_do_post() - Communication error with ScaleIO gateway" ) return response
Convenient method for POST requests Returns http request status value from a POST request
308
15
2,007
def discharge_required_response ( macaroon , path , cookie_suffix_name , message = None ) : if message is None : message = 'discharge required' content = json . dumps ( { 'Code' : 'macaroon discharge required' , 'Message' : message , 'Info' : { 'Macaroon' : macaroon . to_dict ( ) , 'MacaroonPath' : path , 'CookieNameSuffix' : cookie_suffix_name } , } ) . encode ( 'utf-8' ) return content , { 'WWW-Authenticate' : 'Macaroon' , 'Content-Type' : 'application/json' }
Get response content and headers from a discharge macaroons error .
150
13
2,008
def request_version ( req_headers ) : vs = req_headers . get ( BAKERY_PROTOCOL_HEADER ) if vs is None : # No header - use backward compatibility mode. return bakery . VERSION_1 try : x = int ( vs ) except ValueError : # Badly formed header - use backward compatibility mode. return bakery . VERSION_1 if x > bakery . LATEST_VERSION : # Later version than we know about - use the # latest version that we can. return bakery . LATEST_VERSION return x
Determines the bakery protocol version from a client request . If the protocol cannot be determined or is invalid the original version of the protocol is used . If a later version is found the latest known version is used which is OK because versions are backwardly compatible .
117
52
2,009
def from_dict ( cls , serialized ) : # Some servers return lower case field names for message and code. # The Go client is tolerant of this, so be similarly tolerant here. def field ( name ) : return serialized . get ( name ) or serialized . get ( name . lower ( ) ) return Error ( code = field ( 'Code' ) , message = field ( 'Message' ) , info = ErrorInfo . from_dict ( field ( 'Info' ) ) , version = bakery . LATEST_VERSION , )
Create an error from a JSON - deserialized object
114
11
2,010
def interaction_method ( self , kind , x ) : if self . info is None or self . code != ERR_INTERACTION_REQUIRED : raise InteractionError ( 'not an interaction-required error (code {})' . format ( self . code ) ) entry = self . info . interaction_methods . get ( kind ) if entry is None : raise InteractionMethodNotFound ( 'interaction method {} not found' . format ( kind ) ) return x . from_dict ( entry )
Checks whether the error is an InteractionRequired error that implements the method with the given name and JSON - unmarshals the method - specific data into x by calling its from_dict method with the deserialized JSON object .
107
47
2,011
def from_dict ( cls , serialized ) : if serialized is None : return None macaroon = serialized . get ( 'Macaroon' ) if macaroon is not None : macaroon = bakery . Macaroon . from_dict ( macaroon ) path = serialized . get ( 'MacaroonPath' ) cookie_name_suffix = serialized . get ( 'CookieNameSuffix' ) visit_url = serialized . get ( 'VisitURL' ) wait_url = serialized . get ( 'WaitURL' ) interaction_methods = serialized . get ( 'InteractionMethods' ) return ErrorInfo ( macaroon = macaroon , macaroon_path = path , cookie_name_suffix = cookie_name_suffix , visit_url = visit_url , wait_url = wait_url , interaction_methods = interaction_methods )
Create a new ErrorInfo object from a JSON deserialized dictionary
200
13
2,012
async def dump_blob ( elem , elem_type = None ) : elem_is_blob = isinstance ( elem , x . BlobType ) data = getattr ( elem , x . BlobType . DATA_ATTR ) if elem_is_blob else elem if data is None or len ( data ) == 0 : return b'' if isinstance ( data , ( bytes , bytearray , list ) ) : return base64 . b16encode ( bytes ( data ) ) else : raise ValueError ( 'Unknown blob type' )
Dumps blob message . Supports both blob and raw value .
128
12
2,013
async def dump_container ( obj , container , container_type , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE obj = [ ] if obj is None else get_elem ( obj ) if container is None : return None for elem in container : fvalue = await field_archiver ( None , elem , elem_type , params [ 1 : ] if params else None ) obj . append ( fvalue ) return obj
Serializes container as popo
146
6
2,014
async def load_container ( obj , container_type , params = None , container = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else load_field if obj is None : return None c_len = len ( obj ) elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE res = container if container else [ ] for i in range ( c_len ) : fvalue = await field_archiver ( obj [ i ] , elem_type , params [ 1 : ] if params else None , eref ( res , i ) if container else None ) if not container : res . append ( fvalue ) return res
Loads container of elements from the object representation . Supports the container ref . Returns loaded container .
168
19
2,015
async def dump_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) field_archiver = field_archiver if field_archiver else dump_field return await field_archiver ( eref ( obj , fname , True ) , fvalue , ftype , params )
Dumps a message field to the object . Field is defined by the message field specification .
103
18
2,016
async def load_message_field ( obj , msg , field , field_archiver = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] field_archiver = field_archiver if field_archiver else load_field await field_archiver ( obj [ fname ] , ftype , params , eref ( msg , fname ) )
Loads message field from the object . Field is defined by the message field specification . Returns loaded value supports field reference .
90
24
2,017
async def dump_message ( obj , msg , field_archiver = None ) : mtype = msg . __class__ fields = mtype . f_specs ( ) obj = collections . OrderedDict ( ) if obj is None else get_elem ( obj ) for field in fields : await dump_message_field ( obj , msg = msg , field = field , field_archiver = field_archiver ) return obj
Dumps message to the object . Returns message popo representation .
94
13
2,018
async def load_message ( obj , msg_type , msg = None , field_archiver = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await load_message_field ( obj , msg , field , field_archiver = field_archiver ) return msg
Loads message if the given type from the object . Supports reading directly to existing message .
97
18
2,019
async def dump_variant ( obj , elem , elem_type = None , params = None , field_archiver = None ) : field_archiver = field_archiver if field_archiver else dump_field if isinstance ( elem , x . VariantType ) or elem_type . WRAPS_VALUE : return { elem . variant_elem : await field_archiver ( None , getattr ( elem , elem . variant_elem ) , elem . variant_elem_type ) } else : fdef = elem_type . find_fdef ( elem_type . f_specs ( ) , elem ) return { fdef [ 0 ] : await field_archiver ( None , elem , fdef [ 1 ] ) }
Transform variant to the popo object representation .
173
9
2,020
async def dump_field ( obj , elem , elem_type , params = None ) : if isinstance ( elem , ( int , bool ) ) or issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . BlobType ) or isinstance ( obj , bytes ) or isinstance ( obj , bytearray ) : return set_elem ( obj , await dump_blob ( elem ) ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( obj , elem ) elif issubclass ( elem_type , x . VariantType ) : return set_elem ( obj , await dump_variant ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . ContainerType ) : # container ~ simple list return set_elem ( obj , await dump_container ( None , elem , elem_type , params ) ) elif issubclass ( elem_type , x . MessageType ) : return set_elem ( obj , await dump_message ( None , elem ) ) else : raise TypeError
Dumps generic field to the popo object representation according to the element specification . General multiplexer .
304
21
2,021
async def load_field ( obj , elem_type , params = None , elem = None ) : if issubclass ( elem_type , x . UVarintType ) or issubclass ( elem_type , x . IntType ) or isinstance ( obj , ( int , bool ) ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . BlobType ) : fvalue = await load_blob ( obj , elem_type ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . UnicodeType ) or isinstance ( elem , str ) : return set_elem ( elem , obj ) elif issubclass ( elem_type , x . VariantType ) : fvalue = await load_variant ( obj , elem = get_elem ( elem ) , elem_type = elem_type , params = params ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . ContainerType ) : # container ~ simple list fvalue = await load_container ( obj , elem_type , params = params , container = get_elem ( elem ) ) return set_elem ( elem , fvalue ) elif issubclass ( elem_type , x . MessageType ) : fvalue = await load_message ( obj , msg_type = elem_type , msg = get_elem ( elem ) ) return set_elem ( elem , fvalue ) else : raise TypeError
Loads a field from the reader based on the field type specification . Demultiplexer .
357
20
2,022
def instantiate ( data , blueprint ) : Validator = jsonschema . validators . validator_for ( blueprint ) blueprinter = extend ( Validator ) ( blueprint ) return blueprinter . instantiate ( data )
Instantiate the given data using the blueprinter .
49
11
2,023
def main ( argv = None ) : from vsgen import VSGSuite from vsgen import VSGLogger # Special case to use the sys.argv when main called without a list. if argv is None : argv = sys . argv # Initialize the application logger pylogger = VSGLogger ( ) # Construct a command line parser and parse the command line args = VSGSuite . make_parser ( description = 'Executes the vsgen package as an application.' ) . parse_args ( argv [ 1 : ] ) for s in VSGSuite . from_args ( * * vars ( args ) ) : s . write ( False ) return 0
The entry point of the script .
149
7
2,024
def parse_fields ( attributes ) : return tuple ( field . bind_name ( name ) for name , field in six . iteritems ( attributes ) if isinstance ( field , fields . Field ) )
Parse model fields .
42
5
2,025
def prepare_fields_attribute ( attribute_name , attributes , class_name ) : attribute = attributes . get ( attribute_name ) if not attribute : attribute = tuple ( ) elif isinstance ( attribute , std_collections . Iterable ) : attribute = tuple ( attribute ) else : raise errors . Error ( '{0}.{1} is supposed to be a list of {2}, ' 'instead {3} given' , class_name , attribute_name , fields . Field , attribute ) return attribute
Prepare model fields attribute .
108
6
2,026
def bind_fields_to_model_cls ( cls , model_fields ) : return dict ( ( field . name , field . bind_model_cls ( cls ) ) for field in model_fields )
Bind fields to model class .
48
6
2,027
def bind_collection_to_model_cls ( cls ) : cls . Collection = type ( '{0}.Collection' . format ( cls . __name__ ) , ( cls . Collection , ) , { 'value_type' : cls } ) cls . Collection . __module__ = cls . __module__
Bind collection to model s class .
74
7
2,028
def checklist ( ctx ) : checklist = """PRE-RELEASE CHECKLIST: [ ] Everything is checked in [ ] All tests pass w/ tox RELEASE CHECKLIST: [{x1}] Bump version to new-version and tag repository (via bump_version) [{x2}] Build packages (sdist, bdist_wheel via prepare) [{x3}] Register and upload packages to testpypi repository (first) [{x4}] Verify release is OK and packages from testpypi are usable [{x5}] Register and upload packages to pypi repository [{x6}] Push last changes to Github repository POST-RELEASE CHECKLIST: [ ] Bump version to new-develop-version (via bump_version) [ ] Adapt CHANGES (if necessary) [ ] Commit latest changes to Github repository """ steps = dict ( x1 = None , x2 = None , x3 = None , x4 = None , x5 = None , x6 = None ) yesno_map = { True : "x" , False : "_" , None : " " } answers = { name : yesno_map [ value ] for name , value in steps . items ( ) } print ( checklist . format ( * * answers ) )
Checklist for releasing this project .
277
7
2,029
def build_packages ( ctx , hide = False ) : print ( "build_packages:" ) ctx . run ( "python setup.py sdist bdist_wheel" , echo = True , hide = hide )
Build packages for this release .
47
6
2,030
def register ( self , name , function , description = None ) : return self . __app . threads . register ( name , function , self . _plugin , description )
Register a new thread .
35
5
2,031
def unregister ( self , thread ) : if thread not in self . threads . keys ( ) : self . log . warning ( "Can not unregister thread %s" % thread ) else : del ( self . threads [ thread ] ) self . __log . debug ( "Thread %s got unregistered" % thread )
Unregisters an existing thread so that this thread is no longer available .
68
15
2,032
def get ( self , thread = None , plugin = None ) : if plugin is not None : if thread is None : threads_list = { } for key in self . threads . keys ( ) : if self . threads [ key ] . plugin == plugin : threads_list [ key ] = self . threads [ key ] return threads_list else : if thread in self . threads . keys ( ) : if self . threads [ thread ] . plugin == plugin : return self . threads [ thread ] else : return None else : return None else : if thread is None : return self . threads else : if thread in self . threads . keys ( ) : return self . threads [ thread ] else : return None
Get one or more threads .
146
6
2,033
def create_schema_from_xsd_directory ( directory , version ) : schema = Schema ( version ) for f in _get_xsd_from_directory ( directory ) : logger . info ( "Loading schema %s" % f ) fill_schema_from_xsd_file ( f , schema ) return schema
Create and fill the schema from a directory which contains xsd files . It calls fill_schema_from_xsd_file for each xsd file found .
72
34
2,034
def fill_schema_from_xsd_file ( filename , schema ) : ifmap_statements = _parse_xsd_file ( filename ) properties_all = [ ] for v in ifmap_statements . values ( ) : if ( isinstance ( v [ 0 ] , IDLParser . Link ) ) : src_name = v [ 1 ] target_name = v [ 2 ] src = schema . _get_or_add_resource ( src_name ) target = schema . _get_or_add_resource ( target_name ) if "has" in v [ 3 ] : src . children . append ( target_name ) target . parent = src_name if "ref" in v [ 3 ] : src . refs . append ( target_name ) target . back_refs . append ( src_name ) elif isinstance ( v [ 0 ] , IDLParser . Property ) : target_name = v [ 1 ] [ 0 ] prop = ResourceProperty ( v [ 0 ] . name , is_list = v [ 0 ] . is_list , is_map = v [ 0 ] . is_map ) if target_name != 'all' : target = schema . _get_or_add_resource ( target_name ) target . properties . append ( prop ) else : properties_all . append ( prop ) for r in schema . all_resources ( ) : schema . resource ( r ) . properties += properties_all
From an xsd file it fills the schema by creating needed Resource . The generateds idl_parser is used to parse ifmap statements in the xsd file .
315
34
2,035
def split_ls ( func ) : @ wraps ( func ) def wrapper ( self , files , silent = True , exclude_deleted = False ) : if not isinstance ( files , ( tuple , list ) ) : files = [ files ] counter = 0 index = 0 results = [ ] while files : if index >= len ( files ) : results += func ( self , files , silent , exclude_deleted ) break length = len ( str ( files [ index ] ) ) if length + counter > CHAR_LIMIT : # -- at our limit runfiles = files [ : index ] files = files [ index : ] counter = 0 index = 0 results += func ( self , runfiles , silent , exclude_deleted ) runfiles = None del runfiles else : index += 1 counter += length return results return wrapper
Decorator to split files into manageable chunks as not to exceed the windows cmd limit
172
17
2,036
def __getVariables ( self ) : try : startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW output = subprocess . check_output ( [ 'p4' , 'set' ] , startupinfo = startupinfo ) if six . PY3 : output = str ( output , 'utf8' ) except subprocess . CalledProcessError as err : LOGGER . error ( err ) return p4vars = { } for line in output . splitlines ( ) : if not line : continue try : k , v = line . split ( '=' , 1 ) except ValueError : continue p4vars [ k . strip ( ) ] = v . strip ( ) . split ( ' (' ) [ 0 ] if p4vars [ k . strip ( ) ] . startswith ( '(config' ) : del p4vars [ k . strip ( ) ] self . _port = self . _port or os . getenv ( 'P4PORT' , p4vars . get ( 'P4PORT' ) ) self . _user = self . _user or os . getenv ( 'P4USER' , p4vars . get ( 'P4USER' ) ) self . _client = self . _client or os . getenv ( 'P4CLIENT' , p4vars . get ( 'P4CLIENT' ) )
Parses the P4 env vars using set p4
324
13
2,037
def client ( self ) : if isinstance ( self . _client , six . string_types ) : self . _client = Client ( self . _client , self ) return self . _client
The client used in perforce queries
41
7
2,038
def status ( self ) : try : # -- Check client res = self . run ( [ 'info' ] ) if res [ 0 ] [ 'clientName' ] == '*unknown*' : return ConnectionStatus . INVALID_CLIENT # -- Trigger an auth error if not logged in self . run ( [ 'user' , '-o' ] ) except errors . CommandError as err : if 'password (P4PASSWD) invalid or unset' in str ( err . args [ 0 ] ) : return ConnectionStatus . NO_AUTH if 'Connect to server failed' in str ( err . args [ 0 ] ) : return ConnectionStatus . OFFLINE return ConnectionStatus . OK
The status of the connection to perforce
148
8
2,039
def run ( self , cmd , stdin = None , marshal_output = True , * * kwargs ) : records = [ ] args = [ self . _executable , "-u" , self . _user , "-p" , self . _port ] if self . _client : args += [ "-c" , str ( self . _client ) ] if marshal_output : args . append ( '-G' ) if isinstance ( cmd , six . string_types ) : raise ValueError ( 'String commands are not supported, please use a list' ) args += cmd command = ' ' . join ( args ) startupinfo = None if os . name == 'nt' : startupinfo = subprocess . STARTUPINFO ( ) startupinfo . dwFlags |= subprocess . STARTF_USESHOWWINDOW proc = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE , startupinfo = startupinfo , * * kwargs ) if stdin : proc . stdin . write ( six . b ( stdin ) ) if marshal_output : try : while True : record = marshal . load ( proc . stdout ) if record . get ( b'code' , '' ) == b'error' and record [ b'severity' ] >= self . _level : proc . stdin . close ( ) proc . stdout . close ( ) raise errors . CommandError ( record [ b'data' ] , record , command ) if isinstance ( record , dict ) : if six . PY2 : records . append ( record ) else : records . append ( { str ( k , 'utf8' ) : str ( v ) if isinstance ( v , int ) else str ( v , 'utf8' , errors = 'ignore' ) for k , v in record . items ( ) } ) except EOFError : pass stdout , stderr = proc . communicate ( ) else : records , stderr = proc . communicate ( ) if stderr : raise errors . CommandError ( stderr , command ) return records
Runs a p4 command and returns a list of dictionary objects
467
13
2,040
def findChangelist ( self , description = None ) : if description is None : change = Default ( self ) else : if isinstance ( description , six . integer_types ) : change = Changelist ( description , self ) else : pending = self . run ( [ 'changes' , '-l' , '-s' , 'pending' , '-c' , str ( self . _client ) , '-u' , self . _user ] ) for cl in pending : if cl [ 'desc' ] . strip ( ) == description . strip ( ) : LOGGER . debug ( 'Changelist found: {}' . format ( cl [ 'change' ] ) ) change = Changelist ( int ( cl [ 'change' ] ) , self ) break else : LOGGER . debug ( 'No changelist found, creating one' ) change = Changelist . create ( description , self ) change . client = self . _client change . save ( ) return change
Gets or creates a Changelist object with a description
211
12
2,041
def add ( self , filename , change = None ) : try : if not self . canAdd ( filename ) : raise errors . RevisionError ( 'File is not under client path' ) if change is None : self . run ( [ 'add' , filename ] ) else : self . run ( [ 'add' , '-c' , str ( change . change ) , filename ] ) data = self . run ( [ 'fstat' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) raise errors . RevisionError ( 'File is not under client path' ) rev = Revision ( data , self ) if isinstance ( change , Changelist ) : change . append ( rev ) return rev
Adds a new file to a changelist
157
8
2,042
def canAdd ( self , filename ) : try : result = self . run ( [ 'add' , '-n' , '-t' , 'text' , filename ] ) [ 0 ] except errors . CommandError as err : LOGGER . debug ( err ) return False if result . get ( 'code' ) not in ( 'error' , 'info' ) : return True LOGGER . warn ( 'Unable to add {}: {}' . format ( filename , result [ 'data' ] ) ) return False
Determines if a filename can be added to the depot under the current client
111
16
2,043
def query ( self , files = True ) : if self . _change : cl = str ( self . _change ) self . _p4dict = { camel_case ( k ) : v for k , v in six . iteritems ( self . _connection . run ( [ 'change' , '-o' , cl ] ) [ 0 ] ) } if files : self . _files = [ ] if self . _p4dict . get ( 'status' ) == 'pending' or self . _change == 0 : change = self . _change or 'default' data = self . _connection . run ( [ 'opened' , '-c' , str ( change ) ] ) self . _files = [ Revision ( r , self . _connection ) for r in data ] else : data = self . _connection . run ( [ 'describe' , str ( self . _change ) ] ) [ 0 ] depotfiles = [ ] for k , v in six . iteritems ( data ) : if k . startswith ( 'depotFile' ) : depotfiles . append ( v ) self . _files = self . _connection . ls ( depotfiles )
Queries the depot to get the current status of the changelist
251
13
2,044
def remove ( self , rev , permanent = False ) : if not isinstance ( rev , Revision ) : raise TypeError ( 'argument needs to be an instance of Revision' ) if rev not in self : raise ValueError ( '{} not in changelist' . format ( rev ) ) self . _files . remove ( rev ) if not permanent : rev . changelist = self . _connection . default
Removes a revision from this changelist
85
8
2,045
def revert ( self , unchanged_only = False ) : if self . _reverted : raise errors . ChangelistError ( 'This changelist has been reverted' ) change = self . _change if self . _change == 0 : change = 'default' cmd = [ 'revert' , '-c' , str ( change ) ] if unchanged_only : cmd . append ( '-a' ) files = [ f . depotFile for f in self . _files ] if files : cmd += files self . _connection . run ( cmd ) self . _files = [ ] self . _reverted = True
Revert all files in this changelist
131
9
2,046
def submit ( self ) : if self . _dirty : self . save ( ) self . _connection . run ( [ 'submit' , '-c' , str ( self . _change ) ] , marshal_output = False )
Submits a chagelist to the depot
50
9
2,047
def delete ( self ) : try : self . revert ( ) except errors . ChangelistError : pass self . _connection . run ( [ 'change' , '-d' , str ( self . _change ) ] )
Reverts all files in this changelist then deletes the changelist from perforce
48
18
2,048
def create ( description = '<Created by Python>' , connection = None ) : connection = connection or Connection ( ) description = description . replace ( '\n' , '\n\t' ) form = NEW_FORMAT . format ( client = str ( connection . client ) , description = description ) result = connection . run ( [ 'change' , '-i' ] , stdin = form , marshal_output = False ) return Changelist ( int ( result . split ( ) [ 1 ] ) , connection )
Creates a new changelist
113
6
2,049
def query ( self ) : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , self . _p4dict [ 'depotFile' ] ] ) [ 0 ] self . _head = HeadRevision ( self . _p4dict ) self . _filename = self . depotFile
Runs an fstat for this file and repopulates the data
78
14
2,050
def edit ( self , changelist = 0 ) : command = 'reopen' if self . action in ( 'add' , 'edit' ) else 'edit' if int ( changelist ) : self . _connection . run ( [ command , '-c' , str ( changelist . change ) , self . depotFile ] ) else : self . _connection . run ( [ command , self . depotFile ] ) self . query ( )
Checks out the file
94
5
2,051
def lock ( self , lock = True , changelist = 0 ) : cmd = 'lock' if lock else 'unlock' if changelist : self . _connection . run ( [ cmd , '-c' , changelist , self . depotFile ] ) else : self . _connection . run ( [ cmd , self . depotFile ] ) self . query ( )
Locks or unlocks the file
78
6
2,052
def sync ( self , force = False , safe = True , revision = 0 , changelist = 0 ) : cmd = [ 'sync' ] if force : cmd . append ( '-f' ) if safe : cmd . append ( '-s' ) if revision : cmd . append ( '{}#{}' . format ( self . depotFile , revision ) ) elif changelist : cmd . append ( '{}@{}' . format ( self . depotFile , changelist ) ) else : cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
Syncs the file at the current revision
132
8
2,053
def revert ( self , unchanged = False ) : cmd = [ 'revert' ] if unchanged : cmd . append ( '-a' ) wasadd = self . action == 'add' cmd . append ( self . depotFile ) self . _connection . run ( cmd ) if 'movedFile' in self . _p4dict : self . _p4dict [ 'depotFile' ] = self . _p4dict [ 'movedFile' ] if not wasadd : self . query ( ) if self . _changelist : self . _changelist . remove ( self , permanent = True )
Reverts any file changes
132
6
2,054
def shelve ( self , changelist = None ) : if changelist is None and self . changelist . description == 'default' : raise errors . ShelveError ( 'Unabled to shelve files in the default changelist' ) cmd = [ 'shelve' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
Shelves the file if it is in a changelist
100
12
2,055
def delete ( self , changelist = 0 ) : cmd = [ 'delete' ] if changelist : cmd += [ '-c' , str ( changelist ) ] cmd . append ( self . depotFile ) self . _connection . run ( cmd ) self . query ( )
Marks the file for delete
59
6
2,056
def hash ( self ) : if 'digest' not in self . _p4dict : self . _p4dict = self . _connection . run ( [ 'fstat' , '-m' , '1' , '-Ol' , self . depotFile ] ) [ 0 ] return self . _p4dict [ 'digest' ]
The hash value of the current revision
76
7
2,057
def view ( self ) : spec = [ ] for k , v in six . iteritems ( self . _p4dict ) : if k . startswith ( 'view' ) : match = RE_FILESPEC . search ( v ) if match : spec . append ( FileSpec ( v [ : match . end ( ) - 1 ] , v [ match . end ( ) : ] ) ) return spec
A list of view specs
88
5
2,058
def stream ( self ) : stream = self . _p4dict . get ( 'stream' ) if stream : return Stream ( stream , self . _connection )
Which stream if any the client is under
34
8
2,059
async def set_version ( self , tp , params , version = None , elem = None ) : self . registry . set_tr ( None ) tw = TypeWrapper ( tp , params ) if not tw . is_versioned ( ) : return TypeWrapper . ELEMENTARY_RES # If not in the DB, store to the archive at the current position if not self . version_db . is_versioned ( tw ) : if version is None : version = self . _cur_version ( tw , elem ) await dump_uvarint ( self . iobj , 0 ) await dump_uvarint ( self . iobj , version ) self . version_db . set_version ( tw , 0 , version ) return self . version_db . get_version ( tw ) [ 1 ]
Stores version to the stream if not stored yet
176
10
2,060
async def version ( self , tp , params , version = None , elem = None ) : if self . writing : return await self . set_version ( tp , params , version , elem ) else : return await self . get_version ( tp , params )
Symmetric version management
60
5
2,061
async def root_message ( self , msg , msg_type = None ) : await self . root ( ) await self . message ( msg , msg_type )
Root - level message . First entry in the archive . Archive headers processing
35
14
2,062
async def dump_message ( self , msg , msg_type = None ) : mtype = msg . __class__ if msg_type is None else msg_type fields = mtype . f_specs ( ) for field in fields : await self . message_field ( msg = msg , field = field )
Dumps message to the writer .
67
7
2,063
async def load_message ( self , msg_type , msg = None ) : msg = msg_type ( ) if msg is None else msg fields = msg_type . f_specs ( ) if msg_type else msg . __class__ . f_specs ( ) for field in fields : await self . message_field ( msg , field ) return msg
Loads message if the given type from the reader . Supports reading directly to existing message .
78
18
2,064
def contrail_error_handler ( f ) : @ wraps ( f ) def wrapper ( * args , * * kwargs ) : try : return f ( * args , * * kwargs ) except HttpError as e : # Replace message by details to provide a # meaningful message if e . details : e . message , e . details = e . details , e . message e . args = ( "%s (HTTP %s)" % ( e . message , e . http_status ) , ) raise return wrapper
Handle HTTP errors returned by the API server
110
8
2,065
def make ( self , host = "localhost" , port = 8082 , protocol = "http" , base_uri = "" , os_auth_type = "http" , * * kwargs ) : loader = loading . base . get_plugin_loader ( os_auth_type ) plugin_options = { opt . dest : kwargs . pop ( "os_%s" % opt . dest ) for opt in loader . get_options ( ) if 'os_%s' % opt . dest in kwargs } plugin = loader . load_from_options ( * * plugin_options ) return self . load_from_argparse_arguments ( Namespace ( * * kwargs ) , host = host , port = port , protocol = protocol , base_uri = base_uri , auth = plugin )
Initialize a session to Contrail API server
178
9
2,066
def post_json ( self , url , data , cls = None , * * kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . post ( url , * * kwargs ) . json ( )
POST data to the api - server
75
7
2,067
def put_json ( self , url , data , cls = None , * * kwargs ) : kwargs [ 'data' ] = to_json ( data , cls = cls ) kwargs [ 'headers' ] = self . default_headers return self . put ( url , * * kwargs ) . json ( )
PUT data to the api - server
75
7
2,068
def fqname_to_id ( self , fq_name , type ) : data = { "type" : type , "fq_name" : list ( fq_name ) } return self . post_json ( self . make_url ( "/fqname-to-id" ) , data ) [ "uuid" ]
Return uuid for fq_name
75
8
2,069
def id_to_fqname ( self , uuid , type = None ) : data = { "uuid" : uuid } result = self . post_json ( self . make_url ( "/id-to-fqname" ) , data ) result [ 'fq_name' ] = FQName ( result [ 'fq_name' ] ) if type is not None and not result [ 'type' ] . replace ( '_' , '-' ) == type : raise HttpError ( 'uuid %s not found for type %s' % ( uuid , type ) , http_status = 404 ) return result
Return fq_name and type for uuid
140
10
2,070
def add_kv_store ( self , key , value ) : data = { 'operation' : 'STORE' , 'key' : key , 'value' : value } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
Add a key - value store entry .
78
8
2,071
def remove_kv_store ( self , key ) : data = { 'operation' : 'DELETE' , 'key' : key } return self . post ( self . make_url ( "/useragent-kv" ) , data = to_json ( data ) , headers = self . default_headers ) . text
Remove a key - value store entry .
71
8
2,072
def canonical_ops ( ops ) : new_ops = sorted ( set ( ops ) , key = lambda x : ( x . entity , x . action ) ) return new_ops
Returns the given operations array sorted with duplicates removed .
38
11
2,073
def _macaroon_id_ops ( ops ) : id_ops = [ ] for entity , entity_ops in itertools . groupby ( ops , lambda x : x . entity ) : actions = map ( lambda x : x . action , entity_ops ) id_ops . append ( id_pb2 . Op ( entity = entity , actions = actions ) ) return id_ops
Return operations suitable for serializing as part of a MacaroonId .
84
15
2,074
def macaroon ( self , version , expiry , caveats , ops ) : if len ( ops ) == 0 : raise ValueError ( 'cannot mint a macaroon associated ' 'with no operations' ) ops = canonical_ops ( ops ) root_key , storage_id = self . root_keystore_for_ops ( ops ) . root_key ( ) id = self . _new_macaroon_id ( storage_id , expiry , ops ) id_bytes = six . int2byte ( LATEST_VERSION ) + id . SerializeToString ( ) if macaroon_version ( version ) < MACAROON_V2 : # The old macaroon format required valid text for the macaroon id, # so base64-encode it. id_bytes = raw_urlsafe_b64encode ( id_bytes ) m = Macaroon ( root_key , id_bytes , self . location , version , self . namespace , ) m . add_caveat ( checkers . time_before_caveat ( expiry ) , self . key , self . locator ) m . add_caveats ( caveats , self . key , self . locator ) return m
Takes a macaroon with the given version from the oven associates it with the given operations and attaches the given caveats . There must be at least one operation specified . The macaroon will expire at the given time - a time_before first party caveat will be added with that time .
266
59
2,075
def ops_entity ( self , ops ) : # Hash the operations, removing duplicates as we go. hash_entity = hashlib . sha256 ( ) for op in ops : hash_entity . update ( '{}\n{}\n' . format ( op . action , op . entity ) . encode ( ) ) hash_encoded = base64 . urlsafe_b64encode ( hash_entity . digest ( ) ) return 'multi-' + hash_encoded . decode ( 'utf-8' ) . rstrip ( '=' )
Returns a new multi - op entity name string that represents all the given operations and caveats . It returns the same value regardless of the ordering of the operations . It assumes that the operations have been canonicalized and that there s at least one operation .
119
49
2,076
def macaroon_ops ( self , macaroons ) : if len ( macaroons ) == 0 : raise ValueError ( 'no macaroons provided' ) storage_id , ops = _decode_macaroon_id ( macaroons [ 0 ] . identifier_bytes ) root_key = self . root_keystore_for_ops ( ops ) . get ( storage_id ) if root_key is None : raise VerificationError ( 'macaroon key not found in storage' ) v = Verifier ( ) conditions = [ ] def validator ( condition ) : # Verify the macaroon's signature only. Don't check any of the # caveats yet but save them so that we can return them. conditions . append ( condition ) return True v . satisfy_general ( validator ) try : v . verify ( macaroons [ 0 ] , root_key , macaroons [ 1 : ] ) except Exception as exc : # Unfortunately pymacaroons doesn't control # the set of exceptions that can be raised here. # Possible candidates are: # pymacaroons.exceptions.MacaroonUnmetCaveatException # pymacaroons.exceptions.MacaroonInvalidSignatureException # ValueError # nacl.exceptions.CryptoError # # There may be others too, so just catch everything. raise six . raise_from ( VerificationError ( 'verification failed: {}' . format ( str ( exc ) ) ) , exc , ) if ( self . ops_store is not None and len ( ops ) == 1 and ops [ 0 ] . entity . startswith ( 'multi-' ) ) : # It's a multi-op entity, so retrieve the actual operations # it's associated with. ops = self . ops_store . get_ops ( ops [ 0 ] . entity ) return ops , conditions
This method makes the oven satisfy the MacaroonOpStore protocol required by the Checker class .
400
20
2,077
def extend ( self , iterable ) : return super ( Collection , self ) . extend ( self . _ensure_iterable_is_valid ( iterable ) )
Extend the list by appending all the items in the given list .
36
15
2,078
def insert ( self , index , value ) : return super ( Collection , self ) . insert ( index , self . _ensure_value_is_valid ( value ) )
Insert an item at a given position .
37
8
2,079
def _ensure_value_is_valid ( self , value ) : if not isinstance ( value , self . __class__ . value_type ) : raise TypeError ( '{0} is not valid collection value, instance ' 'of {1} required' . format ( value , self . __class__ . value_type ) ) return value
Ensure that value is a valid collection s value .
75
11
2,080
def container_elem_type ( container_type , params ) : elem_type = params [ 0 ] if params else None if elem_type is None : elem_type = container_type . ELEM_TYPE return elem_type
Returns container element type
54
4
2,081
def is_valid ( doi ) : match = REGEX . match ( doi ) return ( match is not None ) and ( match . group ( 0 ) == doi )
Check that a given DOI is a valid canonical DOI .
35
11
2,082
def get_oa_version ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return result [ "paper" ] [ "pdf_url" ] except ( AssertionError , ValueError , KeyError , RequestException ) : return None
Get an OA version for a given DOI .
95
10
2,083
def get_oa_policy ( doi ) : try : request = requests . get ( "%s%s" % ( DISSEMIN_API , doi ) ) request . raise_for_status ( ) result = request . json ( ) assert result [ "status" ] == "ok" return ( [ i for i in result [ "paper" ] [ "publications" ] if i [ "doi" ] == doi ] [ 0 ] ) [ "policy" ] except ( AssertionError , ValueError , KeyError , RequestException , IndexError ) : return None
Get OA policy for a given DOI .
122
9
2,084
def get_linked_version ( doi ) : try : request = requests . head ( to_url ( doi ) ) return request . headers . get ( "location" ) except RequestException : return None
Get the original link behind the DOI .
42
8
2,085
def get_bibtex ( doi ) : try : request = requests . get ( to_url ( doi ) , headers = { "accept" : "application/x-bibtex" } ) request . raise_for_status ( ) assert request . headers . get ( "content-type" ) == "application/x-bibtex" return request . text except ( RequestException , AssertionError ) : return None
Get a BibTeX entry for a given DOI .
92
10
2,086
def _configure_logging ( self , logger_dict = None ) : self . log . debug ( "Configure logging" ) # Let's be sure, that for our log no handlers are registered anymore for handler in self . log . handlers : self . log . removeHandler ( handler ) if logger_dict is None : self . log . debug ( "No logger dictionary defined. Doing default logger configuration" ) formatter = logging . Formatter ( "%(name)s - %(asctime)s - [%(levelname)s] - %(module)s - %(message)s" ) stream_handler = logging . StreamHandler ( sys . stdout ) stream_handler . setLevel ( logging . WARNING ) stream_handler . setFormatter ( formatter ) self . log . addHandler ( stream_handler ) self . log . setLevel ( logging . WARNING ) else : self . log . debug ( "Logger dictionary defined. Loading dictConfig for logging" ) logging . config . dictConfig ( logger_dict ) self . log . debug ( "dictConfig loaded" )
Configures the logging module with a given dictionary which in most cases was loaded from a configuration file .
233
20
2,087
async def _dump_message_field ( self , writer , msg , field , fvalue = None ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] fvalue = getattr ( msg , fname , None ) if fvalue is None else fvalue await self . dump_field ( writer , fvalue , ftype , params )
Dumps a message field to the writer . Field is defined by the message field specification .
85
18
2,088
async def _load_message_field ( self , reader , msg , field ) : fname , ftype , params = field [ 0 ] , field [ 1 ] , field [ 2 : ] await self . load_field ( reader , ftype , params , eref ( msg , fname ) )
Loads message field from the reader . Field is defined by the message field specification . Returns loaded value supports field reference .
65
24
2,089
def start ( self , message ) : self . _start = time . clock ( ) VSGLogger . info ( "{0:<20} - Started" . format ( message ) )
Manually starts timer with the message .
40
8
2,090
def stop ( self , message ) : self . _stop = time . clock ( ) VSGLogger . info ( "{0:<20} - Finished [{1}s]" . format ( message , self . pprint ( self . _stop - self . _start ) ) )
Manually stops timer with the message .
61
8
2,091
def get_bibtex ( identifier ) : identifier_type , identifier_id = identifier if identifier_type not in __valid_identifiers__ : return None # Dynamically call the ``get_bibtex`` method from the associated module. module = sys . modules . get ( "libbmc.%s" % ( identifier_type , ) , None ) if module is None : return None return getattr ( module , "get_bibtex" ) ( identifier_id )
Try to fetch BibTeX from a found identifier .
104
10
2,092
def initialize ( self , * args , * * kwargs ) : super ( JSONHandler , self ) . initialize ( * args , * * kwargs ) content_type = self . request . headers . get ( 'Content-Type' , '' ) if 'application/json' in content_type . lower ( ) : self . _parse_json_body_arguments ( )
Only try to parse as JSON if the JSON content type header is set .
82
15
2,093
def get_plaintext_citations ( bibtex ) : parser = BibTexParser ( ) parser . customization = convert_to_unicode # Load the BibTeX if os . path . isfile ( bibtex ) : with open ( bibtex ) as fh : bib_database = bibtexparser . load ( fh , parser = parser ) else : bib_database = bibtexparser . loads ( bibtex , parser = parser ) # Convert bibentries to plaintext bibentries = [ bibentry_as_plaintext ( bibentry ) for bibentry in bib_database . entries ] # Return them return bibentries
Parse a BibTeX file to get a clean list of plaintext citations .
149
16
2,094
def init ( filename = ConfigPath ) : section , parts = "DEFAULT" , filename . rsplit ( ":" , 1 ) if len ( parts ) > 1 and os . path . isfile ( parts [ 0 ] ) : filename , section = parts if not os . path . isfile ( filename ) : return vardict , parser = globals ( ) , configparser . RawConfigParser ( ) parser . optionxform = str # Force case-sensitivity on names try : def parse_value ( raw ) : try : return json . loads ( raw ) # Try to interpret as JSON except ValueError : return raw # JSON failed, fall back to raw txt = open ( filename ) . read ( ) # Add DEFAULT section if none present if not re . search ( "\\[\\w+\\]" , txt ) : txt = "[DEFAULT]\n" + txt parser . readfp ( StringIO . StringIO ( txt ) , filename ) for k , v in parser . items ( section ) : vardict [ k ] = parse_value ( v ) except Exception : logging . warn ( "Error reading config from %s." , filename , exc_info = True )
Loads INI configuration into this module s attributes .
257
11
2,095
def save ( filename = ConfigPath ) : default_values = defaults ( ) parser = configparser . RawConfigParser ( ) parser . optionxform = str # Force case-sensitivity on names try : save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in sorted ( globals ( ) . items ( ) ) : if not isinstance ( v , save_types ) or k . startswith ( "_" ) or default_values . get ( k , parser ) == v : continue # for k, v try : parser . set ( "DEFAULT" , k , json . dumps ( v ) ) except Exception : pass if parser . defaults ( ) : with open ( filename , "wb" ) as f : f . write ( "# %s %s configuration written on %s.\n" % ( Title , Version , datetime . datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) ) ) parser . write ( f ) else : # Nothing to write: delete configuration file try : os . unlink ( filename ) except Exception : pass except Exception : logging . warn ( "Error writing config to %s." , filename , exc_info = True )
Saves this module s changed attributes to INI configuration .
276
12
2,096
def defaults ( values = { } ) : if values : return values save_types = basestring , int , float , tuple , list , dict , type ( None ) for k , v in globals ( ) . items ( ) : if isinstance ( v , save_types ) and not k . startswith ( "_" ) : values [ k ] = v return values
Returns a once - assembled dict of this module s storable attributes .
80
14
2,097
def fix_pdf ( pdf_file , destination ) : tmp = tempfile . NamedTemporaryFile ( ) with open ( tmp . name , 'wb' ) as output : with open ( pdf_file , "rb" ) as fh : for line in fh : output . write ( line ) if b'%%EOF' in line : break shutil . copy ( tmp . name , destination )
Fix malformed pdf files when data are present after %%EOF
86
13
2,098
def tearpage_backend ( filename , teared_pages = None ) : # Handle default argument if teared_pages is None : teared_pages = [ 0 ] # Copy the pdf to a tmp file with tempfile . NamedTemporaryFile ( ) as tmp : # Copy the input file to tmp shutil . copy ( filename , tmp . name ) # Read the copied pdf # TODO: Use with syntax try : input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) except PdfReadError : fix_pdf ( filename , tmp . name ) input_file = PdfFileReader ( open ( tmp . name , 'rb' ) ) # Seek for the number of pages num_pages = input_file . getNumPages ( ) # Write pages excepted the first one output_file = PdfFileWriter ( ) for i in range ( num_pages ) : if i in teared_pages : continue output_file . addPage ( input_file . getPage ( i ) ) tmp . close ( ) outputStream = open ( filename , "wb" ) output_file . write ( outputStream )
Copy filename to a tempfile write pages to filename except the teared one .
245
16
2,099
def tearpage_needed ( bibtex ) : for publisher in BAD_JOURNALS : if publisher in bibtex . get ( "journal" , "" ) . lower ( ) : # Bad journal is found, add pages to tear return BAD_JOURNALS [ publisher ] # If no bad journals are found, return an empty list return [ ]
Check whether a given paper needs some pages to be teared or not .
76
15