idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
238,200
def getSolutions ( self ) : domains , constraints , vconstraints = self . _getArgs ( ) if not domains : return [ ] return self . _solver . getSolutions ( domains , constraints , vconstraints )
Find and return all solutions to the problem
51
8
238,201
def getSolutionIter ( self ) : domains , constraints , vconstraints = self . _getArgs ( ) if not domains : return iter ( ( ) ) return self . _solver . getSolutionIter ( domains , constraints , vconstraints )
Return an iterator to the solutions of the problem
54
9
238,202
def getSolution ( self , domains , constraints , vconstraints ) : msg = "%s is an abstract class" % self . __class__ . __name__ raise NotImplementedError ( msg )
Return one solution for the given problem
44
7
238,203
def resetState ( self ) : self . extend ( self . _hidden ) del self . _hidden [ : ] del self . _states [ : ]
Reset to the original domain state including all possible values
32
11
238,204
def popState ( self ) : diff = self . _states . pop ( ) - len ( self ) if diff : self . extend ( self . _hidden [ - diff : ] ) del self . _hidden [ - diff : ]
Restore domain state from the top of the stack
49
10
238,205
def hideValue ( self , value ) : list . remove ( self , value ) self . _hidden . append ( value )
Hide the given value from the domain
26
7
238,206
def preProcess ( self , variables , domains , constraints , vconstraints ) : if len ( variables ) == 1 : variable = variables [ 0 ] domain = domains [ variable ] for value in domain [ : ] : if not self ( variables , domains , { variable : value } ) : domain . remove ( value ) constraints . remove ( ( self , variables ) ) vconstraints [ variable ] . remove ( ( self , variables ) )
Preprocess variable domains
93
4
238,207
def forwardCheck ( self , variables , domains , assignments , _unassigned = Unassigned ) : unassignedvariable = _unassigned for variable in variables : if variable not in assignments : if unassignedvariable is _unassigned : unassignedvariable = variable else : break else : if unassignedvariable is not _unassigned : # Remove from the unassigned variable domain's all # values which break our variable's constraints. domain = domains [ unassignedvariable ] if domain : for value in domain [ : ] : assignments [ unassignedvariable ] = value if not self ( variables , domains , assignments ) : domain . hideValue ( value ) del assignments [ unassignedvariable ] if not domain : return False return True
Helper method for generic forward checking
159
6
238,208
def block_view ( request ) : blocked_ip_list = get_blocked_ips ( ) blocked_username_list = get_blocked_usernames ( ) context = { 'blocked_ip_list' : blocked_ip_list , 'blocked_username_list' : blocked_username_list } return render ( request , 'defender/admin/blocks.html' , context )
List the blocked IP and Usernames
89
8
238,209
def get_ip ( request ) : if config . BEHIND_REVERSE_PROXY : ip_address = request . META . get ( config . REVERSE_PROXY_HEADER , '' ) ip_address = ip_address . split ( "," , 1 ) [ 0 ] . strip ( ) if ip_address == '' : ip_address = get_ip_address_from_request ( request ) else : ip_address = get_ip_address_from_request ( request ) return ip_address
get the ip address from the request
114
7
238,210
def get_blocked_ips ( ) : if config . DISABLE_IP_LOCKOUT : # There are no blocked IP's since we disabled them. return [ ] key = get_ip_blocked_cache_key ( "*" ) key_list = [ redis_key . decode ( 'utf-8' ) for redis_key in REDIS_SERVER . keys ( key ) ] return strip_keys ( key_list )
get a list of blocked ips from redis
97
10
238,211
def get_blocked_usernames ( ) : if config . DISABLE_USERNAME_LOCKOUT : # There are no blocked usernames since we disabled them. return [ ] key = get_username_blocked_cache_key ( "*" ) key_list = [ redis_key . decode ( 'utf-8' ) for redis_key in REDIS_SERVER . keys ( key ) ] return strip_keys ( key_list )
get a list of blocked usernames from redis
101
11
238,212
def increment_key ( key ) : pipe = REDIS_SERVER . pipeline ( ) pipe . incr ( key , 1 ) if config . COOLOFF_TIME : pipe . expire ( key , config . COOLOFF_TIME ) new_value = pipe . execute ( ) [ 0 ] return new_value
given a key increment the value
67
6
238,213
def username_from_request ( request ) : if config . USERNAME_FORM_FIELD in request . POST : return request . POST [ config . USERNAME_FORM_FIELD ] [ : 255 ] return None
unloads username from default POST request
46
7
238,214
def get_user_attempts ( request , get_username = get_username_from_request , username = None ) : ip_address = get_ip ( request ) username = lower_username ( username or get_username ( request ) ) # get by IP ip_count = REDIS_SERVER . get ( get_ip_attempt_cache_key ( ip_address ) ) if not ip_count : ip_count = 0 ip_count = int ( ip_count ) # get by username username_count = REDIS_SERVER . get ( get_username_attempt_cache_key ( username ) ) if not username_count : username_count = 0 username_count = int ( username_count ) # return the larger of the two. return max ( ip_count , username_count )
Returns number of access attempts for this ip username
176
9
238,215
def block_ip ( ip_address ) : if not ip_address : # no reason to continue when there is no ip return if config . DISABLE_IP_LOCKOUT : # no need to block, we disabled it. return key = get_ip_blocked_cache_key ( ip_address ) if config . COOLOFF_TIME : REDIS_SERVER . set ( key , 'blocked' , config . COOLOFF_TIME ) else : REDIS_SERVER . set ( key , 'blocked' ) send_ip_block_signal ( ip_address )
given the ip block it
128
5
238,216
def block_username ( username ) : if not username : # no reason to continue when there is no username return if config . DISABLE_USERNAME_LOCKOUT : # no need to block, we disabled it. return key = get_username_blocked_cache_key ( username ) if config . COOLOFF_TIME : REDIS_SERVER . set ( key , 'blocked' , config . COOLOFF_TIME ) else : REDIS_SERVER . set ( key , 'blocked' ) send_username_block_signal ( username )
given the username block it .
121
6
238,217
def record_failed_attempt ( ip_address , username ) : # increment the failed count, and get current number ip_block = False if not config . DISABLE_IP_LOCKOUT : # we only want to increment the IP if this is disabled. ip_count = increment_key ( get_ip_attempt_cache_key ( ip_address ) ) # if over the limit, add to block if ip_count > config . IP_FAILURE_LIMIT : block_ip ( ip_address ) ip_block = True user_block = False if username and not config . DISABLE_USERNAME_LOCKOUT : user_count = increment_key ( get_username_attempt_cache_key ( username ) ) # if over the limit, add to block if user_count > config . USERNAME_FAILURE_LIMIT : block_username ( username ) user_block = True # if we have this turned on, then there is no reason to look at ip_block # we will just look at user_block, and short circut the result since # we don't need to continue. if config . DISABLE_IP_LOCKOUT : # if user_block is True, it means it was blocked # we need to return False return not user_block if config . DISABLE_USERNAME_LOCKOUT : # The same as DISABLE_IP_LOCKOUT return not ip_block # we want to make sure both the IP and user is blocked before we # return False # this is mostly used when a lot of your users are using proxies, # and you don't want one user to block everyone on that one IP. if config . LOCKOUT_BY_IP_USERNAME : # both ip_block and user_block need to be True in order # to return a False. return not ( ip_block and user_block ) # if any blocks return False, no blocks. return True return not ( ip_block or user_block )
record the failed login attempt if over limit return False if not over limit return True
424
16
238,218
def unblock_ip ( ip_address , pipe = None ) : do_commit = False if not pipe : pipe = REDIS_SERVER . pipeline ( ) do_commit = True if ip_address : pipe . delete ( get_ip_attempt_cache_key ( ip_address ) ) pipe . delete ( get_ip_blocked_cache_key ( ip_address ) ) if do_commit : pipe . execute ( )
unblock the given IP
95
5
238,219
def unblock_username ( username , pipe = None ) : do_commit = False if not pipe : pipe = REDIS_SERVER . pipeline ( ) do_commit = True if username : pipe . delete ( get_username_attempt_cache_key ( username ) ) pipe . delete ( get_username_blocked_cache_key ( username ) ) if do_commit : pipe . execute ( )
unblock the given Username
87
5
238,220
def reset_failed_attempts ( ip_address = None , username = None ) : pipe = REDIS_SERVER . pipeline ( ) unblock_ip ( ip_address , pipe = pipe ) unblock_username ( username , pipe = pipe ) pipe . execute ( )
reset the failed attempts for these ip s and usernames
60
12
238,221
def lockout_response ( request ) : if config . LOCKOUT_TEMPLATE : context = { 'cooloff_time_seconds' : config . COOLOFF_TIME , 'cooloff_time_minutes' : config . COOLOFF_TIME / 60 , 'failure_limit' : config . FAILURE_LIMIT , } return render ( request , config . LOCKOUT_TEMPLATE , context ) if config . LOCKOUT_URL : return HttpResponseRedirect ( config . LOCKOUT_URL ) if config . COOLOFF_TIME : return HttpResponse ( "Account locked: too many login attempts. " "Please try again later." ) else : return HttpResponse ( "Account locked: too many login attempts. " "Contact an admin to unlock your account." )
if we are locked out here is the response
178
9
238,222
def is_user_already_locked ( username ) : if username is None : return False if config . DISABLE_USERNAME_LOCKOUT : return False return REDIS_SERVER . get ( get_username_blocked_cache_key ( username ) )
Is this username already locked?
57
6
238,223
def is_source_ip_already_locked ( ip_address ) : if ip_address is None : return False if config . DISABLE_IP_LOCKOUT : return False return REDIS_SERVER . get ( get_ip_blocked_cache_key ( ip_address ) )
Is this IP already locked?
64
6
238,224
def is_already_locked ( request , get_username = get_username_from_request , username = None ) : user_blocked = is_user_already_locked ( username or get_username ( request ) ) ip_blocked = is_source_ip_already_locked ( get_ip ( request ) ) if config . LOCKOUT_BY_IP_USERNAME : # if both this IP and this username are present the request is blocked return ip_blocked and user_blocked return ip_blocked or user_blocked
Parse the username & IP from the request and see if it s already locked .
121
17
238,225
def check_request ( request , login_unsuccessful , get_username = get_username_from_request , username = None ) : ip_address = get_ip ( request ) username = username or get_username ( request ) if not login_unsuccessful : # user logged in -- forget the failed attempts reset_failed_attempts ( ip_address = ip_address , username = username ) return True else : # add a failed attempt for this user return record_failed_attempt ( ip_address , username )
check the request and process results
111
6
238,226
def add_login_attempt_to_db ( request , login_valid , get_username = get_username_from_request , username = None ) : if not config . STORE_ACCESS_ATTEMPTS : # If we don't want to store in the database, then don't proceed. return username = username or get_username ( request ) user_agent = request . META . get ( 'HTTP_USER_AGENT' , '<unknown>' ) [ : 255 ] ip_address = get_ip ( request ) http_accept = request . META . get ( 'HTTP_ACCEPT' , '<unknown>' ) path_info = request . META . get ( 'PATH_INFO' , '<unknown>' ) if config . USE_CELERY : from . tasks import add_login_attempt_task add_login_attempt_task . delay ( user_agent , ip_address , username , http_accept , path_info , login_valid ) else : store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid )
Create a record for the login attempt If using celery call celery task if not call the method normally
249
21
238,227
def add_login_attempt_task ( user_agent , ip_address , username , http_accept , path_info , login_valid ) : store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid )
Create a record for the login attempt
62
7
238,228
def store_login_attempt ( user_agent , ip_address , username , http_accept , path_info , login_valid ) : AccessAttempt . objects . create ( user_agent = user_agent , ip_address = ip_address , username = username , http_accept = http_accept , path_info = path_info , login_valid = login_valid , )
Store the login attempt to the db .
83
8
238,229
def get_redis_connection ( ) : if config . MOCK_REDIS : # pragma: no cover import mockredis return mockredis . mock_strict_redis_client ( ) # pragma: no cover elif config . DEFENDER_REDIS_NAME : # pragma: no cover try : cache = caches [ config . DEFENDER_REDIS_NAME ] except InvalidCacheBackendError : raise KeyError ( INVALID_CACHE_ERROR_MSG . format ( config . DEFENDER_REDIS_NAME ) ) # every redis backend implement it own way to get the low level client try : # redis_cache.RedisCache case (django-redis-cache package) return cache . get_master_client ( ) except AttributeError : # django_redis.cache.RedisCache case (django-redis package) return cache . client . get_client ( True ) else : # pragma: no cover redis_config = parse_redis_url ( config . DEFENDER_REDIS_URL ) return redis . StrictRedis ( host = redis_config . get ( 'HOST' ) , port = redis_config . get ( 'PORT' ) , db = redis_config . get ( 'DB' ) , password = redis_config . get ( 'PASSWORD' ) , ssl = redis_config . get ( 'SSL' ) )
Get the redis connection if not using mock
323
9
238,230
def parse_redis_url ( url ) : # create config with some sane defaults redis_config = { "DB" : 0 , "PASSWORD" : None , "HOST" : "localhost" , "PORT" : 6379 , "SSL" : False } if not url : return redis_config url = urlparse . urlparse ( url ) # Remove query strings. path = url . path [ 1 : ] path = path . split ( '?' , 2 ) [ 0 ] if path : redis_config . update ( { "DB" : int ( path ) } ) if url . password : redis_config . update ( { "PASSWORD" : url . password } ) if url . hostname : redis_config . update ( { "HOST" : url . hostname } ) if url . port : redis_config . update ( { "PORT" : int ( url . port ) } ) if url . scheme in [ 'https' , 'rediss' ] : redis_config . update ( { "SSL" : True } ) return redis_config
Parses a redis URL .
238
8
238,231
def handle ( self , * * options ) : print ( "Starting clean up of django-defender table" ) now = timezone . now ( ) cleanup_delta = timedelta ( hours = config . ACCESS_ATTEMPT_EXPIRATION ) min_attempt_time = now - cleanup_delta attempts_to_clean = AccessAttempt . objects . filter ( attempt_time__lt = min_attempt_time , ) attempts_to_clean_count = attempts_to_clean . count ( ) attempts_to_clean . delete ( ) print ( "Finished. Removed {0} AccessAttempt entries." . format ( attempts_to_clean_count ) )
Removes any entries in the AccessAttempt that are older than your DEFENDER_ACCESS_ATTEMPT_EXPIRATION config default 24 HOURS .
148
33
238,232
def connection ( self , shareable = True ) : if shareable and self . _maxshared : self . _lock . acquire ( ) try : while ( not self . _shared_cache and self . _maxconnections and self . _connections >= self . _maxconnections ) : self . _wait_lock ( ) if len ( self . _shared_cache ) < self . _maxshared : # shared cache is not full, get a dedicated connection try : # first try to get it from the idle cache con = self . _idle_cache . pop ( 0 ) except IndexError : # else get a fresh connection con = self . steady_connection ( ) else : con . _ping_check ( ) # check this connection con = SharedDBConnection ( con ) self . _connections += 1 else : # shared cache full or no more connections allowed self . _shared_cache . sort ( ) # least shared connection first con = self . _shared_cache . pop ( 0 ) # get it while con . con . _transaction : # do not share connections which are in a transaction self . _shared_cache . insert ( 0 , con ) self . _wait_lock ( ) self . _shared_cache . sort ( ) con = self . _shared_cache . pop ( 0 ) con . con . _ping_check ( ) # check the underlying connection con . share ( ) # increase share of this connection # put the connection (back) into the shared cache self . _shared_cache . append ( con ) self . _lock . notify ( ) finally : self . _lock . release ( ) con = PooledSharedDBConnection ( self , con ) else : # try to get a dedicated connection self . _lock . acquire ( ) try : while ( self . _maxconnections and self . _connections >= self . _maxconnections ) : self . _wait_lock ( ) # connection limit not reached, get a dedicated connection try : # first try to get it from the idle cache con = self . _idle_cache . pop ( 0 ) except IndexError : # else get a fresh connection con = self . steady_connection ( ) else : con . _ping_check ( ) # check connection con = PooledDedicatedDBConnection ( self , con ) self . _connections += 1 finally : self . _lock . release ( ) return con
Get a steady cached DB - API 2 connection from the pool .
516
13
238,233
def unshare ( self , con ) : self . _lock . acquire ( ) try : con . unshare ( ) shared = con . shared if not shared : # connection is idle, try : # so try to remove it self . _shared_cache . remove ( con ) # from shared cache except ValueError : pass # pool has already been closed finally : self . _lock . release ( ) if not shared : # connection has become idle, self . cache ( con . con )
Decrease the share of a connection in the shared cache .
101
12
238,234
def close ( self ) : # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self . _con : self . _pool . unshare ( self . _shared_con ) self . _shared_con = self . _con = None
Close the pooled shared connection .
61
6
238,235
def steady_connection ( self ) : return connect ( self . _creator , self . _maxusage , self . _setsession , self . _failures , self . _ping , self . _closeable , * self . _args , * * self . _kwargs )
Get a steady non - persistent DB - API 2 connection .
59
12
238,236
def connection ( self , shareable = False ) : try : con = self . thread . connection except AttributeError : con = self . steady_connection ( ) if not con . threadsafety ( ) : raise NotSupportedError ( "Database module is not thread-safe." ) self . thread . connection = con con . _ping_check ( ) return con
Get a steady persistent DB - API 2 connection .
75
10
238,237
def steady_connection ( self ) : return SteadyPgConnection ( self . _maxusage , self . _setsession , self . _closeable , * self . _args , * * self . _kwargs )
Get a steady non - persistent PyGreSQL connection .
46
11
238,238
def connection ( self ) : try : con = self . thread . connection except AttributeError : con = self . steady_connection ( ) self . thread . connection = con return con
Get a steady persistent PyGreSQL connection .
38
9
238,239
def versionString ( version ) : ver = list ( map ( str , version ) ) numbers , rest = ver [ : 2 if ver [ 2 ] == '0' else 3 ] , ver [ 3 : ] return '.' . join ( numbers ) + '-' . join ( rest )
Create version string .
60
4
238,240
def steady_connection ( self ) : return SteadyPgConnection ( self . _maxusage , self . _setsession , True , * self . _args , * * self . _kwargs )
Get a steady unpooled PostgreSQL connection .
42
10
238,241
def connection ( self ) : if self . _connections : if not self . _connections . acquire ( self . _blocking ) : raise TooManyConnections try : con = self . _cache . get ( 0 ) except Empty : con = self . steady_connection ( ) return PooledPgConnection ( self , con )
Get a steady cached PostgreSQL connection from the pool .
69
11
238,242
def cache ( self , con ) : try : if self . _reset == 2 : con . reset ( ) # reset the connection completely else : if self . _reset or con . _transaction : try : con . rollback ( ) # rollback a possible transaction except Exception : pass self . _cache . put ( con , 0 ) # and then put it back into the cache except Full : con . close ( ) if self . _connections : self . _connections . release ( )
Put a connection back into the pool cache .
104
9
238,243
def reopen ( self ) : # If the connection is already back in the pool, # get another connection from the pool, # otherwise reopen the underlying connection. if self . _con : self . _con . reopen ( ) else : self . _con = self . _pool . connection ( )
Reopen the pooled connection .
61
6
238,244
def reopen ( self ) : try : self . _con . reopen ( ) except Exception : if self . _transcation : self . _transaction = False try : self . _con . query ( 'rollback' ) except Exception : pass else : self . _transaction = False self . _closed = False self . _setsession ( ) self . _usage = 0
Reopen the tough connection .
79
6
238,245
def reset ( self ) : try : self . _con . reset ( ) self . _transaction = False self . _setsession ( ) self . _usage = 0 except Exception : try : self . reopen ( ) except Exception : try : self . rollback ( ) except Exception : pass
Reset the tough connection .
61
6
238,246
def _get_tough_method ( self , method ) : def tough_method ( * args , * * kwargs ) : transaction = self . _transaction if not transaction : try : # check whether connection status is bad if not self . _con . db . status : raise AttributeError if self . _maxusage : # or connection used too often if self . _usage >= self . _maxusage : raise AttributeError except Exception : self . reset ( ) # then reset the connection try : result = method ( * args , * * kwargs ) # try connection method except Exception : # error in query if transaction : # inside a transaction self . _transaction = False raise # propagate the error elif self . _con . db . status : # if it was not a connection problem raise # then propagate the error else : # otherwise self . reset ( ) # reset the connection result = method ( * args , * * kwargs ) # and try one more time self . _usage += 1 return result return tough_method
Return a tough version of a connection class method .
220
10
238,247
def connect ( creator , maxusage = None , setsession = None , failures = None , ping = 1 , closeable = True , * args , * * kwargs ) : return SteadyDBConnection ( creator , maxusage , setsession , failures , ping , closeable , * args , * * kwargs )
A tough version of the connection constructor of a DB - API 2 module .
71
15
238,248
def _create ( self ) : con = self . _creator ( * self . _args , * * self . _kwargs ) try : try : if self . _dbapi . connect != self . _creator : raise AttributeError except AttributeError : # try finding the DB-API 2 module via the connection itself try : mod = con . __module__ except AttributeError : mod = None while mod : try : self . _dbapi = sys . modules [ mod ] if not callable ( self . _dbapi . connect ) : raise AttributeError except ( AttributeError , KeyError ) : pass else : break i = mod . rfind ( '.' ) if i < 0 : mod = None else : mod = mod [ : i ] else : try : mod = con . OperationalError . __module__ except AttributeError : mod = None while mod : try : self . _dbapi = sys . modules [ mod ] if not callable ( self . _dbapi . connect ) : raise AttributeError except ( AttributeError , KeyError ) : pass else : break i = mod . rfind ( '.' ) if i < 0 : mod = None else : mod = mod [ : i ] else : self . _dbapi = None if self . _threadsafety is None : try : self . _threadsafety = self . _dbapi . threadsafety except AttributeError : try : self . _threadsafety = con . threadsafety except AttributeError : pass if self . _failures is None : try : self . _failures = ( self . _dbapi . OperationalError , self . _dbapi . InternalError ) except AttributeError : try : self . _failures = ( self . _creator . OperationalError , self . _creator . InternalError ) except AttributeError : try : self . _failures = ( con . OperationalError , con . InternalError ) except AttributeError : raise AttributeError ( "Could not determine failure exceptions" " (please set failures or creator.dbapi)." ) if isinstance ( self . _failures , tuple ) : self . _failure = self . _failures [ 0 ] else : self . _failure = self . _failures self . _setsession ( con ) except Exception as error : # the database module could not be determined # or the session could not be prepared try : # close the connection first con . close ( ) except Exception : pass raise error # re-raise the original error again return con
Create a new connection using the creator function .
536
9
238,249
def _store ( self , con ) : self . _con = con self . _transaction = False self . _closed = False self . _usage = 0
Store a database connection for subsequent use .
34
8
238,250
def _reset ( self , force = False ) : if not self . _closed and ( force or self . _transaction ) : try : self . rollback ( ) except Exception : pass
Reset a tough connection .
40
6
238,251
def begin ( self , * args , * * kwargs ) : self . _transaction = True try : begin = self . _con . begin except AttributeError : pass else : begin ( * args , * * kwargs )
Indicate the beginning of a transaction .
51
8
238,252
def commit ( self ) : self . _transaction = False try : self . _con . commit ( ) except self . _failures as error : # cannot commit try : # try to reopen the connection con = self . _create ( ) except Exception : pass else : self . _close ( ) self . _store ( con ) raise error
Commit any pending transaction .
72
6
238,253
def cancel ( self ) : self . _transaction = False try : cancel = self . _con . cancel except AttributeError : pass else : cancel ( )
Cancel a long - running transaction .
34
8
238,254
def _setsizes ( self , cursor = None ) : if cursor is None : cursor = self . _cursor if self . _inputsizes : cursor . setinputsizes ( self . _inputsizes ) for column , size in self . _outputsizes . items ( ) : if column is None : cursor . setoutputsize ( size ) else : cursor . setoutputsize ( size , column )
Set stored input and output sizes for cursor execution .
87
10
238,255
def close ( self ) : if not self . _closed : try : self . _cursor . close ( ) except Exception : pass self . _closed = True
Close the tough cursor .
34
5
238,256
def _get_tough_method ( self , name ) : def tough_method ( * args , * * kwargs ) : execute = name . startswith ( 'execute' ) con = self . _con transaction = con . _transaction if not transaction : con . _ping_check ( 4 ) try : if con . _maxusage : if con . _usage >= con . _maxusage : # the connection was used too often raise con . _failure if execute : self . _setsizes ( ) method = getattr ( self . _cursor , name ) result = method ( * args , * * kwargs ) # try to execute if execute : self . _clearsizes ( ) except con . _failures as error : # execution error if not transaction : try : cursor2 = con . _cursor ( * self . _args , * * self . _kwargs ) # open new cursor except Exception : pass else : try : # and try one more time to execute if execute : self . _setsizes ( cursor2 ) method = getattr ( cursor2 , name ) result = method ( * args , * * kwargs ) if execute : self . _clearsizes ( ) except Exception : pass else : self . close ( ) self . _cursor = cursor2 con . _usage += 1 return result try : cursor2 . close ( ) except Exception : pass try : # try to reopen the connection con2 = con . _create ( ) except Exception : pass else : try : cursor2 = con2 . cursor ( * self . _args , * * self . _kwargs ) # open new cursor except Exception : pass else : if transaction : self . close ( ) con . _close ( ) con . _store ( con2 ) self . _cursor = cursor2 raise error # raise the original error again error2 = None try : # try one more time to execute if execute : self . _setsizes ( cursor2 ) method2 = getattr ( cursor2 , name ) result = method2 ( * args , * * kwargs ) if execute : self . _clearsizes ( ) except error . __class__ : # same execution error use2 = False error2 = error except Exception as error : # other execution errors use2 = True error2 = error else : use2 = True if use2 : self . close ( ) con . _close ( ) con . _store ( con2 ) self . _cursor = cursor2 con . _usage += 1 if error2 : raise error2 # raise the other error return result try : cursor2 . close ( ) except Exception : pass try : con2 . close ( ) except Exception : pass if transaction : self . _transaction = False raise error # re-raise the original error again else : con . _usage += 1 return result return tough_method
Return a tough version of the given cursor method .
605
10
238,257
def train_punkt ( ctx , input , output , abbr , colloc ) : click . echo ( 'chemdataextractor.tokenize.train_punkt' ) import pickle from nltk . tokenize . punkt import PunktSentenceTokenizer , PunktTrainer punkt = PunktTrainer ( ) # Set these to true to include collocations more leniently, then increase MIN_COLLOC_FREQ to restrict again # punkt.INCLUDE_ALL_COLLOCS = False # punkt.INCLUDE_ABBREV_COLLOCS = False # punkt.MIN_COLLOC_FREQ = 1 # Don't train on titles. They may contain abbreviations, but basically never have actual sentence boundaries. for fin in input : click . echo ( 'Training on %s' % fin . name ) sentences = fin . read ( ) #.replace('.\n', '. \n\n') punkt . train ( sentences , finalize = False , verbose = True ) punkt . finalize_training ( verbose = True ) if abbr : abbreviations = abbr . read ( ) . strip ( ) . split ( '\n' ) click . echo ( 'Manually adding abbreviations: %s' % abbreviations ) punkt . _params . abbrev_types . update ( abbreviations ) if colloc : collocations = [ tuple ( l . split ( '. ' , 1 ) ) for l in colloc . read ( ) . strip ( ) . split ( '\n' ) ] click . echo ( 'Manually adding collocs: %s' % collocations ) punkt . _params . collocations . update ( collocations ) model = PunktSentenceTokenizer ( punkt . get_params ( ) ) pickle . dump ( model , output , protocol = pickle . HIGHEST_PROTOCOL )
Train Punkt sentence splitter using sentences in input .
418
11
238,258
def sentences ( ctx , input , output ) : log . info ( 'chemdataextractor.read.elements' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : if isinstance ( element , Text ) : for raw_sentence in element . raw_sentences : output . write ( raw_sentence . strip ( ) ) output . write ( u'\n' )
Read input document and output sentences .
103
7
238,259
def words ( ctx , input , output ) : log . info ( 'chemdataextractor.read.elements' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : if isinstance ( element , Text ) : for sentence in element . sentences : output . write ( u' ' . join ( sentence . raw_tokens ) ) output . write ( u'\n' )
Read input document and output words .
103
7
238,260
def _in_stoplist ( self , entity ) : start = 0 end = len ( entity ) # Adjust boundaries to exclude disallowed prefixes/suffixes for prefix in IGNORE_PREFIX : if entity . startswith ( prefix ) : # print('%s removing %s' % (currenttext, prefix)) start += len ( prefix ) break for suffix in IGNORE_SUFFIX : if entity . endswith ( suffix ) : # print('%s removing %s' % (currenttext, suffix)) end -= len ( suffix ) break # Return True if entity has been reduced to nothing by adjusting boundaries if start >= end : return True # Return True if adjusted entity is in the literal stoplist entity = entity [ start : end ] if entity in STOPLIST : return True # log.debug('Entity: %s', entity) for stop_re in STOP_RES : if re . search ( stop_re , entity ) : log . debug ( 'Killed: %s' , entity ) return True
Return True if the entity is in the stoplist .
217
11
238,261
def _process_name ( name ) : # Unescape HTML entities name = unescape ( name ) # Remove bracketed stuff on the end name = NG_RE . sub ( '' , name ) . strip ( ) # Nomenclature groups name = END_RE . sub ( '' , name ) . strip ( ', ' ) # Words name = RATIO_RE . sub ( '' , name ) . strip ( ', ' ) # Ratios # Remove stuff off start name = START_RE . sub ( '' , name ) . strip ( ) # Remove balanced start and end brackets if none in between name = BRACKET_RE . sub ( '\g<1>' , name ) # Un-invert CAS style names comps = name . split ( ', ' ) if len ( comps ) == 2 : if comps [ 1 ] . endswith ( '-' ) : name = comps [ 0 ] name = '%s%s' % ( comps [ 1 ] , name ) elif len ( comps ) > 2 : name = comps [ 0 ] for i in range ( 1 , len ( comps ) ) : if comps [ i ] . endswith ( '-' ) : name = '%s%s' % ( comps [ i ] , name ) else : name = '%s %s' % ( name , comps [ i ] ) return name
Fix issues with Jochem names .
299
7
238,262
def _get_variants ( name ) : names = [ name ] oldname = name # Map greek words to unicode characters if DOT_GREEK_RE . search ( name ) : wordname = name while True : m = DOT_GREEK_RE . search ( wordname ) if m : wordname = wordname [ : m . start ( 1 ) - 1 ] + m . group ( 1 ) + wordname [ m . end ( 1 ) + 1 : ] else : break symbolname = name while True : m = DOT_GREEK_RE . search ( symbolname ) if m : symbolname = symbolname [ : m . start ( 1 ) - 1 ] + GREEK_WORDS [ m . group ( 1 ) ] + symbolname [ m . end ( 1 ) + 1 : ] else : break names = [ wordname , symbolname ] else : while True : m = GREEK_RE . search ( name ) if m : name = name [ : m . start ( 2 ) ] + GREEK_WORDS [ m . group ( 2 ) ] + name [ m . end ( 2 ) : ] else : break while True : m = UNAMBIGUOUS_GREEK_RE . search ( name ) if m : name = name [ : m . start ( 1 ) ] + GREEK_WORDS [ m . group ( 1 ) ] + name [ m . end ( 1 ) : ] else : break if not name == oldname : names . append ( name ) newnames = [ ] for name in names : # If last word \d+, add variants with hyphen and no space preceding if NUM_END_RE . search ( name ) : newnames . append ( NUM_END_RE . sub ( '-\g<1>' , name ) ) newnames . append ( NUM_END_RE . sub ( '\g<1>' , name ) ) # If last word [A-Za-z]\d* add variants with hyphen preceding. if ALPHANUM_END_RE . search ( name ) : newnames . append ( ALPHANUM_END_RE . sub ( '-\g<1>' , name ) ) names . extend ( newnames ) return names
Return variants of chemical name .
489
6
238,263
def prepare_jochem ( ctx , jochem , output , csoutput ) : click . echo ( 'chemdataextractor.dict.prepare_jochem' ) for i , line in enumerate ( jochem ) : print ( 'JC%s' % i ) if line . startswith ( 'TM ' ) : if line . endswith ( ' @match=ci\n' ) : for tokens in _make_tokens ( line [ 3 : - 11 ] ) : output . write ( ' ' . join ( tokens ) ) output . write ( '\n' ) else : for tokens in _make_tokens ( line [ 3 : - 1 ] ) : csoutput . write ( ' ' . join ( tokens ) ) csoutput . write ( '\n' )
Process and filter jochem file to produce list of names for dictionary .
172
14
238,264
def prepare_include ( ctx , include , output ) : click . echo ( 'chemdataextractor.dict.prepare_include' ) for i , line in enumerate ( include ) : print ( 'IN%s' % i ) for tokens in _make_tokens ( line . strip ( ) ) : output . write ( u' ' . join ( tokens ) ) output . write ( u'\n' )
Process and filter include file to produce list of names for dictionary .
92
13
238,265
def build ( ctx , inputs , output , cs ) : click . echo ( 'chemdataextractor.dict.build' ) dt = DictionaryTagger ( lexicon = ChemLexicon ( ) , case_sensitive = cs ) names = [ ] for input in inputs : for line in input : tokens = line . split ( ) names . append ( tokens ) dt . build ( words = names ) dt . save ( output )
Build chemical name dictionary .
93
5
238,266
def _parse_table_footnotes ( self , fns , refs , specials ) : footnotes = [ ] for fn in fns : footnote = self . _parse_text ( fn , refs = refs , specials = specials , element_cls = Footnote ) [ 0 ] footnote += Footnote ( '' , id = fn . getprevious ( ) . get ( 'id' ) ) footnotes . append ( footnote ) return footnotes
Override to account for awkward RSC table footnotes .
97
11
238,267
def extract ( ) : Paragraph . parsers = [ CompoundParser ( ) , ChemicalLabelParser ( ) , MpParser ( ) ] Table . parsers = [ ] patents = [ ] for root , dirs , files in os . walk ( '../examples/mp/grants' ) : for filename in files : if not filename . endswith ( '.xml' ) : continue path = os . path . abspath ( os . path . join ( root , filename ) ) size = os . path . getsize ( path ) patents . append ( ( path , filename , size ) ) patents = sorted ( patents , key = lambda p : p [ 2 ] ) for path , filename , size in patents : print ( path ) shutil . copyfile ( path , '../examples/mp/used/%s' % filename ) with open ( path ) as f : d = Document . from_file ( f ) if os . path . isfile ( '../examples/mp/results/%s.json' % filename ) : continue records = [ r . serialize ( ) for r in d . records if len ( r . melting_points ) == 1 ] with open ( '../examples/mp/results/%s.json' % filename , 'w' ) as fout : fout . write ( json . dumps ( records , ensure_ascii = False , indent = 2 ) . encode ( 'utf8' ) )
Extract melting points from patents .
311
7
238,268
def cli ( ctx , verbose ) : log . debug ( 'ChemDataExtractor v%s' % __version__ ) logging . basicConfig ( level = logging . DEBUG if verbose else logging . INFO ) logging . getLogger ( 'requests' ) . setLevel ( logging . WARN ) ctx . obj = { }
ChemDataExtractor command line interface .
73
8
238,269
def extract ( ctx , input , output ) : log . info ( 'chemdataextractor.extract' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input , fname = input . name ) records = [ record . serialize ( primitive = True ) for record in doc . records ] jsonstring = json . dumps ( records , indent = 2 , ensure_ascii = False ) output . write ( jsonstring )
Run ChemDataExtractor on a document .
103
9
238,270
def read ( ctx , input , output ) : log . info ( 'chemdataextractor.read' ) log . info ( 'Reading %s' % input . name ) doc = Document . from_file ( input ) for element in doc . elements : output . write ( u'%s : %s\n=====\n' % ( element . __class__ . __name__ , six . text_type ( element ) ) )
Output processed document elements .
95
5
238,271
def extract_smiles ( s ) : # TODO: This still gets a lot of false positives. smiles = [ ] for t in s . split ( ) : if len ( t ) > 2 and SMILES_RE . match ( t ) and not t . endswith ( '.' ) and bracket_level ( t ) == 0 : smiles . append ( t ) return smiles
Return a list of SMILES identifiers extracted from the string .
82
13
238,272
def could_be ( self , other ) : # TODO: Some suffix and title differences should be allowed if type ( other ) is not type ( self ) : return NotImplemented if self == other : return True for attr in [ 'title' , 'firstname' , 'middlename' , 'nickname' , 'prefix' , 'lastname' , 'suffix' ] : if attr not in self or attr not in other : continue puncmap = dict ( ( ord ( char ) , None ) for char in string . punctuation ) s = self [ attr ] . lower ( ) . translate ( puncmap ) o = other [ attr ] . lower ( ) . translate ( puncmap ) if s == o : continue if attr in { 'firstname' , 'middlename' , 'lastname' } : if ( ( { len ( comp ) for comp in s . split ( ) } == { 1 } and [ el [ 0 ] for el in o . split ( ) ] == s . split ( ) ) or ( { len ( comp ) for comp in o . split ( ) } == { 1 } and [ el [ 0 ] for el in s . split ( ) ] == o . split ( ) ) ) : continue return False return True
Return True if the other PersonName is not explicitly inconsistent .
280
12
238,273
def _is_suffix ( self , t ) : return t not in NOT_SUFFIX and ( t . replace ( '.' , '' ) in SUFFIXES or t . replace ( '.' , '' ) in SUFFIXES_LOWER )
Return true if t is a suffix .
56
8
238,274
def _tokenize ( self , comps ) : ps = [ ] for comp in comps : ps . extend ( [ c . strip ( ' ,' ) for c in re . split ( r'\s+(?=[^{}]*(?:\{|$))' , comp ) ] ) return [ p for p in ps if p ]
Split name on spaces unless inside curly brackets or quotes .
75
11
238,275
def _clean ( self , t , capitalize = None ) : if self . _from_bibtex : t = latex_to_unicode ( t , capitalize = capitalize ) t = ' ' . join ( [ el . rstrip ( '.' ) if el . count ( '.' ) == 1 else el for el in t . split ( ) ] ) return t
Convert to normalized unicode and strip trailing full stops .
78
12
238,276
def _strip ( self , tokens , criteria , prop , rev = False ) : num = len ( tokens ) res = [ ] for i , token in enumerate ( reversed ( tokens ) if rev else tokens ) : if criteria ( token ) and num > i + 1 : res . insert ( 0 , tokens . pop ( ) ) if rev else res . append ( tokens . pop ( 0 ) ) else : break if res : self [ prop ] = self . _clean ( ' ' . join ( res ) ) return tokens
Strip off contiguous tokens from the start or end of the list that meet the criteria .
109
18
238,277
def _parse_text ( self , el , refs = None , specials = None , element_cls = Paragraph ) : if specials is None : specials = { } if refs is None : refs = { } elements = self . _parse_element_r ( el , specials = specials , refs = refs , element_cls = element_cls ) # This occurs if the input element is self-closing... (some table td in NLM XML) if not elements : return [ element_cls ( '' ) ] element = elements [ 0 ] for next_element in elements [ 1 : ] : element += element_cls ( ' ' ) + next_element return [ element ]
Like _parse_element but ensure a single element .
152
11
238,278
def _parse_reference ( self , el ) : if '#' in el . get ( 'href' , '' ) : return [ el . get ( 'href' ) . split ( '#' , 1 ) [ 1 ] ] elif 'rid' in el . attrib : return [ el . attrib [ 'rid' ] ] elif 'idref' in el . attrib : return [ el . attrib [ 'idref' ] ] else : return [ '' . join ( el . itertext ( ) ) . strip ( ) ]
Return reference ID from href or text content .
118
9
238,279
def _is_inline ( self , element ) : if element . tag not in { etree . Comment , etree . ProcessingInstruction } and element . tag . lower ( ) in self . inline_elements : return True return False
Return True if an element is inline .
50
8
238,280
def _next_token ( self , skipws = True ) : self . _token = next ( self . _tokens ) . group ( 0 ) return self . _next_token ( ) if skipws and self . _token . isspace ( ) else self . _token
Increment _token to the next token and return it .
60
12
238,281
def _parse_entry ( self ) : entry_type = self . _next_token ( ) . lower ( ) if entry_type == 'string' : self . _parse_string ( ) elif entry_type not in [ 'comment' , 'preamble' ] : self . _parse_record ( entry_type )
Parse an entry .
72
5
238,282
def _parse_string ( self ) : if self . _next_token ( ) in [ '{' , '(' ] : field = self . _parse_field ( ) if field : self . definitions [ field [ 0 ] ] = field [ 1 ]
Parse a string entry and store the definition .
55
10
238,283
def _parse_record ( self , record_type ) : if self . _next_token ( ) in [ '{' , '(' ] : key = self . _next_token ( ) self . records [ key ] = { u'id' : key , u'type' : record_type . lower ( ) } if self . _next_token ( ) == ',' : while True : field = self . _parse_field ( ) if field : k , v = field [ 0 ] , field [ 1 ] if k in self . keynorms : k = self . keynorms [ k ] if k == 'pages' : v = v . replace ( ' ' , '' ) . replace ( '--' , '-' ) if k == 'author' or k == 'editor' : v = self . parse_names ( v ) # Recapitalizing the title generally causes more problems than it solves # elif k == 'title': # v = latex_to_unicode(v, capitalize='title') else : v = latex_to_unicode ( v ) self . records [ key ] [ k ] = v if self . _token != ',' : break
Parse a record .
253
5
238,284
def _parse_field ( self ) : name = self . _next_token ( ) if self . _next_token ( ) == '=' : value = self . _parse_value ( ) return name , value
Parse a Field .
46
5
238,285
def _parse_value ( self ) : val = [ ] while True : t = self . _next_token ( ) if t == '"' : brac_counter = 0 while True : t = self . _next_token ( skipws = False ) if t == '{' : brac_counter += 1 if t == '}' : brac_counter -= 1 if t == '"' and brac_counter <= 0 : break else : val . append ( t ) elif t == '{' : brac_counter = 0 while True : t = self . _next_token ( skipws = False ) if t == '{' : brac_counter += 1 if t == '}' : brac_counter -= 1 if brac_counter < 0 : break else : val . append ( t ) elif re . match ( r'\w' , t ) : val . extend ( [ self . definitions . get ( t , t ) , ' ' ] ) elif t . isdigit ( ) : val . append ( [ t , ' ' ] ) elif t == '#' : pass else : break value = ' ' . join ( '' . join ( val ) . split ( ) ) return value
Parse a value . Digits definitions and the contents of double quotes or curly brackets .
264
18
238,286
def parse_names ( cls , names ) : names = [ latex_to_unicode ( n ) for n in re . split ( r'\sand\s(?=[^{}]*(?:\{|$))' , names ) if n ] return names
Parse a string of names separated by and like in a BibTeX authors field .
59
17
238,287
def metadata ( self ) : auto = { u'records' : self . size } auto . update ( self . meta ) return auto
Return metadata for the parsed collection of records .
29
9
238,288
def json ( self ) : return json . dumps ( OrderedDict ( [ ( 'metadata' , self . metadata ) , ( 'records' , self . records . values ( ) ) ] ) )
Return a list of records as a JSON string . Follows the BibJSON convention .
44
17
238,289
def _flush ( self ) : d = os . path . dirname ( self . path ) if not os . path . isdir ( d ) : os . makedirs ( d ) with io . open ( self . path , 'w' , encoding = 'utf8' ) as f : yaml . safe_dump ( self . _data , f , default_flow_style = False , encoding = None )
Save the contents of data to the file on disk . You should not need to call this manually .
89
20
238,290
def _is_allowed_abbr ( self , tokens ) : if len ( tokens ) <= 2 : abbr_text = '' . join ( tokens ) if self . abbr_min <= len ( abbr_text ) <= self . abbr_max and bracket_level ( abbr_text ) == 0 : if abbr_text [ 0 ] . isalnum ( ) and any ( c . isalpha ( ) for c in abbr_text ) : # Disallow property values if re . match ( '^\d+(\.\d+)?(g|m[lL]|cm)$' , abbr_text ) : return False return True return False
Return True if text is an allowed abbreviation .
146
10
238,291
def name ( self ) : return '' . join ( '_%s' % c if c . isupper ( ) else c for c in self . __class__ . __name__ ) . strip ( '_' ) . lower ( )
A unique name for this scraper .
51
8
238,292
def _post_scrape ( self , value , processor = None ) : # Pass each value through the field's clean method value = [ self . process ( v ) for v in value ] # Filter None values value = [ v for v in value if v is not None ] # Pass each value through processors defined on the entity if processor : value = [ processor ( v ) for v in value ] value = [ v for v in value if v is not None ] # Take first unless all is specified if not self . all : value = value [ 0 ] if value else None log . debug ( 'Scraped %s: %s from %s' % ( self . name , value , self . selection ) ) return value
Apply processing to the scraped value .
151
8
238,293
def scrape ( cls , selector , root , xpath = False ) : log . debug ( 'Called scrape classmethod with root: %s' % root ) roots = selector . xpath ( root ) if xpath else selector . css ( root ) results = [ cls ( r ) for r in roots ] return EntityList ( * results )
Return EntityList for the given selector .
75
8
238,294
def serialize ( self ) : # Serialize fields to a dict data = { } for field_name in self : value = self . _values . get ( field_name ) field = self . fields . get ( field_name ) if value is not None : if field . all : value = [ field . serialize ( v ) for v in value ] else : value = field . serialize ( value ) # Skip empty fields unless field.null if not field . null and ( ( field . all and value == [ ] ) or ( not field . all and value in { None , '' } ) ) : continue data [ field . name ] = value return data
Convert Entity to python dictionary .
140
7
238,295
def to_json ( self , * args , * * kwargs ) : return json . dumps ( self . serialize ( ) , * args , * * kwargs )
Convert Entity to JSON .
38
6
238,296
def from_file ( cls , f , fname = None , readers = None ) : if isinstance ( f , six . string_types ) : f = io . open ( f , 'rb' ) if not fname and hasattr ( f , 'name' ) : fname = f . name return cls . from_string ( f . read ( ) , fname = fname , readers = readers )
Create a Document from a file .
90
7
238,297
def from_string ( cls , fstring , fname = None , readers = None ) : if readers is None : from . . reader import DEFAULT_READERS readers = DEFAULT_READERS if isinstance ( fstring , six . text_type ) : raise ReaderError ( 'from_string expects a byte string, not a unicode string' ) for reader in readers : # Skip reader if we don't think it can read file if not reader . detect ( fstring , fname = fname ) : continue try : d = reader . readstring ( fstring ) log . debug ( 'Parsed document with %s' % reader . __class__ . __name__ ) return d except ReaderError : pass raise ReaderError ( 'Unable to read document' )
Create a Document from a byte string containing the contents of a file .
166
14
238,298
def get_element_with_id ( self , id ) : # Should we maintain a hashmap of ids to make this more efficient? Probably overkill. # TODO: Elements can contain nested elements (captions, footnotes, table cells, etc.) return next ( ( el for el in self . elements if el . id == id ) , None )
Return the element with the specified ID .
76
8
238,299
def serialize ( self ) : # Serialize fields to a dict elements = [ ] for element in self . elements : elements . append ( element . serialize ( ) ) data = { 'type' : 'document' , 'elements' : elements } return data
Convert Document to python dictionary .
56
7