idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
37,300 | def get_app ( ) : from bottle import default_app default_app . push ( ) for module in ( "mongo_orchestration.apps.servers" , "mongo_orchestration.apps.replica_sets" , "mongo_orchestration.apps.sharded_clusters" ) : __import__ ( module ) app = default_app . pop ( ) return app | return bottle app that includes all sub - apps |
37,301 | def await_connection ( host , port ) : for i in range ( CONNECT_ATTEMPTS ) : try : conn = socket . create_connection ( ( host , port ) , CONNECT_TIMEOUT ) conn . close ( ) return True except ( IOError , socket . error ) : time . sleep ( 1 ) return False | Wait for the mongo - orchestration server to accept connections . |
37,302 | def __init_config_params ( self , config ) : if self . version >= ( 2 , 4 ) : params = config . get ( 'setParameter' , { } ) params . setdefault ( 'enableTestCommands' , 1 ) if self . version >= ( 4 , 1 ) and not self . is_mongos : params . setdefault ( 'transactionLifetimeLimitSeconds' , 3 ) if self . version >= ( 4 , 0 ) and not self . is_mongos : params . setdefault ( 'maxTransactionLockRequestTimeoutMillis' , 25 ) config [ 'setParameter' ] = params compressors = config . get ( 'networkMessageCompressors' ) if compressors is None : if self . version >= ( 4 , 1 , 7 ) : config [ 'networkMessageCompressors' ] = 'zstd,zlib,snappy,noop' elif self . version >= ( 3 , 5 , 9 ) : config [ 'networkMessageCompressors' ] = 'zlib,snappy,noop' elif self . version >= ( 3 , 4 ) : config [ 'networkMessageCompressors' ] = 'snappy,noop' | Conditionally enable options in the Server s config file . |
37,303 | def connection ( self ) : c = pymongo . MongoClient ( self . hostname , fsync = True , socketTimeoutMS = self . socket_timeout , ** self . kwargs ) connected ( c ) if not self . is_mongos and self . login and not self . restart_required : db = c [ self . auth_source ] if self . x509_extra_user : auth_dict = { 'name' : DEFAULT_SUBJECT , 'mechanism' : 'MONGODB-X509' } else : auth_dict = { 'name' : self . login , 'password' : self . password } try : db . authenticate ( ** auth_dict ) except : logger . exception ( "Could not authenticate to %s with %r" % ( self . hostname , auth_dict ) ) raise return c | return authenticated connection |
37,304 | def version ( self ) : if not self . __version : command = ( self . name , '--version' ) logger . debug ( command ) stdout , _ = subprocess . Popen ( command , stdout = subprocess . PIPE ) . communicate ( ) version_output = str ( stdout ) match = re . search ( self . version_patt , version_output ) if match is None : raise ServersError ( 'Could not determine version of %s from string: %s' % ( self . name , version_output ) ) version_string = match . group ( 'version' ) self . __version = tuple ( map ( int , version_string . split ( '.' ) ) ) return self . __version | Get the version of MongoDB that this Server runs as a tuple . |
37,305 | def run_command ( self , command , arg = None , is_eval = False ) : mode = is_eval and 'eval' or 'command' if isinstance ( arg , tuple ) : name , d = arg else : name , d = arg , { } result = getattr ( self . connection . admin , mode ) ( command , name , ** d ) return result | run command on the server |
37,306 | def info ( self ) : proc_info = { "name" : self . name , "params" : self . cfg , "alive" : self . is_alive , "optfile" : self . config_path } if self . is_alive : proc_info [ 'pid' ] = self . proc . pid logger . debug ( "proc_info: {proc_info}" . format ( ** locals ( ) ) ) mongodb_uri = '' server_info = { } status_info = { } if self . hostname and self . cfg . get ( 'port' , None ) : try : c = self . connection server_info = c . server_info ( ) logger . debug ( "server_info: {server_info}" . format ( ** locals ( ) ) ) mongodb_uri = 'mongodb://' + self . hostname status_info = { "primary" : c . is_primary , "mongos" : c . is_mongos } logger . debug ( "status_info: {status_info}" . format ( ** locals ( ) ) ) except ( pymongo . errors . AutoReconnect , pymongo . errors . OperationFailure , pymongo . errors . ConnectionFailure ) : server_info = { } status_info = { } result = { "mongodb_uri" : mongodb_uri , "statuses" : status_info , "serverInfo" : server_info , "procInfo" : proc_info , "orchestration" : 'servers' } if self . login : result [ 'mongodb_auth_uri' ] = self . mongodb_auth_uri ( self . hostname ) logger . debug ( "return {result}" . format ( result = result ) ) return result | return info about server as dict object |
37,307 | def start ( self , timeout = 300 ) : if self . is_alive : return True try : dbpath = self . cfg . get ( 'dbpath' ) if dbpath and self . _is_locked : logger . info ( "Performing repair on locked dbpath %s" , dbpath ) process . repair_mongo ( self . name , self . cfg [ 'dbpath' ] ) self . proc , self . hostname = process . mprocess ( self . name , self . config_path , self . cfg . get ( 'port' , None ) , timeout , self . silence_stdout ) self . pid = self . proc . pid logger . debug ( "pid={pid}, hostname={hostname}" . format ( pid = self . pid , hostname = self . hostname ) ) self . host = self . hostname . split ( ':' ) [ 0 ] self . port = int ( self . hostname . split ( ':' ) [ 1 ] ) max_attempts = 6 for i in range ( max_attempts ) : try : self . run_command ( 'isMaster' ) break except pymongo . errors . ConnectionFailure : logger . exception ( 'isMaster command failed:' ) else : raise TimeoutError ( "Server did not respond to 'isMaster' after %d attempts." % max_attempts ) except ( OSError , TimeoutError ) : logpath = self . cfg . get ( 'logpath' ) if logpath : logger . error ( "Could not start Server. Please find server log below.\n" "=====================================================" ) with open ( logpath ) as lp : logger . error ( lp . read ( ) ) else : logger . exception ( 'Could not start Server, and no logpath was provided!' ) reraise ( TimeoutError , 'Could not start Server. ' 'Please check server log located in ' + self . cfg . get ( 'logpath' , '<no logpath given>' ) + ' or the mongo-orchestration log in ' + LOG_FILE + ' for more details.' ) if self . restart_required : if self . login : self . _add_users ( ) self . stop ( ) if self . is_mongos : self . config_path , self . cfg = self . __init_mongos ( self . cfg ) else : self . config_path , self . cfg = self . __init_mongod ( self . cfg , add_auth = True ) self . restart_required = False self . start ( ) return True | start server return True of False |
37,308 | def shutdown ( self ) : if not process . proc_alive ( self . proc ) : return logger . info ( "Attempting to connect to %s" , self . hostname ) client = self . connection attempts = 2 for i in range ( attempts ) : logger . info ( "Attempting to send shutdown command to %s" , self . hostname ) try : client . admin . command ( "shutdown" , force = True ) except ConnectionFailure : pass try : return process . wait_mprocess ( self . proc , 5 ) except TimeoutError as exc : logger . info ( "Timed out waiting on process: %s" , exc ) continue raise ServersError ( "Server %s failed to shutdown after %s attempts" % ( self . hostname , attempts ) ) | Send shutdown command and wait for the process to exit . |
37,309 | def bin_path ( self , release = None ) : if release : for r in self . releases : if release in r : return self . releases [ r ] raise MongoOrchestrationError ( "No such release '%s' in %r" % ( release , self . releases ) ) if self . default_release : return self . releases [ self . default_release ] if self . releases : return list ( self . releases . values ( ) ) [ 0 ] return '' | Get the bin path for a particular release . |
37,310 | def __init_configrs ( self , rs_cfg ) : rs_cfg [ 'id' ] = rs_cfg . pop ( 'rs_id' , None ) for member in rs_cfg . setdefault ( 'members' , [ { } ] ) : member [ 'procParams' ] = self . _strip_auth ( member . get ( 'procParams' , { } ) ) member [ 'procParams' ] [ 'configsvr' ] = True if self . enable_ipv6 : common . enable_ipv6_single ( member [ 'procParams' ] ) rs_cfg [ 'sslParams' ] = self . sslParams self . _configsvrs . append ( ReplicaSets ( ) . create ( rs_cfg ) ) | Create and start a config replica set . |
37,311 | def __init_configsvrs ( self , params ) : self . _configsvrs = [ ] for cfg in params : cfg = self . _strip_auth ( cfg ) server_id = cfg . pop ( 'server_id' , None ) version = cfg . pop ( 'version' , self . _version ) cfg . update ( { 'configsvr' : True } ) if self . enable_ipv6 : common . enable_ipv6_single ( cfg ) self . _configsvrs . append ( Servers ( ) . create ( 'mongod' , cfg , sslParams = self . sslParams , autostart = True , version = version , server_id = server_id ) ) | create and start config servers |
37,312 | def configsvrs ( self ) : if self . uses_rs_configdb : rs_id = self . _configsvrs [ 0 ] mongodb_uri = ReplicaSets ( ) . info ( rs_id ) [ 'mongodb_uri' ] return [ { 'id' : rs_id , 'mongodb_uri' : mongodb_uri } ] return [ { 'id' : h_id , 'hostname' : Servers ( ) . hostname ( h_id ) } for h_id in self . _configsvrs ] | return list of config servers |
37,313 | def router ( self ) : for server in self . _routers : info = Servers ( ) . info ( server ) if info [ 'procInfo' ] . get ( 'alive' , False ) : return { 'id' : server , 'hostname' : Servers ( ) . hostname ( server ) } | return first available router |
37,314 | def router_connections ( self ) : clients = [ ] for server in self . _routers : if Servers ( ) . is_alive ( server ) : client = self . create_connection ( Servers ( ) . hostname ( server ) ) clients . append ( client ) return clients | Return a list of MongoClients one for each mongos . |
37,315 | def _add ( self , shard_uri , name ) : return self . router_command ( "addShard" , ( shard_uri , { "name" : name } ) , is_eval = False ) | execute addShard command |
37,316 | def member_add ( self , member_id = None , params = None ) : member_id = member_id or str ( uuid4 ( ) ) if self . enable_ipv6 : common . enable_ipv6_repl ( params ) if 'members' in params : for member in params [ 'members' ] : if not member . get ( 'rsParams' , { } ) . get ( 'arbiterOnly' , False ) : member . setdefault ( 'procParams' , { } ) [ 'shardsvr' ] = True rs_params = params . copy ( ) rs_params [ 'id' ] = rs_params . pop ( 'rs_id' , None ) rs_params . update ( { 'sslParams' : self . sslParams } ) rs_params [ 'version' ] = params . pop ( 'version' , self . _version ) rs_params [ 'members' ] = [ self . _strip_auth ( params ) for params in rs_params [ 'members' ] ] rs_id = ReplicaSets ( ) . create ( rs_params ) members = ReplicaSets ( ) . members ( rs_id ) cfgs = rs_id + r"/" + ',' . join ( [ item [ 'host' ] for item in members ] ) result = self . _add ( cfgs , member_id ) if result . get ( 'ok' , 0 ) == 1 : self . _shards [ result [ 'shardAdded' ] ] = { 'isReplicaSet' : True , '_id' : rs_id } return self . member_info ( member_id ) else : params . setdefault ( 'procParams' , { } ) [ 'shardsvr' ] = True params . update ( { 'autostart' : True , 'sslParams' : self . sslParams } ) params = params . copy ( ) params [ 'procParams' ] = self . _strip_auth ( params . get ( 'procParams' , { } ) ) params . setdefault ( 'version' , self . _version ) logger . debug ( "servers create params: {params}" . format ( ** locals ( ) ) ) server_id = Servers ( ) . create ( 'mongod' , ** params ) result = self . _add ( Servers ( ) . hostname ( server_id ) , member_id ) if result . get ( 'ok' , 0 ) == 1 : self . _shards [ result [ 'shardAdded' ] ] = { 'isServer' : True , '_id' : server_id } return self . member_info ( member_id ) | add new member into existing configuration |
37,317 | def _remove ( self , shard_name ) : result = self . router_command ( "removeShard" , shard_name , is_eval = False ) if result [ 'ok' ] == 1 and result [ 'state' ] == 'completed' : shard = self . _shards . pop ( shard_name ) if shard . get ( 'isServer' , False ) : Servers ( ) . remove ( shard [ '_id' ] ) if shard . get ( 'isReplicaSet' , False ) : ReplicaSets ( ) . remove ( shard [ '_id' ] ) return result | remove member from configuration |
37,318 | def reset ( self ) : for shard_id in self . _shards : if self . _shards [ shard_id ] . get ( 'isReplicaSet' ) : singleton = ReplicaSets ( ) elif self . _shards [ shard_id ] . get ( 'isServer' ) : singleton = Servers ( ) singleton . command ( self . _shards [ shard_id ] [ '_id' ] , 'reset' ) for config_id in self . _configsvrs : self . configdb_singleton . command ( config_id , 'reset' ) for router_id in self . _routers : Servers ( ) . command ( router_id , 'reset' ) return self . info ( ) | Ensure all shards configs and routers are running and available . |
37,319 | def info ( self ) : uri = ',' . join ( x [ 'hostname' ] for x in self . routers ) mongodb_uri = 'mongodb://' + uri result = { 'id' : self . id , 'shards' : self . members , 'configsvrs' : self . configsvrs , 'routers' : self . routers , 'mongodb_uri' : mongodb_uri , 'orchestration' : 'sharded_clusters' } if self . login : result [ 'mongodb_auth_uri' ] = self . mongodb_auth_uri ( uri ) return result | return info about configuration |
37,320 | def router_add ( self , cluster_id , params ) : cluster = self . _storage [ cluster_id ] result = cluster . router_add ( params ) self . _storage [ cluster_id ] = cluster return result | add new router |
37,321 | def router_del ( self , cluster_id , router_id ) : cluster = self . _storage [ cluster_id ] result = cluster . router_remove ( router_id ) self . _storage [ cluster_id ] = cluster return result | remove router from the ShardedCluster |
37,322 | def command ( self , cluster_id , command , * args ) : cluster = self . _storage [ cluster_id ] try : return getattr ( cluster , command ) ( * args ) except AttributeError : raise ValueError ( "Cannot issue the command %r to ShardedCluster %s" % ( command , cluster_id ) ) | Call a ShardedCluster method . |
37,323 | def member_del ( self , cluster_id , member_id ) : cluster = self . _storage [ cluster_id ] result = cluster . member_remove ( member_id ) self . _storage [ cluster_id ] = cluster return result | remove member from cluster cluster |
37,324 | def member_add ( self , cluster_id , params ) : cluster = self . _storage [ cluster_id ] result = cluster . member_add ( params . get ( 'id' , None ) , params . get ( 'shardParams' , { } ) ) self . _storage [ cluster_id ] = cluster return result | add new member into configuration |
37,325 | def expand_dir ( _dir , cwd = os . getcwd ( ) ) : _dir = os . path . expanduser ( os . path . expandvars ( _dir ) ) if not os . path . isabs ( _dir ) : _dir = os . path . normpath ( os . path . join ( cwd , _dir ) ) return _dir | Return path with environmental variables and tilde ~ expanded . |
37,326 | def extract_repos ( config , cwd = os . getcwd ( ) ) : configs = [ ] for directory , repos in config . items ( ) : for repo , repo_data in repos . items ( ) : conf = { } if isinstance ( repo_data , string_types ) : conf [ 'url' ] = repo_data else : conf = update_dict ( conf , repo_data ) if 'repo' in conf : if 'url' not in conf : conf [ 'url' ] = conf . pop ( 'repo' ) else : conf . pop ( 'repo' , None ) if 'shell_command_after' in conf : if isinstance ( conf [ 'shell_command_after' ] , string_types ) : conf [ 'shell_command_after' ] = [ conf [ 'shell_command_after' ] ] if 'name' not in conf : conf [ 'name' ] = repo if 'parent_dir' not in conf : conf [ 'parent_dir' ] = expand_dir ( directory , cwd ) if 'repo_dir' not in conf : conf [ 'repo_dir' ] = expand_dir ( os . path . join ( conf [ 'parent_dir' ] , conf [ 'name' ] ) , cwd ) if 'remotes' in conf : remotes = [ ] for remote_name , url in conf [ 'remotes' ] . items ( ) : remotes . append ( { 'remote_name' : remote_name , 'url' : url } ) conf [ 'remotes' ] = sorted ( remotes , key = lambda x : sorted ( x . get ( 'remote_name' ) ) ) configs . append ( conf ) return configs | Return expanded configuration . |
37,327 | def find_config_files ( path = [ '~/.vcspull' ] , match = [ '*' ] , filetype = [ 'json' , 'yaml' ] , include_home = False ) : configs = [ ] if include_home is True : configs . extend ( find_home_config_files ( ) ) if isinstance ( path , list ) : for p in path : configs . extend ( find_config_files ( p , match , filetype ) ) return configs else : path = os . path . expanduser ( path ) if isinstance ( match , list ) : for m in match : configs . extend ( find_config_files ( path , m , filetype ) ) else : if isinstance ( filetype , list ) : for f in filetype : configs . extend ( find_config_files ( path , match , f ) ) else : match = os . path . join ( path , match ) match += ".{filetype}" . format ( filetype = filetype ) configs = glob . glob ( match ) return configs | Return repos from a directory and match . Not recursive . |
37,328 | def load_configs ( files , cwd = os . getcwd ( ) ) : repos = [ ] for f in files : _ , ext = os . path . splitext ( f ) conf = kaptan . Kaptan ( handler = ext . lstrip ( '.' ) ) . import_config ( f ) newrepos = extract_repos ( conf . export ( 'dict' ) , cwd ) if not repos : repos . extend ( newrepos ) continue dupes = detect_duplicate_repos ( repos , newrepos ) if dupes : msg = ( 'repos with same path + different VCS detected!' , dupes ) raise exc . VCSPullException ( msg ) repos . extend ( newrepos ) return repos | Return repos from a list of files . |
37,329 | def detect_duplicate_repos ( repos1 , repos2 ) : dupes = [ ] path_dupe_repos = [ ] curpaths = [ r [ 'repo_dir' ] for r in repos1 ] newpaths = [ r [ 'repo_dir' ] for r in repos2 ] path_duplicates = list ( set ( curpaths ) . intersection ( newpaths ) ) if not path_duplicates : return None path_dupe_repos . extend ( [ r for r in repos2 if any ( r [ 'repo_dir' ] == p for p in path_duplicates ) ] ) if not path_dupe_repos : return None for n in path_dupe_repos : currepo = next ( ( r for r in repos1 if r [ 'repo_dir' ] == n [ 'repo_dir' ] ) , None ) if n [ 'url' ] != currepo [ 'url' ] : dupes += ( n , currepo ) return dupes | Return duplicate repos dict if repo_dir same and vcs different . |
37,330 | def copy_node_info ( src , dest ) : for attr in [ 'lineno' , 'fromlineno' , 'tolineno' , 'col_offset' , 'parent' ] : if hasattr ( src , attr ) : setattr ( dest , attr , getattr ( src , attr ) ) | Copy information from src to dest |
37,331 | def make_non_magical_flask_import ( flask_ext_name ) : match = re . match ( r'flask\.ext\.(.*)' , flask_ext_name ) if match is None : raise LookupError ( "Module name `{}` doesn't match" "`flask.ext` style import." ) from_name = match . group ( 1 ) actual_module_name = 'flask_{}' . format ( from_name ) return actual_module_name | Convert a flask . ext . admin into flask_admin . |
37,332 | def transform_flask_from_import ( node ) : new_names = [ ] for ( name , as_name ) in node . names : actual_module_name = 'flask_{}' . format ( name ) new_names . append ( ( actual_module_name , as_name or name ) ) new_node = nodes . Import ( ) copy_node_info ( node , new_node ) new_node . names = new_names mark_transformed ( new_node ) return new_node | Translates a flask . ext from - style import into a non - magical import . |
37,333 | def transform_flask_from_long ( node ) : actual_module_name = make_non_magical_flask_import ( node . modname ) new_node = nodes . ImportFrom ( actual_module_name , node . names , node . level ) copy_node_info ( node , new_node ) mark_transformed ( new_node ) return new_node | Translates a flask . ext . wtf from - style import into a non - magical import . |
37,334 | def transform_flask_bare_import ( node ) : new_names = [ ] for ( name , as_name ) in node . names : match = re . match ( r'flask\.ext\.(.*)' , name ) from_name = match . group ( 1 ) actual_module_name = 'flask_{}' . format ( from_name ) new_names . append ( ( actual_module_name , as_name ) ) new_node = nodes . Import ( ) copy_node_info ( node , new_node ) new_node . names = new_names mark_transformed ( new_node ) return new_node | Translates a flask . ext . wtf bare import into a non - magical import . |
37,335 | def main ( ) : alarm = XBeeAlarm ( '/dev/ttyUSB0' , '\x56\x78' ) routine = SimpleWakeupRoutine ( alarm ) from time import sleep while True : try : print "Waiting 5 seconds..." sleep ( 5 ) print "Firing" routine . trigger ( ) except KeyboardInterrupt : break | Run through simple demonstration of alarm concept |
37,336 | def _parse_IS_at_response ( self , packet_info ) : if packet_info [ 'id' ] in ( 'at_response' , 'remote_at_response' ) and packet_info [ 'command' ] . lower ( ) == b'is' and packet_info [ 'status' ] == b'\x00' : return self . _parse_samples ( packet_info [ 'parameter' ] ) else : return packet_info [ 'parameter' ] | If the given packet is a successful remote AT response for an IS command parse the parameter field as IO data . |
37,337 | def _parse_ND_at_response ( self , packet_info ) : if packet_info [ 'id' ] == 'at_response' and packet_info [ 'command' ] . lower ( ) == b'nd' and packet_info [ 'status' ] == b'\x00' : result = { } result [ 'source_addr' ] = packet_info [ 'parameter' ] [ 0 : 2 ] result [ 'source_addr_long' ] = packet_info [ 'parameter' ] [ 2 : 10 ] null_terminator_index = 10 while packet_info [ 'parameter' ] [ null_terminator_index : null_terminator_index + 1 ] != b'\x00' : null_terminator_index += 1 result [ 'node_identifier' ] = packet_info [ 'parameter' ] [ 10 : null_terminator_index ] result [ 'parent_address' ] = packet_info [ 'parameter' ] [ null_terminator_index + 1 : null_terminator_index + 3 ] result [ 'device_type' ] = packet_info [ 'parameter' ] [ null_terminator_index + 3 : null_terminator_index + 4 ] result [ 'status' ] = packet_info [ 'parameter' ] [ null_terminator_index + 4 : null_terminator_index + 5 ] result [ 'profile_id' ] = packet_info [ 'parameter' ] [ null_terminator_index + 5 : null_terminator_index + 7 ] result [ 'manufacturer' ] = packet_info [ 'parameter' ] [ null_terminator_index + 7 : null_terminator_index + 9 ] if null_terminator_index + 9 != len ( packet_info [ 'parameter' ] ) : raise ValueError ( "Improper ND response length: expected {0}, " "read {1} bytes" . format ( len ( packet_info [ 'parameter' ] ) , null_terminator_index + 9 ) ) return result else : return packet_info [ 'parameter' ] | If the given packet is a successful AT response for an ND command parse the parameter field . |
37,338 | def main ( ) : try : ser = serial . Serial ( '/dev/ttyUSB0' , 9600 ) xbee = XBee ( ser ) xbee . send ( 'at' , frame_id = 'A' , command = 'DH' ) response = xbee . wait_read_frame ( ) print response xbee . send ( 'at' , frame_id = 'B' , command = 'DL' ) response = xbee . wait_read_frame ( ) print response xbee . send ( 'at' , frame_id = 'C' , command = 'MY' ) response = xbee . wait_read_frame ( ) print response xbee . send ( 'at' , frame_id = 'D' , command = 'CE' ) response = xbee . wait_read_frame ( ) print response except KeyboardInterrupt : pass finally : ser . close ( ) | Sends an API AT command to read the lower - order address bits from an XBee Series 1 and looks for a response |
37,339 | def byteToInt ( byte ) : if hasattr ( byte , 'bit_length' ) : return byte return ord ( byte ) if hasattr ( byte , 'encode' ) else byte [ 0 ] | byte - > int |
37,340 | def list_conversions ( api_key , api_secret , video_key , ** kwargs ) : jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Querying for video conversions." ) try : response = jwplatform_client . videos . conversions . list ( video_key = video_key , ** kwargs ) except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error querying for video conversions.\n{}" . format ( e ) ) sys . exit ( e . message ) return response | Function which retrieves a list of a video object s conversions . |
37,341 | def _build_request ( self , path , params = None ) : _url = '{scheme}://{host}{port}/{version}{path}' . format ( scheme = self . _scheme , host = self . _host , port = ':{}' . format ( self . _port ) if self . _port != 80 else '' , version = self . _api_version , path = path ) if params is not None : _params = params . copy ( ) else : _params = dict ( ) _params [ 'api_nonce' ] = str ( random . randint ( 0 , 999999999 ) ) . zfill ( 9 ) _params [ 'api_timestamp' ] = int ( time . time ( ) ) _params [ 'api_key' ] = self . __key _params [ 'api_format' ] = 'json' _params [ 'api_kit' ] = 'py-{}{}' . format ( __version__ , '-{}' . format ( self . _agent ) if self . _agent else '' ) sbs = '&' . join ( [ '{}={}' . format ( quote ( ( unicode ( key ) . encode ( 'utf-8' ) ) , safe = '~' ) , quote ( ( unicode ( value ) . encode ( 'utf-8' ) ) , safe = '~' ) ) for key , value in sorted ( _params . items ( ) ) ] ) _params [ 'api_signature' ] = hashlib . sha1 ( '{}{}' . format ( sbs , self . __secret ) . encode ( 'utf-8' ) ) . hexdigest ( ) return _url , _params | Build API request |
37,342 | def create_video ( api_key , api_secret , local_video_path , api_format = 'json' , ** kwargs ) : jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Registering new Video-Object" ) try : response = jwplatform_client . videos . create ( upload_method = 'single' , ** kwargs ) except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error creating a video\n{}" . format ( e ) ) logging . info ( response ) upload_url = '{}://{}{}' . format ( response [ 'link' ] [ 'protocol' ] , response [ 'link' ] [ 'address' ] , response [ 'link' ] [ 'path' ] ) query_parameters = response [ 'link' ] [ 'query' ] query_parameters [ 'api_format' ] = api_format with open ( local_video_path , 'rb' ) as f : files = { 'file' : f } r = requests . post ( upload_url , params = query_parameters , files = files ) logging . info ( 'uploading file {} to url {}' . format ( local_video_path , r . url ) ) logging . info ( 'upload response: {}' . format ( r . text ) ) logging . info ( r ) | Function which creates new video object via singlefile upload method . |
37,343 | def replace_video ( api_key , api_secret , local_video_path , video_key , ** kwargs ) : filename = os . path . basename ( local_video_path ) jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Updating Video" ) try : response = jwplatform_client . videos . update ( video_key = video_key , upload_method = 's3' , update_file = 'True' , ** kwargs ) except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error updating the video\n{}" . format ( e ) ) sys . exit ( e . message ) logging . info ( response ) upload_url = '{}://{}{}' . format ( response [ 'link' ] [ 'protocol' ] , response [ 'link' ] [ 'address' ] , response [ 'link' ] [ 'path' ] ) query_parameters = response [ 'link' ] [ 'query' ] headers = { 'Content-Disposition' : 'attachment; filename="{}"' . format ( filename ) } with open ( local_video_path , 'rb' ) as f : r = requests . put ( upload_url , params = query_parameters , headers = headers , data = f ) logging . info ( 'uploading file {} to url {}' . format ( local_video_path , r . url ) ) logging . info ( 'upload response: {}' . format ( r . text ) ) logging . info ( r ) | Function which allows to replace the content of an EXISTING video object . |
37,344 | def update_thumbnail ( api_key , api_secret , video_key , position = 7.0 , ** kwargs ) : jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Updating video thumbnail." ) try : response = jwplatform_client . videos . thumbnails . update ( video_key = video_key , position = position , ** kwargs ) except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error updating thumbnail.\n{}" . format ( e ) ) sys . exit ( e . message ) return response | Function which updates the thumbnail for an EXISTING video utilizing position parameter . This function is useful for selecting a new thumbnail from with the already existing video content . Instead of position parameter user may opt to utilize thumbnail_index parameter . Please eee documentation for further information . |
37,345 | def update_thumbnail_via_upload ( api_key , api_secret , video_key , local_video_image_path = '' , api_format = 'json' , ** kwargs ) : jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Updating video thumbnail." ) try : response = jwplatform_client . videos . thumbnails . update ( video_key = video_key , ** kwargs ) except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error updating thumbnail.\n{}" . format ( e ) ) sys . exit ( e . message ) logging . info ( response ) upload_url = '{}://{}{}' . format ( response [ 'link' ] [ 'protocol' ] , response [ 'link' ] [ 'address' ] , response [ 'link' ] [ 'path' ] ) query_parameters = response [ 'link' ] [ 'query' ] query_parameters [ 'api_format' ] = api_format with open ( local_video_image_path , 'rb' ) as f : files = { 'file' : f } r = requests . post ( upload_url , params = query_parameters , files = files ) logging . info ( 'uploading file {} to url {}' . format ( local_video_image_path , r . url ) ) logging . info ( 'upload response: {}' . format ( r . text ) ) | Function which updates the thumbnail for a particular video object with a locally saved image . |
37,346 | def run_upload ( video_file_path ) : upload_parameters = { 'file_path' : video_file_path , 'file_size' : os . stat ( video_file_path ) . st_size , 'file_name' : os . path . basename ( video_file_path ) } try : jwplatform_client = Client ( JW_API_KEY , JW_API_SECRET ) jwplatform_video_create_response = jwplatform_client . videos . create ( upload_method = 'multipart' , title = upload_parameters [ 'file_name' ] ) except JWPlatformError : logging . exception ( 'An error occurred during the uploader setup. Check that your API keys are properly ' 'set up in your environment, and ensure that the video file path exists.' ) return upload_parameters [ 'upload_url' ] = '{protocol}://{address}{path}' . format ( ** jwplatform_video_create_response [ 'link' ] ) logging . info ( 'Upload URL to be used: {}' . format ( upload_parameters [ 'upload_url' ] ) ) upload_parameters [ 'query_parameters' ] = jwplatform_video_create_response [ 'link' ] [ 'query' ] upload_parameters [ 'query_parameters' ] [ 'api_format' ] = 'json' upload_parameters [ 'headers' ] = { 'X-Session-ID' : jwplatform_video_create_response [ 'session_id' ] } upload_parameters [ 'chunk_offset' ] = 0 with open ( upload_parameters [ 'file_path' ] , 'rb' ) as file_to_upload : while True : chunk = file_to_upload . read ( BYTES_TO_BUFFER ) if len ( chunk ) <= 0 : break try : upload_chunk ( chunk , upload_parameters ) except requests . exceptions . RequestException : logging . exception ( 'Error posting data, stopping upload...' ) break | Configures all of the needed upload_parameters and sets up all information pertinent to the video to be uploaded . |
37,347 | def make_csv ( api_key , api_secret , path_to_csv = None , result_limit = 1000 , ** kwargs ) : path_to_csv = path_to_csv or os . path . join ( os . getcwd ( ) , 'video_list.csv' ) timeout_in_seconds = 2 max_retries = 3 retries = 0 offset = 0 videos = list ( ) jwplatform_client = jwplatform . Client ( api_key , api_secret ) logging . info ( "Querying for video list." ) while True : try : response = jwplatform_client . videos . list ( result_limit = result_limit , result_offset = offset , ** kwargs ) except jwplatform . errors . JWPlatformRateLimitExceededError : logging . error ( "Encountered rate limiting error. Backing off on request time." ) if retries == max_retries : raise jwplatform . errors . JWPlatformRateLimitExceededError ( ) timeout_in_seconds *= timeout_in_seconds retries += 1 time . sleep ( timeout_in_seconds ) continue except jwplatform . errors . JWPlatformError as e : logging . error ( "Encountered an error querying for videos list.\n{}" . format ( e ) ) raise e retries = 0 timeout_in_seconds = 2 next_videos = response . get ( 'videos' , [ ] ) last_query_total = response . get ( 'total' , 0 ) videos . extend ( next_videos ) offset += len ( next_videos ) logging . info ( "Accumulated {} videos." . format ( offset ) ) if offset >= last_query_total : break desired_fields = [ 'key' , 'title' , 'description' , 'tags' , 'date' , 'link' ] should_write_header = not os . path . isfile ( path_to_csv ) with open ( path_to_csv , 'a+' ) as path_to_csv : writer = csv . DictWriter ( path_to_csv , fieldnames = desired_fields , extrasaction = 'ignore' ) if should_write_header : writer . writeheader ( ) writer . writerows ( videos ) | Function which fetches a video library and writes each video_objects Metadata to CSV . Useful for CMS systems . |
37,348 | def image ( self ) : r text = self . text w , h = self . font . getsize ( text ) margin_x = round ( self . margin_x * w / self . w ) margin_y = round ( self . margin_y * h / self . h ) image = Image . new ( 'RGB' , ( w + 2 * margin_x , h + 2 * margin_y ) , ( 255 , 255 , 255 ) ) self . _writeText ( image , text , pos = ( margin_x , margin_y ) ) self . _drawLine ( image ) noise = self . _whiteNoise ( image . size ) if noise is not None : image = Image . blend ( image , noise , 0.5 ) image = image . resize ( self . size , resample = self . resample ) return ( text , image ) | r Tuple with a CAPTCHA text and a Image object . |
37,349 | def bytes ( self ) : r text , image = self . image bytes = BytesIO ( ) image . save ( bytes , format = self . format ) bytes . seek ( 0 ) return ( text , bytes ) | r Tuple with a CAPTCHA text and a BytesIO object . |
37,350 | def write ( self , file ) : r text , image = self . image image . save ( file , format = self . format ) return ( text , file ) | r Save CAPTCHA image in given filepath . |
37,351 | def text ( self ) : if isinstance ( self . source , str ) : return self . source else : return self . source ( ) | Text received from self . source . |
37,352 | def _writeText ( self , image , text , pos ) : offset = 0 x , y = pos for c in text : c_size = self . font . getsize ( c ) c_image = Image . new ( 'RGBA' , c_size , ( 0 , 0 , 0 , 0 ) ) c_draw = ImageDraw . Draw ( c_image ) c_draw . text ( ( 0 , 0 ) , c , font = self . font , fill = ( 0 , 0 , 0 , 255 ) ) c_image = self . _rndLetterTransform ( c_image ) image . paste ( c_image , ( x + offset , y ) , c_image ) offset += c_size [ 0 ] | Write morphed text in Image object . |
37,353 | def _drawLine ( self , image ) : w , h = image . size w *= 5 h *= 5 l_image = Image . new ( 'RGBA' , ( w , h ) , ( 0 , 0 , 0 , 0 ) ) l_draw = ImageDraw . Draw ( l_image ) x1 = int ( w * random . uniform ( 0 , 0.1 ) ) y1 = int ( h * random . uniform ( 0 , 1 ) ) x2 = int ( w * random . uniform ( 0.9 , 1 ) ) y2 = int ( h * random . uniform ( 0 , 1 ) ) l_width = round ( ( w * h ) ** 0.5 * 2.284e-2 ) l_draw . line ( ( ( x1 , y1 ) , ( x2 , y2 ) ) , fill = ( 0 , 0 , 0 , 255 ) , width = l_width ) l_image = self . _rndLineTransform ( l_image ) l_image = l_image . resize ( image . size , resample = self . resample ) image . paste ( l_image , ( 0 , 0 ) , l_image ) | Draw morphed line in Image object . |
37,354 | def _whiteNoise ( self , size ) : if self . noise > 0.003921569 : w , h = size pixel = ( lambda noise : round ( 255 * random . uniform ( 1 - noise , 1 ) ) ) n_image = Image . new ( 'RGB' , size , ( 0 , 0 , 0 , 0 ) ) rnd_grid = map ( lambda _ : tuple ( [ pixel ( self . noise ) ] ) * 3 , [ 0 ] * w * h ) n_image . putdata ( list ( rnd_grid ) ) return n_image else : return None | Generate white noise and merge it with given Image object . |
37,355 | def _rndLetterTransform ( self , image ) : w , h = image . size dx = w * random . uniform ( 0.2 , 0.7 ) dy = h * random . uniform ( 0.2 , 0.7 ) x1 , y1 = self . __class__ . _rndPointDisposition ( dx , dy ) x2 , y2 = self . __class__ . _rndPointDisposition ( dx , dy ) w += abs ( x1 ) + abs ( x2 ) h += abs ( x1 ) + abs ( x2 ) quad = self . __class__ . _quadPoints ( ( w , h ) , ( x1 , y1 ) , ( x2 , y2 ) ) return image . transform ( image . size , Image . QUAD , data = quad , resample = self . resample ) | Randomly morph a single character . |
37,356 | def _rndPointDisposition ( dx , dy ) : x = int ( random . uniform ( - dx , dx ) ) y = int ( random . uniform ( - dy , dy ) ) return ( x , y ) | Return random disposition point . |
37,357 | def _quadPoints ( size , disp1 , disp2 ) : w , h = size x1 , y1 = disp1 x2 , y2 = disp2 return ( x1 , - y1 , - x1 , h + y2 , w + x2 , h - y2 , w - x2 , y1 ) | Return points for QUAD transformation . |
37,358 | def path_helper ( self , operations , view , ** kwargs ) : operations . update ( yaml_utils . load_operations_from_docstring ( view . __doc__ ) ) app = kwargs . get ( 'app' , _default_app ) route = self . _route_for_view ( app , view ) return self . bottle_path_to_openapi ( route . rule ) | Path helper that allows passing a bottle view function . |
37,359 | def path_helper ( self , operations , view , app = None , ** kwargs ) : rule = self . _rule_for_view ( view , app = app ) operations . update ( yaml_utils . load_operations_from_docstring ( view . __doc__ ) ) if hasattr ( view , 'view_class' ) and issubclass ( view . view_class , MethodView ) : for method in view . methods : if method in rule . methods : method_name = method . lower ( ) method = getattr ( view . view_class , method_name ) operations [ method_name ] = yaml_utils . load_yaml_from_docstring ( method . __doc__ ) return self . flaskpath2openapi ( rule . rule ) | Path helper that allows passing a Flask view function . |
37,360 | def _operations_from_methods ( handler_class ) : for httpmethod in yaml_utils . PATH_KEYS : method = getattr ( handler_class , httpmethod ) operation_data = yaml_utils . load_yaml_from_docstring ( method . __doc__ ) if operation_data : operation = { httpmethod : operation_data } yield operation | Generator of operations described in handler s http methods |
37,361 | def tornadopath2openapi ( urlspec , method ) : if sys . version_info >= ( 3 , 3 ) : args = list ( inspect . signature ( method ) . parameters . keys ( ) ) [ 1 : ] else : if getattr ( method , '__tornado_coroutine__' , False ) : method = method . __wrapped__ args = inspect . getargspec ( method ) . args [ 1 : ] params = tuple ( '{{{}}}' . format ( arg ) for arg in args ) try : path_tpl = urlspec . matcher . _path except AttributeError : path_tpl = urlspec . _path path = ( path_tpl % params ) if path . count ( '/' ) > 1 : path = path . rstrip ( '/?*' ) return path | Convert Tornado URLSpec to OpenAPI - compliant path . |
37,362 | def path_helper ( self , operations , urlspec , ** kwargs ) : if not isinstance ( urlspec , URLSpec ) : urlspec = URLSpec ( * urlspec ) for operation in self . _operations_from_methods ( urlspec . handler_class ) : operations . update ( operation ) if not operations : raise APISpecError ( 'Could not find endpoint for urlspec {0}' . format ( urlspec ) , ) params_method = getattr ( urlspec . handler_class , list ( operations . keys ( ) ) [ 0 ] ) operations . update ( self . _extensions_from_handler ( urlspec . handler_class ) ) return self . tornadopath2openapi ( urlspec , params_method ) | Path helper that allows passing a Tornado URLSpec or tuple . |
37,363 | def generate_mfa_token ( self , user_id , expires_in = 259200 , reusable = False ) : self . clean_error ( ) try : url = self . get_url ( Constants . GENERATE_MFA_TOKEN_URL , user_id ) data = { 'expires_in' : expires_in , 'reusable' : reusable } response = self . execute_call ( 'post' , url , json = data ) if response . status_code == 201 : json_data = response . json ( ) if json_data : return MFAToken ( json_data ) else : self . error = self . extract_status_code_from_response ( response ) self . error_description = self . extract_error_message_from_response ( response ) except Exception as e : self . error = 500 self . error_description = e . args [ 0 ] | Use to generate a temporary MFA token that can be used in place of other MFA tokens for a set time period . For example use this token for account recovery . |
37,364 | def camel_to_snake ( camel ) : ret = [ ] last_lower = False for char in camel : current_upper = char . upper ( ) == char if current_upper and last_lower : ret . append ( "_" ) ret . append ( char . lower ( ) ) else : ret . append ( char . lower ( ) ) last_lower = not current_upper return "" . join ( ret ) | Convert camelCase to snake_case . |
37,365 | def register_id ( self , id_string ) : try : prefix , count = id_string . rsplit ( "_" , 1 ) count = int ( count ) except ValueError : pass else : if prefix == self . prefix : self . counter = max ( count , self . counter ) | Register a manually assigned id as used to avoid collisions . |
37,366 | def parse ( cls , root ) : if root . tag != utils . lxmlns ( "mets" ) + "amdSec" : raise exceptions . ParseError ( "AMDSec can only parse amdSec elements with METS namespace." ) section_id = root . get ( "ID" ) subsections = [ ] for child in root : subsection = SubSection . parse ( child ) subsections . append ( subsection ) return cls ( section_id , subsections ) | Create a new AMDSec by parsing root . |
37,367 | def serialize ( self , now = None ) : if self . _tree is not None : return self . _tree el = etree . Element ( utils . lxmlns ( "mets" ) + self . tag , ID = self . id_string ) self . subsections . sort ( ) for child in self . subsections : el . append ( child . serialize ( now ) ) return el | Serialize this amdSec and all children to lxml Element and return it . |
37,368 | def parse ( cls , element ) : if element . tag != cls . ALT_RECORD_ID_TAG : raise exceptions . ParseError ( u"AltRecordID got unexpected tag {}; expected {}" . format ( element . tag , cls . ALT_RECORD_ID_TAG ) ) return cls ( element . text , id = element . get ( u"ID" ) , type = element . get ( u"TYPE" ) ) | Create a new AltRecordID by parsing root . |
37,369 | def parse ( cls , element ) : if element . tag != cls . AGENT_TAG : raise exceptions . ParseError ( u"Agent got unexpected tag {}; expected {}" . format ( element . tag , cls . AGENT_TAG ) ) role = element . get ( u"ROLE" ) if not role : raise exceptions . ParseError ( u"Agent must have a ROLE attribute." ) if role == u"OTHER" : role = element . get ( u"OTHERROLE" ) or role agent_type = element . get ( u"TYPE" ) if agent_type == u"OTHER" : agent_type = element . get ( u"OTHERTYPE" ) or agent_type agent_id = element . get ( u"ID" ) try : name = element . find ( cls . NAME_TAG ) . text except AttributeError : name = None notes = [ note . text for note in element . findall ( cls . NOTE_TAG ) ] return cls ( role , id = agent_id , type = agent_type , name = name , notes = notes ) | Create a new Agent by parsing root . |
37,370 | def get_status ( self ) : if self . status is not None : return self . status if self . subsection == "dmdSec" : if self . older is None : return "original" else : return "updated" if self . subsection in ( "techMD" , "rightsMD" ) : if self . newer is None : return "current" else : return "superseded" return None | Returns the STATUS when serializing . |
37,371 | def replace_with ( self , new_subsection ) : if self . subsection != new_subsection . subsection : raise exceptions . MetsError ( "Must replace a SubSection with one of the same type." ) self . newer = new_subsection new_subsection . older = self self . status = None | Replace this SubSection with new_subsection . |
37,372 | def parse ( cls , root ) : subsection = root . tag . replace ( utils . lxmlns ( "mets" ) , "" , 1 ) if subsection not in cls . ALLOWED_SUBSECTIONS : raise exceptions . ParseError ( "SubSection can only parse elements with tag in %s with METS namespace" % ( cls . ALLOWED_SUBSECTIONS , ) ) section_id = root . get ( "ID" ) created = root . get ( "CREATED" , "" ) status = root . get ( "STATUS" , "" ) child = root [ 0 ] if child . tag == utils . lxmlns ( "mets" ) + "mdWrap" : mdwrap = MDWrap . parse ( child ) obj = cls ( subsection , mdwrap , section_id ) elif child . tag == utils . lxmlns ( "mets" ) + "mdRef" : mdref = MDRef . parse ( child ) obj = cls ( subsection , mdref , section_id ) else : raise exceptions . ParseError ( "Child of %s must be mdWrap or mdRef" % subsection ) obj . created = created obj . status = status return obj | Create a new SubSection by parsing root . |
37,373 | def serialize ( self , now = None ) : created = self . created if self . created is not None else now el = etree . Element ( utils . lxmlns ( "mets" ) + self . subsection , ID = self . id_string ) if created : el . set ( "CREATED" , created ) status = self . get_status ( ) if status : el . set ( "STATUS" , status ) if self . contents : el . append ( self . contents . serialize ( ) ) return el | Serialize this SubSection and all children to lxml Element and return it . |
37,374 | def _get_el_attributes ( lxml_el , ns = None , nsmap = None ) : attrs = { } for attr , val in lxml_el . items ( ) : attr = _to_colon_ns ( attr , default_ns = ns , nsmap = nsmap ) attrs [ attr ] = val return attrs | Return the XML attributes of lxml Element instance lxml_el as a dict where namespaced attributes are represented via colon - delimiting and using snake case . |
37,375 | def _lxml_el_to_data ( lxml_el , ns , nsmap , snake = True ) : tag_name = _to_colon_ns ( lxml_el . tag , default_ns = ns , nsmap = nsmap ) ret = [ tag_name ] attributes = _get_el_attributes ( lxml_el , ns = ns , nsmap = nsmap ) if attributes : ret . append ( attributes ) for sub_el in lxml_el : ret . append ( _lxml_el_to_data ( sub_el , ns , nsmap , snake = snake ) ) text = lxml_el . text if text : ret . append ( text ) return tuple ( ret ) | Convert an lxml . _Element instance to a Python tuple . |
37,376 | def premis_to_data ( premis_lxml_el ) : premis_version = premis_lxml_el . get ( "version" , utils . PREMIS_VERSION ) nsmap = utils . PREMIS_VERSIONS_MAP [ premis_version ] [ "namespaces" ] return _lxml_el_to_data ( premis_lxml_el , "premis" , nsmap ) | Transform a PREMIS lxml . _Element instance to a Python tuple . |
37,377 | def data_find ( data , path ) : path_parts = path . split ( "/" ) try : sub_elm = [ el for el in data if isinstance ( el , ( tuple , list ) ) and el [ 0 ] == path_parts [ 0 ] ] [ 0 ] except IndexError : return None else : if len ( path_parts ) > 1 : return data_find ( sub_elm , "/" . join ( path_parts [ 1 : ] ) ) return sub_elm | Find and return the first element - as - tuple in tuple data using simplified XPath path . |
37,378 | def tuple_to_schema ( tuple_ ) : schema = [ ] for element in tuple_ : if isinstance ( element , ( tuple , list ) ) : try : if isinstance ( element [ 1 ] , six . string_types ) : schema . append ( ( element [ 0 ] , ) ) else : schema . append ( tuple_to_schema ( element ) ) except IndexError : schema . append ( ( element [ 0 ] , ) ) else : schema . append ( element ) return tuple ( schema ) | Convert a tuple representing an XML data structure into a schema tuple that can be used in the . schema property of a sub - class of PREMISElement . |
37,379 | def generate_element_class ( tuple_instance ) : schema = tuple_to_schema ( tuple_instance ) def defaults ( self ) : return { } def schema_getter ( self ) : return schema new_class_name = "PREMIS{}Element" . format ( schema [ 0 ] . capitalize ( ) ) return type ( new_class_name , ( PREMISElement , ) , { "defaults" : property ( defaults ) , "schema" : property ( schema_getter ) } , ) | Dynamically create a sub - class of PREMISElement given tuple_instance which is a tuple representing an XML data structure . |
37,380 | def data_find_all ( data , path , dyn_cls = False ) : path_parts = path . split ( "/" ) try : sub_elms = tuple ( el for el in data if isinstance ( el , ( tuple , list ) ) and el [ 0 ] == path_parts [ 0 ] ) except IndexError : return None if len ( path_parts ) > 1 : ret = [ ] for sub_elm in sub_elms : for x in data_find_all ( sub_elm , "/" . join ( path_parts [ 1 : ] ) ) : ret . append ( x ) ret = tuple ( ret ) else : ret = sub_elms if ret and dyn_cls : cls = generate_element_class ( ret [ 0 ] ) return tuple ( cls ( data = tuple_ ) for tuple_ in ret ) return ret | Find and return all element - as - tuples in tuple data using simplified XPath path . |
37,381 | def data_find_text ( data , path ) : el = data_find ( data , path ) if not isinstance ( el , ( list , tuple ) ) : return None texts = [ child for child in el [ 1 : ] if not isinstance ( child , ( tuple , list , dict ) ) ] if not texts : return None return " " . join ( [ six . ensure_text ( x , encoding = "utf-8" , errors = "strict" ) for x in texts ] ) | Return the text value of the element - as - tuple in tuple data using simplified XPath path . |
37,382 | def _generate_data ( schema , elements , attributes = None , path = None ) : path = path or [ ] attributes = attributes or { } tag_name = schema [ 0 ] data = [ tag_name ] if attributes : data . append ( attributes ) new_path = path [ : ] new_path . append ( tag_name ) root = new_path [ 0 ] possible_paths = [ "__" . join ( new_path ) , tag_name ] if root != tag_name and tag_name . startswith ( root ) : possible_paths . append ( tag_name . lstrip ( root ) [ 1 : ] ) for possible_path in possible_paths : val = elements . get ( possible_path ) if val : if isinstance ( val , ( tuple , list ) ) : data = tuple ( val ) else : if attributes : data = ( tag_name , attributes , val ) else : data = ( tag_name , val ) return tuple ( data ) for subschema in schema [ 1 : ] : subel = _generate_data ( subschema , elements , path = new_path ) if ( not subel ) or ( subel == subschema ) : continue if all ( map ( lambda x : isinstance ( x , tuple ) , subel ) ) : for subsubel in subel : data . append ( subsubel ) elif not el_is_empty ( subel ) : data . append ( subel ) return tuple ( data ) | Using tree - as - tuple schema as guide return a tree - as - tuple data representing a PREMIS XML element where the values in dict elements and the values in dict attributes are located in the appropriate locations in the data tree structure . |
37,383 | def el_is_empty ( el ) : if len ( el ) == 1 and not isinstance ( el [ 0 ] , ( list , tuple ) ) : return True subels_are_empty = [ ] for subel in el : if isinstance ( subel , ( list , tuple ) ) : subels_are_empty . append ( el_is_empty ( subel ) ) else : subels_are_empty . append ( not bool ( subel ) ) return all ( subels_are_empty ) | Return True if tuple el represents an empty XML element . |
37,384 | def _premis_version_from_data ( data ) : for child in data : if isinstance ( child , dict ) : version = child . get ( "version" ) if version : return version return utils . PREMIS_VERSION | Given tuple data encoding a PREMIS element attempt to return the PREMIS version it is using . If none can be found return the default PREMIS version . |
37,385 | def compression_details ( self ) : event_type = self . findtext ( "event_type" ) if event_type != "compression" : raise AttributeError ( 'PREMIS events of type "{}" have no compression' " details" . format ( event_type ) ) parsed_compression_event_detail = self . parsed_event_detail compression_program = _get_event_detail_attr ( "program" , parsed_compression_event_detail ) compression_algorithm = _get_event_detail_attr ( "algorithm" , parsed_compression_event_detail ) compression_program_version = _get_event_detail_attr ( "version" , parsed_compression_event_detail ) archive_tool = { "7z" : "7-Zip" } . get ( compression_program , compression_program ) return compression_algorithm , compression_program_version , archive_tool | Return as a 3 - tuple this PREMIS compression event s program version and algorithm used to perform the compression . |
37,386 | def encryption_details ( self ) : event_type = self . findtext ( "event_type" ) if event_type != "encryption" : raise AttributeError ( 'PREMIS events of type "{}" have no encryption' " details" . format ( event_type ) ) parsed_encryption_event_detail = self . parsed_event_detail encryption_program = _get_event_detail_attr ( "program" , parsed_encryption_event_detail ) encryption_program_version = _get_event_detail_attr ( "version" , parsed_encryption_event_detail ) encryption_key = _get_event_detail_attr ( "key" , parsed_encryption_event_detail ) return encryption_program , encryption_program_version , encryption_key | Return as a 3 - tuple this PREMIS encryption event s program version and key used to perform the encryption . |
37,387 | def has_class_methods ( * class_method_names ) : def test ( cls ) : if not isinstance ( cls , type ) : cls = type ( cls ) for class_method_name in class_method_names : try : class_method = getattr ( cls , class_method_name ) if class_method . __self__ is not cls : return False except AttributeError : return False return True return test | Return a test function that when given a class returns True if that class has all of the class methods in class_method_names . If an object is passed to the test function check for the class methods on its class . |
37,388 | def _urlendecode ( url , func ) : parsed = urlparse ( url ) for attr in URL_ENCODABLE_PARTS : parsed = parsed . _replace ( ** { attr : func ( getattr ( parsed , attr ) ) } ) return urlunparse ( parsed ) | Encode or decode url by applying func to all of its URL - encodable parts . |
37,389 | def autodiscover ( ) : from django . conf import settings try : from django . utils . module_loading import import_module except ImportError : from django . utils . importlib import import_module from django . utils . module_loading import module_has_submodule from backbone . views import BackboneAPIView for app in settings . INSTALLED_APPS : mod = import_module ( app ) try : import_module ( '%s.backbone_api' % app ) except : if module_has_submodule ( mod , 'backbone_api' ) : raise | Auto - discover INSTALLED_APPS backbone_api . py modules . |
37,390 | def register ( self , backbone_view_class ) : if backbone_view_class not in self . _registry : self . _registry . append ( backbone_view_class ) | Registers the given backbone view class . |
37,391 | def get ( self , request , id = None , ** kwargs ) : if not self . has_get_permission ( request ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) if id : obj = get_object_or_404 ( self . queryset ( request , ** kwargs ) , id = id ) return self . get_object_detail ( request , obj ) else : return self . get_collection ( request , ** kwargs ) | Handles get requests for either the collection or an object detail . |
37,392 | def get_object_detail ( self , request , obj ) : if self . display_detail_fields : display_fields = self . display_detail_fields else : display_fields = self . display_fields data = self . serialize ( obj , [ 'id' ] + list ( display_fields ) ) return HttpResponse ( self . json_dumps ( data ) , content_type = 'application/json' ) | Handles get requests for the details of the given object . |
37,393 | def get_collection ( self , request , ** kwargs ) : qs = self . queryset ( request , ** kwargs ) if self . display_collection_fields : display_fields = self . display_collection_fields else : display_fields = self . display_fields if self . paginate_by is not None : page = request . GET . get ( 'page' , 1 ) paginator = Paginator ( qs , self . paginate_by ) try : qs = paginator . page ( page ) . object_list except PageNotAnInteger : data = _ ( 'Invalid `page` parameter: Not a valid integer.' ) return HttpResponseBadRequest ( data ) except EmptyPage : data = _ ( 'Invalid `page` parameter: Out of range.' ) return HttpResponseBadRequest ( data ) data = [ self . serialize ( obj , [ 'id' ] + list ( display_fields ) ) for obj in qs ] return HttpResponse ( self . json_dumps ( data ) , content_type = 'application/json' ) | Handles get requests for the list of objects . |
37,394 | def post ( self , request , id = None , ** kwargs ) : if id : return HttpResponseForbidden ( ) else : if not self . has_add_permission ( request ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) else : return self . add_object ( request ) | Handles post requests . |
37,395 | def add_object ( self , request ) : try : data = json . loads ( request . body if hasattr ( request , 'body' ) else request . raw_post_data ) except ValueError : return HttpResponseBadRequest ( _ ( 'Unable to parse JSON request body.' ) ) form = self . get_form_instance ( request , data = data ) if form . is_valid ( ) : if not self . has_add_permission_for_data ( request , form . cleaned_data ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) obj = form . save ( ) response = self . get_object_detail ( request , obj ) response . status_code = 201 opts = self . model . _meta url_slug = self . url_slug or ( opts . model_name if hasattr ( opts , 'model_name' ) else opts . module_name ) url_name = 'backbone:%s_%s_detail' % ( self . model . _meta . app_label , url_slug ) response [ 'Location' ] = reverse ( url_name , args = [ obj . id ] ) return response else : return HttpResponseBadRequest ( self . json_dumps ( form . errors ) , content_type = 'application/json' ) | Adds an object . |
37,396 | def put ( self , request , id = None , ** kwargs ) : if id : obj = get_object_or_404 ( self . queryset ( request ) , id = id ) if not self . has_update_permission ( request , obj ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) else : return self . update_object ( request , obj ) else : return HttpResponseForbidden ( ) | Handles put requests . |
37,397 | def update_object ( self , request , obj ) : try : data = json . loads ( request . body if hasattr ( request , 'body' ) else request . raw_post_data ) except ValueError : return HttpResponseBadRequest ( _ ( 'Unable to parse JSON request body.' ) ) form = self . get_form_instance ( request , data = data , instance = obj ) if form . is_valid ( ) : if not self . has_update_permission_for_data ( request , form . cleaned_data ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) form . save ( ) return self . get_object_detail ( request , obj ) else : return HttpResponseBadRequest ( self . json_dumps ( form . errors ) , content_type = 'application/json' ) | Updates an object . |
37,398 | def get_form_instance ( self , request , data = None , instance = None ) : defaults = { } if self . form : defaults [ 'form' ] = self . form if self . fields : defaults [ 'fields' ] = self . fields return modelform_factory ( self . model , ** defaults ) ( data = data , instance = instance ) | Returns an instantiated form to be used for adding or editing an object . |
37,399 | def delete ( self , request , id = None ) : if id : obj = get_object_or_404 ( self . queryset ( request ) , id = id ) if not self . has_delete_permission ( request , obj ) : return HttpResponseForbidden ( _ ( 'You do not have permission to perform this action.' ) ) else : return self . delete_object ( request , obj ) else : return HttpResponseForbidden ( ) | Handles delete requests . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.