query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Fetch account groups and extra data from resource if necessary .
def account_groups_and_extra_data ( account , resource , refresh_timedelta = None ) : updated = datetime . utcnow ( ) modified_since = updated if refresh_timedelta is not None : modified_since += refresh_timedelta modified_since = modified_since . isoformat ( ) last_update = account . extra_data . get ( 'updated' , modified_since ) if last_update > modified_since : return account . extra_data . get ( 'groups' , [ ] ) groups = fetch_groups ( resource [ 'Group' ] ) extra_data = current_app . config . get ( 'OAUTHCLIENT_CERN_EXTRA_DATA_SERIALIZER' , fetch_extra_data ) ( resource ) account . extra_data . update ( groups = groups , updated = updated . isoformat ( ) , * * extra_data ) return groups
1,100
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L232-L256
[ "async", "def", "_publish", "(", "self", ",", "model", ",", "path", ")", ":", "if", "model", "[", "'type'", "]", "!=", "'notebook'", ":", "raise", "web", ".", "HTTPError", "(", "400", ",", "\"bookstore only publishes notebooks\"", ")", "content", "=", "model", "[", "'content'", "]", "full_s3_path", "=", "s3_path", "(", "self", ".", "bookstore_settings", ".", "s3_bucket", ",", "self", ".", "bookstore_settings", ".", "published_prefix", ",", "path", ")", "file_key", "=", "s3_key", "(", "self", ".", "bookstore_settings", ".", "published_prefix", ",", "path", ")", "self", ".", "log", ".", "info", "(", "\"Publishing to %s\"", ",", "s3_display_path", "(", "self", ".", "bookstore_settings", ".", "s3_bucket", ",", "self", ".", "bookstore_settings", ".", "published_prefix", ",", "path", ")", ",", ")", "async", "with", "self", ".", "session", ".", "create_client", "(", "'s3'", ",", "aws_secret_access_key", "=", "self", ".", "bookstore_settings", ".", "s3_secret_access_key", ",", "aws_access_key_id", "=", "self", ".", "bookstore_settings", ".", "s3_access_key_id", ",", "endpoint_url", "=", "self", ".", "bookstore_settings", ".", "s3_endpoint_url", ",", "region_name", "=", "self", ".", "bookstore_settings", ".", "s3_region_name", ",", ")", "as", "client", ":", "self", ".", "log", ".", "info", "(", "\"Processing published write of %s\"", ",", "path", ")", "obj", "=", "await", "client", ".", "put_object", "(", "Bucket", "=", "self", ".", "bookstore_settings", ".", "s3_bucket", ",", "Key", "=", "file_key", ",", "Body", "=", "json", ".", "dumps", "(", "content", ")", ")", "self", ".", "log", ".", "info", "(", "\"Done with published write of %s\"", ",", "path", ")", "self", ".", "set_status", "(", "201", ")", "resp_content", "=", "{", "\"s3path\"", ":", "full_s3_path", "}", "if", "'VersionId'", "in", "obj", ":", "resp_content", "[", "\"versionID\"", "]", "=", "obj", "[", "'VersionId'", "]", "resp_str", "=", "json", ".", "dumps", "(", "resp_content", ")", "self", ".", "finish", "(", "resp_str", ")" ]
Extend identity with roles based on CERN groups .
def extend_identity ( identity , groups ) : provides = set ( [ UserNeed ( current_user . email ) ] + [ RoleNeed ( '{0}@cern.ch' . format ( name ) ) for name in groups ] ) identity . provides |= provides session [ OAUTHCLIENT_CERN_SESSION_KEY ] = provides
1,101
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L259-L265
[ "def", "urlstate", "(", "self", ",", "encryption_key", ")", ":", "lzma", "=", "LZMACompressor", "(", ")", "urlstate_data", "=", "json", ".", "dumps", "(", "self", ".", "_state_dict", ")", "urlstate_data", "=", "lzma", ".", "compress", "(", "urlstate_data", ".", "encode", "(", "\"UTF-8\"", ")", ")", "urlstate_data", "+=", "lzma", ".", "flush", "(", ")", "urlstate_data", "=", "_AESCipher", "(", "encryption_key", ")", ".", "encrypt", "(", "urlstate_data", ")", "lzma", "=", "LZMACompressor", "(", ")", "urlstate_data", "=", "lzma", ".", "compress", "(", "urlstate_data", ")", "urlstate_data", "+=", "lzma", ".", "flush", "(", ")", "urlstate_data", "=", "base64", ".", "urlsafe_b64encode", "(", "urlstate_data", ")", "return", "urlstate_data", ".", "decode", "(", "\"utf-8\"", ")" ]
Prepare new mapping with Value s groupped by Type .
def get_dict_from_response ( response ) : result = { } if getattr ( response , '_resp' ) and response . _resp . code > 400 : return result for i in response . data : # strip the schema from the key k = i [ 'Type' ] . replace ( REMOTE_APP_RESOURCE_SCHEMA , '' ) result . setdefault ( k , list ( ) ) result [ k ] . append ( i [ 'Value' ] ) return result
1,102
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L274-L285
[ "def", "sync_blockchain", "(", "working_dir", ",", "bt_opts", ",", "last_block", ",", "server_state", ",", "expected_snapshots", "=", "{", "}", ",", "*", "*", "virtualchain_args", ")", ":", "subdomain_index", "=", "server_state", "[", "'subdomains'", "]", "atlas_state", "=", "server_state", "[", "'atlas'", "]", "# make this usable even if we haven't explicitly configured virtualchain ", "impl", "=", "sys", ".", "modules", "[", "__name__", "]", "log", ".", "info", "(", "\"Synchronizing database {} up to block {}\"", ".", "format", "(", "working_dir", ",", "last_block", ")", ")", "# NOTE: this is the only place where a read-write handle should be created,", "# since this is the only place where the db should be modified.", "new_db", "=", "BlockstackDB", ".", "borrow_readwrite_instance", "(", "working_dir", ",", "last_block", ",", "expected_snapshots", "=", "expected_snapshots", ")", "# propagate runtime state to virtualchain callbacks", "new_db", ".", "subdomain_index", "=", "subdomain_index", "new_db", ".", "atlas_state", "=", "atlas_state", "rc", "=", "virtualchain", ".", "sync_virtualchain", "(", "bt_opts", ",", "last_block", ",", "new_db", ",", "expected_snapshots", "=", "expected_snapshots", ",", "*", "*", "virtualchain_args", ")", "BlockstackDB", ".", "release_readwrite_instance", "(", "new_db", ",", "last_block", ")", "return", "rc" ]
Query CERN Resources to get user info and groups .
def get_resource ( remote ) : cached_resource = session . pop ( 'cern_resource' , None ) if cached_resource : return cached_resource response = remote . get ( REMOTE_APP_RESOURCE_API_URL ) dict_response = get_dict_from_response ( response ) session [ 'cern_resource' ] = dict_response return dict_response
1,103
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L288-L297
[ "def", "_CompressHistogram", "(", "self", ",", "histo_ev", ")", ":", "return", "CompressedHistogramEvent", "(", "histo_ev", ".", "wall_time", ",", "histo_ev", ".", "step", ",", "compressor", ".", "compress_histogram_proto", "(", "histo_ev", ".", "histogram_value", ",", "self", ".", "_compression_bps", ")", ")" ]
Store groups in session whenever identity changes .
def on_identity_changed ( sender , identity ) : if isinstance ( identity , AnonymousIdentity ) : return client_id = current_app . config [ 'CERN_APP_CREDENTIALS' ] [ 'consumer_key' ] account = RemoteAccount . get ( user_id = current_user . get_id ( ) , client_id = client_id , ) groups = [ ] if account : remote = find_remote_by_client_id ( client_id ) resource = get_resource ( remote ) refresh = current_app . config . get ( 'OAUTHCLIENT_CERN_REFRESH_TIMEDELTA' , OAUTHCLIENT_CERN_REFRESH_TIMEDELTA ) groups . extend ( account_groups_and_extra_data ( account , resource , refresh_timedelta = refresh ) ) extend_identity ( identity , groups )
1,104
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/cern.py#L373-L400
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Get RemoteAccount object for user .
def get ( cls , user_id , client_id ) : return cls . query . filter_by ( user_id = user_id , client_id = client_id , ) . first ( )
1,105
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/models.py#L63-L73
[ "def", "_repack_archive", "(", "archive1", ",", "archive2", ",", "verbosity", "=", "0", ",", "interactive", "=", "True", ")", ":", "format1", ",", "compression1", "=", "get_archive_format", "(", "archive1", ")", "format2", ",", "compression2", "=", "get_archive_format", "(", "archive2", ")", "if", "format1", "==", "format2", "and", "compression1", "==", "compression2", ":", "# same format and compression allows to copy the file", "util", ".", "link_or_copy", "(", "archive1", ",", "archive2", ",", "verbosity", "=", "verbosity", ")", "return", "tmpdir", "=", "util", ".", "tmpdir", "(", ")", "try", ":", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "outdir", "=", "tmpdir", ")", "same_format", "=", "(", "format1", "==", "format2", "and", "compression1", "and", "compression2", ")", "if", "same_format", ":", "# only decompress since the format is the same", "kwargs", "[", "'format'", "]", "=", "compression1", "path", "=", "_extract_archive", "(", "archive1", ",", "*", "*", "kwargs", ")", "archive", "=", "os", ".", "path", ".", "abspath", "(", "archive2", ")", "files", "=", "tuple", "(", "os", ".", "listdir", "(", "path", ")", ")", "olddir", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "path", ")", "try", ":", "kwargs", "=", "dict", "(", "verbosity", "=", "verbosity", ",", "interactive", "=", "interactive", ")", "if", "same_format", ":", "# only compress since the format is the same", "kwargs", "[", "'format'", "]", "=", "compression2", "_create_archive", "(", "archive", ",", "files", ",", "*", "*", "kwargs", ")", "finally", ":", "os", ".", "chdir", "(", "olddir", ")", "finally", ":", "shutil", ".", "rmtree", "(", "tmpdir", ",", "onerror", "=", "rmtree_log_error", ")" ]
Create new remote account for user .
def create ( cls , user_id , client_id , extra_data ) : with db . session . begin_nested ( ) : account = cls ( user_id = user_id , client_id = client_id , extra_data = extra_data or dict ( ) ) db . session . add ( account ) return account
1,106
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/models.py#L76-L92
[ "def", "fromtif", "(", "path", ",", "ext", "=", "'tif'", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "recursive", "=", "False", ",", "nplanes", "=", "None", ",", "npartitions", "=", "None", ",", "labels", "=", "None", ",", "engine", "=", "None", ",", "credentials", "=", "None", ",", "discard_extra", "=", "False", ")", ":", "from", "tifffile", "import", "TiffFile", "if", "nplanes", "is", "not", "None", "and", "nplanes", "<=", "0", ":", "raise", "ValueError", "(", "'nplanes must be positive if passed, got %d'", "%", "nplanes", ")", "def", "getarray", "(", "idx_buffer_filename", ")", ":", "idx", ",", "buf", ",", "fname", "=", "idx_buffer_filename", "fbuf", "=", "BytesIO", "(", "buf", ")", "tfh", "=", "TiffFile", "(", "fbuf", ")", "ary", "=", "tfh", ".", "asarray", "(", ")", "pageCount", "=", "ary", ".", "shape", "[", "0", "]", "if", "nplanes", "is", "not", "None", ":", "extra", "=", "pageCount", "%", "nplanes", "if", "extra", ":", "if", "discard_extra", ":", "pageCount", "=", "pageCount", "-", "extra", "logging", ".", "getLogger", "(", "'thunder'", ")", ".", "warn", "(", "'Ignored %d pages in file %s'", "%", "(", "extra", ",", "fname", ")", ")", "else", ":", "raise", "ValueError", "(", "\"nplanes '%d' does not evenly divide '%d in file %s'\"", "%", "(", "nplanes", ",", "pageCount", ",", "fname", ")", ")", "values", "=", "[", "ary", "[", "i", ":", "(", "i", "+", "nplanes", ")", "]", "for", "i", "in", "range", "(", "0", ",", "pageCount", ",", "nplanes", ")", "]", "else", ":", "values", "=", "[", "ary", "]", "tfh", ".", "close", "(", ")", "if", "ary", ".", "ndim", "==", "3", ":", "values", "=", "[", "val", ".", "squeeze", "(", ")", "for", "val", "in", "values", "]", "nvals", "=", "len", "(", "values", ")", "keys", "=", "[", "(", "idx", "*", "nvals", "+", "timepoint", ",", ")", "for", "timepoint", "in", "range", "(", "nvals", ")", "]", "return", "zip", "(", "keys", ",", "values", ")", "recount", "=", "False", "if", "nplanes", "is", "None", "else", "True", "data", "=", "frompath", "(", "path", ",", "accessor", "=", "getarray", ",", "ext", "=", "ext", ",", "start", "=", "start", ",", "stop", "=", "stop", ",", "recursive", "=", "recursive", ",", "npartitions", "=", "npartitions", ",", "recount", "=", "recount", ",", "labels", "=", "labels", ",", "engine", "=", "engine", ",", "credentials", "=", "credentials", ")", "if", "engine", "is", "not", "None", "and", "npartitions", "is", "not", "None", "and", "data", ".", "npartitions", "(", ")", "<", "npartitions", ":", "data", "=", "data", ".", "repartition", "(", "npartitions", ")", "return", "data" ]
Update token with new values .
def update_token ( self , token , secret ) : if self . access_token != token or self . secret != secret : with db . session . begin_nested ( ) : self . access_token = token self . secret = secret db . session . add ( self )
1,107
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/models.py#L153-L163
[ "def", "render_category", "(", "category", "=", "''", ",", "template", "=", "None", ")", ":", "# pylint:disable=too-many-return-statements", "# See if this is an aliased path", "redir", "=", "get_redirect", "(", ")", "if", "redir", ":", "return", "redir", "# Forbidden template types", "if", "template", "and", "template", ".", "startswith", "(", "'_'", ")", ":", "raise", "http_error", ".", "Forbidden", "(", "\"Template is private\"", ")", "if", "template", "in", "[", "'entry'", ",", "'error'", "]", ":", "raise", "http_error", ".", "BadRequest", "(", "\"Invalid view requested\"", ")", "if", "category", ":", "# See if there's any entries for the view...", "if", "not", "orm", ".", "select", "(", "e", "for", "e", "in", "model", ".", "Entry", "if", "e", ".", "category", "==", "category", "or", "e", ".", "category", ".", "startswith", "(", "category", "+", "'/'", ")", ")", ":", "raise", "http_error", ".", "NotFound", "(", "\"No such category\"", ")", "if", "not", "template", ":", "template", "=", "Category", "(", "category", ")", ".", "get", "(", "'Index-Template'", ")", "or", "'index'", "tmpl", "=", "map_template", "(", "category", ",", "template", ")", "if", "not", "tmpl", ":", "# this might actually be a malformed category URL", "test_path", "=", "'/'", ".", "join", "(", "(", "category", ",", "template", ")", ")", "if", "category", "else", "template", "logger", ".", "debug", "(", "\"Checking for malformed category %s\"", ",", "test_path", ")", "record", "=", "orm", ".", "select", "(", "e", "for", "e", "in", "model", ".", "Entry", "if", "e", ".", "category", "==", "test_path", ")", ".", "exists", "(", ")", "if", "record", ":", "return", "redirect", "(", "url_for", "(", "'category'", ",", "category", "=", "test_path", ",", "*", "*", "request", ".", "args", ")", ")", "# nope, we just don't know what this is", "raise", "http_error", ".", "NotFound", "(", "\"No such view\"", ")", "view_spec", "=", "view", ".", "parse_view_spec", "(", "request", ".", "args", ")", "view_spec", "[", "'category'", "]", "=", "category", "view_obj", "=", "view", ".", "View", "(", "view_spec", ")", "rendered", ",", "etag", "=", "render_publ_template", "(", "tmpl", ",", "_url_root", "=", "request", ".", "url_root", ",", "category", "=", "Category", "(", "category", ")", ",", "view", "=", "view_obj", ")", "if", "request", ".", "if_none_match", ".", "contains", "(", "etag", ")", ":", "return", "'Not modified'", ",", "304", "return", "rendered", ",", "{", "'Content-Type'", ":", "mime_type", "(", "tmpl", ")", ",", "'ETag'", ":", "etag", "}" ]
Get RemoteToken for user .
def get ( cls , user_id , client_id , token_type = '' , access_token = None ) : args = [ RemoteAccount . id == RemoteToken . id_remote_account , RemoteAccount . user_id == user_id , RemoteAccount . client_id == client_id , RemoteToken . token_type == token_type , ] if access_token : args . append ( RemoteToken . access_token == access_token ) return cls . query . options ( db . joinedload ( 'remote_account' ) ) . filter ( * args ) . first ( )
1,108
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/models.py#L166-L188
[ "def", "_detach_childs", "(", "self", ",", "idx_from", "=", "None", ",", "idx_to", "=", "None", ")", ":", "idx_from", "=", "idx_from", "or", "0", "idx_to", "=", "idx_to", "or", "len", "(", "self", ".", "childs", ")", "removed", "=", "self", ".", "childs", "[", "idx_from", ":", "idx_to", "]", "for", "child", "in", "removed", ":", "if", "issubclass", "(", "child", ".", "__class__", ",", "DOMElement", ")", ":", "child", ".", "parent", "=", "None", "self", ".", "childs", "[", "idx_from", ":", "idx_to", "]", "=", "[", "]", "return", "removed" ]
Get RemoteAccount object for token .
def get_by_token ( cls , client_id , access_token , token_type = '' ) : return cls . query . options ( db . joinedload ( 'remote_account' ) ) . filter ( RemoteAccount . id == RemoteToken . id_remote_account , RemoteAccount . client_id == client_id , RemoteToken . token_type == token_type , RemoteToken . access_token == access_token , ) . first ( )
1,109
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/models.py#L191-L204
[ "def", "_check_data", "(", "data", ")", ":", "if", "\"vibfreqs\"", "in", "data", ".", "columns", ":", "for", "species", "in", "data", ".", "index", ":", "vibfreqs", "=", "data", ".", "loc", "[", "species", ",", "\"vibfreqs\"", "]", "nimagvibfreqs", "=", "_np", ".", "sum", "(", "_np", ".", "array", "(", "vibfreqs", ")", "<", "0", ")", "if", "species", "[", "-", "1", "]", "==", "'#'", "and", "nimagvibfreqs", "!=", "1", ":", "_warnings", ".", "warn", "(", "\"'{}' should have 1 imaginary vibfreqs but {} \"", "\"found\"", ".", "format", "(", "species", ",", "nimagvibfreqs", ")", ")", "elif", "species", "[", "-", "1", "]", "!=", "'#'", "and", "nimagvibfreqs", "!=", "0", ":", "_warnings", ".", "warn", "(", "\"'{}' should have no imaginary vibfreqs but {} \"", "\"found\"", ".", "format", "(", "species", ",", "nimagvibfreqs", ")", ")" ]
Bulk export a set of configs devices packages and results .
def bulk_export ( self , config_ids = None , device_ids = None , package_ids = None , result_ids = None , exclude_captures = False ) : if config_ids is None : config_ids = [ ] if device_ids is None : device_ids = [ ] if package_ids is None : package_ids = [ ] if result_ids is None : result_ids = [ ] json = { 'configs' : map ( int , config_ids ) , 'devices' : map ( int , device_ids ) , 'packages' : map ( int , package_ids ) , 'results' : map ( int , result_ids ) , 'options' : { 'exclude_captures' : exclude_captures } } resp = self . service . post ( self . base , json = json , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
1,110
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/exports.py#L22-L52
[ "def", "_make_variant_locus_id", "(", "self", ",", "gene_id", ",", "disease_id", ")", ":", "alt_locus_id", "=", "'_:'", "+", "re", ".", "sub", "(", "r':'", ",", "''", ",", "gene_id", ")", "+", "'-'", "+", "re", ".", "sub", "(", "r':'", ",", "''", ",", "disease_id", ")", "+", "'VL'", "alt_label", "=", "self", ".", "label_hash", ".", "get", "(", "gene_id", ")", "disease_label", "=", "self", ".", "label_hash", ".", "get", "(", "disease_id", ")", "if", "alt_label", "is", "not", "None", "and", "alt_label", "!=", "''", ":", "alt_label", "=", "'some variant of '", "+", "str", "(", "alt_label", ")", "if", "disease_label", "is", "not", "None", "and", "disease_label", "!=", "''", ":", "alt_label", "+=", "' that is associated with '", "+", "str", "(", "disease_label", ")", "else", ":", "alt_label", "=", "None", "self", ".", "label_hash", "[", "alt_locus_id", "]", "=", "alt_label", "return", "alt_locus_id" ]
create the report directory and return the directory name
def _init_report ( self ) : self . sections = [ ] self . section_names = [ ] # if the directory already exists, print a warning try : if os . path . isdir ( self . directory ) is False : if self . verbose : print ( "Created directory {}" . format ( self . directory ) ) os . mkdir ( self . directory ) # list of directories created in the constructor for this in self . _to_create : try : os . mkdir ( self . directory + os . sep + this ) except : pass # already created ? except Exception : pass finally : # Once the main directory is created, copy files required temp_path = easydev . get_package_location ( "reports" ) temp_path += os . sep + "reports" + os . sep + "resources" # Copy the CSS from reports/resources/css filenames = glob . glob ( os . sep . join ( [ temp_path , "css" , "*.css" ] ) ) # If there are CSS in the directory with JINJA templates, use them # as well filenames += glob . glob ( os . sep . join ( [ self . searchpath , '*.css' ] ) ) # In addition, the user may also provide his own CSS as a list filenames += self . extra_css_list for filename in filenames : target = os . sep . join ( [ self . directory , 'css' ] ) if os . path . isfile ( target ) is False : shutil . copy ( filename , target ) # We copy all javascript from reports resources for filename in [ 'sorttable.js' , 'highlight.pack.js' , "jquery-1.12.3.min.js" ] : target = os . sep . join ( [ self . directory , 'js' , filename ] ) if os . path . isfile ( target ) is False : filename = os . sep . join ( [ temp_path , "javascript" , filename ] ) shutil . copy ( filename , target ) for filename in self . extra_js_list : basename = os . path . basename ( filename ) target = os . sep . join ( [ self . directory , 'js' , basename ] ) if os . path . isfile ( target ) is False : shutil . copy ( filename , target )
1,111
https://github.com/cokelaer/reports/blob/7703b1e27d440c3193ee6cc90bfecd78cc98b737/reports/report.py#L159-L209
[ "def", "getTemplates", "(", "fnames", ",", "blend", "=", "True", ")", ":", "if", "not", "blend", ":", "newhdrs", "=", "blendheaders", ".", "getSingleTemplate", "(", "fnames", "[", "0", "]", ")", "newtab", "=", "None", "else", ":", "# apply rules to create final version of headers, plus table", "newhdrs", ",", "newtab", "=", "blendheaders", ".", "get_blended_headers", "(", "inputs", "=", "fnames", ")", "cleanTemplates", "(", "newhdrs", "[", "1", "]", ",", "newhdrs", "[", "2", "]", ",", "newhdrs", "[", "3", "]", ")", "return", "newhdrs", ",", "newtab" ]
Returns a time stamp
def get_time_now ( self ) : import datetime import getpass username = getpass . getuser ( ) # this is not working on some systems: os.environ["USERNAME"] timenow = str ( datetime . datetime . now ( ) ) timenow = timenow . split ( '.' ) [ 0 ] msg = '<div class="date">Created on ' + timenow msg += " by " + username + '</div>' return msg
1,112
https://github.com/cokelaer/reports/blob/7703b1e27d440c3193ee6cc90bfecd78cc98b737/reports/report.py#L230-L240
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Track a field on a related model
def _track_class_related_field ( cls , field ) : # field = field on current model # related_field = field on related model ( field , related_field ) = field . split ( '__' , 1 ) field_obj = cls . _meta . get_field ( field ) related_cls = field_obj . remote_field . model related_name = field_obj . remote_field . get_accessor_name ( ) if not hasattr ( related_cls , '_tracked_related_fields' ) : setattr ( related_cls , '_tracked_related_fields' , { } ) if related_field not in related_cls . _tracked_related_fields . keys ( ) : related_cls . _tracked_related_fields [ related_field ] = [ ] # There can be several field from different or same model # related to a single model. # Thus _tracked_related_fields will be of the form: # { # 'field name on related model': [ # ('field name on current model', 'field name to current model'), # ('field name on another model', 'field name to another model'), # ... # ], # ... # } related_cls . _tracked_related_fields [ related_field ] . append ( ( field , related_name ) ) _add_signals_to_cls ( related_cls ) # Detect m2m fields changes if isinstance ( related_cls . _meta . get_field ( related_field ) , ManyToManyField ) : m2m_changed . connect ( tracking_m2m , sender = getattr ( related_cls , related_field ) . through , dispatch_uid = repr ( related_cls ) , )
1,113
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/decorators.py#L35-L71
[ "def", "ticks", "(", "self", ",", "security", ",", "start", ",", "end", ")", ":", "period", "=", "1", "# url = 'http://www.google.com/finance/getprices?q=%s&i=%s&p=%sd&f=d,o,h,l,c,v&ts=%s' % (security, interval, period, start)\r", "url", "=", "'http://www.google.com/finance/getprices?q=%s&i=61&p=%sd&f=d,o,h,l,c,v'", "%", "(", "security", ".", "symbol", ",", "period", ")", "LOG", ".", "debug", "(", "'fetching {0}'", ".", "format", "(", "url", ")", ")", "try", ":", "response", "=", "self", ".", "_request", "(", "url", ")", "except", "UfException", "as", "ufExcep", ":", "# if symol is not right, will get 400\r", "if", "Errors", ".", "NETWORK_400_ERROR", "==", "ufExcep", ".", "getCode", ":", "raise", "UfException", "(", "Errors", ".", "STOCK_SYMBOL_ERROR", ",", "\"Can find data for stock %s, security error?\"", "%", "security", ")", "raise", "ufExcep", "# use csv reader here\r", "days", "=", "response", ".", "text", ".", "split", "(", "'\\n'", ")", "[", "7", ":", "]", "# first 7 line is document\r", "# sample values:'a1316784600,31.41,31.5,31.4,31.43,150911'\r", "values", "=", "[", "day", ".", "split", "(", "','", ")", "for", "day", "in", "days", "if", "len", "(", "day", ".", "split", "(", "','", ")", ")", ">=", "6", "]", "for", "value", "in", "values", ":", "yield", "json", ".", "dumps", "(", "{", "'date'", ":", "value", "[", "0", "]", "[", "1", ":", "]", ".", "strip", "(", ")", ",", "'close'", ":", "value", "[", "1", "]", ".", "strip", "(", ")", ",", "'high'", ":", "value", "[", "2", "]", ".", "strip", "(", ")", ",", "'low'", ":", "value", "[", "3", "]", ".", "strip", "(", ")", ",", "'open'", ":", "value", "[", "4", "]", ".", "strip", "(", ")", ",", "'volume'", ":", "value", "[", "5", "]", ".", "strip", "(", ")", "}", ")" ]
Track a field on the current model
def _track_class_field ( cls , field ) : if '__' in field : _track_class_related_field ( cls , field ) return # Will raise FieldDoesNotExist if there is an error cls . _meta . get_field ( field ) # Detect m2m fields changes if isinstance ( cls . _meta . get_field ( field ) , ManyToManyField ) : m2m_changed . connect ( tracking_m2m , sender = getattr ( cls , field ) . through , dispatch_uid = repr ( cls ) , )
1,114
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/decorators.py#L74-L87
[ "def", "main", "(", ")", ":", "try", ":", "# Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000.", "device", "=", "AlarmDecoder", "(", "SocketDevice", "(", "interface", "=", "(", "HOSTNAME", ",", "PORT", ")", ")", ")", "# Set up an event handler and open the device", "device", ".", "on_message", "+=", "handle_message", "with", "device", ".", "open", "(", ")", ":", "while", "True", ":", "time", ".", "sleep", "(", "1", ")", "except", "Exception", "as", "ex", ":", "print", "(", "'Exception:'", ",", "ex", ")" ]
Track fields on the specified model
def _track_class ( cls , fields ) : # Small tests to ensure everything is all right assert not getattr ( cls , '_is_tracked' , False ) for field in fields : _track_class_field ( cls , field ) _add_signals_to_cls ( cls ) # Mark the class as tracked cls . _is_tracked = True # Do not directly track related fields (tracked on related model) # or m2m fields (tracked by another signal) cls . _tracked_fields = [ field for field in fields if '__' not in field ]
1,115
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/decorators.py#L90-L107
[ "def", "ask_question", "(", "self", ",", "question_text", ",", "question", "=", "None", ")", ":", "if", "question", "is", "not", "None", ":", "q", "=", "question", ".", "to_dict", "(", ")", "else", ":", "q", "=", "WatsonQuestion", "(", "question_text", ")", ".", "to_dict", "(", ")", "r", "=", "requests", ".", "post", "(", "self", ".", "url", "+", "'/question'", ",", "json", "=", "{", "'question'", ":", "q", "}", ",", "headers", "=", "{", "'Accept'", ":", "'application/json'", ",", "'X-SyncTimeout'", ":", "30", "}", ",", "auth", "=", "(", "self", ".", "username", ",", "self", ".", "password", ")", ")", "try", ":", "response_json", "=", "r", ".", "json", "(", ")", "except", "ValueError", ":", "raise", "Exception", "(", "'Failed to parse response JSON'", ")", "return", "WatsonAnswer", "(", "response_json", ")" ]
Add a method to get the tracking url of an object .
def _add_get_tracking_url ( cls ) : def get_tracking_url ( self ) : """ return url to tracking view in admin panel """ url = reverse ( 'admin:tracking_fields_trackingevent_changelist' ) object_id = '{0}%3A{1}' . format ( ContentType . objects . get_for_model ( self ) . pk , self . pk ) return '{0}?object={1}' . format ( url , object_id ) if not hasattr ( cls , 'get_tracking_url' ) : setattr ( cls , 'get_tracking_url' , get_tracking_url )
1,116
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/decorators.py#L110-L121
[ "def", "rebalance_replication_groups", "(", "self", ")", ":", "# Balance replicas over replication-groups for each partition", "if", "any", "(", "b", ".", "inactive", "for", "b", "in", "six", ".", "itervalues", "(", "self", ".", "cluster_topology", ".", "brokers", ")", ")", ":", "self", ".", "log", ".", "error", "(", "\"Impossible to rebalance replication groups because of inactive \"", "\"brokers.\"", ")", "raise", "RebalanceError", "(", "\"Impossible to rebalance replication groups because of inactive \"", "\"brokers\"", ")", "# Balance replica-count over replication-groups", "self", ".", "rebalance_replicas", "(", ")", "# Balance partition-count over replication-groups", "self", ".", "_rebalance_groups_partition_cnt", "(", ")" ]
Decorator used to track changes on Model s fields .
def track ( * fields ) : def inner ( cls ) : _track_class ( cls , fields ) _add_get_tracking_url ( cls ) return cls return inner
1,117
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/decorators.py#L124-L137
[ "def", "get_gpus", "(", "num_gpu", "=", "1", ",", "worker_index", "=", "-", "1", ")", ":", "# get list of gpus (index, uuid)", "list_gpus", "=", "subprocess", ".", "check_output", "(", "[", "\"nvidia-smi\"", ",", "\"--list-gpus\"", "]", ")", ".", "decode", "(", ")", "logging", ".", "debug", "(", "\"all GPUs:\\n{0}\"", ".", "format", "(", "list_gpus", ")", ")", "# parse index and guid", "gpus", "=", "[", "x", "for", "x", "in", "list_gpus", ".", "split", "(", "'\\n'", ")", "if", "len", "(", "x", ")", ">", "0", "]", "def", "parse_gpu", "(", "gpu_str", ")", ":", "cols", "=", "gpu_str", ".", "split", "(", "' '", ")", "return", "cols", "[", "5", "]", ".", "split", "(", "')'", ")", "[", "0", "]", ",", "cols", "[", "1", "]", ".", "split", "(", "':'", ")", "[", "0", "]", "gpu_list", "=", "[", "parse_gpu", "(", "gpu", ")", "for", "gpu", "in", "gpus", "]", "free_gpus", "=", "[", "]", "retries", "=", "0", "while", "len", "(", "free_gpus", ")", "<", "num_gpu", "and", "retries", "<", "MAX_RETRIES", ":", "smi_output", "=", "subprocess", ".", "check_output", "(", "[", "\"nvidia-smi\"", ",", "\"--format=csv,noheader,nounits\"", ",", "\"--query-compute-apps=gpu_uuid\"", "]", ")", ".", "decode", "(", ")", "logging", ".", "debug", "(", "\"busy GPUs:\\n{0}\"", ".", "format", "(", "smi_output", ")", ")", "busy_uuids", "=", "[", "x", "for", "x", "in", "smi_output", ".", "split", "(", "'\\n'", ")", "if", "len", "(", "x", ")", ">", "0", "]", "for", "uuid", ",", "index", "in", "gpu_list", ":", "if", "uuid", "not", "in", "busy_uuids", ":", "free_gpus", ".", "append", "(", "index", ")", "if", "len", "(", "free_gpus", ")", "<", "num_gpu", ":", "logging", ".", "warn", "(", "\"Unable to find available GPUs: requested={0}, available={1}\"", ".", "format", "(", "num_gpu", ",", "len", "(", "free_gpus", ")", ")", ")", "retries", "+=", "1", "time", ".", "sleep", "(", "30", "*", "retries", ")", "free_gpus", "=", "[", "]", "logging", ".", "info", "(", "\"Available GPUs: {}\"", ".", "format", "(", "free_gpus", ")", ")", "# if still can't find available GPUs, raise exception", "if", "len", "(", "free_gpus", ")", "<", "num_gpu", ":", "smi_output", "=", "subprocess", ".", "check_output", "(", "[", "\"nvidia-smi\"", ",", "\"--format=csv\"", ",", "\"--query-compute-apps=gpu_uuid,pid,process_name,used_gpu_memory\"", "]", ")", ".", "decode", "(", ")", "logging", ".", "info", "(", "\": {0}\"", ".", "format", "(", "smi_output", ")", ")", "raise", "Exception", "(", "\"Unable to find {} free GPU(s)\\n{}\"", ".", "format", "(", "num_gpu", ",", "smi_output", ")", ")", "# Get logical placement", "num_available", "=", "len", "(", "free_gpus", ")", "if", "worker_index", "==", "-", "1", ":", "# use original random placement", "random", ".", "shuffle", "(", "free_gpus", ")", "proposed_gpus", "=", "free_gpus", "[", ":", "num_gpu", "]", "else", ":", "# ordered by worker index", "if", "worker_index", "*", "num_gpu", "+", "num_gpu", ">", "num_available", ":", "worker_index", "=", "worker_index", "*", "num_gpu", "%", "num_available", "proposed_gpus", "=", "free_gpus", "[", "worker_index", "*", "num_gpu", ":", "(", "worker_index", "*", "num_gpu", "+", "num_gpu", ")", "]", "logging", ".", "info", "(", "\"Proposed GPUs: {}\"", ".", "format", "(", "proposed_gpus", ")", ")", "return", "','", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "proposed_gpus", ")" ]
Indent a value by n character s
def indent ( value , n = 2 , character = ' ' ) : prefix = n * character return '\n' . join ( prefix + line for line in value . splitlines ( ) )
1,118
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/docs/generate.py#L13-L23
[ "def", "i2repr", "(", "self", ",", "pkt", ",", "packed_seconds", ")", ":", "time_struct", "=", "time", ".", "gmtime", "(", "self", ".", "_convert_seconds", "(", "packed_seconds", ")", ")", "return", "time", ".", "strftime", "(", "\"%a %b %d %H:%M:%S %Y\"", ",", "time_struct", ")" ]
Wrapper that tests the type of _session .
def check_instance ( function ) : def wrapper ( self , * args , * * kwargs ) : func_trans = { "commit" : manager . Manager , "compare_config" : manager . Manager , "commit_check" : manager . Manager , "device_info" : manager . Manager , "diff_config" : manager . Manager , "health_check" : manager . Manager , "interface_errors" : manager . Manager , "op_cmd" : paramiko . client . SSHClient , "shell_cmd" : paramiko . client . SSHClient , "scp_pull" : paramiko . client . SSHClient , "scp_push" : paramiko . client . SSHClient } # when doing an operational command, logging in as root # brings you to shell, so we need to enter the device as a shell # connection, and move to cli to perform the command # this is a one-off because the isinstance() check will be bypassed if self . username == "root" and function . __name__ == "op_cmd" : if not self . _session : self . conn_type = "paramiko" self . connect ( ) if not self . _shell : self . conn_type = "root" self . connect ( ) self . shell_to_cli ( ) # check if we're in the cli # Have to call shell command separately, since we are using _shell # for comparison, not _session. elif function . __name__ == 'shell_cmd' : if not self . _shell : self . conn_type = "shell" self . connect ( ) self . cli_to_shell ( ) # check if we're in shell. if isinstance ( self . _session , func_trans [ function . __name__ ] ) : # If they're doing SCP, we have to check for both _session and # _scp if function . __name__ in [ 'scp_pull' , 'scp_push' ] : if not isinstance ( self . _scp , SCPClient ) : self . conn_type = "scp" self . connect ( ) else : self . disconnect ( ) if function . __name__ == "op_cmd" : self . conn_type = "paramiko" elif function . __name__ in [ "scp_pull" , "scp_push" ] : self . conn_type = "scp" else : self . conn_type = "ncclient" self . connect ( ) return function ( self , * args , * * kwargs ) return wrapper
1,119
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L132-L201
[ "def", "GetAllUsers", "(", "self", ",", "pagination_size", "=", "10", ")", ":", "next_page_token", ",", "accounts", "=", "self", ".", "rpc_helper", ".", "DownloadAccount", "(", "None", ",", "pagination_size", ")", "while", "accounts", ":", "for", "account", "in", "accounts", ":", "yield", "GitkitUser", ".", "FromApiResponse", "(", "account", ")", "next_page_token", ",", "accounts", "=", "self", ".", "rpc_helper", ".", "DownloadAccount", "(", "next_page_token", ",", "pagination_size", ")" ]
Perform a commit operation .
def commit ( self , commands = "" , confirmed = None , comment = None , at_time = None , synchronize = False , req_format = 'text' ) : # ncclient doesn't support a truly blank commit, so if nothing is # passed, use 'annotate system' to make a blank commit if not commands : commands = 'annotate system ""' clean_cmds = [ ] for cmd in clean_lines ( commands ) : clean_cmds . append ( cmd ) # try to lock the candidate config so we can make changes. self . lock ( ) self . _session . load_configuration ( action = 'set' , config = commands ) results = "" # confirmed and commit at are mutually exclusive. commit confirm # takes precedence. if confirmed : results = self . _session . commit ( confirmed = True , timeout = str ( confirmed ) , comment = comment , synchronize = synchronize ) else : results = self . _session . commit ( comment = comment , at_time = at_time , synchronize = synchronize ) self . unlock ( ) if results : if req_format == 'xml' : return results # commit() DOES NOT return a parse-able xml tree, so we # convert it to an ElementTree xml tree. results = ET . fromstring ( results . tostring ) out = '' for i in results . iter ( ) : # the success message is just a tag, so we need to get it # specifically. if i . tag == 'commit-check-success' : out += 'configuration check succeeds\n' elif i . tag == 'commit-success' : out += 'commit complete\n' elif i . tag == 'ok' : out += 'commit complete\n' # this is for normal output with a tag and inner text, it will # strip the inner text and add it to the output. elif i . text is not None : if i . text . strip ( ) + '\n' != '\n' : out += i . text . strip ( ) + '\n' # this is for elements that don't have inner text, # it will add the tag to the output. elif i . text is None : if i . tag + '\n' != '\n' : out += i . tag + '\n' return out return False
1,120
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L214-L298
[ "def", "CreateDynamicDisplayAdSettings", "(", "client", ",", "opener", ")", ":", "media_service", "=", "client", ".", "GetService", "(", "'MediaService'", ",", "'v201809'", ")", "logo", "=", "{", "'xsi_type'", ":", "'Image'", ",", "'mediaId'", ":", "_CreateImage", "(", "media_service", ",", "opener", ",", "'https://goo.gl/dEvQeF'", ")", "}", "dynamic_settings", "=", "{", "'landscapeLogoImage'", ":", "logo", ",", "'pricePrefix'", ":", "'as low as'", ",", "'promoText'", ":", "'Free shipping!'", "}", "return", "dynamic_settings" ]
Execute a commit check operation .
def commit_check ( self , commands = "" , req_format = "text" ) : if not commands : raise InvalidCommandError ( 'No commands specified' ) clean_cmds = [ ] for cmd in clean_lines ( commands ) : clean_cmds . append ( cmd ) self . lock ( ) self . _session . load_configuration ( action = 'set' , config = clean_cmds ) # conn.validate() DOES NOT return a parse-able xml tree, so we # convert it to an ElementTree xml tree. results = ET . fromstring ( self . _session . validate ( source = 'candidate' ) . tostring ) # release the candidate configuration self . unlock ( ) if req_format == "xml" : return ET . tostring ( results ) out = "" # we have to parse the elementTree object, and get the text # from the xml. for i in results . iter ( ) : # the success message is just a tag, so we need to get it # specifically. if i . tag == 'commit-check-success' : out += 'configuration check succeeds\n' # this is for normal output with a tag and inner text, it will # strip the inner text and add it to the output. elif i . text is not None : if i . text . strip ( ) + '\n' != '\n' : out += i . text . strip ( ) + '\n' # this is for elements that don't have inner text, it will add the # tag to the output. elif i . text is None : if i . tag + '\n' != '\n' : out += i . tag + '\n' return out
1,121
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L301-L352
[ "def", "register_dataframe_method", "(", "method", ")", ":", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "class", "AccessorMethod", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "pandas_obj", ")", ":", "self", ".", "_obj", "=", "pandas_obj", "@", "wraps", "(", "method", ")", "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "method", "(", "self", ".", "_obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", "register_dataframe_accessor", "(", "method", ".", "__name__", ")", "(", "AccessorMethod", ")", "return", "method", "return", "inner", "(", ")" ]
Execute a show | compare against the specified commands .
def compare_config ( self , commands = "" , req_format = "text" ) : if not commands : raise InvalidCommandError ( 'No commands specified' ) clean_cmds = [ cmd for cmd in clean_lines ( commands ) ] self . lock ( ) self . _session . load_configuration ( action = 'set' , config = clean_cmds ) out = self . _session . compare_configuration ( ) self . unlock ( ) if req_format . lower ( ) == "xml" : return out return out . xpath ( 'configuration-information/configuration-output' ) [ 0 ] . text
1,122
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L355-L383
[ "def", "InitializeUpload", "(", "self", ",", "http_request", ",", "http", "=", "None", ",", "client", "=", "None", ")", ":", "if", "self", ".", "strategy", "is", "None", ":", "raise", "exceptions", ".", "UserError", "(", "'No upload strategy set; did you call ConfigureRequest?'", ")", "if", "http", "is", "None", "and", "client", "is", "None", ":", "raise", "exceptions", ".", "UserError", "(", "'Must provide client or http.'", ")", "if", "self", ".", "strategy", "!=", "RESUMABLE_UPLOAD", ":", "return", "http", "=", "http", "or", "client", ".", "http", "if", "client", "is", "not", "None", ":", "http_request", ".", "url", "=", "client", ".", "FinalizeTransferUrl", "(", "http_request", ".", "url", ")", "self", ".", "EnsureUninitialized", "(", ")", "http_response", "=", "http_wrapper", ".", "MakeRequest", "(", "http", ",", "http_request", ",", "retries", "=", "self", ".", "num_retries", ")", "if", "http_response", ".", "status_code", "!=", "http_client", ".", "OK", ":", "raise", "exceptions", ".", "HttpError", ".", "FromResponse", "(", "http_response", ")", "self", ".", "__server_chunk_granularity", "=", "http_response", ".", "info", ".", "get", "(", "'X-Goog-Upload-Chunk-Granularity'", ")", "url", "=", "http_response", ".", "info", "[", "'location'", "]", "if", "client", "is", "not", "None", ":", "url", "=", "client", ".", "FinalizeTransferUrl", "(", "url", ")", "self", ".", "_Initialize", "(", "http", ",", "url", ")", "# Unless the user has requested otherwise, we want to just", "# go ahead and pump the bytes now.", "if", "self", ".", "auto_transfer", ":", "return", "self", ".", "StreamInChunks", "(", ")", "return", "http_response" ]
Establish a connection to the device .
def connect ( self ) : if self . conn_type == 'paramiko' : self . _session = paramiko . SSHClient ( ) # These two lines set the paramiko logging to Critical to # remove extra messages from being sent to the user output. logger = logging . Logger . manager . getLogger ( 'paramiko.transport' ) logger . setLevel ( logging . CRITICAL ) self . _session . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) self . _session . connect ( hostname = self . host , username = self . username , password = self . password , port = self . port , timeout = self . connect_timeout ) if self . conn_type == 'scp' : self . _scp_session = paramiko . SSHClient ( ) logger = logging . Logger . manager . getLogger ( 'paramiko.transport' ) logger . setLevel ( logging . CRITICAL ) self . _scp_session . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) self . _scp_session . connect ( hostname = self . host , username = self . username , password = self . password , port = self . port , timeout = self . connect_timeout ) self . _scp = SCPClient ( self . _scp_session . get_transport ( ) ) elif self . conn_type == "ncclient" : self . _session = manager . connect ( host = self . host , port = self . port , username = self . username , password = self . password , timeout = self . connect_timeout , device_params = { 'name' : 'junos' } , hostkey_verify = False ) elif self . conn_type == 'shell' : if not self . _session : self . conn_type = 'paramiko' self . connect ( ) self . conn_type = 'shell' if not self . _shell : self . _shell = self . _session . invoke_shell ( ) time . sleep ( 2 ) if self . username != 'root' and not self . _in_cli : self . _in_cli = True if not self . cli_to_shell ( ) : self . _shell . recv ( 9999 ) elif self . conn_type == 'root' : # open the shell if necessary, and move into CLI if not self . _shell : self . _shell = self . _session . invoke_shell ( ) time . sleep ( 2 ) if not self . shell_to_cli ( ) : self . _shell . recv ( 9999 ) self . _update_timeout ( self . session_timeout )
1,123
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L385-L457
[ "def", "process_event", "(", "self", ",", "event_id", ")", ":", "with", "db", ".", "session", ".", "begin_nested", "(", ")", ":", "event", "=", "Event", ".", "query", ".", "get", "(", "event_id", ")", "event", ".", "_celery_task", "=", "self", "# internal binding to a Celery task", "event", ".", "receiver", ".", "run", "(", "event", ")", "# call run directly to avoid circular calls", "flag_modified", "(", "event", ",", "'response'", ")", "flag_modified", "(", "event", ",", "'response_headers'", ")", "db", ".", "session", ".", "add", "(", "event", ")", "db", ".", "session", ".", "commit", "(", ")" ]
Echo status of an SCP operation .
def _copy_status ( self , filename , size , sent ) : output = "Transferred %.0f%% of the file %s" % ( ( float ( sent ) / float ( size ) * 100 ) , path . normpath ( filename ) ) output += ( ' ' * ( 120 - len ( output ) ) ) if filename != self . _filename : if self . _filename is not None : print ( '' ) self . _filename = filename print ( output , end = '\r' )
1,124
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L459-L485
[ "def", "merge_objects", "(", "self", ",", "mujoco_objects", ")", ":", "self", ".", "mujoco_objects", "=", "mujoco_objects", "self", ".", "objects", "=", "{", "}", "# xml manifestation", "self", ".", "max_horizontal_radius", "=", "0", "for", "obj_name", ",", "obj_mjcf", "in", "mujoco_objects", ".", "items", "(", ")", ":", "self", ".", "merge_asset", "(", "obj_mjcf", ")", "# Load object", "obj", "=", "obj_mjcf", ".", "get_collision", "(", "name", "=", "obj_name", ",", "site", "=", "True", ")", "obj", ".", "append", "(", "new_joint", "(", "name", "=", "obj_name", ",", "type", "=", "\"free\"", ",", "damping", "=", "\"0.0005\"", ")", ")", "self", ".", "objects", "[", "obj_name", "]", "=", "obj", "self", ".", "worldbody", ".", "append", "(", "obj", ")", "self", ".", "max_horizontal_radius", "=", "max", "(", "self", ".", "max_horizontal_radius", ",", "obj_mjcf", ".", "get_horizontal_radius", "(", ")", ")" ]
Pull basic device information .
def device_info ( self ) : # get hostname, model, and version from 'show version' resp = self . _session . get_software_information ( format = 'xml' ) hostname = resp . xpath ( '//software-information/host-name' ) [ 0 ] . text model = resp . xpath ( '//software-information/product-model' ) [ 0 ] . text version = 'Unknown' if resp . xpath ( '//junos-version' ) : """ case: <junos-version>15.1</junos-version> """ try : version = resp . xpath ( '//junos-version' ) [ 0 ] . text except IndexError : pass elif resp . xpath ( "//package-information[name = 'junos-version']" ) : """ case: <package-information> <name>junos-version</name> <comment>Junos: 14.2R4</comment> </package-information> """ try : version = ( resp . xpath ( "//package-information[name = 'junos-version']/comment" ) [ 0 ] . text ) . split ( ) [ 1 ] except IndexError : pass else : """ case: <package-information> <name>junos</name> <comment>JUNOS Base OS boot [12.3R5]</comment> </package-information> """ try : version = ( ( resp . xpath ( '//software-information/package-information/comment' ) [ 0 ] . text . split ( '[' ) [ 1 ] . split ( ']' ) [ 0 ] ) ) except IndexError : pass # try looking for 'junos-version' for >= 14.2 # for element in resp.xpath('//software-information'): # version = element.findtext('junos-version') # if not version: # try: # version = ((resp.xpath( # '//software-information/package-information/comment') # [0].text.split('[')[1].split(']')[0])) # except IndexError: # version = 'Unknown' # get uptime from 'show system uptime' resp = self . _session . get_system_uptime_information ( format = 'xml' ) try : current_time = resp . xpath ( '//current-time/date-time' ) [ 0 ] . text except IndexError : current_time = 'Unknown' try : uptime = resp . xpath ( '//uptime-information/up-time' ) [ 0 ] . text except IndexError : uptime = 'Unknown' # get serial number from 'show chassis hardware' show_hardware = self . _session . get_chassis_inventory ( format = 'xml' ) # If we're hitting an EX, grab each Routing Engine Serial number # to get all RE SNs in a VC try : chassis_module = show_hardware . xpath ( '//chassis-inventory/chassis/chassis-module/description' ) [ 0 ] . text except IndexError : chassis_module = 'Unknown' if ( 'EX' or 'ex' or 'Ex' ) in chassis_module : serial_num = '' for eng in show_hardware . xpath ( '//chassis-inventory/chassis/chassis-module' ) : if 'Routing Engine' in eng . xpath ( 'name' ) [ 0 ] . text : serial_num += ( eng . xpath ( 'name' ) [ 0 ] . text + ' Serial #: ' + eng . xpath ( 'serial-number' ) [ 0 ] . text ) else : # Any other device type, just grab chassis SN try : serial_num = ( 'Chassis Serial Number: ' + show_hardware . xpath ( '//chassis-inventory/chassis/serial-number' ) [ 0 ] . text ) except IndexError : serial_num = 'Chassis Serial Number: ' + 'Unknown (virtual machine?)' return ( 'Hostname: %s\nModel: %s\nJunos Version: %s\n%s\nCurrent Time:' ' %s\nUptime: %s\n' % ( hostname , model , version , serial_num , current_time , uptime ) )
1,125
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L488-L588
[ "def", "cp_cropduster_image", "(", "self", ",", "the_image_path", ",", "del_after_upload", "=", "False", ",", "overwrite", "=", "False", ",", "invalidate", "=", "False", ")", ":", "local_file", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "the_image_path", ")", "# only try to upload things if the origin cropduster file exists (so it is not already uploaded to the CDN)", "if", "os", ".", "path", ".", "exists", "(", "local_file", ")", ":", "the_image_crops_path", "=", "os", ".", "path", ".", "splitext", "(", "the_image_path", ")", "[", "0", "]", "the_image_crops_path_full_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "the_image_crops_path", ")", "self", ".", "cp", "(", "local_path", "=", "local_file", ",", "target_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "S3_ROOT_BASE", ",", "the_image_path", ")", ",", "del_after_upload", "=", "del_after_upload", ",", "overwrite", "=", "overwrite", ",", "invalidate", "=", "invalidate", ",", ")", "self", ".", "cp", "(", "local_path", "=", "the_image_crops_path_full_path", "+", "\"/*\"", ",", "target_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "S3_ROOT_BASE", ",", "the_image_crops_path", ")", ",", "del_after_upload", "=", "del_after_upload", ",", "overwrite", "=", "overwrite", ",", "invalidate", "=", "invalidate", ",", ")" ]
Generate configuration differences with a second device .
def diff_config ( self , second_host , mode = 'stanza' ) : second_conn = manager . connect ( host = second_host , port = self . port , username = self . username , password = self . password , timeout = self . connect_timeout , device_params = { 'name' : 'junos' } , hostkey_verify = False ) command = 'show configuration' if mode == 'set' : command += ' | display set' # get the raw xml config config1 = self . _session . command ( command , format = 'text' ) # for each /configuration-output snippet, turn it to text and join them config1 = '' . join ( [ snippet . text . lstrip ( '\n' ) for snippet in config1 . xpath ( '//configuration-output' ) ] ) config2 = second_conn . command ( command , format = 'text' ) config2 = '' . join ( [ snippet . text . lstrip ( '\n' ) for snippet in config2 . xpath ( '//configuration-output' ) ] ) return difflib . unified_diff ( config1 . splitlines ( ) , config2 . splitlines ( ) , self . host , second_host )
1,126
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L592-L633
[ "def", "unregister_vm", "(", "vm_ref", ")", ":", "vm_name", "=", "get_managed_object_name", "(", "vm_ref", ")", "log", ".", "trace", "(", "'Destroying vm \\'%s\\''", ",", "vm_name", ")", "try", ":", "vm_ref", ".", "UnregisterVM", "(", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")" ]
Parse the extensive xml output of an interface and yield errors .
def _error_parse ( self , interface , face ) : try : error_list = interface . xpath ( face + '-error-list' ) [ 0 ] . getchildren ( ) except IndexError : # no error list on this interface pass else : for x in range ( len ( error_list ) ) : if error_list [ x ] . tag == "carrier-transitions" : if int ( error_list [ x ] . text . strip ( ) ) > 50 : yield " has greater than 50 flaps." elif int ( error_list [ x ] . text . strip ( ) ) > 0 : yield " has %s of %s." % ( error_list [ x ] . text . strip ( ) , error_list [ x ] . tag . strip ( ) )
1,127
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L657-L685
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Pull health and alarm information from the device .
def health_check ( self ) : output = 'Chassis Alarms:\n\t' # Grab chassis alarms, system alarms, show chassis routing-engine, # 'show system processes extensive', and also xpath to the # relevant nodes on each. chassis_alarms = self . _session . command ( "show chassis alarms" ) chassis_alarms = chassis_alarms . xpath ( '//alarm-detail' ) system_alarms = self . _session . command ( "show system alarms" ) system_alarms = system_alarms . xpath ( '//alarm-detail' ) chass = self . _session . command ( command = "show chassis routing-engine" , format = 'text' ) . xpath ( '//output' ) [ 0 ] . text proc = self . _session . command ( "show system processes extensive" ) proc = proc . xpath ( 'output' ) [ 0 ] . text . split ( '\n' ) if chassis_alarms == [ ] : # Chassis Alarms output += 'No chassis alarms active.\n' else : for i in chassis_alarms : output += ( i . xpath ( 'alarm-class' ) [ 0 ] . text . strip ( ) + ' Alarm \t' '\t' + i . xpath ( 'alarm-time' ) [ 0 ] . text . strip ( ) + '\n\t' + i . xpath ( 'alarm-description' ) [ 0 ] . text . strip ( ) + '\n' ) output += '\nSystem Alarms: \n\t' if system_alarms == [ ] : # System Alarms output += 'No system alarms active.\n' else : for i in system_alarms : output += ( i . xpath ( 'alarm-class' ) [ 0 ] . text . strip ( ) + ' Alarm ' '\t\t' + i . xpath ( 'alarm-time' ) [ 0 ] . text . strip ( ) + '\n\t' + i . xpath ( 'alarm-description' ) [ 0 ] . text . strip ( ) + '\n' ) # add the output of the show chassis routing-engine to the command. output += '\n' + chass # Grabs the top 5 processes and the header line. output += ( '\n\nTop 5 busiest processes (high mgd values likely from ' 'script execution):\n' ) for line_number in range ( 8 , 14 ) : output += proc [ line_number ] + '\n' return output
1,128
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L688-L734
[ "def", "send_ftp", "(", "outdir", ")", ":", "print", "(", "\"Uploading the files in the \"", "+", "outdir", "+", "\"/ directory!\\n\"", ")", "# Make sure there is actually a configuration file", "config_file_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"config.py\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "config_file_dir", ")", ":", "sys", ".", "exit", "(", "\"There dosen't seem to be a configuration file. Have you run the init command?\"", ")", "else", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "cwd", ")", "try", ":", "from", "config", "import", "ftp_server", ",", "ftp_username", ",", "ftp_password", ",", "ftp_port", ",", "ftp_upload_path", "except", ":", "sys", ".", "exit", "(", "\"The FTP settings could not be found. Maybe your config file is too old. Re-run 'blended init' to fix it.\"", ")", "server", "=", "ftp_server", "username", "=", "ftp_username", "password", "=", "ftp_password", "port", "=", "ftp_port", "ftp", "=", "FTP", "(", ")", "ftp", ".", "connect", "(", "server", ",", "port", ")", "ftp", ".", "login", "(", "username", ",", "password", ")", "filenameCV", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "outdir", ")", "try", ":", "ftp", ".", "cwd", "(", "ftp_upload_path", ")", "placeFiles", "(", "ftp", ",", "filenameCV", ")", "except", ":", "ftp", ".", "quit", "(", ")", "sys", ".", "exit", "(", "\"Files not able to be uploaded! Are you sure the directory exists?\"", ")", "ftp", ".", "quit", "(", ")", "print", "(", "\"\\nFTP Done!\"", ")" ]
Parse show interfaces extensive and return interfaces with errors .
def interface_errors ( self ) : output = [ ] # used to store the list of interfaces with errors. # get a string of each physical and logical interface element dev_response = self . _session . command ( 'sh interfaces extensive' ) ints = dev_response . xpath ( '//physical-interface' ) ints += dev_response . xpath ( '//logical-interface' ) for i in ints : # Grab the interface name for user output. int_name = i . xpath ( 'name' ) [ 0 ] . text . strip ( ) # Only check certain interface types. if ( ( 'ge' or 'fe' or 'ae' or 'xe' or 'so' or 'et' or 'vlan' or 'lo0' or 'irb' ) in int_name ) : try : status = ( i . xpath ( 'admin-status' ) [ 0 ] . text . strip ( ) + '/' + i . xpath ( 'oper-status' ) [ 0 ] . text . strip ( ) ) except IndexError : pass else : for error in self . _error_parse ( i , "input" ) : output . append ( "%s (%s)%s" % ( int_name , status , error ) ) for error in self . _error_parse ( i , "output" ) : output . append ( "%s (%s)%s" % ( int_name , status , error ) ) if output == [ ] : output . append ( 'No interface errors were detected on this device.' ) return '\n' . join ( output ) + '\n'
1,129
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L737-L772
[ "def", "libvlc_vlm_seek_media", "(", "p_instance", ",", "psz_name", ",", "f_percentage", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_vlm_seek_media'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_vlm_seek_media'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "Instance", ",", "ctypes", ".", "c_char_p", ",", "ctypes", ".", "c_float", ")", "return", "f", "(", "p_instance", ",", "psz_name", ",", "f_percentage", ")" ]
Lock the candidate config . Requires ncclient . manager . Manager .
def lock ( self ) : if isinstance ( self . _session , manager . Manager ) : self . _session . lock ( )
1,130
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L774-L777
[ "def", "extract_zip", "(", "archive", ",", "compression", ",", "cmd", ",", "verbosity", ",", "interactive", ",", "outdir", ")", ":", "try", ":", "with", "zipfile", ".", "ZipFile", "(", "archive", ")", "as", "zfile", ":", "zfile", ".", "extractall", "(", "outdir", ")", "except", "Exception", "as", "err", ":", "msg", "=", "\"error extracting %s: %s\"", "%", "(", "archive", ",", "err", ")", "raise", "util", ".", "PatoolError", "(", "msg", ")", "return", "None" ]
Execute an operational mode command .
def op_cmd ( self , command , req_format = 'text' , xpath_expr = "" ) : if not command : raise InvalidCommandError ( "Parameter 'command' cannot be empty" ) if req_format . lower ( ) == 'xml' or xpath_expr : command = command . strip ( ) + ' | display xml' command = command . strip ( ) + ' | no-more\n' out = '' # when logging in as root, we use _shell to get the response. if self . username == 'root' : self . _shell . send ( command ) time . sleep ( 3 ) while self . _shell . recv_ready ( ) : out += self . _shell . recv ( 999999 ) time . sleep ( .75 ) # take off the command being sent and the prompt at the end. out = '\n' . join ( out . split ( '\n' ) [ 1 : - 2 ] ) # not logging in as root, and can grab the output as normal. else : stdin , stdout , stderr = self . _session . exec_command ( command = command , timeout = float ( self . session_timeout ) ) stdin . close ( ) # read normal output while not stdout . channel . exit_status_ready ( ) : out += stdout . read ( ) stdout . close ( ) # read errors while not stderr . channel . exit_status_ready ( ) : out += stderr . read ( ) stderr . close ( ) return out if not xpath_expr else xpath ( out , xpath_expr )
1,131
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L780-L832
[ "def", "unsubscribe", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "object", "=", "self", ".", "get_object", "(", ")", "self", ".", "object", ".", "subscribers", ".", "remove", "(", "request", ".", "user", ")", "messages", ".", "success", "(", "self", ".", "request", ",", "self", ".", "success_message", ")", "return", "HttpResponseRedirect", "(", "self", ".", "get_success_url", "(", ")", ")" ]
Unlock the candidate config .
def unlock ( self ) : if isinstance ( self . _session , manager . Manager ) : self . _session . unlock ( )
1,132
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/core.py#L942-L950
[ "def", "delete_container_service", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "service_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourcegroups/'", ",", "resource_group", ",", "'/providers/Microsoft.ContainerService/ContainerServices/'", ",", "service_name", ",", "'?api-version='", ",", "ACS_API", "]", ")", "return", "do_delete", "(", "endpoint", ",", "access_token", ")" ]
Wraps an existing method on an object with the provided generator which will be sent the value when it yields control .
def intercept ( obj , methodname , wrapper ) : original = getattr ( obj , methodname ) def replacement ( * args , * * kwargs ) : wrapfn = wrapper ( * args , * * kwargs ) wrapfn . send ( None ) result = original ( * args , * * kwargs ) try : wrapfn . send ( result ) except StopIteration : return result else : raise AssertionError ( 'Generator did not stop' ) def unwrap ( ) : """ Restores the method to it's original (unwrapped) state. """ setattr ( obj , methodname , original ) replacement . unwrap = unwrap setattr ( obj , methodname , replacement )
1,133
https://github.com/Fluxx/exam/blob/27dc53a703349ec09433a6b989d6fc32ad523c0b/exam/helpers.py#L28-L72
[ "def", "add_metadata", "(", "file_name", ",", "title", ",", "artist", ",", "album", ")", ":", "tags", "=", "EasyMP3", "(", "file_name", ")", "if", "title", ":", "tags", "[", "\"title\"", "]", "=", "title", "if", "artist", ":", "tags", "[", "\"artist\"", "]", "=", "artist", "if", "album", ":", "tags", "[", "\"album\"", "]", "=", "album", "tags", ".", "save", "(", ")", "return", "file_name" ]
Returns the next element or raises StopIteration if stopped .
def next ( self ) : # need new iterable? if self . r == self . repeats : self . i = ( self . i + 1 ) % self . lenght self . r = 0 self . r += 1 if self . stopping and self . i == 0 and self . r == 1 : self . stopped = True if self . i == 0 and self . stopped : raise StopIteration else : iterator = self . iterators [ self . i ] return iterator . next ( )
1,134
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/numap/NuMap.py#L880-L896
[ "def", "authentication", "(", "self", ",", "event", ")", ":", "try", ":", "self", ".", "log", "(", "\"Authorization has been granted by DB check:\"", ",", "event", ".", "username", ",", "lvl", "=", "debug", ")", "account", ",", "profile", ",", "clientconfig", "=", "event", ".", "userdata", "useruuid", "=", "event", ".", "useruuid", "originatingclientuuid", "=", "event", ".", "clientuuid", "clientuuid", "=", "clientconfig", ".", "uuid", "if", "clientuuid", "!=", "originatingclientuuid", ":", "self", ".", "log", "(", "\"Mutating client uuid to request id:\"", ",", "clientuuid", ",", "lvl", "=", "network", ")", "# Assign client to user", "if", "useruuid", "in", "self", ".", "_users", ":", "signedinuser", "=", "self", ".", "_users", "[", "useruuid", "]", "else", ":", "signedinuser", "=", "User", "(", "account", ",", "profile", ",", "useruuid", ")", "self", ".", "_users", "[", "account", ".", "uuid", "]", "=", "signedinuser", "if", "clientuuid", "in", "signedinuser", ".", "clients", ":", "self", ".", "log", "(", "\"Client configuration already logged in.\"", ",", "lvl", "=", "critical", ")", "# TODO: What now??", "# Probably senseful would be to add the socket to the", "# client's other socket", "# The clients would be identical then - that could cause", "# problems", "# which could be remedied by duplicating the configuration", "else", ":", "signedinuser", ".", "clients", ".", "append", "(", "clientuuid", ")", "self", ".", "log", "(", "\"Active client (\"", ",", "clientuuid", ",", "\") registered to \"", "\"user\"", ",", "useruuid", ",", "lvl", "=", "debug", ")", "# Update socket..", "socket", "=", "self", ".", "_sockets", "[", "event", ".", "sock", "]", "socket", ".", "clientuuid", "=", "clientuuid", "self", ".", "_sockets", "[", "event", ".", "sock", "]", "=", "socket", "# ..and client lists", "try", ":", "language", "=", "clientconfig", ".", "language", "except", "AttributeError", ":", "language", "=", "\"en\"", "# TODO: Rewrite and simplify this:", "newclient", "=", "Client", "(", "sock", "=", "event", ".", "sock", ",", "ip", "=", "socket", ".", "ip", ",", "clientuuid", "=", "clientuuid", ",", "useruuid", "=", "useruuid", ",", "name", "=", "clientconfig", ".", "name", ",", "config", "=", "clientconfig", ",", "language", "=", "language", ")", "del", "(", "self", ".", "_clients", "[", "originatingclientuuid", "]", ")", "self", ".", "_clients", "[", "clientuuid", "]", "=", "newclient", "authpacket", "=", "{", "\"component\"", ":", "\"auth\"", ",", "\"action\"", ":", "\"login\"", ",", "\"data\"", ":", "account", ".", "serializablefields", "(", ")", "}", "self", ".", "log", "(", "\"Transmitting Authorization to client\"", ",", "authpacket", ",", "lvl", "=", "network", ")", "self", ".", "fireEvent", "(", "write", "(", "event", ".", "sock", ",", "json", ".", "dumps", "(", "authpacket", ")", ")", ",", "\"wsserver\"", ")", "profilepacket", "=", "{", "\"component\"", ":", "\"profile\"", ",", "\"action\"", ":", "\"get\"", ",", "\"data\"", ":", "profile", ".", "serializablefields", "(", ")", "}", "self", ".", "log", "(", "\"Transmitting Profile to client\"", ",", "profilepacket", ",", "lvl", "=", "network", ")", "self", ".", "fireEvent", "(", "write", "(", "event", ".", "sock", ",", "json", ".", "dumps", "(", "profilepacket", ")", ")", ",", "\"wsserver\"", ")", "clientconfigpacket", "=", "{", "\"component\"", ":", "\"clientconfig\"", ",", "\"action\"", ":", "\"get\"", ",", "\"data\"", ":", "clientconfig", ".", "serializablefields", "(", ")", "}", "self", ".", "log", "(", "\"Transmitting client configuration to client\"", ",", "clientconfigpacket", ",", "lvl", "=", "network", ")", "self", ".", "fireEvent", "(", "write", "(", "event", ".", "sock", ",", "json", ".", "dumps", "(", "clientconfigpacket", ")", ")", ",", "\"wsserver\"", ")", "self", ".", "fireEvent", "(", "userlogin", "(", "clientuuid", ",", "useruuid", ",", "clientconfig", ",", "signedinuser", ")", ")", "self", ".", "log", "(", "\"User configured: Name\"", ",", "signedinuser", ".", "account", ".", "name", ",", "\"Profile\"", ",", "signedinuser", ".", "profile", ".", "uuid", ",", "\"Clients\"", ",", "signedinuser", ".", "clients", ",", "lvl", "=", "debug", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "\"Error (%s, %s) during auth grant: %s\"", "%", "(", "type", "(", "e", ")", ",", "e", ",", "event", ")", ",", "lvl", "=", "error", ")" ]
Returns a result if availble within timeout else raises a TimeoutError exception . See documentation for NuMap . next .
def next ( self ) : return self . iterator . next ( task = self . task , timeout = self . timeout , block = self . block )
1,135
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/numap/NuMap.py#L919-L925
[ "def", "_CompressHistogram", "(", "self", ",", "histo_ev", ")", ":", "return", "CompressedHistogramEvent", "(", "histo_ev", ".", "wall_time", ",", "histo_ev", ".", "step", ",", "compressor", ".", "compress_histogram_proto", "(", "histo_ev", ".", "histogram_value", ",", "self", ".", "_compression_bps", ")", ")" ]
Write language - specific script template to file .
def write_template ( fn , lang = "python" ) : with open ( fn , "wb" ) as fh : if lang == "python" : fh . write ( PY_TEMPLATE ) elif lang == "bash" : fh . write ( SH_TEMPLATE )
1,136
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/script.py#L94-L108
[ "def", "communityvisibilitystate", "(", "self", ")", ":", "if", "self", ".", "_communityvisibilitystate", "==", "None", ":", "return", "None", "elif", "self", ".", "_communityvisibilitystate", "in", "self", ".", "VisibilityState", ":", "return", "self", ".", "VisibilityState", "[", "self", ".", "_communityvisibilitystate", "]", "else", ":", "#Invalid State", "return", "None" ]
Execute arbitrary scripts .
def script ( inbox , cfg ) : script_name = cfg [ "id" ] script_id = str ( abs ( hash ( ( cfg [ "id" ] , ) + tuple ( inbox [ 0 ] . values ( ) ) ) ) ) [ 0 : 8 ] # LOG.log(mp.DEFAULT, "@papy;script %s:%s started" % (script_name, script_id)) # LOG.log(mp.SUBDEFAULT, "@papy;%s:%s received: %s" % (script_name, script_id, inbox)) args = { } args [ "params" ] = dict ( cfg [ "params" ] ) args [ "in" ] = { } for in_port in cfg [ "in" ] : for inin_ports in inbox : in_path = inin_ports . get ( in_port , None ) if ( in_path is not None ) : # first matching input-output (including type) port is linked remaining ignored args [ "in" ] [ in_port ] = in_path break # check that all input ports are connected if len ( args [ "in" ] ) < len ( cfg [ "in" ] ) : raise Exception ( "not all in_ports connected, got: %s" % ( args [ "in" ] , ) ) # create output file for out_ports args [ "out" ] = { } out = { } for i , ( out_port , out_ext ) in enumerate ( cfg [ "out" ] ) : if cfg [ "in" ] == tuple ( out_port_ for out_port_ , _ in cfg [ "out" ] ) : pfx = args [ "in" ] [ cfg [ "in" ] [ i ] ] . split ( "/" ) [ - 1 ] . split ( "." ) [ 0 ] + "_" base = cfg [ "id" ] else : pfx = args [ "in" ] [ cfg [ "in" ] [ 0 ] ] . split ( "/" ) [ - 1 ] . split ( "." ) [ 0 ] + "_" base = cfg [ "id" ] + "-" + out_port if out_ext : out_path = cfg [ "dir" ] + "/" + pfx + base + "." + out_ext else : out_path = cfg [ "dir" ] + "/" + pfx + base args [ "out" ] [ out_port ] = out_path out [ out_port ] = out_path # evaluate and check for errors ret = _eval_script ( cfg [ "evaluator" ] , cfg [ "preamble" ] , cfg [ "dir" ] , cfg [ "executable" ] , cfg [ "script" ] , args ) if ret [ 0 ] != 0 : # LOG.error("@papy;%s:%s %s:%s:%s" % (script_name, script_id, ret[0], # ret[1].replace("\n", "<br>"), # ret[2].replace("\n", "<br>"))) raise Exception ( ret [ 0 ] , cfg [ "script" ] , ret [ 1 ] , ret [ 2 ] ) #LOG.log(mp.SUBDEFAULT, "@papy;%s:%s produced:%s" % (script_name, script_id, out)) #LOG.log(mp.DEFAULT, "@papy;script %s:%s finished" % (script_name, script_id)) return out
1,137
https://github.com/mcieslik-mctp/papy/blob/708e50827b5db46bbea081982cb74b9b0e464064/src/papy/util/script.py#L152-L203
[ "def", "delete_group", "(", "group_id", ",", "purge_data", ",", "*", "*", "kwargs", ")", ":", "user_id", "=", "kwargs", ".", "get", "(", "'user_id'", ")", "try", ":", "group_i", "=", "db", ".", "DBSession", ".", "query", "(", "ResourceGroup", ")", ".", "filter", "(", "ResourceGroup", ".", "id", "==", "group_id", ")", ".", "one", "(", ")", "except", "NoResultFound", ":", "raise", "ResourceNotFoundError", "(", "\"Group %s not found\"", "%", "(", "group_id", ")", ")", "group_items", "=", "db", ".", "DBSession", ".", "query", "(", "ResourceGroupItem", ")", ".", "filter", "(", "ResourceGroupItem", ".", "group_id", "==", "group_id", ")", ".", "all", "(", ")", "for", "gi", "in", "group_items", ":", "db", ".", "DBSession", ".", "delete", "(", "gi", ")", "if", "purge_data", "==", "'Y'", ":", "_purge_datasets_unique_to_resource", "(", "'GROUP'", ",", "group_id", ")", "log", ".", "info", "(", "\"Deleting group %s, id=%s\"", ",", "group_i", ".", "name", ",", "group_id", ")", "group_i", ".", "network", ".", "check_write_permission", "(", "user_id", ")", "db", ".", "DBSession", ".", "delete", "(", "group_i", ")", "db", ".", "DBSession", ".", "flush", "(", ")" ]
Edit a job .
def edit ( self , resource ) : schema = JobSchema ( exclude = ( 'id' , 'status' , 'options' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' , 'run_at' ) ) json = self . service . encode ( schema , resource ) schema = JobSchema ( ) resp = self . service . edit ( self . base , resource . name , json ) return self . service . decode ( schema , resp )
1,138
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/jobs.py#L153-L165
[ "def", "removefromreadergroup", "(", "self", ",", "groupname", ")", ":", "hresult", ",", "hcontext", "=", "SCardEstablishContext", "(", "SCARD_SCOPE_USER", ")", "if", "0", "!=", "hresult", ":", "raise", "EstablishContextException", "(", "hresult", ")", "try", ":", "hresult", "=", "SCardRemoveReaderFromGroup", "(", "hcontext", ",", "self", ".", "name", ",", "groupname", ")", "if", "0", "!=", "hresult", ":", "raise", "RemoveReaderFromGroupException", "(", "hresult", ",", "self", ".", "name", ",", "groupname", ")", "finally", ":", "hresult", "=", "SCardReleaseContext", "(", "hcontext", ")", "if", "0", "!=", "hresult", ":", "raise", "ReleaseContextException", "(", "hresult", ")" ]
Launch a new job .
def launch ( self , resource ) : schema = JobSchema ( exclude = ( 'id' , 'status' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' ) ) json = self . service . encode ( schema , resource ) schema = JobSchema ( ) resp = self . service . create ( self . base , json ) return self . service . decode ( schema , resp )
1,139
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/jobs.py#L167-L179
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "return", "False", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: is closing\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "self", ".", "_wrap_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_internal_console_port", ",", "self", ".", "_project", ")", "self", ".", "_internal_console_port", "=", "None", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "self", ".", "_closed", "=", "True", "return", "True" ]
Bulk launch a set of jobs .
def bulk_launch ( self , jobs = None , filter = None , all = False ) : # pylint: disable=redefined-builtin json = None if jobs is not None : schema = JobSchema ( exclude = ( 'id' , 'status' , 'package_name' , 'config_name' , 'device_name' , 'result_id' , 'user_id' , 'created' , 'updated' , 'automatic' ) ) jobs_json = self . service . encode ( schema , jobs , many = True ) json = { self . RESOURCE : jobs_json } schema = JobSchema ( ) resp = self . service . post ( self . base , params = { 'bulk' : 'launch' , 'filter' : filter , 'all' : all } , json = json ) return self . service . decode ( schema , resp , many = True )
1,140
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/jobs.py#L188-L204
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "return", "False", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: is closing\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "self", ".", "_wrap_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_internal_console_port", ",", "self", ".", "_project", ")", "self", ".", "_internal_console_port", "=", "None", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "self", ".", "_closed", "=", "True", "return", "True" ]
Get a highlight .
def get ( self , id , seq , line ) : # pylint: disable=invalid-name,redefined-builtin schema = HighlightSchema ( ) resp = self . service . get_id ( self . _base ( id , seq ) , line ) return self . service . decode ( schema , resp )
1,141
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/highlights.py#L57-L67
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Create or edit a highlight .
def create_or_edit ( self , id , seq , resource ) : # pylint: disable=invalid-name,redefined-builtin schema = HighlightSchema ( exclude = ( 'id' , 'seq' ) ) json = self . service . encode ( schema , resource ) schema = HighlightSchema ( ) resp = self . service . edit ( self . _base ( id , seq ) , resource . line , json ) return self . service . decode ( schema , resp )
1,142
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/highlights.py#L69-L83
[ "def", "_read_console_output", "(", "self", ",", "ws", ",", "out", ")", ":", "while", "True", ":", "msg", "=", "yield", "from", "ws", ".", "receive", "(", ")", "if", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "text", ":", "out", ".", "feed_data", "(", "msg", ".", "data", ".", "encode", "(", ")", ")", "elif", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "BINARY", ":", "out", ".", "feed_data", "(", "msg", ".", "data", ")", "elif", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "ERROR", ":", "log", ".", "critical", "(", "\"Docker WebSocket Error: {}\"", ".", "format", "(", "msg", ".", "data", ")", ")", "else", ":", "out", ".", "feed_eof", "(", ")", "ws", ".", "close", "(", ")", "break", "yield", "from", "self", ".", "stop", "(", ")" ]
Create a highlight .
def create ( self , id , seq , resource ) : # pylint: disable=invalid-name,redefined-builtin return self . create_or_edit ( id , seq , resource )
1,143
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/highlights.py#L85-L94
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Edit a highlight .
def edit ( self , id , seq , resource ) : # pylint: disable=invalid-name,redefined-builtin return self . create_or_edit ( id , seq , resource )
1,144
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/highlights.py#L96-L105
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Delete a highlight .
def delete ( self , id , seq , line ) : # pylint: disable=invalid-name,redefined-builtin return self . service . delete_id ( self . _base ( id , seq ) , line )
1,145
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/highlights.py#L107-L114
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Setup blueprint .
def post_ext_init ( state ) : app = state . app app . config . setdefault ( 'OAUTHCLIENT_SITENAME' , app . config . get ( 'THEME_SITENAME' , 'Invenio' ) ) app . config . setdefault ( 'OAUTHCLIENT_BASE_TEMPLATE' , app . config . get ( 'BASE_TEMPLATE' , 'invenio_oauthclient/base.html' ) ) app . config . setdefault ( 'OAUTHCLIENT_COVER_TEMPLATE' , app . config . get ( 'COVER_TEMPLATE' , 'invenio_oauthclient/base_cover.html' ) ) app . config . setdefault ( 'OAUTHCLIENT_SETTINGS_TEMPLATE' , app . config . get ( 'SETTINGS_TEMPLATE' , 'invenio_oauthclient/settings/base.html' ) )
1,146
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/client.py#L43-L61
[ "def", "returnJobReqs", "(", "self", ",", "jobReqs", ")", ":", "# Since we are only reading this job's specific values from the state file, we don't", "# need a lock", "jobState", "=", "self", ".", "_JobState", "(", "self", ".", "_CacheState", ".", "_load", "(", "self", ".", "cacheStateFile", ")", ".", "jobState", "[", "self", ".", "jobID", "]", ")", "for", "x", "in", "list", "(", "jobState", ".", "jobSpecificFiles", ".", "keys", "(", ")", ")", ":", "self", ".", "deleteLocalFile", "(", "x", ")", "with", "self", ".", "_CacheState", ".", "open", "(", "self", ")", "as", "cacheInfo", ":", "cacheInfo", ".", "sigmaJob", "-=", "jobReqs" ]
Send user to remote application for authentication .
def login ( remote_app ) : oauth = current_app . extensions [ 'oauthlib.client' ] if remote_app not in oauth . remote_apps : return abort ( 404 ) # Get redirect target in safe manner. next_param = get_safe_redirect_target ( arg = 'next' ) # Redirect URI - must be registered in the remote service. callback_url = url_for ( '.authorized' , remote_app = remote_app , _external = True , ) # Create a JSON Web Token that expires after OAUTHCLIENT_STATE_EXPIRES # seconds. state_token = serializer . dumps ( { 'app' : remote_app , 'next' : next_param , 'sid' : _create_identifier ( ) , } ) return oauth . remote_apps [ remote_app ] . authorize ( callback = callback_url , state = state_token , )
1,147
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/client.py#L65-L93
[ "def", "split", "(", "self", ",", "k", ")", ":", "if", "not", "1", "<=", "k", "<=", "self", ".", "num_rows", "-", "1", ":", "raise", "ValueError", "(", "\"Invalid value of k. k must be between 1 and the\"", "\"number of rows - 1\"", ")", "rows", "=", "np", ".", "random", ".", "permutation", "(", "self", ".", "num_rows", ")", "first", "=", "self", ".", "take", "(", "rows", "[", ":", "k", "]", ")", "rest", "=", "self", ".", "take", "(", "rows", "[", "k", ":", "]", ")", "for", "column_label", "in", "self", ".", "_formats", ":", "first", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "rest", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "return", "first", ",", "rest" ]
Authorized handler callback .
def authorized ( remote_app = None ) : if remote_app not in current_oauthclient . handlers : return abort ( 404 ) state_token = request . args . get ( 'state' ) # Verify state parameter try : assert state_token # Checks authenticity and integrity of state and decodes the value. state = serializer . loads ( state_token ) # Verify that state is for this session, app and that next parameter # have not been modified. assert state [ 'sid' ] == _create_identifier ( ) assert state [ 'app' ] == remote_app # Store next URL set_session_next_url ( remote_app , state [ 'next' ] ) except ( AssertionError , BadData ) : if current_app . config . get ( 'OAUTHCLIENT_STATE_ENABLED' , True ) or ( not ( current_app . debug or current_app . testing ) ) : abort ( 403 ) try : handler = current_oauthclient . handlers [ remote_app ] ( ) except OAuthException as e : if e . type == 'invalid_response' : abort ( 500 ) else : raise return handler
1,148
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/client.py#L97-L128
[ "def", "save", "(", "self", ",", "create_multiple_renditions", "=", "True", ",", "preserve_source_rendition", "=", "True", ",", "encode_to", "=", "enums", ".", "EncodeToEnum", ".", "FLV", ")", ":", "if", "is_ftp_connection", "(", "self", ".", "connection", ")", "and", "len", "(", "self", ".", "assets", ")", ">", "0", ":", "self", ".", "connection", ".", "post", "(", "xml", "=", "self", ".", "to_xml", "(", ")", ",", "assets", "=", "self", ".", "assets", ")", "elif", "not", "self", ".", "id", "and", "self", ".", "_filename", ":", "self", ".", "id", "=", "self", ".", "connection", ".", "post", "(", "'create_video'", ",", "self", ".", "_filename", ",", "create_multiple_renditions", "=", "create_multiple_renditions", ",", "preserve_source_rendition", "=", "preserve_source_rendition", ",", "encode_to", "=", "encode_to", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "elif", "not", "self", ".", "id", "and", "len", "(", "self", ".", "renditions", ")", ">", "0", ":", "self", ".", "id", "=", "self", ".", "connection", ".", "post", "(", "'create_video'", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "elif", "self", ".", "id", ":", "data", "=", "self", ".", "connection", ".", "post", "(", "'update_video'", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "if", "data", ":", "self", ".", "_load", "(", "data", ")" ]
Extra signup step .
def signup ( remote_app ) : if remote_app not in current_oauthclient . signup_handlers : return abort ( 404 ) res = current_oauthclient . signup_handlers [ remote_app ] [ 'view' ] ( ) return abort ( 404 ) if res is None else res
1,149
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/client.py#L132-L137
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Disconnect user from remote application .
def disconnect ( remote_app ) : if remote_app not in current_oauthclient . disconnect_handlers : return abort ( 404 ) ret = current_oauthclient . disconnect_handlers [ remote_app ] ( ) db . session . commit ( ) return ret
1,150
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/views/client.py#L141-L151
[ "def", "update", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "augment_args", "(", "args", ",", "kwargs", ")", "kwargs", "[", "'log_action'", "]", "=", "kwargs", ".", "get", "(", "'log_action'", ",", "'update'", ")", "if", "not", "self", ".", "rec", ":", "return", "self", ".", "add", "(", "*", "*", "kwargs", ")", "else", ":", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "# Don't update object; use whatever was set in the original record", "if", "k", "not", "in", "(", "'source'", ",", "'s_vid'", ",", "'table'", ",", "'t_vid'", ",", "'partition'", ",", "'p_vid'", ")", ":", "setattr", "(", "self", ".", "rec", ",", "k", ",", "v", ")", "self", ".", "_session", ".", "merge", "(", "self", ".", "rec", ")", "if", "self", ".", "_logger", ":", "self", ".", "_logger", ".", "info", "(", "self", ".", "rec", ".", "log_str", ")", "self", ".", "_session", ".", "commit", "(", ")", "self", ".", "_ai_rec_id", "=", "None", "return", "self", ".", "rec", ".", "id" ]
Returns the checksum in bytes for an address in bytes
def address_checksum ( address ) : address_bytes = address h = blake2b ( digest_size = 5 ) h . update ( address_bytes ) checksum = bytearray ( h . digest ( ) ) checksum . reverse ( ) return checksum
1,151
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/crypto.py#L16-L25
[ "def", "next", "(", "self", ")", ":", "c", "=", "Conversation", "(", "self", ".", "data", ",", "requests", ".", "get", "(", "self", ".", "data", "[", "\"comments\"", "]", "[", "\"paging\"", "]", "[", "\"next\"", "]", ")", ".", "json", "(", ")", ")", "if", "\"error\"", "in", "c", ".", "data", "[", "\"comments\"", "]", "and", "c", ".", "data", "[", "\"comments\"", "]", "[", "\"error\"", "]", "[", "\"code\"", "]", "==", "613", ":", "raise", "LimitExceededException", "(", ")", "return", "c" ]
Generates a deterministic keypair from seed based on index
def keypair_from_seed ( seed , index = 0 ) : h = blake2b ( digest_size = 32 ) h . update ( seed + struct . pack ( ">L" , index ) ) priv_key = h . digest ( ) pub_key = private_to_public_key ( priv_key ) return { 'private' : priv_key , 'public' : pub_key }
1,152
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/crypto.py#L38-L58
[ "def", "vtmvg", "(", "v1", ",", "matrix", ",", "v2", ",", "nrow", ",", "ncol", ")", ":", "v1", "=", "stypes", ".", "toDoubleVector", "(", "v1", ")", "matrix", "=", "stypes", ".", "toDoubleMatrix", "(", "matrix", ")", "v2", "=", "stypes", ".", "toDoubleVector", "(", "v2", ")", "nrow", "=", "ctypes", ".", "c_int", "(", "nrow", ")", "ncol", "=", "ctypes", ".", "c_int", "(", "ncol", ")", "return", "libspice", ".", "vtmvg_c", "(", "v1", ",", "matrix", ",", "v2", ",", "nrow", ",", "ncol", ")" ]
Verifies signature is correct for a message signed with public_key
def verify_signature ( message , signature , public_key ) : try : ed25519_blake2 . checkvalid ( signature , message , public_key ) except ed25519_blake2 . SignatureMismatch : return False return True
1,153
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/crypto.py#L95-L116
[ "def", "data", "(", "self", ",", "where", ",", "start", ",", "end", ",", "archiver", "=", "\"\"", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "return", "self", ".", "query", "(", "\"select data in ({0}, {1}) where {2}\"", ".", "format", "(", "start", ",", "end", ",", "where", ")", ",", "archiver", ",", "timeout", ")", ".", "get", "(", "'timeseries'", ",", "{", "}", ")" ]
Signs a message using private_key and public_key
def sign_message ( message , private_key , public_key = None ) : if public_key is None : public_key = private_to_public_key ( private_key ) return ed25519_blake2 . signature_unsafe ( message , private_key , public_key )
1,154
https://github.com/dourvaris/nano-python/blob/f26b8bc895b997067780f925049a70e82c0c2479/src/nano/crypto.py#L119-L142
[ "def", "command_max_delay", "(", "self", ",", "event", "=", "None", ")", ":", "try", ":", "max_delay", "=", "self", ".", "max_delay_var", ".", "get", "(", ")", "except", "ValueError", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "if", "max_delay", "<", "0", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "if", "max_delay", ">", "0.1", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "self", ".", "runtime_cfg", ".", "max_delay", "=", "max_delay", "self", ".", "max_delay_var", ".", "set", "(", "self", ".", "runtime_cfg", ".", "max_delay", ")" ]
Check the CDRouter Support Lounge for eligible upgrades using your Support Lounge email & password .
def check_for_lounge_upgrade ( self , email , password ) : schema = ReleaseSchema ( ) resp = self . service . post ( self . base + 'lounge/check/' , json = { 'email' : email , 'password' : password } ) return self . service . decode ( schema , resp )
1,155
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L323-L335
[ "def", "wait", "(", "self", ")", ":", "self", ".", "_done_event", ".", "wait", "(", "MAXINT", ")", "return", "self", ".", "_status", ",", "self", ".", "_exception" ]
Download & install an upgrade from the CDRouter Support Lounge using your Support Lounge email & password . Please note that any running tests will be stopped .
def lounge_upgrade ( self , email , password , release_id ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'lounge/upgrade/' , json = { 'email' : email , 'password' : password , 'release' : { 'id' : int ( release_id ) } } ) return self . service . decode ( schema , resp )
1,156
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L337-L351
[ "def", "_record_offset", "(", "self", ")", ":", "offset", "=", "self", ".", "blob_file", ".", "tell", "(", ")", "self", ".", "event_offsets", ".", "append", "(", "offset", ")" ]
Download & install a license for your CDRouter system from the CDRouter Support Lounge .
def lounge_update_license ( self ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'license/' ) return self . service . decode ( schema , resp )
1,157
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L368-L377
[ "def", "face_index", "(", "vertices", ")", ":", "new_verts", "=", "[", "]", "face_indices", "=", "[", "]", "for", "wall", "in", "vertices", ":", "face_wall", "=", "[", "]", "for", "vert", "in", "wall", ":", "if", "new_verts", ":", "if", "not", "np", ".", "isclose", "(", "vert", ",", "new_verts", ")", ".", "all", "(", "axis", "=", "1", ")", ".", "any", "(", ")", ":", "new_verts", ".", "append", "(", "vert", ")", "else", ":", "new_verts", ".", "append", "(", "vert", ")", "face_index", "=", "np", ".", "where", "(", "np", ".", "isclose", "(", "vert", ",", "new_verts", ")", ".", "all", "(", "axis", "=", "1", ")", ")", "[", "0", "]", "[", "0", "]", "face_wall", ".", "append", "(", "face_index", ")", "face_indices", ".", "append", "(", "face_wall", ")", "return", "np", ".", "array", "(", "new_verts", ")", ",", "np", ".", "array", "(", "face_indices", ")" ]
Update the license on your CDRouter system manually by uploading a . lic license from the CDRouter Support Lounge .
def manual_update_license ( self , fd , filename = 'cdrouter.lic' ) : schema = UpgradeSchema ( ) resp = self . service . post ( self . base + 'license/' , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
1,158
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L379-L391
[ "def", "remove", "(", "self", ",", "key", ",", "where", "=", "None", ",", "start", "=", "None", ",", "stop", "=", "None", ")", ":", "where", "=", "_ensure_term", "(", "where", ",", "scope_level", "=", "1", ")", "try", ":", "s", "=", "self", ".", "get_storer", "(", "key", ")", "except", "KeyError", ":", "# the key is not a valid store, re-raising KeyError", "raise", "except", "Exception", ":", "if", "where", "is", "not", "None", ":", "raise", "ValueError", "(", "\"trying to remove a node with a non-None where clause!\"", ")", "# we are actually trying to remove a node (with children)", "s", "=", "self", ".", "get_node", "(", "key", ")", "if", "s", "is", "not", "None", ":", "s", ".", "_f_remove", "(", "recursive", "=", "True", ")", "return", "None", "# remove the node", "if", "com", ".", "_all_none", "(", "where", ",", "start", ",", "stop", ")", ":", "s", ".", "group", ".", "_f_remove", "(", "recursive", "=", "True", ")", "# delete from the table", "else", ":", "if", "not", "s", ".", "is_table", ":", "raise", "ValueError", "(", "'can only remove with where on objects written as tables'", ")", "return", "s", ".", "delete", "(", "where", "=", "where", ",", "start", "=", "start", ",", "stop", "=", "stop", ")" ]
Get system disk space usage .
def space ( self ) : schema = SpaceSchema ( ) resp = self . service . get ( self . base + 'space/' ) return self . service . decode ( schema , resp )
1,159
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L425-L433
[ "def", "put_lifecycle_configuration", "(", "Bucket", ",", "Rules", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "Rules", "is", "not", "None", "and", "isinstance", "(", "Rules", ",", "six", ".", "string_types", ")", ":", "Rules", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "Rules", ")", "conn", ".", "put_bucket_lifecycle_configuration", "(", "Bucket", "=", "Bucket", ",", "LifecycleConfiguration", "=", "{", "'Rules'", ":", "Rules", "}", ")", "return", "{", "'updated'", ":", "True", ",", "'name'", ":", "Bucket", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'updated'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
Get system interfaces .
def interfaces ( self , addresses = False ) : schema = InterfaceSchema ( ) resp = self . service . get ( self . base + 'interfaces/' , params = { 'addresses' : addresses } ) return self . service . decode ( schema , resp , many = True )
1,160
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/system.py#L442-L450
[ "def", "create_or_update_secret", "(", "self", ",", "path", ",", "secret", ",", "cas", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'options'", ":", "{", "}", ",", "'data'", ":", "secret", ",", "}", "if", "cas", "is", "not", "None", ":", "params", "[", "'options'", "]", "[", "'cas'", "]", "=", "cas", "api_path", "=", "'/v1/{mount_point}/data/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "response", "=", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
Save fields value only for non - m2m fields .
def _set_original_fields ( instance ) : original_fields = { } def _set_original_field ( instance , field ) : if instance . pk is None : original_fields [ field ] = None else : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : # Only get the PK, we don't want to get the object # (which would make an additional request) original_fields [ field ] = getattr ( instance , '{0}_id' . format ( field ) ) else : original_fields [ field ] = getattr ( instance , field ) for field in getattr ( instance , '_tracked_fields' , [ ] ) : _set_original_field ( instance , field ) for field in getattr ( instance , '_tracked_related_fields' , { } ) . keys ( ) : _set_original_field ( instance , field ) instance . _original_fields = original_fields # Include pk to detect the creation of an object instance . _original_fields [ 'pk' ] = instance . pk
1,161
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L29-L54
[ "def", "yum_update", "(", "self", ",", "allow_reboot", "=", "False", ")", ":", "self", ".", "run", "(", "'yum clean all'", ")", "self", ".", "run", "(", "'test -f /usr/bin/subscription-manager && subscription-manager repos --list-enabled'", ",", "ignore_error", "=", "True", ")", "self", ".", "run", "(", "'yum repolist'", ")", "self", ".", "run", "(", "'yum update -y --quiet'", ",", "retry", "=", "3", ")", "# reboot if a new initrd has been generated since the boot", "if", "allow_reboot", ":", "self", ".", "run", "(", "'grubby --set-default $(ls /boot/vmlinuz-*.x86_64|tail -1)'", ")", "default_kernel", "=", "self", ".", "run", "(", "'grubby --default-kernel'", ")", "[", "0", "]", ".", "rstrip", "(", ")", "cur_kernel", "=", "self", ".", "run", "(", "'uname -r'", ")", "[", "0", "]", ".", "rstrip", "(", ")", "if", "cur_kernel", "not", "in", "default_kernel", ":", "self", ".", "run", "(", "'reboot'", ",", "ignore_error", "=", "True", ")", "self", ".", "ssh_pool", ".", "stop_all", "(", ")" ]
Check if some tracked fields have changed
def _has_changed ( instance ) : for field , value in instance . _original_fields . items ( ) : if field != 'pk' and not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : try : if field in getattr ( instance , '_tracked_fields' , [ ] ) : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : if getattr ( instance , '{0}_id' . format ( field ) ) != value : return True else : if getattr ( instance , field ) != value : return True except TypeError : # Can't compare old and new value, should be different. return True return False
1,162
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L57-L75
[ "def", "pre_run", "(", "self", ",", "args", ",", "options", ")", ":", "if", "'generate'", "in", "args", "and", "args", "[", "'generate'", "]", ":", "self", ".", "metadata_provider", "=", "CsvMetadataProvider", "(", "args", "[", "'channeldir'", "]", ",", "channelinfo", "=", "args", "[", "'channelinfo'", "]", ",", "contentinfo", "=", "args", "[", "'contentinfo'", "]", ",", "exercisesinfo", "=", "args", "[", "'exercisesinfo'", "]", ",", "questionsinfo", "=", "args", "[", "'questionsinfo'", "]", ",", "validate_and_cache", "=", "False", ")", "self", ".", "metadata_provider", ".", "generate_templates", "(", "exercise_questions", "=", "True", ")", "self", ".", "metadata_provider", ".", "generate_contentinfo_from_channeldir", "(", "args", ",", "options", ")", "sys", ".", "exit", "(", "0", ")", "elif", "'importstudioid'", "in", "args", "and", "args", "[", "'importstudioid'", "]", ":", "studio_id", "=", "args", "[", "'importstudioid'", "]", "config", ".", "LOGGER", ".", "info", "(", "\"Calling with importstudioid... \"", "+", "studio_id", ")", "self", ".", "metadata_provider", "=", "CsvMetadataProvider", "(", "args", "[", "'channeldir'", "]", ",", "channelinfo", "=", "args", "[", "'channelinfo'", "]", ",", "contentinfo", "=", "args", "[", "'contentinfo'", "]", ",", "exercisesinfo", "=", "args", "[", "'exercisesinfo'", "]", ",", "questionsinfo", "=", "args", "[", "'questionsinfo'", "]", ",", "validate_and_cache", "=", "False", ")", "self", ".", "metadata_provider", ".", "generate_templates", "(", "exercise_questions", "=", "True", ")", "self", ".", "metadata_provider", ".", "generate_exercises_from_importstudioid", "(", "args", ",", "options", ")", "sys", ".", "exit", "(", "0", ")", "if", "self", ".", "metadata_provider", "is", "None", ":", "self", ".", "_init_metadata_provider", "(", "args", ",", "options", ")", "kwargs", "=", "{", "}", "# combined dictionary of argparse args and extra options", "kwargs", ".", "update", "(", "args", ")", "kwargs", ".", "update", "(", "options", ")", "json_tree_path", "=", "self", ".", "get_json_tree_path", "(", "*", "*", "kwargs", ")", "build_ricecooker_json_tree", "(", "args", ",", "options", ",", "self", ".", "metadata_provider", ",", "json_tree_path", ")" ]
Check if some related tracked fields have changed
def _has_changed_related ( instance ) : tracked_related_fields = getattr ( instance , '_tracked_related_fields' , { } ) . keys ( ) for field , value in instance . _original_fields . items ( ) : if field != 'pk' and not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : if field in tracked_related_fields : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : if getattr ( instance , '{0}_id' . format ( field ) ) != value : return True else : if getattr ( instance , field ) != value : return True return False
1,163
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L78-L97
[ "def", "pre_run", "(", "self", ",", "args", ",", "options", ")", ":", "if", "'generate'", "in", "args", "and", "args", "[", "'generate'", "]", ":", "self", ".", "metadata_provider", "=", "CsvMetadataProvider", "(", "args", "[", "'channeldir'", "]", ",", "channelinfo", "=", "args", "[", "'channelinfo'", "]", ",", "contentinfo", "=", "args", "[", "'contentinfo'", "]", ",", "exercisesinfo", "=", "args", "[", "'exercisesinfo'", "]", ",", "questionsinfo", "=", "args", "[", "'questionsinfo'", "]", ",", "validate_and_cache", "=", "False", ")", "self", ".", "metadata_provider", ".", "generate_templates", "(", "exercise_questions", "=", "True", ")", "self", ".", "metadata_provider", ".", "generate_contentinfo_from_channeldir", "(", "args", ",", "options", ")", "sys", ".", "exit", "(", "0", ")", "elif", "'importstudioid'", "in", "args", "and", "args", "[", "'importstudioid'", "]", ":", "studio_id", "=", "args", "[", "'importstudioid'", "]", "config", ".", "LOGGER", ".", "info", "(", "\"Calling with importstudioid... \"", "+", "studio_id", ")", "self", ".", "metadata_provider", "=", "CsvMetadataProvider", "(", "args", "[", "'channeldir'", "]", ",", "channelinfo", "=", "args", "[", "'channelinfo'", "]", ",", "contentinfo", "=", "args", "[", "'contentinfo'", "]", ",", "exercisesinfo", "=", "args", "[", "'exercisesinfo'", "]", ",", "questionsinfo", "=", "args", "[", "'questionsinfo'", "]", ",", "validate_and_cache", "=", "False", ")", "self", ".", "metadata_provider", ".", "generate_templates", "(", "exercise_questions", "=", "True", ")", "self", ".", "metadata_provider", ".", "generate_exercises_from_importstudioid", "(", "args", ",", "options", ")", "sys", ".", "exit", "(", "0", ")", "if", "self", ".", "metadata_provider", "is", "None", ":", "self", ".", "_init_metadata_provider", "(", "args", ",", "options", ")", "kwargs", "=", "{", "}", "# combined dictionary of argparse args and extra options", "kwargs", ".", "update", "(", "args", ")", "kwargs", ".", "update", "(", "options", ")", "json_tree_path", "=", "self", ".", "get_json_tree_path", "(", "*", "*", "kwargs", ")", "build_ricecooker_json_tree", "(", "args", ",", "options", ",", "self", ".", "metadata_provider", ",", "json_tree_path", ")" ]
Create a new event getting the use if django - cuser is available .
def _create_event ( instance , action ) : user = None user_repr = repr ( user ) if CUSER : user = CuserMiddleware . get_user ( ) user_repr = repr ( user ) if user is not None and user . is_anonymous : user = None return TrackingEvent . objects . create ( action = action , object = instance , object_repr = repr ( instance ) , user = user , user_repr = user_repr , )
1,164
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L100-L117
[ "def", "delete_attachments", "(", "self", ",", "volumeID", ",", "attachmentsID", ")", ":", "log", ".", "debug", "(", "\"deleting attachments from volume '{}': {}\"", ".", "format", "(", "volumeID", ",", "attachmentsID", ")", ")", "rawVolume", "=", "self", ".", "_req_raw_volume", "(", "volumeID", ")", "insID", "=", "[", "a", "[", "'id'", "]", "for", "a", "in", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", "]", "# check that all requested file are present", "for", "id", "in", "attachmentsID", ":", "if", "id", "not", "in", "insID", ":", "raise", "NotFoundException", "(", "\"could not found attachment '{}' of the volume '{}'\"", ".", "format", "(", "id", ",", "volumeID", ")", ")", "for", "index", ",", "id", "in", "enumerate", "(", "attachmentsID", ")", ":", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", ".", "pop", "(", "insID", ".", "index", "(", "id", ")", ")", "self", ".", "_db", ".", "modify_book", "(", "volumeID", ",", "rawVolume", "[", "'_source'", "]", ",", "version", "=", "rawVolume", "[", "'_version'", "]", ")" ]
Create a TrackedFieldModification for the instance .
def _create_tracked_field ( event , instance , field , fieldname = None ) : fieldname = fieldname or field if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : # We only have the pk, we need to get the actual object model = instance . _meta . get_field ( field ) . remote_field . model pk = instance . _original_fields [ field ] try : old_value = model . objects . get ( pk = pk ) except model . DoesNotExist : old_value = None else : old_value = instance . _original_fields [ field ] return TrackedFieldModification . objects . create ( event = event , field = fieldname , old_value = _serialize_field ( old_value ) , new_value = _serialize_field ( getattr ( instance , field ) ) )
1,165
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L145-L170
[ "def", "dir_df_boot", "(", "dir_df", ",", "nb", "=", "5000", ",", "par", "=", "False", ")", ":", "N", "=", "dir_df", ".", "dir_dec", ".", "values", ".", "shape", "[", "0", "]", "# number of data points", "BDIs", "=", "[", "]", "for", "k", "in", "range", "(", "nb", ")", ":", "pdir_df", "=", "dir_df", ".", "sample", "(", "n", "=", "N", ",", "replace", "=", "True", ")", "# bootstrap pseudosample", "pdir_df", ".", "reset_index", "(", "inplace", "=", "True", ")", "# reset the index", "if", "par", ":", "# do a parametric bootstrap", "for", "i", "in", "pdir_df", ".", "index", ":", "# set through the pseudosample", "n", "=", "pdir_df", ".", "loc", "[", "i", ",", "'dir_n'", "]", "# get number of samples/site", "# get ks for each sample", "ks", "=", "np", ".", "ones", "(", "shape", "=", "n", ")", "*", "pdir_df", ".", "loc", "[", "i", ",", "'dir_k'", "]", "# draw a fisher distributed set of directions", "decs", ",", "incs", "=", "fshdev", "(", "ks", ")", "di_block", "=", "np", ".", "column_stack", "(", "(", "decs", ",", "incs", ")", ")", "# rotate them to the mean", "di_block", "=", "dodirot_V", "(", "di_block", ",", "pdir_df", ".", "loc", "[", "i", ",", "'dir_dec'", "]", ",", "pdir_df", ".", "loc", "[", "i", ",", "'dir_inc'", "]", ")", "# get the new mean direction for the pseudosample", "fpars", "=", "fisher_mean", "(", "di_block", ")", "# replace the pseudo sample mean direction", "pdir_df", ".", "loc", "[", "i", ",", "'dir_dec'", "]", "=", "fpars", "[", "'dec'", "]", "pdir_df", ".", "loc", "[", "i", ",", "'dir_inc'", "]", "=", "fpars", "[", "'inc'", "]", "# get bootstrap mean bootstrap sample", "bfpars", "=", "dir_df_fisher_mean", "(", "pdir_df", ")", "BDIs", ".", "append", "(", "[", "bfpars", "[", "'dec'", "]", ",", "bfpars", "[", "'inc'", "]", "]", ")", "return", "BDIs" ]
Create a TrackingEvent and TrackedFieldModification for a CREATE event .
def _create_create_tracking_event ( instance ) : event = _create_event ( instance , CREATE ) for field in instance . _tracked_fields : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : _create_tracked_field ( event , instance , field )
1,166
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L173-L180
[ "def", "_decrypt_ciphertext", "(", "cipher", ")", ":", "try", ":", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "cipher", ")", ".", "replace", "(", "r'\\n'", ",", "'\\n'", ")", "except", "UnicodeDecodeError", ":", "# ciphertext is binary", "pass", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "cipher", ")", "cmd", "=", "[", "_get_gpg_exec", "(", ")", ",", "'--homedir'", ",", "_get_key_dir", "(", ")", ",", "'--status-fd'", ",", "'2'", ",", "'--no-tty'", ",", "'-d'", "]", "proc", "=", "Popen", "(", "cmd", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "False", ")", "decrypted_data", ",", "decrypt_error", "=", "proc", ".", "communicate", "(", "input", "=", "cipher", ")", "if", "not", "decrypted_data", ":", "try", ":", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "cipher", ")", "except", "UnicodeDecodeError", ":", "# decrypted data contains undecodable binary data", "pass", "log", ".", "warning", "(", "'Could not decrypt cipher %s, received: %s'", ",", "cipher", ",", "decrypt_error", ")", "return", "cipher", "else", ":", "try", ":", "decrypted_data", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "decrypted_data", ")", "except", "UnicodeDecodeError", ":", "# decrypted data contains undecodable binary data", "pass", "return", "decrypted_data" ]
Create a TrackingEvent and TrackedFieldModification for an UPDATE event .
def _create_update_tracking_event ( instance ) : event = _create_event ( instance , UPDATE ) for field in instance . _tracked_fields : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : try : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : # Compare pk value = getattr ( instance , '{0}_id' . format ( field ) ) else : value = getattr ( instance , field ) if instance . _original_fields [ field ] != value : _create_tracked_field ( event , instance , field ) except TypeError : # Can't compare old and new value, should be different. _create_tracked_field ( event , instance , field )
1,167
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L183-L200
[ "def", "_decrypt_ciphertext", "(", "cipher", ")", ":", "try", ":", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "cipher", ")", ".", "replace", "(", "r'\\n'", ",", "'\\n'", ")", "except", "UnicodeDecodeError", ":", "# ciphertext is binary", "pass", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_bytes", "(", "cipher", ")", "cmd", "=", "[", "_get_gpg_exec", "(", ")", ",", "'--homedir'", ",", "_get_key_dir", "(", ")", ",", "'--status-fd'", ",", "'2'", ",", "'--no-tty'", ",", "'-d'", "]", "proc", "=", "Popen", "(", "cmd", ",", "stdin", "=", "PIPE", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ",", "shell", "=", "False", ")", "decrypted_data", ",", "decrypt_error", "=", "proc", ".", "communicate", "(", "input", "=", "cipher", ")", "if", "not", "decrypted_data", ":", "try", ":", "cipher", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "cipher", ")", "except", "UnicodeDecodeError", ":", "# decrypted data contains undecodable binary data", "pass", "log", ".", "warning", "(", "'Could not decrypt cipher %s, received: %s'", ",", "cipher", ",", "decrypt_error", ")", "return", "cipher", "else", ":", "try", ":", "decrypted_data", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "decrypted_data", ")", "except", "UnicodeDecodeError", ":", "# decrypted data contains undecodable binary data", "pass", "return", "decrypted_data" ]
Create a TrackingEvent and TrackedFieldModification for an UPDATE event for each related model .
def _create_update_tracking_related_event ( instance ) : events = { } # Create a dict mapping related model field to modified fields for field , related_fields in instance . _tracked_related_fields . items ( ) : if not isinstance ( instance . _meta . get_field ( field ) , ManyToManyField ) : if isinstance ( instance . _meta . get_field ( field ) , ForeignKey ) : # Compare pk value = getattr ( instance , '{0}_id' . format ( field ) ) else : value = getattr ( instance , field ) if instance . _original_fields [ field ] != value : for related_field in related_fields : events . setdefault ( related_field , [ ] ) . append ( field ) # Create the events from the events dict for related_field , fields in events . items ( ) : try : related_instances = getattr ( instance , related_field [ 1 ] ) except ObjectDoesNotExist : continue # FIXME: isinstance(related_instances, RelatedManager ?) if hasattr ( related_instances , 'all' ) : related_instances = related_instances . all ( ) else : related_instances = [ related_instances ] for related_instance in related_instances : event = _create_event ( related_instance , UPDATE ) for field in fields : fieldname = '{0}__{1}' . format ( related_field [ 0 ] , field ) _create_tracked_field ( event , instance , field , fieldname = fieldname )
1,168
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L203-L239
[ "def", "time_auth", "(", "self", ",", "load", ")", ":", "start", "=", "time", ".", "time", "(", ")", "ret", "=", "self", ".", "__auth_call", "(", "load", ")", "if", "ret", ":", "return", "ret", "f_time", "=", "time", ".", "time", "(", ")", "-", "start", "if", "f_time", ">", "self", ".", "max_fail", ":", "self", ".", "max_fail", "=", "f_time", "deviation", "=", "self", ".", "max_fail", "/", "4", "r_time", "=", "random", ".", "SystemRandom", "(", ")", ".", "uniform", "(", "self", ".", "max_fail", "-", "deviation", ",", "self", ".", "max_fail", "+", "deviation", ")", "while", "start", "+", "r_time", ">", "time", ".", "time", "(", ")", ":", "time", ".", "sleep", "(", "0.001", ")", "return", "False" ]
Get the field name from a model and a sender from m2m_changed signal .
def _get_m2m_field ( model , sender ) : for field in getattr ( model , '_tracked_fields' , [ ] ) : if isinstance ( model . _meta . get_field ( field ) , ManyToManyField ) : if getattr ( model , field ) . through == sender : return field for field in getattr ( model , '_tracked_related_fields' , { } ) . keys ( ) : if isinstance ( model . _meta . get_field ( field ) , ManyToManyField ) : if getattr ( model , field ) . through == sender : return field
1,169
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L249-L260
[ "def", "tune_mount_configuration", "(", "self", ",", "path", ",", "default_lease_ttl", "=", "None", ",", "max_lease_ttl", "=", "None", ",", "description", "=", "None", ",", "audit_non_hmac_request_keys", "=", "None", ",", "audit_non_hmac_response_keys", "=", "None", ",", "listing_visibility", "=", "None", ",", "passthrough_request_headers", "=", "None", ",", "options", "=", "None", ")", ":", "# All parameters are optional for this method. Until/unless we include input validation, we simply loop over the", "# parameters and add which parameters are set.", "optional_parameters", "=", "[", "'default_lease_ttl'", ",", "'max_lease_ttl'", ",", "'description'", ",", "'audit_non_hmac_request_keys'", ",", "'audit_non_hmac_response_keys'", ",", "'listing_visibility'", ",", "'passthrough_request_headers'", ",", "'options'", ",", "]", "params", "=", "{", "}", "for", "optional_parameter", "in", "optional_parameters", ":", "if", "locals", "(", ")", ".", "get", "(", "optional_parameter", ")", "is", "not", "None", ":", "params", "[", "optional_parameter", "]", "=", "locals", "(", ")", ".", "get", "(", "optional_parameter", ")", "api_path", "=", "'/v1/sys/mounts/{path}/tune'", ".", "format", "(", "path", "=", "path", ")", "return", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Post save detect creation or changes and log them . We need post_save to have the object for a create .
def tracking_save ( sender , instance , raw , using , update_fields , * * kwargs ) : if _has_changed ( instance ) : if instance . _original_fields [ 'pk' ] is None : # Create _create_create_tracking_event ( instance ) else : # Update _create_update_tracking_event ( instance ) if _has_changed_related ( instance ) : # Because an object need to be saved before being related, # it can only be an update _create_update_tracking_related_event ( instance ) if _has_changed ( instance ) or _has_changed_related ( instance ) : _set_original_fields ( instance )
1,170
https://github.com/makinacorpus/django-tracking-fields/blob/463313d0f9c0f8107a0413f4d418d1a8c2311981/tracking_fields/tracking.py#L332-L349
[ "def", "keyspaceConnection", "(", "self", ",", "keyspace", ",", "consistency", "=", "ConsistencyLevel", ".", "ONE", ")", ":", "conn", "=", "CassandraKeyspaceConnection", "(", "self", ",", "keyspace", ")", "return", "CassandraClient", "(", "conn", ",", "consistency", "=", "consistency", ")" ]
This is a constructor for the LogEntry class .
def from_entry_dict ( cls , entry_dict ) : # Debug helper # https://circleci.com/gh/andresriancho/w3af-api-docker/30 try : _type = entry_dict [ 'type' ] _id = entry_dict [ 'id' ] _time = entry_dict [ 'time' ] message = entry_dict [ 'message' ] severity = entry_dict [ 'severity' ] except KeyError : msg = ( 'Missing expected log entry attribute. Log entry' ' object is:\n\n%s' ) raise APIException ( msg % json . dumps ( entry_dict , indent = 4 ) ) return cls ( _type , message , _time , severity , _id )
1,171
https://github.com/andresriancho/w3af-api-client/blob/adeb79bad75264d754de69f0bb981b366da96f32/w3af_api_client/log.py#L22-L42
[ "def", "_get_and_verify_certificate_chain", "(", "server_info", ":", "ServerConnectivityInfo", ",", "trust_store", ":", "TrustStore", ")", "->", "Tuple", "[", "List", "[", "Certificate", "]", ",", "str", ",", "Optional", "[", "OcspResponse", "]", "]", ":", "ssl_connection", "=", "server_info", ".", "get_preconfigured_ssl_connection", "(", "ssl_verify_locations", "=", "trust_store", ".", "path", ")", "# Enable OCSP stapling", "ssl_connection", ".", "ssl_client", ".", "set_tlsext_status_ocsp", "(", ")", "try", ":", "# Perform the SSL handshake", "ssl_connection", ".", "connect", "(", ")", "ocsp_response", "=", "ssl_connection", ".", "ssl_client", ".", "get_tlsext_status_ocsp_resp", "(", ")", "x509_cert_chain", "=", "ssl_connection", ".", "ssl_client", ".", "get_peer_cert_chain", "(", ")", "(", "_", ",", "verify_str", ")", "=", "ssl_connection", ".", "ssl_client", ".", "get_certificate_chain_verify_result", "(", ")", "except", "ClientCertificateRequested", ":", "# The server asked for a client cert", "# We can get the server cert anyway", "ocsp_response", "=", "ssl_connection", ".", "ssl_client", ".", "get_tlsext_status_ocsp_resp", "(", ")", "x509_cert_chain", "=", "ssl_connection", ".", "ssl_client", ".", "get_peer_cert_chain", "(", ")", "(", "_", ",", "verify_str", ")", "=", "ssl_connection", ".", "ssl_client", ".", "get_certificate_chain_verify_result", "(", ")", "finally", ":", "ssl_connection", ".", "close", "(", ")", "# Parse the certificates using the cryptography module", "parsed_x509_chain", "=", "[", "load_pem_x509_certificate", "(", "x509_cert", ".", "as_pem", "(", ")", ".", "encode", "(", "'ascii'", ")", ",", "backend", "=", "default_backend", "(", ")", ")", "for", "x509_cert", "in", "x509_cert_chain", "]", "return", "parsed_x509_chain", ",", "verify_str", ",", "ocsp_response" ]
Get a list of captures .
def list ( self , id , seq ) : # pylint: disable=invalid-name,redefined-builtin schema = CaptureSchema ( exclude = ( 'id' , 'seq' ) ) resp = self . service . list ( self . _base ( id , seq ) ) return self . service . decode ( schema , resp , many = True )
1,172
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L308-L317
[ "def", "wrap", "(", "vtkdataset", ")", ":", "wrappers", "=", "{", "'vtkUnstructuredGrid'", ":", "vtki", ".", "UnstructuredGrid", ",", "'vtkRectilinearGrid'", ":", "vtki", ".", "RectilinearGrid", ",", "'vtkStructuredGrid'", ":", "vtki", ".", "StructuredGrid", ",", "'vtkPolyData'", ":", "vtki", ".", "PolyData", ",", "'vtkImageData'", ":", "vtki", ".", "UniformGrid", ",", "'vtkStructuredPoints'", ":", "vtki", ".", "UniformGrid", ",", "'vtkMultiBlockDataSet'", ":", "vtki", ".", "MultiBlock", ",", "}", "key", "=", "vtkdataset", ".", "GetClassName", "(", ")", "try", ":", "wrapped", "=", "wrappers", "[", "key", "]", "(", "vtkdataset", ")", "except", ":", "logging", ".", "warning", "(", "'VTK data type ({}) is not currently supported by vtki.'", ".", "format", "(", "key", ")", ")", "return", "vtkdataset", "# if not supported just passes the VTK data object", "return", "wrapped" ]
Get a capture .
def get ( self , id , seq , intf ) : # pylint: disable=invalid-name,redefined-builtin schema = CaptureSchema ( ) resp = self . service . get_id ( self . _base ( id , seq ) , intf ) return self . service . decode ( schema , resp )
1,173
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L319-L330
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Download a capture as a PCAP file .
def download ( self , id , seq , intf , inline = False ) : # pylint: disable=invalid-name,redefined-builtin resp = self . service . get_id ( self . _base ( id , seq ) , intf , params = { 'format' : 'cap' , 'inline' : inline } , stream = True ) b = io . BytesIO ( ) stream . stream_response_to_file ( resp , path = b ) resp . close ( ) b . seek ( 0 ) return ( b , self . service . filename ( resp ) )
1,174
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L332-L346
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Get a capture s summary .
def summary ( self , id , seq , intf , filter = None , inline = False ) : # pylint: disable=invalid-name,redefined-builtin schema = SummarySchema ( ) resp = self . service . get ( self . _base ( id , seq ) + str ( intf ) + '/summary/' , params = { 'filter' : filter , 'inline' : inline } ) return self . service . decode ( schema , resp )
1,175
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L348-L362
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Get a capture s decode .
def decode ( self , id , seq , intf , filter = None , frame = None , inline = False ) : # pylint: disable=invalid-name,redefined-builtin schema = DecodeSchema ( ) resp = self . service . get ( self . _base ( id , seq ) + str ( intf ) + '/decode/' , params = { 'filter' : filter , 'frame' : frame , 'inline' : inline } ) return self . service . decode ( schema , resp )
1,176
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L364-L379
[ "def", "get_rmsd", "(", "self", ",", "mol1", ",", "mol2", ")", ":", "label1", ",", "label2", "=", "self", ".", "_mapper", ".", "uniform_labels", "(", "mol1", ",", "mol2", ")", "if", "label1", "is", "None", "or", "label2", "is", "None", ":", "return", "float", "(", "\"Inf\"", ")", "return", "self", ".", "_calc_rms", "(", "mol1", ",", "mol2", ",", "label1", ",", "label2", ")" ]
Send a capture to a CloudShark Appliance . Both cloudshark_appliance_url and cloudshark_appliance_token must be properly configured via system preferences .
def send_to_cloudshark ( self , id , seq , intf , inline = False ) : # pylint: disable=invalid-name,redefined-builtin schema = CloudSharkSchema ( ) resp = self . service . post ( self . _base ( id , seq ) + str ( intf ) + '/cloudshark/' , params = { 'inline' : inline } ) return self . service . decode ( schema , resp )
1,177
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/captures.py#L398-L412
[ "def", "to_weld_vec", "(", "weld_type", ",", "ndim", ")", ":", "for", "i", "in", "range", "(", "ndim", ")", ":", "weld_type", "=", "WeldVec", "(", "weld_type", ")", "return", "weld_type" ]
Check for errors in the response and return the resulting JSON .
def get_dict_from_response ( response ) : if getattr ( response , '_resp' ) and response . _resp . code > 400 : raise OAuthResponseError ( 'Application mis-configuration in Globus' , None , response ) return response . data
1,178
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/globus.py#L88-L95
[ "def", "Modify", "(", "self", ",", "client_limit", "=", "None", ",", "client_rate", "=", "None", ",", "duration", "=", "None", ")", ":", "args", "=", "hunt_pb2", ".", "ApiModifyHuntArgs", "(", "hunt_id", "=", "self", ".", "hunt_id", ")", "if", "client_limit", "is", "not", "None", ":", "args", ".", "client_limit", "=", "client_limit", "if", "client_rate", "is", "not", "None", ":", "args", ".", "client_rate", "=", "client_rate", "if", "duration", "is", "not", "None", ":", "args", ".", "duration", "=", "duration", "data", "=", "self", ".", "_context", ".", "SendRequest", "(", "\"ModifyHunt\"", ",", "args", ")", "return", "Hunt", "(", "data", "=", "data", ",", "context", "=", "self", ".", "_context", ")" ]
Get user information from Globus .
def get_user_info ( remote ) : response = remote . get ( GLOBUS_USER_INFO_URL ) user_info = get_dict_from_response ( response ) response . data [ 'username' ] = response . data [ 'preferred_username' ] if '@' in response . data [ 'username' ] : user_info [ 'username' ] , _ = response . data [ 'username' ] . split ( '@' ) return user_info
1,179
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/globus.py#L98-L109
[ "def", "insert", "(", "self", ",", "storagemodel", ")", "->", "StorageTableModel", ":", "modeldefinition", "=", "self", ".", "getmodeldefinition", "(", "storagemodel", ",", "True", ")", "try", ":", "modeldefinition", "[", "'tableservice'", "]", ".", "insert_or_replace_entity", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "entity", "(", ")", ")", "storagemodel", ".", "_exists", "=", "True", "except", "AzureMissingResourceHttpError", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "log", ".", "debug", "(", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "getPartitionKey", "(", ")", ",", "storagemodel", ".", "getRowKey", "(", ")", ",", "e", ")", ")", "except", "Exception", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "msg", "=", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "PartitionKey", ",", "storagemodel", ".", "RowKey", ",", "e", ")", "raise", "AzureStorageWrapException", "(", "msg", "=", "msg", ")", "finally", ":", "return", "storagemodel" ]
Get the Globus identity for a users given email .
def get_user_id ( remote , email ) : try : url = '{}?usernames={}' . format ( GLOBUS_USER_ID_URL , email ) user_id = get_dict_from_response ( remote . get ( url ) ) return user_id [ 'identities' ] [ 0 ] [ 'id' ] except KeyError : # If we got here the response was successful but the data was invalid. # It's likely the URL is wrong but possible the API has changed. raise OAuthResponseError ( 'Failed to fetch user id, likely server ' 'mis-configuration' , None , remote )
1,180
https://github.com/inveniosoftware/invenio-oauthclient/blob/2500dc6935738107617aeade79e050d7608004bb/invenio_oauthclient/contrib/globus.py#L112-L127
[ "def", "comment", "(", "self", ",", "text", ",", "comment_prefix", "=", "'#'", ")", ":", "comment", "=", "Comment", "(", "self", ".", "_container", ")", "if", "not", "text", ".", "startswith", "(", "comment_prefix", ")", ":", "text", "=", "\"{} {}\"", ".", "format", "(", "comment_prefix", ",", "text", ")", "if", "not", "text", ".", "endswith", "(", "'\\n'", ")", ":", "text", "=", "\"{}{}\"", ".", "format", "(", "text", ",", "'\\n'", ")", "comment", ".", "add_line", "(", "text", ")", "self", ".", "_container", ".", "structure", ".", "insert", "(", "self", ".", "_idx", ",", "comment", ")", "self", ".", "_idx", "+=", "1", "return", "self" ]
Return the signature string of the specified function .
def get_function_signature ( func ) : if func is None : return 'Function is None' try : func_name = func . __name__ except AttributeError : func_name = 'None' if not inspect . isfunction ( func ) : raise TypeError ( 'The argument must be a function object: %s type is %s' % ( func_name , type ( func ) ) ) return func_name + str ( inspect . signature ( func ) )
1,181
https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/utils.py#L10-L34
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Acquire a read lock several threads can hold this type of lock .
def acquire_reader ( self ) : with self . mutex : while self . rwlock < 0 or self . rwlock == self . max_reader_concurrency or self . writers_waiting : self . readers_ok . wait ( ) self . rwlock += 1
1,182
https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/utils.py#L73-L80
[ "def", "deserialize", "(", "obj", ")", ":", "# Be careful of shallow copy here", "target", "=", "dict", "(", "obj", ")", "class_name", "=", "None", "if", "'__class__'", "in", "target", ":", "class_name", "=", "target", ".", "pop", "(", "'__class__'", ")", "if", "'__module__'", "in", "obj", ":", "obj", ".", "pop", "(", "'__module__'", ")", "# Use getattr(module, class_name) for custom types if needed", "if", "class_name", "==", "'datetime'", ":", "return", "datetime", ".", "datetime", "(", "tzinfo", "=", "utc", ",", "*", "*", "target", ")", "if", "class_name", "==", "'StreamingBody'", ":", "return", "StringIO", "(", "target", "[", "'body'", "]", ")", "# Return unrecognized structures as-is", "return", "obj" ]
Acquire a write lock only one thread can hold this lock and only when no read locks are also held .
def acquire_writer ( self ) : with self . mutex : while self . rwlock != 0 : self . _writer_wait ( ) self . rwlock = - 1
1,183
https://github.com/SylvanasSun/python-common-cache/blob/f113eb3cd751eed5ab5373e8610a31a444220cf8/common_cache/utils.py#L82-L90
[ "def", "deserialize", "(", "obj", ")", ":", "# Be careful of shallow copy here", "target", "=", "dict", "(", "obj", ")", "class_name", "=", "None", "if", "'__class__'", "in", "target", ":", "class_name", "=", "target", ".", "pop", "(", "'__class__'", ")", "if", "'__module__'", "in", "obj", ":", "obj", ".", "pop", "(", "'__module__'", ")", "# Use getattr(module, class_name) for custom types if needed", "if", "class_name", "==", "'datetime'", ":", "return", "datetime", ".", "datetime", "(", "tzinfo", "=", "utc", ",", "*", "*", "target", ")", "if", "class_name", "==", "'StreamingBody'", ":", "return", "StringIO", "(", "target", "[", "'body'", "]", ")", "# Return unrecognized structures as-is", "return", "obj" ]
Get a list of packages .
def list ( self , filter = None , type = None , sort = None , limit = None , page = None ) : # pylint: disable=redefined-builtin schema = PackageSchema ( exclude = ( 'testlist' , 'extra_cli_args' , 'agent_id' , 'options' , 'note' ) ) resp = self . service . list ( self . base , filter , type , sort , limit , page ) ps , l = self . service . decode ( schema , resp , many = True , links = True ) return Page ( ps , l )
1,184
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L190-L203
[ "def", "_encode_filename", "(", "filename", ")", ":", "# pragma: no cover", "# Don't replace unknown characters as '?' is forbidden in Windows filenames", "errors", "=", "'ignore'", "if", "os", ".", "name", "==", "'nt'", "else", "'replace'", "if", "not", "isinstance", "(", "filename", ",", "bytes", ")", ":", "if", "os", ".", "name", "==", "'nt'", "and", "cairo", ".", "cairo_version", "(", ")", ">=", "11510", ":", "# Since 1.15.10, cairo uses utf-8 filenames on Windows", "filename", "=", "filename", ".", "encode", "(", "'utf-8'", ",", "errors", "=", "errors", ")", "else", ":", "try", ":", "filename", "=", "filename", ".", "encode", "(", "sys", ".", "getfilesystemencoding", "(", ")", ")", "except", "UnicodeEncodeError", ":", "# Use plain ASCII filenames as fallback", "filename", "=", "filename", ".", "encode", "(", "'ascii'", ",", "errors", "=", "errors", ")", "# TODO: avoid characters forbidden in filenames?", "return", "ffi", ".", "new", "(", "'char[]'", ",", "filename", ")" ]
Get a package .
def get ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = PackageSchema ( ) resp = self . service . get_id ( self . base , id ) return self . service . decode ( schema , resp )
1,185
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L218-L227
[ "def", "apply_binding", "(", "self", ",", "binding", ",", "msg_str", ",", "destination", "=", "\"\"", ",", "relay_state", "=", "\"\"", ",", "response", "=", "False", ",", "sign", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# unless if BINDING_HTTP_ARTIFACT", "if", "response", ":", "typ", "=", "\"SAMLResponse\"", "else", ":", "typ", "=", "\"SAMLRequest\"", "if", "binding", "==", "BINDING_HTTP_POST", ":", "logger", ".", "info", "(", "\"HTTP POST\"", ")", "# if self.entity_type == 'sp':", "# info = self.use_http_post(msg_str, destination, relay_state,", "# typ)", "# info[\"url\"] = destination", "# info[\"method\"] = \"POST\"", "# else:", "info", "=", "self", ".", "use_http_form_post", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ")", "info", "[", "\"url\"", "]", "=", "destination", "info", "[", "\"method\"", "]", "=", "\"POST\"", "elif", "binding", "==", "BINDING_HTTP_REDIRECT", ":", "logger", ".", "info", "(", "\"HTTP REDIRECT\"", ")", "sigalg", "=", "kwargs", ".", "get", "(", "\"sigalg\"", ")", "if", "sign", "and", "sigalg", ":", "signer", "=", "self", ".", "sec", ".", "sec_backend", ".", "get_signer", "(", "sigalg", ")", "else", ":", "signer", "=", "None", "info", "=", "self", ".", "use_http_get", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ",", "signer", "=", "signer", ",", "*", "*", "kwargs", ")", "info", "[", "\"url\"", "]", "=", "str", "(", "destination", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "elif", "binding", "==", "BINDING_SOAP", "or", "binding", "==", "BINDING_PAOS", ":", "info", "=", "self", ".", "use_soap", "(", "msg_str", ",", "destination", ",", "sign", "=", "sign", ",", "*", "*", "kwargs", ")", "elif", "binding", "==", "BINDING_URI", ":", "info", "=", "self", ".", "use_http_uri", "(", "msg_str", ",", "typ", ",", "destination", ")", "elif", "binding", "==", "BINDING_HTTP_ARTIFACT", ":", "if", "response", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "info", "[", "\"status\"", "]", "=", "302", "else", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "else", ":", "raise", "SAMLError", "(", "\"Unknown binding type: %s\"", "%", "binding", ")", "return", "info" ]
Create a new package .
def create ( self , resource ) : schema = PackageSchema ( exclude = ( 'id' , 'created' , 'updated' , 'test_count' , 'agent_id' , 'result_id' ) ) json = self . service . encode ( schema , resource ) schema = PackageSchema ( ) resp = self . service . create ( self . base , json ) return self . service . decode ( schema , resp )
1,186
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L241-L253
[ "def", "get_counts", "(", "self", ")", ":", "hbondsback", "=", "len", "(", "[", "hb", "for", "hb", "in", "self", ".", "hbonds", "if", "not", "hb", ".", "sidechain", "]", ")", "counts", "=", "{", "'hydrophobics'", ":", "len", "(", "self", ".", "hydrophobics", ")", ",", "'hbonds'", ":", "len", "(", "self", ".", "hbonds", ")", ",", "'wbridges'", ":", "len", "(", "self", ".", "wbridges", ")", ",", "'sbridges'", ":", "len", "(", "self", ".", "sbridges", ")", ",", "'pistacks'", ":", "len", "(", "self", ".", "pi_stacks", ")", ",", "'pications'", ":", "len", "(", "self", ".", "pi_cations", ")", ",", "'halogens'", ":", "len", "(", "self", ".", "halogens", ")", ",", "'metal'", ":", "len", "(", "self", ".", "metal_complexes", ")", ",", "'hbond_back'", ":", "hbondsback", ",", "'hbond_nonback'", ":", "(", "len", "(", "self", ".", "hbonds", ")", "-", "hbondsback", ")", "}", "counts", "[", "'total'", "]", "=", "counts", "[", "'hydrophobics'", "]", "+", "counts", "[", "'hbonds'", "]", "+", "counts", "[", "'wbridges'", "]", "+", "counts", "[", "'sbridges'", "]", "+", "counts", "[", "'pistacks'", "]", "+", "counts", "[", "'pications'", "]", "+", "counts", "[", "'halogens'", "]", "+", "counts", "[", "'metal'", "]", "return", "counts" ]
Get a list of tests that will be skipped for a package .
def analyze ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = AnalysisSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/' , params = { 'process' : 'analyze' } ) return self . service . decode ( schema , resp )
1,187
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L301-L310
[ "def", "next", "(", "self", ")", ":", "if", "self", ".", "__buffered", "is", "None", ":", "# Use floor division to force multiplier to an integer", "multiplier", "=", "self", ".", "__max_in_mem", "//", "self", ".", "__chunk_size", "self", ".", "__buffered", "=", "\"\"", "else", ":", "multiplier", "=", "1", "self", ".", "__buffered", "=", "self", ".", "__buffered", "[", "self", ".", "__chunk_size", ":", "]", "data", "=", "self", ".", "__file", ".", "read", "(", "self", ".", "__chunk_size", "*", "multiplier", ")", "# Data is a byte object in Python 3", "# Decode it in order to append to self.__buffered str later", "# Use the salt util in case it's already a string (Windows)", "data", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "data", ")", "if", "not", "data", ":", "self", ".", "__file", ".", "close", "(", ")", "raise", "StopIteration", "self", ".", "__buffered", "+=", "data", "return", "self", ".", "__buffered" ]
Bulk copy a set of packages .
def bulk_copy ( self , ids ) : schema = PackageSchema ( ) return self . service . bulk_copy ( self . base , self . RESOURCE , ids , schema )
1,188
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L320-L327
[ "def", "checkIfAvailable", "(", "self", ",", "dateTime", "=", "timezone", ".", "now", "(", ")", ")", ":", "return", "(", "self", ".", "startTime", ">=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__closeBookingDays'", ")", ")", "and", "self", ".", "startTime", "<=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__openBookingDays'", ")", ")", "and", "not", "self", ".", "eventRegistration", "and", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "available", "or", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "tentative", "and", "getattr", "(", "getattr", "(", "self", ".", "temporaryEventRegistration", ",", "'registration'", ",", "None", ")", ",", "'expirationDate'", ",", "timezone", ".", "now", "(", ")", ")", "<=", "timezone", ".", "now", "(", ")", ")", ")", ")" ]
Bulk edit a set of packages .
def bulk_edit ( self , _fields , ids = None , filter = None , type = None , all = False ) : # pylint: disable=redefined-builtin schema = PackageSchema ( exclude = ( 'id' , 'created' , 'updated' , 'test_count' , 'agent_id' , 'result_id' ) ) _fields = self . service . encode ( schema , _fields , skip_none = True ) return self . service . bulk_edit ( self . base , self . RESOURCE , _fields , ids = ids , filter = filter , type = type , all = all )
1,189
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/packages.py#L329-L341
[ "def", "checkIfAvailable", "(", "self", ",", "dateTime", "=", "timezone", ".", "now", "(", ")", ")", ":", "return", "(", "self", ".", "startTime", ">=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__closeBookingDays'", ")", ")", "and", "self", ".", "startTime", "<=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__openBookingDays'", ")", ")", "and", "not", "self", ".", "eventRegistration", "and", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "available", "or", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "tentative", "and", "getattr", "(", "getattr", "(", "self", ".", "temporaryEventRegistration", ",", "'registration'", ",", "None", ")", ",", "'expirationDate'", ",", "timezone", ".", "now", "(", ")", ")", "<=", "timezone", ".", "now", "(", ")", ")", ")", ")" ]
Generate strings that are not comments or lines with only whitespace .
def clean_lines ( commands ) : if isinstance ( commands , basestring ) : # if the command argument is a filename, we need to open it. if path . isfile ( commands ) : commands = open ( commands , 'rb' ) # if the command string is a comma separated list, break it up. elif len ( commands . split ( ',' ) ) > 1 : commands = commands . split ( ',' ) else : # if a single command, need to just be returned. try : if commands . strip ( ) [ 0 ] != "#" : yield commands . strip ( ) + '\n' return except IndexError : pass elif isinstance ( commands , list ) : pass else : raise TypeError ( 'clean_lines() accepts a \'str\' or \'list\'' ) for cmd in commands : # exclude commented lines, and skip blank lines (index error) try : if cmd . strip ( ) [ 0 ] != "#" : yield cmd . strip ( ) + '\n' except IndexError : pass
1,190
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/utils.py#L8-L54
[ "def", "__parseThunkData", "(", "self", ",", "thunk", ",", "importSection", ")", ":", "offset", "=", "to_offset", "(", "thunk", ".", "header", ".", "AddressOfData", ",", "importSection", ")", "if", "0xf0000000", "&", "thunk", ".", "header", ".", "AddressOfData", "==", "0x80000000", ":", "thunk", ".", "ordinal", "=", "thunk", ".", "header", ".", "AddressOfData", "&", "0x0fffffff", "else", ":", "ibn", "=", "IMAGE_IMPORT_BY_NAME", ".", "from_buffer", "(", "importSection", ".", "raw", ",", "offset", ")", "checkOffset", "(", "offset", "+", "2", ",", "importSection", ")", "name", "=", "get_str", "(", "importSection", ".", "raw", ",", "offset", "+", "2", ")", "thunk", ".", "importByName", "=", "ImportByNameData", "(", "header", "=", "ibn", ",", "hint", "=", "ibn", ".", "Hint", ",", "name", "=", "name", ")" ]
Filter xml based on an xpath expression .
def xpath ( source_xml , xpath_expr , req_format = 'string' ) : tree = source_xml if not isinstance ( source_xml , ET . Element ) : tree = objectify . fromstring ( source_xml ) # clean up the namespace in the tags, as namespaces appear to confuse # xpath method for elem in tree . getiterator ( ) : # beware of factory functions such as Comment if isinstance ( elem . tag , basestring ) : i = elem . tag . find ( '}' ) if i >= 0 : elem . tag = elem . tag [ i + 1 : ] # remove unused namespaces objectify . deannotate ( tree , cleanup_namespaces = True ) filtered_list = tree . xpath ( xpath_expr ) # Return string from the list of Elements or pure xml if req_format == 'xml' : return filtered_list matches = '' . join ( etree . tostring ( element , pretty_print = True ) for element in filtered_list ) return matches if matches else ""
1,191
https://github.com/NetworkAutomation/jaide/blob/8571b987a8c24c246dc09f1bcc11cb0f045ec33f/jaide/utils.py#L57-L97
[ "def", "send_headers", "(", "self", ")", ":", "self", ".", "events", ".", "sync_emit", "(", "'headers'", ")", "self", ".", "_set_default_headers", "(", ")", "header_str", "=", "self", ".", "status_line", "+", "self", ".", "EOL", "+", "str", "(", "self", ".", "headers", ")", "self", ".", "stream", ".", "write", "(", "header_str", ".", "encode", "(", ")", ")", "self", ".", "events", ".", "sync_emit", "(", "'after_headers'", ")" ]
Set the value for the key in the key - value store .
def set ( self , key , value , lease = None , return_previous = None , timeout = None ) : assembler = commons . PutRequestAssembler ( self . _url , key , value , lease , return_previous ) obj = yield self . _post ( assembler . url , assembler . data , timeout ) revision = Revision . _parse ( obj ) returnValue ( revision )
1,192
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_client_tx.py#L256-L291
[ "def", "get_correlation_table", "(", "self", ",", "chain", "=", "0", ",", "parameters", "=", "None", ",", "caption", "=", "\"Parameter Correlations\"", ",", "label", "=", "\"tab:parameter_correlations\"", ")", ":", "parameters", ",", "cor", "=", "self", ".", "get_correlations", "(", "chain", "=", "chain", ",", "parameters", "=", "parameters", ")", "return", "self", ".", "_get_2d_latex_table", "(", "parameters", ",", "cor", ",", "caption", ",", "label", ")" ]
Watch one or more keys or key sets and invoke a callback .
def watch ( self , keys , on_watch , filters = None , start_revision = None , return_previous = None ) : d = self . _start_watching ( keys , on_watch , filters , start_revision , return_previous ) # # ODD: Trying to use a parameter instead of *args errors out as soon as the # parameter is accessed. # def on_err ( * args ) : if args [ 0 ] . type not in [ CancelledError , ResponseFailed ] : self . log . warn ( 'etcd watch terminated with "{error}"' , error = args [ 0 ] . type ) return args [ 0 ] d . addErrback ( on_err ) return d
1,193
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_client_tx.py#L408-L446
[ "def", "_get_logger", "(", "self", ",", "handler", ")", ":", "log_file", "=", "self", ".", "_get_log_file", "(", "handler", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "log_file", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "log_file", ")", ")", "handler", "[", "'log_rot_time'", "]", "=", "time", ".", "gmtime", "(", ")", "return", "pcap", ".", "open", "(", "log_file", ",", "mode", "=", "'a'", ")" ]
Creates a lease which expires if the server does not receive a keep alive within a given time to live period .
def lease ( self , time_to_live , lease_id = None , timeout = None ) : assembler = commons . LeaseRequestAssembler ( self . _url , time_to_live , lease_id ) obj = yield self . _post ( assembler . url , assembler . data , timeout ) lease = Lease . _parse ( self , obj ) returnValue ( lease )
1,194
https://github.com/crossbario/txaio-etcd/blob/c9aebff7f288a0b219bffc9d2579d22cf543baa5/txaioetcd/_client_tx.py#L590-L620
[ "def", "loadMsbwt", "(", "self", ",", "dirName", ",", "logger", ")", ":", "#open the file with our BWT in it", "self", ".", "dirName", "=", "dirName", "self", ".", "bwt", "=", "np", ".", "load", "(", "self", ".", "dirName", "+", "'/comp_msbwt.npy'", ",", "'r'", ")", "#build auxiliary structures", "self", ".", "constructTotalCounts", "(", "logger", ")", "self", ".", "constructIndexing", "(", ")", "self", ".", "constructFMIndex", "(", "logger", ")" ]
Stage an import from a file upload .
def stage_import_from_file ( self , fd , filename = 'upload.gz' ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , files = { 'file' : ( filename , fd ) } ) return self . service . decode ( schema , resp )
1,195
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/imports.py#L153-L163
[ "def", "main", "(", ")", ":", "try", ":", "# Retrieve the first USB device", "device", "=", "AlarmDecoder", "(", "SerialDevice", "(", "interface", "=", "SERIAL_DEVICE", ")", ")", "# Set up an event handler and open the device", "device", ".", "on_alarm", "+=", "handle_alarm", "with", "device", ".", "open", "(", "baudrate", "=", "BAUDRATE", ")", ":", "while", "True", ":", "time", ".", "sleep", "(", "1", ")", "except", "Exception", "as", "ex", ":", "print", "(", "'Exception:'", ",", "ex", ")" ]
Stage an import from a filesystem path .
def stage_import_from_filesystem ( self , filepath ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , params = { 'path' : filepath } ) return self . service . decode ( schema , resp )
1,196
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/imports.py#L165-L174
[ "def", "get_lateration_parameters", "(", "all_points", ",", "indices", ",", "index", ",", "edm", ",", "W", "=", "None", ")", ":", "if", "W", "is", "None", ":", "W", "=", "np", ".", "ones", "(", "edm", ".", "shape", ")", "# delete points that are not considered anchors", "anchors", "=", "np", ".", "delete", "(", "all_points", ",", "indices", ",", "axis", "=", "0", ")", "r2", "=", "np", ".", "delete", "(", "edm", "[", "index", ",", ":", "]", ",", "indices", ")", "w", "=", "np", ".", "delete", "(", "W", "[", "index", ",", ":", "]", ",", "indices", ")", "# set w to zero where measurements are invalid", "if", "np", ".", "isnan", "(", "r2", ")", ".", "any", "(", ")", ":", "nan_measurements", "=", "np", ".", "where", "(", "np", ".", "isnan", "(", "r2", ")", ")", "[", "0", "]", "r2", "[", "nan_measurements", "]", "=", "0.0", "w", "[", "nan_measurements", "]", "=", "0.0", "if", "np", ".", "isnan", "(", "w", ")", ".", "any", "(", ")", ":", "nan_measurements", "=", "np", ".", "where", "(", "np", ".", "isnan", "(", "w", ")", ")", "[", "0", "]", "r2", "[", "nan_measurements", "]", "=", "0.0", "w", "[", "nan_measurements", "]", "=", "0.0", "# delete anchors where weight is zero to avoid ill-conditioning", "missing_anchors", "=", "np", ".", "where", "(", "w", "==", "0.0", ")", "[", "0", "]", "w", "=", "np", ".", "asarray", "(", "np", ".", "delete", "(", "w", ",", "missing_anchors", ")", ")", "r2", "=", "np", ".", "asarray", "(", "np", ".", "delete", "(", "r2", ",", "missing_anchors", ")", ")", "w", ".", "resize", "(", "edm", ".", "shape", "[", "0", "]", "-", "len", "(", "indices", ")", "-", "len", "(", "missing_anchors", ")", ",", "1", ")", "r2", ".", "resize", "(", "edm", ".", "shape", "[", "0", "]", "-", "len", "(", "indices", ")", "-", "len", "(", "missing_anchors", ")", ",", "1", ")", "anchors", "=", "np", ".", "delete", "(", "anchors", ",", "missing_anchors", ",", "axis", "=", "0", ")", "assert", "w", ".", "shape", "[", "0", "]", "==", "anchors", ".", "shape", "[", "0", "]", "assert", "np", ".", "isnan", "(", "w", ")", ".", "any", "(", ")", "==", "False", "assert", "np", ".", "isnan", "(", "r2", ")", ".", "any", "(", ")", "==", "False", "return", "anchors", ",", "w", ",", "r2" ]
Stage an import from a URL to another CDRouter system .
def stage_import_from_url ( self , url , token = None , username = None , password = None , insecure = False ) : schema = ImportSchema ( ) resp = self . service . post ( self . base , params = { 'url' : url , 'token' : token , 'username' : username , 'password' : password , 'insecure' : insecure } ) return self . service . decode ( schema , resp )
1,197
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/imports.py#L176-L189
[ "def", "sometimes", "(", "fn", ")", ":", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "wrapped", ".", "x", "+=", "1", "if", "wrapped", ".", "x", "%", "2", "==", "1", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "wrapped", ".", "x", "=", "0", "return", "wrapped" ]
Get a commit request for a staged import .
def get_commit_request ( self , id ) : # pylint: disable=invalid-name,redefined-builtin schema = RequestSchema ( ) resp = self . service . get ( self . base + str ( id ) + '/request/' ) return self . service . decode ( schema , resp )
1,198
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/imports.py#L202-L211
[ "def", "_CreateDynamicDisplayAdSettings", "(", "media_service", ",", "opener", ")", ":", "image", "=", "_CreateImage", "(", "media_service", ",", "opener", ",", "'https://goo.gl/dEvQeF'", ")", "logo", "=", "{", "'type'", ":", "'IMAGE'", ",", "'mediaId'", ":", "image", "[", "'mediaId'", "]", ",", "'xsi_type'", ":", "'Image'", "}", "dynamic_settings", "=", "{", "'landscapeLogoImage'", ":", "logo", ",", "'pricePrefix'", ":", "'as low as'", ",", "'promoText'", ":", "'Free shipping!'", ",", "'xsi_type'", ":", "'DynamicSettings'", ",", "}", "return", "dynamic_settings" ]
Commit a staged import .
def commit ( self , id , impreq ) : # pylint: disable=invalid-name,redefined-builtin schema = RequestSchema ( ) json = self . service . encode ( schema , impreq ) schema = RequestSchema ( ) resp = self . service . post ( self . base + str ( id ) + '/' , json = json ) return self . service . decode ( schema , resp )
1,199
https://github.com/qacafe/cdrouter.py/blob/aacf2c6ab0b987250f7b1892f4bba14bb2b7dbe5/cdrouter/imports.py#L213-L226
[ "def", "_verify_options", "(", "options", ")", ":", "# sanity check all vals used for bitwise operations later", "bitwise_args", "=", "[", "(", "'level'", ",", "options", "[", "'level'", "]", ")", ",", "(", "'facility'", ",", "options", "[", "'facility'", "]", ")", "]", "bitwise_args", ".", "extend", "(", "[", "(", "'option'", ",", "x", ")", "for", "x", "in", "options", "[", "'options'", "]", "]", ")", "for", "opt_name", ",", "opt", "in", "bitwise_args", ":", "if", "not", "hasattr", "(", "syslog", ",", "opt", ")", ":", "log", ".", "error", "(", "'syslog has no attribute %s'", ",", "opt", ")", "return", "False", "if", "not", "isinstance", "(", "getattr", "(", "syslog", ",", "opt", ")", ",", "int", ")", ":", "log", ".", "error", "(", "'%s is not a valid syslog %s'", ",", "opt", ",", "opt_name", ")", "return", "False", "# Sanity check tag", "if", "'tag'", "in", "options", ":", "if", "not", "isinstance", "(", "options", "[", "'tag'", "]", ",", "six", ".", "string_types", ")", ":", "log", ".", "error", "(", "'tag must be a string'", ")", "return", "False", "if", "len", "(", "options", "[", "'tag'", "]", ")", ">", "32", ":", "log", ".", "error", "(", "'tag size is limited to 32 characters'", ")", "return", "False", "return", "True" ]