query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Ask for a filename to open and returned the opened file
def askopenfile ( mode = "r" , * * options ) : filename = askopenfilename ( * * options ) if filename : return open ( filename , mode ) return None
8,700
https://github.com/ponty/psidialogs/blob/e385ab6b48cb43af52b810a1bf76a8135f4585b8/psidialogs/api/tkfiledialog_api.py#L4-L10
[ "def", "dashes_cleanup", "(", "records", ",", "prune_chars", "=", "'.:?~'", ")", ":", "logging", ".", "info", "(", "\"Applying _dashes_cleanup: converting any of '{}' to '-'.\"", ".", "format", "(", "prune_chars", ")", ")", "translation_table", "=", "{", "ord", "(", "c", ")", ":", "'-'", "for", "c", "in", "prune_chars", "}", "for", "record", "in", "records", ":", "record", ".", "seq", "=", "Seq", "(", "str", "(", "record", ".", "seq", ")", ".", "translate", "(", "translation_table", ")", ",", "record", ".", "seq", ".", "alphabet", ")", "yield", "record" ]
Ask for multiple filenames and return the open file objects
def askopenfiles ( mode = "r" , * * options ) : files = askopenfilenames ( * * options ) if files : ofiles = [ ] for filename in files : ofiles . append ( open ( filename , mode ) ) files = ofiles return files
8,701
https://github.com/ponty/psidialogs/blob/e385ab6b48cb43af52b810a1bf76a8135f4585b8/psidialogs/api/tkfiledialog_api.py#L13-L27
[ "def", "_process_cluster_health_data", "(", "self", ",", "node_name", ",", "node_stats", ",", "tags", ")", ":", "# Tags for service check", "cluster_health_tags", "=", "list", "(", "tags", ")", "+", "[", "'node:{}'", ".", "format", "(", "node_name", ")", "]", "# Get the membership status of the node", "cluster_membership", "=", "node_stats", ".", "get", "(", "'clusterMembership'", ",", "None", ")", "membership_status", "=", "self", ".", "NODE_MEMBERSHIP_TRANSLATION", ".", "get", "(", "cluster_membership", ",", "AgentCheck", ".", "UNKNOWN", ")", "self", ".", "service_check", "(", "self", ".", "NODE_CLUSTER_SERVICE_CHECK_NAME", ",", "membership_status", ",", "tags", "=", "cluster_health_tags", ")", "# Get the health status of the node", "health", "=", "node_stats", ".", "get", "(", "'status'", ",", "None", ")", "health_status", "=", "self", ".", "NODE_HEALTH_TRANSLATION", ".", "get", "(", "health", ",", "AgentCheck", ".", "UNKNOWN", ")", "self", ".", "service_check", "(", "self", ".", "NODE_HEALTH_SERVICE_CHECK_NAME", ",", "health_status", ",", "tags", "=", "cluster_health_tags", ")" ]
Ask for a filename to save as and returned the opened file
def asksaveasfile ( mode = "w" , * * options ) : filename = asksaveasfilename ( * * options ) if filename : return open ( filename , mode ) return None
8,702
https://github.com/ponty/psidialogs/blob/e385ab6b48cb43af52b810a1bf76a8135f4585b8/psidialogs/api/tkfiledialog_api.py#L30-L36
[ "def", "_guess_type", "(", "mrz_lines", ")", ":", "try", ":", "if", "len", "(", "mrz_lines", ")", "==", "3", ":", "return", "'TD1'", "elif", "len", "(", "mrz_lines", ")", "==", "2", "and", "len", "(", "mrz_lines", "[", "0", "]", ")", "<", "40", "and", "len", "(", "mrz_lines", "[", "1", "]", ")", "<", "40", ":", "return", "'MRVB'", "if", "mrz_lines", "[", "0", "]", "[", "0", "]", ".", "upper", "(", ")", "==", "'V'", "else", "'TD2'", "elif", "len", "(", "mrz_lines", ")", "==", "2", ":", "return", "'MRVA'", "if", "mrz_lines", "[", "0", "]", "[", "0", "]", ".", "upper", "(", ")", "==", "'V'", "else", "'TD3'", "else", ":", "return", "None", "except", "Exception", ":", "#pylint: disable=broad-except", "return", "None" ]
Create a subclass of Coordinate instances of which must have exactly the given keys .
def spaced_coordinate ( name , keys , ordered = True ) : def validate ( self ) : """Raise a ValueError if the instance's keys are incorrect""" if set ( keys ) != set ( self ) : raise ValueError ( '{} needs keys {} and got {}' . format ( type ( self ) . __name__ , keys , tuple ( self ) ) ) new_class = type ( name , ( Coordinate , ) , { 'default_order' : keys if ordered else None , '_validate' : validate } ) return new_class
8,703
https://github.com/clbarnes/coordinates/blob/2f5b3ca855da069204407f4bb7e8eb5d4835dfe0/coordinates/classes.py#L289-L312
[ "def", "read_response", "(", "self", ",", "delegate", ":", "httputil", ".", "HTTPMessageDelegate", ")", "->", "Awaitable", "[", "bool", "]", ":", "if", "self", ".", "params", ".", "decompress", ":", "delegate", "=", "_GzipMessageDelegate", "(", "delegate", ",", "self", ".", "params", ".", "chunk_size", ")", "return", "self", ".", "_read_message", "(", "delegate", ")" ]
Find the vector norm with the given order of the values
def norm ( self , order = 2 ) : return ( sum ( val ** order for val in abs ( self ) . values ( ) ) ) ** ( 1 / order )
8,704
https://github.com/clbarnes/coordinates/blob/2f5b3ca855da069204407f4bb7e8eb5d4835dfe0/coordinates/classes.py#L175-L177
[ "def", "time_replacer", "(", "match", ",", "timestamp", ")", ":", "# match.group(0) = entire match", "# match.group(1) = match in braces #1", "return", "time", ".", "strftime", "(", "match", ".", "group", "(", "1", ")", ",", "time", ".", "gmtime", "(", "timestamp", ")", ")" ]
Walks through object o and attempts to get the property instead of the key if available . This means that for our VDev objects we can easily get a dict of all the parsed values .
def jsonify ( o , max_depth = - 1 , parse_enums = PARSE_KEEP ) : if max_depth == 0 : return o max_depth -= 1 if isinstance ( o , dict ) : keyattrs = getattr ( o . __class__ , '_altnames' , { } ) def _getter ( key , value ) : key = keyattrs . get ( key , key ) other = getattr ( o , key , value ) if callable ( other ) : other = value if isinstance ( key , Enum ) : # Make sure we use a name as the key... if we don't it might mess some things up. key = key . name return key , jsonify ( other , max_depth = max_depth , parse_enums = parse_enums ) return dict ( _getter ( key , value ) for key , value in six . iteritems ( o ) ) elif isinstance ( o , list ) : return [ jsonify ( x , max_depth = max_depth , parse_enums = parse_enums ) for x in o ] elif isinstance ( o , tuple ) : return ( jsonify ( x , max_depth = max_depth , parse_enums = parse_enums ) for x in o ) elif isinstance ( o , Enum ) : o = _parse_enum ( o , parse_enums = parse_enums ) return o
8,705
https://github.com/Xaroth/libzfs-python/blob/146e5f28de5971bb6eb64fd82b098c5f302f0b33/libzfs/utils/jsonify.py#L25-L52
[ "def", "create_meta_main", "(", "create_path", ",", "config", ",", "role", ",", "categories", ")", ":", "meta_file", "=", "c", ".", "DEFAULT_META_FILE", ".", "replace", "(", "\"%author_name\"", ",", "config", "[", "\"author_name\"", "]", ")", "meta_file", "=", "meta_file", ".", "replace", "(", "\"%author_company\"", ",", "config", "[", "\"author_company\"", "]", ")", "meta_file", "=", "meta_file", ".", "replace", "(", "\"%license_type\"", ",", "config", "[", "\"license_type\"", "]", ")", "meta_file", "=", "meta_file", ".", "replace", "(", "\"%role_name\"", ",", "role", ")", "# Normalize the category so %categories always gets replaced.", "if", "not", "categories", ":", "categories", "=", "\"\"", "meta_file", "=", "meta_file", ".", "replace", "(", "\"%categories\"", ",", "categories", ")", "string_to_file", "(", "create_path", ",", "meta_file", ")" ]
Copy the LICENSE and CONTRIBUTING files to each folder repo Generate covers if needed . Dump the metadata .
def copy_files ( self ) : files = [ u'LICENSE' , u'CONTRIBUTING.rst' ] this_dir = dirname ( abspath ( __file__ ) ) for _file in files : sh . cp ( '{0}/templates/{1}' . format ( this_dir , _file ) , '{0}/' . format ( self . book . local_path ) ) # copy metadata rdf file if self . book . meta . rdf_path : # if None, meta is from yaml file sh . cp ( self . book . meta . rdf_path , '{0}/' . format ( self . book . local_path ) ) if 'GITenberg' not in self . book . meta . subjects : if not self . book . meta . subjects : self . book . meta . metadata [ 'subjects' ] = [ ] self . book . meta . metadata [ 'subjects' ] . append ( 'GITenberg' ) self . save_meta ( )
8,706
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/make.py#L47-L70
[ "def", "operator_si", "(", "u", ")", ":", "global", "_aux", "if", "np", ".", "ndim", "(", "u", ")", "==", "2", ":", "P", "=", "_P2", "elif", "np", ".", "ndim", "(", "u", ")", "==", "3", ":", "P", "=", "_P3", "else", ":", "raise", "ValueError", "(", "\"u has an invalid number of dimensions \"", "\"(should be 2 or 3)\"", ")", "if", "u", ".", "shape", "!=", "_aux", ".", "shape", "[", "1", ":", "]", ":", "_aux", "=", "np", ".", "zeros", "(", "(", "len", "(", "P", ")", ",", ")", "+", "u", ".", "shape", ")", "for", "_aux_i", ",", "P_i", "in", "zip", "(", "_aux", ",", "P", ")", ":", "_aux_i", "[", ":", "]", "=", "binary_erosion", "(", "u", ",", "P_i", ")", "return", "_aux", ".", "max", "(", "0", ")" ]
Collects information about the number of edit actions belonging to keys in a supplied dictionary of object or changeset ids .
def _collate_data ( collation , first_axis , second_axis ) : if first_axis not in collation : collation [ first_axis ] = { } collation [ first_axis ] [ "create" ] = 0 collation [ first_axis ] [ "modify" ] = 0 collation [ first_axis ] [ "delete" ] = 0 first = collation [ first_axis ] first [ second_axis ] = first [ second_axis ] + 1 collation [ first_axis ] = first
8,707
https://github.com/ethan-nelson/osm_diff_tool/blob/d5b083100dedd9427ad23c4be5316f89a55ec8f0/osmdt/extract.py#L1-L27
[ "def", "not_storable", "(", "_type", ")", ":", "return", "Storable", "(", "_type", ",", "handlers", "=", "StorableHandler", "(", "poke", "=", "fake_poke", ",", "peek", "=", "fail_peek", "(", "_type", ")", ")", ")" ]
Provides information about each changeset present in an OpenStreetMap diff file .
def extract_changesets ( objects ) : def add_changeset_info ( collation , axis , item ) : """ """ if axis not in collation : collation [ axis ] = { } first = collation [ axis ] first [ "id" ] = axis first [ "username" ] = item [ "username" ] first [ "uid" ] = item [ "uid" ] first [ "timestamp" ] = item [ "timestamp" ] collation [ axis ] = first changeset_collation = { } for node in objects . nodes . values ( ) : _collate_data ( changeset_collation , node [ 'changeset' ] , node [ 'action' ] ) add_changeset_info ( changeset_collation , node [ 'changeset' ] , node ) for way in objects . ways . values ( ) : _collate_data ( changeset_collation , way [ 'changeset' ] , way [ 'action' ] ) add_changeset_info ( changeset_collation , way [ 'changeset' ] , way ) for relation in objects . relations . values ( ) : _collate_data ( changeset_collation , relation [ 'changeset' ] , relation [ 'action' ] ) add_changeset_info ( changeset_collation , relation [ 'changeset' ] , relation ) return changeset_collation
8,708
https://github.com/ethan-nelson/osm_diff_tool/blob/d5b083100dedd9427ad23c4be5316f89a55ec8f0/osmdt/extract.py#L30-L74
[ "def", "delete_secret", "(", "self", ",", "path", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "api_path", "=", "'/v1/{mount_point}/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "return", "self", ".", "_adapter", ".", "delete", "(", "url", "=", "api_path", ",", ")" ]
convert a object to string
def to_str ( obj ) : if isinstance ( obj , str ) : return obj if isinstance ( obj , unicode ) : return obj . encode ( 'utf-8' ) return str ( obj )
8,709
https://github.com/xlzd/xtls/blob/b3cc0ab24197ecaa39adcad7cd828cada9c04a4e/xtls/util.py#L33-L41
[ "def", "_ParseWtmp", "(", ")", ":", "users", "=", "{", "}", "wtmp_struct_size", "=", "UtmpStruct", ".", "GetSize", "(", ")", "filenames", "=", "glob", ".", "glob", "(", "\"/var/log/wtmp*\"", ")", "+", "[", "\"/var/run/utmp\"", "]", "for", "filename", "in", "filenames", ":", "try", ":", "wtmp", "=", "open", "(", "filename", ",", "\"rb\"", ")", ".", "read", "(", ")", "except", "IOError", ":", "continue", "for", "offset", "in", "range", "(", "0", ",", "len", "(", "wtmp", ")", ",", "wtmp_struct_size", ")", ":", "try", ":", "record", "=", "UtmpStruct", "(", "wtmp", "[", "offset", ":", "offset", "+", "wtmp_struct_size", "]", ")", "except", "utils", ".", "ParsingError", ":", "break", "# Users only appear for USER_PROCESS events, others are system.", "if", "record", ".", "ut_type", "!=", "7", ":", "continue", "try", ":", "if", "users", "[", "record", ".", "ut_user", "]", "<", "record", ".", "tv_sec", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "except", "KeyError", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "return", "users" ]
Get the GDNS managed zone name for a DNS zone .
def get_managed_zone ( self , zone ) : if zone . endswith ( '.in-addr.arpa.' ) : return self . reverse_prefix + '-' . join ( zone . split ( '.' ) [ - 5 : - 3 ] ) return self . forward_prefix + '-' . join ( zone . split ( '.' ) [ : - 1 ] )
8,710
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/clients/gdns.py#L81-L106
[ "def", "cancelRealTimeBars", "(", "self", ",", "bars", ":", "RealTimeBarList", ")", ":", "self", ".", "client", ".", "cancelRealTimeBars", "(", "bars", ".", "reqId", ")", "self", ".", "wrapper", ".", "endSubscription", "(", "bars", ")" ]
Get all resource record sets for a managed zone using the DNS zone .
async def get_records_for_zone ( self , dns_zone , params = None ) : managed_zone = self . get_managed_zone ( dns_zone ) url = f'{self._base_url}/managedZones/{managed_zone}/rrsets' if not params : params = { } if 'fields' not in params : # Get only the fields we care about params [ 'fields' ] = ( 'rrsets/name,rrsets/kind,rrsets/rrdatas,' 'rrsets/type,rrsets/ttl,nextPageToken' ) next_page_token = None records = [ ] while True : if next_page_token : params [ 'pageToken' ] = next_page_token response = await self . get_json ( url , params = params ) records . extend ( response [ 'rrsets' ] ) next_page_token = response . get ( 'nextPageToken' ) if not next_page_token : break logging . info ( f'Found {len(records)} rrsets for zone "{dns_zone}".' ) return records
8,711
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/clients/gdns.py#L108-L141
[ "def", "_read_console_output", "(", "self", ",", "ws", ",", "out", ")", ":", "while", "True", ":", "msg", "=", "yield", "from", "ws", ".", "receive", "(", ")", "if", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "text", ":", "out", ".", "feed_data", "(", "msg", ".", "data", ".", "encode", "(", ")", ")", "elif", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "BINARY", ":", "out", ".", "feed_data", "(", "msg", ".", "data", ")", "elif", "msg", ".", "tp", "==", "aiohttp", ".", "WSMsgType", ".", "ERROR", ":", "log", ".", "critical", "(", "\"Docker WebSocket Error: {}\"", ".", "format", "(", "msg", ".", "data", ")", ")", "else", ":", "out", ".", "feed_eof", "(", ")", "ws", ".", "close", "(", ")", "break", "yield", "from", "self", ".", "stop", "(", ")" ]
Check if a DNS change has completed .
async def is_change_done ( self , zone , change_id ) : zone_id = self . get_managed_zone ( zone ) url = f'{self._base_url}/managedZones/{zone_id}/changes/{change_id}' resp = await self . get_json ( url ) return resp [ 'status' ] == self . DNS_CHANGES_DONE
8,712
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/clients/gdns.py#L143-L155
[ "def", "wrap", "(", "vtkdataset", ")", ":", "wrappers", "=", "{", "'vtkUnstructuredGrid'", ":", "vtki", ".", "UnstructuredGrid", ",", "'vtkRectilinearGrid'", ":", "vtki", ".", "RectilinearGrid", ",", "'vtkStructuredGrid'", ":", "vtki", ".", "StructuredGrid", ",", "'vtkPolyData'", ":", "vtki", ".", "PolyData", ",", "'vtkImageData'", ":", "vtki", ".", "UniformGrid", ",", "'vtkStructuredPoints'", ":", "vtki", ".", "UniformGrid", ",", "'vtkMultiBlockDataSet'", ":", "vtki", ".", "MultiBlock", ",", "}", "key", "=", "vtkdataset", ".", "GetClassName", "(", ")", "try", ":", "wrapped", "=", "wrappers", "[", "key", "]", "(", "vtkdataset", ")", "except", ":", "logging", ".", "warning", "(", "'VTK data type ({}) is not currently supported by vtki.'", ".", "format", "(", "key", ")", ")", "return", "vtkdataset", "# if not supported just passes the VTK data object", "return", "wrapped" ]
Post changes to a zone .
async def publish_changes ( self , zone , changes ) : zone_id = self . get_managed_zone ( zone ) url = f'{self._base_url}/managedZones/{zone_id}/changes' resp = await self . request ( 'post' , url , json = changes ) return json . loads ( resp ) [ 'id' ]
8,713
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/clients/gdns.py#L157-L170
[ "def", "remove_armor", "(", "armored_data", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "armored_data", ")", "lines", "=", "stream", ".", "readlines", "(", ")", "[", "3", ":", "-", "1", "]", "data", "=", "base64", ".", "b64decode", "(", "b''", ".", "join", "(", "lines", ")", ")", "payload", ",", "checksum", "=", "data", "[", ":", "-", "3", "]", ",", "data", "[", "-", "3", ":", "]", "assert", "util", ".", "crc24", "(", "payload", ")", "==", "checksum", "return", "payload" ]
Actively close this WAMP session .
def leave ( self , reason = None , message = None ) : # see https://github.com/crossbario/autobahn-python/issues/605 return self . _async_session . leave ( reason = reason , log_message = message )
8,714
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/session.py#L69-L75
[ "def", "generic_html", "(", "self", ",", "result", ",", "errors", ")", ":", "h1", "=", "htmlize", "(", "type", "(", "result", ")", ")", "out", "=", "[", "]", "result", "=", "pre_process_json", "(", "result", ")", "if", "not", "hasattr", "(", "result", ",", "'items'", ")", ":", "# result is a non-container", "header", "=", "\"<tr><th>Value</th></tr>\"", "if", "type", "(", "result", ")", "is", "list", ":", "result", "=", "htmlize_list", "(", "result", ")", "else", ":", "result", "=", "htmlize", "(", "result", ")", "out", "=", "[", "\"<tr><td>\"", "+", "result", "+", "\"</td></tr>\"", "]", "elif", "hasattr", "(", "result", ",", "'lower'", ")", ":", "out", "=", "[", "\"<tr><td>\"", "+", "result", "+", "\"</td></tr>\"", "]", "else", ":", "# object is a dict", "header", "=", "\"<tr><th>Key</th><th>Value</th></tr>\"", "for", "key", ",", "value", "in", "result", ".", "items", "(", ")", ":", "v", "=", "htmlize", "(", "value", ")", "row", "=", "\"<tr><td>{0}</td><td>{1}</td></tr>\"", ".", "format", "(", "key", ",", "v", ")", "out", ".", "append", "(", "row", ")", "env", "=", "Environment", "(", "loader", "=", "PackageLoader", "(", "'giotto'", ")", ")", "template", "=", "env", ".", "get_template", "(", "'generic.html'", ")", "rendered", "=", "template", ".", "render", "(", "{", "'header'", ":", "h1", ",", "'table_header'", ":", "header", ",", "'table_body'", ":", "out", "}", ")", "return", "{", "'body'", ":", "rendered", ",", "'mimetype'", ":", "'text/html'", "}" ]
Call a remote procedure .
def call ( self , procedure , * args , * * kwargs ) : return self . _async_session . call ( procedure , * args , * * kwargs )
8,715
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/session.py#L78-L83
[ "def", "_media", "(", "self", ")", ":", "css", "=", "[", "'markymark/css/markdown-editor.css'", "]", "iconlibrary_css", "=", "getattr", "(", "settings", ",", "'MARKYMARK_FONTAWESOME_CSS'", ",", "'markymark/fontawesome/fontawesome.min.css'", ")", "if", "iconlibrary_css", ":", "css", ".", "append", "(", "iconlibrary_css", ")", "media", "=", "forms", ".", "Media", "(", "css", "=", "{", "'all'", ":", "css", "}", ",", "js", "=", "(", "'markymark/js/markdown-editor.js'", ",", ")", ")", "# Use official extension loading to initialize all extensions", "# and hook in extension-defined media files.", "renderer", "=", "initialize_renderer", "(", ")", "for", "extension", "in", "renderer", ".", "registeredExtensions", ":", "if", "hasattr", "(", "extension", ",", "'media'", ")", ":", "media", "+=", "extension", ".", "media", "return", "media" ]
Register a procedure for remote calling .
def register ( self , endpoint , procedure = None , options = None ) : def proxy_endpoint ( * args , * * kwargs ) : return self . _callbacks_runner . put ( partial ( endpoint , * args , * * kwargs ) ) return self . _async_session . register ( proxy_endpoint , procedure = procedure , options = options )
8,716
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/session.py#L86-L93
[ "def", "codemirror_field_js_bundle", "(", "field", ")", ":", "manifesto", "=", "CodemirrorAssetTagRender", "(", ")", "manifesto", ".", "register_from_fields", "(", "field", ")", "try", ":", "bundle_name", "=", "manifesto", ".", "js_bundle_names", "(", ")", "[", "0", "]", "except", "IndexError", ":", "msg", "=", "(", "\"Given field with configuration name '{}' does not have a \"", "\"Javascript bundle name\"", ")", "raise", "CodeMirrorFieldBundleError", "(", "msg", ".", "format", "(", "field", ".", "config_name", ")", ")", "return", "bundle_name" ]
Publish an event to a topic .
def publish ( self , topic , * args , * * kwargs ) : return self . _async_session . publish ( topic , * args , * * kwargs )
8,717
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/session.py#L100-L105
[ "def", "close_data", "(", "self", ")", ":", "# save the total number of cells to make re-loading convient", "if", "self", ".", "datafile", "is", "not", "None", ":", "if", "self", ".", "datafile", ".", "filemode", "!=", "'r'", ":", "self", ".", "datafile", ".", "set_metadata", "(", "''", ",", "{", "'total cells'", ":", "self", ".", "current_cellid", "}", ")", "self", ".", "datafile", ".", "close", "(", ")", "self", ".", "datafile", "=", "None" ]
Subscribe to a topic for receiving events .
def subscribe ( self , handler , topic = None , options = None ) : def proxy_handler ( * args , * * kwargs ) : return self . _callbacks_runner . put ( partial ( handler , * args , * * kwargs ) ) return self . _async_session . subscribe ( proxy_handler , topic = topic , options = options )
8,718
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/session.py#L108-L115
[ "def", "close_data", "(", "self", ")", ":", "# save the total number of cells to make re-loading convient", "if", "self", ".", "datafile", "is", "not", "None", ":", "if", "self", ".", "datafile", ".", "filemode", "!=", "'r'", ":", "self", ".", "datafile", ".", "set_metadata", "(", "''", ",", "{", "'total cells'", ":", "self", ".", "current_cellid", "}", ")", "self", ".", "datafile", ".", "close", "(", ")", "self", ".", "datafile", "=", "None" ]
Encode input to base58check encoding .
def b58encode ( val , charset = DEFAULT_CHARSET ) : def _b58encode_int ( int_ , default = bytes ( [ charset [ 0 ] ] ) ) : if not int_ and default : return default output = b'' while int_ : int_ , idx = divmod ( int_ , base ) output = charset [ idx : idx + 1 ] + output return output if not isinstance ( val , bytes ) : raise TypeError ( "a bytes-like object is required, not '%s', " "use .encode('ascii') to encode unicode strings" % type ( val ) . __name__ ) if isinstance ( charset , str ) : charset = charset . encode ( 'ascii' ) base = len ( charset ) if not base == 58 : raise ValueError ( 'charset base must be 58, not %s' % base ) pad_len = len ( val ) val = val . lstrip ( b'\0' ) pad_len -= len ( val ) p , acc = 1 , 0 for char in deque ( reversed ( val ) ) : acc += p * char p = p << 8 result = _b58encode_int ( acc , default = False ) prefix = bytes ( [ charset [ 0 ] ] ) * pad_len return prefix + result
8,719
https://github.com/joeblackwaslike/base58check/blob/417282766e697b8affc926a5f52cb9fcc41978cc/base58check/__init__.py#L43-L93
[ "def", "readShocks", "(", "self", ")", ":", "IndShockConsumerType", ".", "readShocks", "(", "self", ")", "self", ".", "MrkvNow", "=", "self", ".", "MrkvNow", ".", "astype", "(", "int", ")" ]
Decode base58check encoded input to original raw bytes .
def b58decode ( val , charset = DEFAULT_CHARSET ) : def _b58decode_int ( val ) : output = 0 for char in val : output = output * base + charset . index ( char ) return output if isinstance ( val , str ) : val = val . encode ( ) if isinstance ( charset , str ) : charset = charset . encode ( ) base = len ( charset ) if not base == 58 : raise ValueError ( 'charset base must be 58, not %s' % base ) pad_len = len ( val ) val = val . lstrip ( bytes ( [ charset [ 0 ] ] ) ) pad_len -= len ( val ) acc = _b58decode_int ( val ) result = deque ( ) while acc > 0 : acc , mod = divmod ( acc , 256 ) result . appendleft ( mod ) prefix = b'\0' * pad_len return prefix + bytes ( result )
8,720
https://github.com/joeblackwaslike/base58check/blob/417282766e697b8affc926a5f52cb9fcc41978cc/base58check/__init__.py#L96-L141
[ "def", "update_configuration", "(", "self", ",", "timeout", "=", "-", "1", ")", ":", "uri", "=", "\"{}/configuration\"", ".", "format", "(", "self", ".", "data", "[", "\"uri\"", "]", ")", "return", "self", ".", "_helper", ".", "update", "(", "None", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ")" ]
This will remove remove any callbacks you might have specified
def wait_for_edge ( self ) : GPIO . remove_event_detect ( self . _pin ) GPIO . wait_for_edge ( self . _pin , self . _edge )
8,721
https://github.com/zourtney/gpiocrust/blob/4973d467754c50510647ddf855fdc7a73be8a5f6/gpiocrust/raspberry_pi.py#L148-L153
[ "def", "write_moc_ascii", "(", "moc", ",", "filename", "=", "None", ",", "file", "=", "None", ")", ":", "orders", "=", "[", "]", "for", "(", "order", ",", "cells", ")", "in", "moc", ":", "ranges", "=", "[", "]", "rmin", "=", "rmax", "=", "None", "for", "cell", "in", "sorted", "(", "cells", ")", ":", "if", "rmin", "is", "None", ":", "rmin", "=", "rmax", "=", "cell", "elif", "rmax", "==", "cell", "-", "1", ":", "rmax", "=", "cell", "else", ":", "ranges", ".", "append", "(", "_format_range", "(", "rmin", ",", "rmax", ")", ")", "rmin", "=", "rmax", "=", "cell", "ranges", ".", "append", "(", "_format_range", "(", "rmin", ",", "rmax", ")", ")", "orders", ".", "append", "(", "'{0}'", ".", "format", "(", "order", ")", "+", "'/'", "+", "','", ".", "join", "(", "ranges", ")", ")", "if", "file", "is", "not", "None", ":", "_write_ascii", "(", "orders", ",", "file", ")", "else", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "f", ":", "_write_ascii", "(", "orders", ",", "f", ")" ]
This function logs if the user acceses the page
def request_finished_callback ( sender , * * kwargs ) : logger = logging . getLogger ( __name__ ) level = settings . AUTOMATED_LOGGING [ 'loglevel' ] [ 'request' ] user = get_current_user ( ) uri , application , method , status = get_current_environ ( ) excludes = settings . AUTOMATED_LOGGING [ 'exclude' ] [ 'request' ] if status and status in excludes : return if method and method . lower ( ) in excludes : return if not settings . AUTOMATED_LOGGING [ 'request' ] [ 'query' ] : uri = urllib . parse . urlparse ( uri ) . path logger . log ( level , ( '%s performed request at %s (%s %s)' % ( user , uri , method , status ) ) . replace ( " " , " " ) , extra = { 'action' : 'request' , 'data' : { 'user' : user , 'uri' : uri , 'method' : method , 'application' : application , 'status' : status } } )
8,722
https://github.com/indietyp/django-automated-logging/blob/095dfc6df62dca45f7db4516bc35e52085d0a01c/automated_logging/signals/request.py#L20-L48
[ "def", "_get_to_many_relationship_value", "(", "self", ",", "obj", ",", "column", ")", ":", "related_key", "=", "column", ".", "get", "(", "'related_key'", ",", "None", ")", "related", "=", "getattr", "(", "obj", ",", "column", "[", "'__col__'", "]", ".", "key", ")", "value", "=", "{", "}", "if", "related", ":", "total", "=", "len", "(", "related", ")", "for", "index", ",", "rel_obj", "in", "enumerate", "(", "related", ")", ":", "if", "related_key", ":", "compiled_res", "=", "self", ".", "_get_formatted_val", "(", "rel_obj", ",", "related_key", ",", "column", ")", "else", ":", "compiled_res", "=", "column", "[", "'__prop__'", "]", ".", "compile_obj", "(", "rel_obj", ")", "value", "[", "'item_%d'", "%", "index", "]", "=", "compiled_res", "value", "[", "str", "(", "index", ")", "]", "=", "compiled_res", "value", "[", "\"_\"", "+", "str", "(", "index", ")", "]", "=", "compiled_res", "if", "index", "==", "0", ":", "value", "[", "'first'", "]", "=", "compiled_res", "if", "index", "==", "total", "-", "1", ":", "value", "[", "'last'", "]", "=", "compiled_res", "return", "value" ]
Automated request exception logging .
def request_exception ( sender , request , * * kwargs ) : if not isinstance ( request , WSGIRequest ) : logger = logging . getLogger ( __name__ ) level = CRITICAL if request . status_code <= 500 else WARNING logger . log ( level , '%s exception occured (%s)' , request . status_code , request . reason_phrase ) else : logger = logging . getLogger ( __name__ ) logger . log ( WARNING , 'WSGIResponse exception occured' )
8,723
https://github.com/indietyp/django-automated-logging/blob/095dfc6df62dca45f7db4516bc35e52085d0a01c/automated_logging/signals/request.py#L52-L68
[ "def", "get_current_track_info", "(", "self", ")", ":", "response", "=", "self", ".", "avTransport", ".", "GetPositionInfo", "(", "[", "(", "'InstanceID'", ",", "0", ")", ",", "(", "'Channel'", ",", "'Master'", ")", "]", ")", "track", "=", "{", "'title'", ":", "''", ",", "'artist'", ":", "''", ",", "'album'", ":", "''", ",", "'album_art'", ":", "''", ",", "'position'", ":", "''", "}", "track", "[", "'playlist_position'", "]", "=", "response", "[", "'Track'", "]", "track", "[", "'duration'", "]", "=", "response", "[", "'TrackDuration'", "]", "track", "[", "'uri'", "]", "=", "response", "[", "'TrackURI'", "]", "track", "[", "'position'", "]", "=", "response", "[", "'RelTime'", "]", "metadata", "=", "response", "[", "'TrackMetaData'", "]", "# Store the entire Metadata entry in the track, this can then be", "# used if needed by the client to restart a given URI", "track", "[", "'metadata'", "]", "=", "metadata", "# Duration seems to be '0:00:00' when listening to radio", "if", "metadata", "!=", "''", "and", "track", "[", "'duration'", "]", "==", "'0:00:00'", ":", "metadata", "=", "XML", ".", "fromstring", "(", "really_utf8", "(", "metadata", ")", ")", "# Try parse trackinfo", "trackinfo", "=", "metadata", ".", "findtext", "(", "'.//{urn:schemas-rinconnetworks-com:'", "'metadata-1-0/}streamContent'", ")", "or", "''", "index", "=", "trackinfo", ".", "find", "(", "' - '", ")", "if", "index", ">", "-", "1", ":", "track", "[", "'artist'", "]", "=", "trackinfo", "[", ":", "index", "]", "track", "[", "'title'", "]", "=", "trackinfo", "[", "index", "+", "3", ":", "]", "else", ":", "# Might find some kind of title anyway in metadata", "track", "[", "'title'", "]", "=", "metadata", ".", "findtext", "(", "'.//{http://purl.org/dc/'", "'elements/1.1/}title'", ")", "if", "not", "track", "[", "'title'", "]", ":", "track", "[", "'title'", "]", "=", "trackinfo", "# If the speaker is playing from the line-in source, querying for track", "# metadata will return \"NOT_IMPLEMENTED\".", "elif", "metadata", "not", "in", "(", "''", ",", "'NOT_IMPLEMENTED'", ",", "None", ")", ":", "# Track metadata is returned in DIDL-Lite format", "metadata", "=", "XML", ".", "fromstring", "(", "really_utf8", "(", "metadata", ")", ")", "md_title", "=", "metadata", ".", "findtext", "(", "'.//{http://purl.org/dc/elements/1.1/}title'", ")", "md_artist", "=", "metadata", ".", "findtext", "(", "'.//{http://purl.org/dc/elements/1.1/}creator'", ")", "md_album", "=", "metadata", ".", "findtext", "(", "'.//{urn:schemas-upnp-org:metadata-1-0/upnp/}album'", ")", "track", "[", "'title'", "]", "=", "\"\"", "if", "md_title", ":", "track", "[", "'title'", "]", "=", "md_title", "track", "[", "'artist'", "]", "=", "\"\"", "if", "md_artist", ":", "track", "[", "'artist'", "]", "=", "md_artist", "track", "[", "'album'", "]", "=", "\"\"", "if", "md_album", ":", "track", "[", "'album'", "]", "=", "md_album", "album_art_url", "=", "metadata", ".", "findtext", "(", "'.//{urn:schemas-upnp-org:metadata-1-0/upnp/}albumArtURI'", ")", "if", "album_art_url", "is", "not", "None", ":", "track", "[", "'album_art'", "]", "=", "self", ".", "music_library", ".", "build_album_art_full_uri", "(", "album_art_url", ")", "return", "track" ]
chooses a starting source file in the base directory for id = book_id
def source_start ( base = '' , book_id = 'book' ) : repo_htm_path = "{book_id}-h/{book_id}-h.htm" . format ( book_id = book_id ) possible_paths = [ "book.asciidoc" , repo_htm_path , "{}-0.txt" . format ( book_id ) , "{}-8.txt" . format ( book_id ) , "{}.txt" . format ( book_id ) , "{}-pdf.pdf" . format ( book_id ) , ] # return the first match for path in possible_paths : fullpath = os . path . join ( base , path ) if os . path . exists ( fullpath ) : return path return None
8,724
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/util/pg.py#L3-L24
[ "def", "_send", "(", "self", ",", "metric", ")", ":", "metric_name", "=", "self", ".", "get_name_from_path", "(", "metric", ".", "path", ")", "tmax", "=", "\"60\"", "dmax", "=", "\"0\"", "slope", "=", "\"both\"", "# FIXME: Badness, shouldn't *assume* double type", "metric_type", "=", "\"double\"", "units", "=", "\"\"", "group", "=", "\"\"", "self", ".", "gmetric", ".", "send", "(", "metric_name", ",", "metric", ".", "value", ",", "metric_type", ",", "units", ",", "slope", ",", "tmax", ",", "dmax", ",", "group", ")" ]
Decorator used to output prettified JSON .
def pretty_dump ( fn ) : @ wraps ( fn ) def pretty_dump_wrapper ( * args , * * kwargs ) : response . content_type = "application/json; charset=utf-8" return json . dumps ( fn ( * args , * * kwargs ) , # sort_keys=True, indent = 4 , separators = ( ',' , ': ' ) ) return pretty_dump_wrapper
8,725
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L22-L46
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Decode bottle . request . body to JSON .
def decode_json_body ( ) : raw_data = request . body . read ( ) try : return json . loads ( raw_data ) except ValueError as e : raise HTTPError ( 400 , e . __str__ ( ) )
8,726
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L49-L64
[ "def", "unshare", "(", "flags", ")", ":", "res", "=", "lib", ".", "unshare", "(", "flags", ")", "if", "res", "!=", "0", ":", "_check_error", "(", "ffi", ".", "errno", ")" ]
Convert TypeError to bottle . HTTPError with 400 code and message about wrong parameters .
def handle_type_error ( fn ) : @ wraps ( fn ) def handle_type_error_wrapper ( * args , * * kwargs ) : def any_match ( string_list , obj ) : return filter ( lambda x : x in obj , string_list ) try : return fn ( * args , * * kwargs ) except TypeError as e : message = e . __str__ ( ) str_list = [ "takes exactly" , "got an unexpected" , "takes no argument" , ] if fn . __name__ in message and any_match ( str_list , message ) : raise HTTPError ( 400 , message ) raise # This will cause 500: Internal server error return handle_type_error_wrapper
8,727
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L91-L119
[ "def", "_increment", "(", "self", ",", "*", "args", ")", ":", "value", "=", "self", ".", "_var", ".", "get", "(", ")", "if", "self", ".", "_resolution", ":", "value", "=", "self", ".", "_start", "+", "int", "(", "round", "(", "(", "value", "-", "self", ".", "_start", ")", "/", "self", ".", "_resolution", ")", ")", "*", "self", ".", "_resolution", "self", ".", "_var", ".", "set", "(", "value", ")", "self", ".", "display_value", "(", "value", ")" ]
Convert JSON in the body of the request to the parameters for the wrapped function .
def json_to_params ( fn = None , return_json = True ) : def json_to_params_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def json_to_params_wrapper ( * args , * * kwargs ) : data = decode_json_body ( ) if type ( data ) in [ tuple , list ] : args = list ( args ) + data elif type ( data ) == dict : # transport only items that are not already in kwargs allowed_keys = set ( data . keys ( ) ) - set ( kwargs . keys ( ) ) for key in allowed_keys : kwargs [ key ] = data [ key ] elif type ( data ) in PRIMITIVE_TYPES : args = list ( args ) args . append ( data ) if not return_json : return fn ( * args , * * kwargs ) return encode_json_body ( fn ( * args , * * kwargs ) ) return json_to_params_wrapper if fn : # python decorator with optional parameters bukkake return json_to_params_decorator ( fn ) return json_to_params_decorator
8,728
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L122-L167
[ "def", "vacuum", "(", "verbose", "=", "False", ")", ":", "ret", "=", "{", "}", "cmd", "=", "'imgadm vacuum -f'", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "res", "[", "'retcode'", "]", "if", "retcode", "!=", "0", ":", "ret", "[", "'Error'", "]", "=", "_exit_status", "(", "retcode", ")", "return", "ret", "# output: Deleted image d5b3865c-0804-11e5-be21-dbc4ce844ddc (lx-centos-6@20150601)", "result", "=", "{", "}", "for", "image", "in", "res", "[", "'stdout'", "]", ".", "splitlines", "(", ")", ":", "image", "=", "[", "var", "for", "var", "in", "image", ".", "split", "(", "\" \"", ")", "if", "var", "]", "result", "[", "image", "[", "2", "]", "]", "=", "{", "'name'", ":", "image", "[", "3", "]", "[", "1", ":", "image", "[", "3", "]", ".", "index", "(", "'@'", ")", "]", ",", "'version'", ":", "image", "[", "3", "]", "[", "image", "[", "3", "]", ".", "index", "(", "'@'", ")", "+", "1", ":", "-", "1", "]", "}", "if", "verbose", ":", "return", "result", "else", ":", "return", "list", "(", "result", ".", "keys", "(", ")", ")" ]
Decode JSON from the request and add it as data parameter for wrapped function .
def json_to_data ( fn = None , return_json = True ) : def json_to_data_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def get_data_wrapper ( * args , * * kwargs ) : kwargs [ "data" ] = decode_json_body ( ) if not return_json : return fn ( * args , * * kwargs ) return encode_json_body ( fn ( * args , * * kwargs ) ) return get_data_wrapper if fn : # python decorator with optional parameters bukkake return json_to_data_decorator ( fn ) return json_to_data_decorator
8,729
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L170-L197
[ "def", "_PrintTSKPartitionIdentifiersOverview", "(", "self", ",", "volume_system", ",", "volume_identifiers", ")", ":", "header", "=", "'The following partitions were found:\\n'", "self", ".", "_output_writer", ".", "Write", "(", "header", ")", "column_names", "=", "[", "'Identifier'", ",", "'Offset (in bytes)'", ",", "'Size (in bytes)'", "]", "table_view", "=", "views", ".", "CLITabularTableView", "(", "column_names", "=", "column_names", ")", "for", "volume_identifier", "in", "sorted", "(", "volume_identifiers", ")", ":", "volume", "=", "volume_system", ".", "GetVolumeByIdentifier", "(", "volume_identifier", ")", "if", "not", "volume", ":", "raise", "errors", ".", "SourceScannerError", "(", "'Partition missing for identifier: {0:s}.'", ".", "format", "(", "volume_identifier", ")", ")", "volume_extent", "=", "volume", ".", "extents", "[", "0", "]", "volume_offset", "=", "'{0:d} (0x{0:08x})'", ".", "format", "(", "volume_extent", ".", "offset", ")", "volume_size", "=", "self", ".", "_FormatHumanReadableSize", "(", "volume_extent", ".", "size", ")", "table_view", ".", "AddRow", "(", "[", "volume", ".", "identifier", ",", "volume_offset", ",", "volume_size", "]", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\\n'", ")", "table_view", ".", "Write", "(", "self", ".", "_output_writer", ")", "self", ".", "_output_writer", ".", "Write", "(", "'\\n'", ")" ]
Convert bottle forms request to parameters for the wrapped function .
def form_to_params ( fn = None , return_json = True ) : def forms_to_params_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def forms_to_params_wrapper ( * args , * * kwargs ) : kwargs . update ( dict ( request . forms ) ) if not return_json : return fn ( * args , * * kwargs ) return encode_json_body ( fn ( * args , * * kwargs ) ) return forms_to_params_wrapper if fn : # python decorator with optional parameters bukkake return forms_to_params_decorator ( fn ) return forms_to_params_decorator
8,730
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/src/bottle_rest/__init__.py#L200-L228
[ "def", "is_dental", "(", "c", ",", "lang", ")", ":", "o", "=", "get_offset", "(", "c", ",", "lang", ")", "return", "(", "o", ">=", "DENTAL_RANGE", "[", "0", "]", "and", "o", "<=", "DENTAL_RANGE", "[", "1", "]", ")" ]
Fetch an OpenStreetMap diff file .
def fetch ( sequence , time = 'hour' ) : import StringIO import gzip import requests if time not in [ 'minute' , 'hour' , 'day' ] : raise ValueError ( 'The supplied type of replication file does not exist.' ) sqn = str ( sequence ) . zfill ( 9 ) url = "https://planet.osm.org/replication/%s/%s/%s/%s.osc.gz" % ( time , sqn [ 0 : 3 ] , sqn [ 3 : 6 ] , sqn [ 6 : 9 ] ) content = requests . get ( url ) if content . status_code == 404 : raise EnvironmentError ( 'Diff file cannot be found.' ) content = StringIO . StringIO ( content . content ) data_stream = gzip . GzipFile ( fileobj = content ) return data_stream
8,731
https://github.com/ethan-nelson/osm_diff_tool/blob/d5b083100dedd9427ad23c4be5316f89a55ec8f0/osmdt/fetch.py#L1-L42
[ "def", "promote_owner", "(", "self", ",", "stream_id", ",", "user_id", ")", ":", "req_hook", "=", "'pod/v1/room/'", "+", "stream_id", "+", "'/membership/promoteOwner'", "req_args", "=", "'{ \"id\": %s }'", "%", "user_id", "status_code", ",", "response", "=", "self", ".", "__rest__", ".", "POST_query", "(", "req_hook", ",", "req_args", ")", "self", ".", "logger", ".", "debug", "(", "'%s: %s'", "%", "(", "status_code", ",", "response", ")", ")", "return", "status_code", ",", "response" ]
Many 2 Many relationship signall receivver .
def m2m_callback ( sender , instance , action , reverse , model , pk_set , using , * * kwargs ) : if validate_instance ( instance ) and settings . AUTOMATED_LOGGING [ 'to_database' ] : if action in [ "post_add" , 'post_remove' ] : modification = [ model . objects . get ( pk = x ) for x in pk_set ] if 'al_chl' in instance . __dict__ . keys ( ) and instance . al_chl : changelog = instance . al_chl else : changelog = ModelChangelog ( ) changelog . information = ModelObject ( ) changelog . information . value = repr ( instance ) changelog . information . type = ContentType . objects . get_for_model ( instance ) changelog . information . save ( ) changelog . save ( ) for f in modification : obj = ModelObject ( ) obj . value = repr ( f ) try : obj . type = ContentType . objects . get_for_model ( f ) except Exception : logger = logging . getLogger ( __name__ ) logger . debug ( 'Could not determin the type of the modification.' ) obj . save ( ) if action == 'post_add' : changelog . inserted . add ( obj ) else : changelog . removed . add ( obj ) changelog . save ( ) instance . al_chl = changelog if 'al_evt' in instance . __dict__ . keys ( ) : target = instance . al_evt else : target = Model ( ) target . user = get_current_user ( ) target . action = 2 if action == 'post_add' else 2 target . save ( ) ct = ContentType . objects . get_for_model ( instance ) . app_label target . application = Application . objects . get_or_create ( name = ct ) [ 0 ] target . information = ModelObject ( ) target . information . value = repr ( instance ) target . information . type = ContentType . objects . get_for_model ( instance ) target . information . save ( ) instance . al_evt = target target . modification = changelog target . save ( )
8,732
https://github.com/indietyp/django-automated-logging/blob/095dfc6df62dca45f7db4516bc35e52085d0a01c/automated_logging/signals/m2m.py#L13-L73
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Return a tuple that contains font properties required for rendering .
def font ( self , name , properties ) : size , slant , weight = ( properties ) return ( name , ( self . ty ( size ) , slant , weight ) )
8,733
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/util/tenprintcover.py#L222-L227
[ "def", "listDatasetAccessTypes", "(", "self", ",", "dataset_access_type", "=", "''", ")", ":", "if", "dataset_access_type", ":", "dataset_access_type", "=", "dataset_access_type", ".", "replace", "(", "\"*\"", ",", "\"%\"", ")", "try", ":", "return", "self", ".", "dbsDatasetAccessType", ".", "listDatasetAccessTypes", "(", "dataset_access_type", ")", "except", "dbsException", "as", "de", ":", "dbsExceptionHandler", "(", "de", ".", "eCode", ",", "de", ".", "message", ",", "self", ".", "logger", ".", "exception", ",", "de", ".", "serverError", ")", "except", "Exception", "as", "ex", ":", "sError", "=", "\"DBSReaderModel/listDatasetAccessTypes. %s\\n. Exception trace: \\n %s\"", "%", "(", "ex", ",", "traceback", ".", "format_exc", "(", ")", ")", "dbsExceptionHandler", "(", "'dbsException-server-error'", ",", "dbsExceptionCode", "[", "'dbsException-server-error'", "]", ",", "self", ".", "logger", ".", "exception", ",", "sError", ")" ]
Fetch the latest state .
def async_update ( self ) : _LOGGER . debug ( 'Calling update on Alarm.com' ) response = None if not self . _login_info : yield from self . async_login ( ) try : with async_timeout . timeout ( 10 , loop = self . _loop ) : response = yield from self . _websession . get ( self . ALARMDOTCOM_URL + '{}/main.aspx' . format ( self . _login_info [ 'sessionkey' ] ) , headers = { 'User-Agent' : 'Mozilla/5.0 ' '(Windows NT 6.1; ' 'WOW64; rv:40.0) ' 'Gecko/20100101 ' 'Firefox/40.1' } ) _LOGGER . debug ( 'Response from Alarm.com: %s' , response . status ) text = yield from response . text ( ) _LOGGER . debug ( text ) tree = BeautifulSoup ( text , 'html.parser' ) try : self . state = tree . select ( self . ALARM_STATE ) [ 0 ] . get_text ( ) _LOGGER . debug ( 'Current alarm state: %s' , self . state ) self . sensor_status = tree . select ( self . SENSOR_STATUS ) [ 0 ] . get_text ( ) _LOGGER . debug ( 'Current sensor status: %s' , self . sensor_status ) except IndexError : # We may have timed out. Re-login again self . state = None self . sensor_status = None self . _login_info = None yield from self . async_update ( ) except ( asyncio . TimeoutError , aiohttp . ClientError ) : _LOGGER . error ( "Can not load login page from Alarm.com" ) return False finally : if response is not None : yield from response . release ( )
8,734
https://github.com/Xorso/pyalarmdotcom/blob/9d2cfe1968d52bb23533aeda80ca5efbfb692304/pyalarmdotcom/pyalarmdotcom.py#L209-L249
[ "def", "formula_dual", "(", "input_formula", ":", "str", ")", "->", "str", ":", "conversion_dictionary", "=", "{", "'and'", ":", "'or'", ",", "'or'", ":", "'and'", ",", "'True'", ":", "'False'", ",", "'False'", ":", "'True'", "}", "return", "re", ".", "sub", "(", "'|'", ".", "join", "(", "re", ".", "escape", "(", "key", ")", "for", "key", "in", "conversion_dictionary", ".", "keys", "(", ")", ")", ",", "lambda", "k", ":", "conversion_dictionary", "[", "k", ".", "group", "(", "0", ")", "]", ",", "input_formula", ")" ]
Creates an api handler and sets it on self
def create_api_handler ( self ) : try : self . github = github3 . login ( username = config . data [ 'gh_user' ] , password = config . data [ 'gh_password' ] ) except KeyError as e : raise config . NotConfigured ( e ) logger . info ( "ratelimit remaining: {}" . format ( self . github . ratelimit_remaining ) ) if hasattr ( self . github , 'set_user_agent' ) : self . github . set_user_agent ( '{}: {}' . format ( self . org_name , self . org_homepage ) ) try : self . org = self . github . organization ( self . org_name ) except github3 . GitHubError : logger . error ( "Possibly the github ratelimit has been exceeded" ) logger . info ( "ratelimit: " + str ( self . github . ratelimit_remaining ) )
8,735
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/push.py#L72-L86
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Validate decorator args when used to decorate a function .
def _validate_func_args ( func , kwargs ) : args , varargs , varkw , defaults = inspect . getargspec ( func ) if set ( kwargs . keys ( ) ) != set ( args [ 1 : ] ) : # chop off self raise TypeError ( "decorator kwargs do not match %s()'s kwargs" % func . __name__ )
8,736
https://github.com/dstanek/snake-guice/blob/d20b62de3ee31e84119c801756398c35ed803fb3/snakeguice/decorators.py#L52-L58
[ "def", "merge_entities", "(", "self", ",", "from_entity_ids", ",", "to_entity_id", ",", "force", "=", "False", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'from_entity_ids'", ":", "from_entity_ids", ",", "'to_entity_id'", ":", "to_entity_id", ",", "'force'", ":", "force", ",", "}", "api_path", "=", "'/v1/{mount_point}/entity/merge'", ".", "format", "(", "mount_point", "=", "mount_point", ")", "return", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Get an enclosing frame that skips decorator code
def enclosing_frame ( frame = None , level = 2 ) : frame = frame or sys . _getframe ( level ) while frame . f_globals . get ( '__name__' ) == __name__ : frame = frame . f_back return frame
8,737
https://github.com/dstanek/snake-guice/blob/d20b62de3ee31e84119c801756398c35ed803fb3/snakeguice/decorators.py#L61-L65
[ "def", "parse_url", "(", "url", ")", ":", "parsed", "=", "url", "if", "not", "url", ".", "startswith", "(", "\"http://\"", ")", "and", "not", "url", ".", "startswith", "(", "\"https://\"", ")", ":", "# if url is like www.yahoo.com", "parsed", "=", "\"http://\"", "+", "parsed", "elif", "url", ".", "startswith", "(", "\"https://\"", ")", ":", "parsed", "=", "parsed", "[", "8", ":", "]", "parsed", "=", "\"http://\"", "+", "parsed", "index_hash", "=", "parsed", ".", "rfind", "(", "\"#\"", ")", "# remove trailing #", "index_slash", "=", "parsed", ".", "rfind", "(", "\"/\"", ")", "if", "index_hash", ">", "index_slash", ":", "parsed", "=", "parsed", "[", "0", ":", "index_hash", "]", "return", "parsed" ]
Get a GPSEventConsumer client .
def get_event_consumer ( config , success_channel , error_channel , metrics , * * kwargs ) : builder = event_consumer . GPSEventConsumerBuilder ( config , success_channel , error_channel , metrics , * * kwargs ) return builder . build_event_consumer ( )
8,738
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/plugins/service/__init__.py#L34-L60
[ "def", "to_xlsx", "(", "self", ",", "*", "*", "kwargs", ")", ":", "from", "xlsxwriter", ".", "workbook", "import", "Workbook", "as", "_Workbook", "self", ".", "workbook_obj", "=", "_Workbook", "(", "*", "*", "kwargs", ")", "self", ".", "workbook_obj", ".", "set_calc_mode", "(", "self", ".", "calc_mode", ")", "for", "worksheet", "in", "self", ".", "itersheets", "(", ")", ":", "worksheet", ".", "to_xlsx", "(", "workbook", "=", "self", ")", "self", ".", "workbook_obj", ".", "filename", "=", "self", ".", "filename", "if", "self", ".", "filename", ":", "self", ".", "workbook_obj", ".", "close", "(", ")", "return", "self", ".", "workbook_obj" ]
Get a GCEEnricher client .
def get_enricher ( config , metrics , * * kwargs ) : builder = enricher . GCEEnricherBuilder ( config , metrics , * * kwargs ) return builder . build_enricher ( )
8,739
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/plugins/service/__init__.py#L63-L80
[ "def", "rrmdir", "(", "directory", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "directory", ",", "topdown", "=", "False", ")", ":", "for", "name", "in", "files", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")", "for", "name", "in", "dirs", ":", "os", ".", "rmdir", "(", "os", ".", "path", ".", "join", "(", "root", ",", "name", ")", ")", "os", ".", "rmdir", "(", "directory", ")" ]
Get a GDNSPublisher client .
def get_gdns_publisher ( config , metrics , * * kwargs ) : builder = gdns_publisher . GDNSPublisherBuilder ( config , metrics , * * kwargs ) return builder . build_publisher ( )
8,740
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/plugins/service/__init__.py#L83-L100
[ "def", "cancel", "(", "self", ")", ":", "self", ".", "_transaction", "=", "False", "try", ":", "cancel", "=", "self", ".", "_con", ".", "cancel", "except", "AttributeError", ":", "pass", "else", ":", "cancel", "(", ")" ]
MHC alleles are named with a frustratingly loose system . It s not uncommon to see dozens of different forms for the same allele .
def normalize_allele_name ( raw_allele , omit_dra1 = False , infer_class2_pair = True ) : cache_key = ( raw_allele , omit_dra1 , infer_class2_pair ) if cache_key in _normalized_allele_cache : return _normalized_allele_cache [ cache_key ] parsed_alleles = parse_classi_or_classii_allele_name ( raw_allele , infer_pair = infer_class2_pair ) species = parsed_alleles [ 0 ] . species normalized_list = [ species ] # Optionally omit the alpha allele, e.g. for IEDB predictors. if omit_dra1 and len ( parsed_alleles ) == 2 : alpha , beta = parsed_alleles # by convention the alpha allelle is omitted since it's assumed # to be DRA1*01:01 if alpha == _DRA1_0101 : parsed_alleles = [ beta ] for parsed_allele in parsed_alleles : if len ( parsed_allele . allele_family ) > 0 : normalized_list . append ( "%s*%s:%s" % ( parsed_allele . gene , parsed_allele . allele_family , parsed_allele . allele_code ) ) else : # mice don't have allele families # e.g. H-2-Kd # species = H-2 # gene = K # allele = d normalized_list . append ( "%s%s" % ( parsed_allele . gene , parsed_allele . allele_code ) ) normalized = "-" . join ( normalized_list ) _normalized_allele_cache [ cache_key ] = normalized return normalized
8,741
https://github.com/openvax/mhcnames/blob/71694b9d620db68ceee44da1b8422ff436f15bd3/mhcnames/normalization.py#L28-L101
[ "def", "interpolation_change_cb", "(", "self", ",", "setting", ",", "value", ")", ":", "canvas_img", "=", "self", ".", "get_canvas_image", "(", ")", "canvas_img", ".", "interpolation", "=", "value", "canvas_img", ".", "reset_optimize", "(", ")", "self", ".", "redraw", "(", "whence", "=", "0", ")" ]
Parse version from changelog written in RST format .
def getVersion ( data ) : data = data . splitlines ( ) return next ( ( v for v , u in zip ( data , data [ 1 : ] ) # v = version, u = underline if len ( v ) == len ( u ) and allSame ( u ) and hasDigit ( v ) and "." in v ) )
8,742
https://github.com/Bystroushaak/bottle-rest/blob/428ef68a632ac092cdd49e2f03a664dbaccb0b86/docs/__init__.py#L16-L25
[ "def", "get_max_devices_per_port_for_storage_bus", "(", "self", ",", "bus", ")", ":", "if", "not", "isinstance", "(", "bus", ",", "StorageBus", ")", ":", "raise", "TypeError", "(", "\"bus can only be an instance of type StorageBus\"", ")", "max_devices_per_port", "=", "self", ".", "_call", "(", "\"getMaxDevicesPerPortForStorageBus\"", ",", "in_p", "=", "[", "bus", "]", ")", "return", "max_devices_per_port" ]
Splits off the species component of the allele name from the rest of it .
def split_species_prefix ( name , seps = "-:_ " ) : species = None name_upper = name . upper ( ) name_len = len ( name ) for curr_prefix in _all_prefixes : n = len ( curr_prefix ) if name_len <= n : continue if name_upper . startswith ( curr_prefix . upper ( ) ) : species = curr_prefix name = name [ n : ] . strip ( seps ) break return ( species , name )
8,743
https://github.com/openvax/mhcnames/blob/71694b9d620db68ceee44da1b8422ff436f15bd3/mhcnames/species.py#L93-L110
[ "def", "_init_request_logging", "(", "self", ",", "app", ")", ":", "enabled", "=", "not", "app", ".", "config", ".", "get", "(", "CONF_DISABLE_REQUEST_LOGGING", ",", "False", ")", "if", "not", "enabled", ":", "return", "self", ".", "_requests_middleware", "=", "WSGIApplication", "(", "self", ".", "_key", ",", "app", ".", "wsgi_app", ",", "telemetry_channel", "=", "self", ".", "_channel", ")", "app", ".", "wsgi_app", "=", "self", ".", "_requests_middleware" ]
Reformats the control flow output
def formatFlow ( s ) : result = "" shifts = [ ] # positions of opening '<' pos = 0 # symbol position in a line nextIsList = False def IsNextList ( index , maxIndex , buf ) : if index == maxIndex : return False if buf [ index + 1 ] == '<' : return True if index < maxIndex - 1 : if buf [ index + 1 ] == '\n' and buf [ index + 2 ] == '<' : return True return False maxIndex = len ( s ) - 1 for index in range ( len ( s ) ) : sym = s [ index ] if sym == "\n" : lastShift = shifts [ - 1 ] result += sym + lastShift * " " pos = lastShift if index < maxIndex : if s [ index + 1 ] not in "<>" : result += " " pos += 1 continue if sym == "<" : if nextIsList == False : shifts . append ( pos ) else : nextIsList = False pos += 1 result += sym continue if sym == ">" : shift = shifts [ - 1 ] result += '\n' result += shift * " " pos = shift result += sym pos += 1 if IsNextList ( index , maxIndex , s ) : nextIsList = True else : del shifts [ - 1 ] nextIsList = False continue result += sym pos += 1 return result
8,744
https://github.com/SergeySatskiy/cdm-flowparser/blob/0af20325eeafd964c684d66a31cd2efd51fd25a6/utils/run.py#L26-L78
[ "def", "require_session", "(", "handler", ")", ":", "@", "functools", ".", "wraps", "(", "handler", ")", "async", "def", "decorated", "(", "request", ":", "web", ".", "Request", ")", "->", "web", ".", "Response", ":", "request_session_token", "=", "request", ".", "match_info", "[", "'session'", "]", "session", "=", "session_from_request", "(", "request", ")", "if", "not", "session", "or", "request_session_token", "!=", "session", ".", "token", ":", "LOG", ".", "warning", "(", "f\"request for invalid session {request_session_token}\"", ")", "return", "web", ".", "json_response", "(", "data", "=", "{", "'error'", ":", "'bad-token'", ",", "'message'", ":", "f'No such session {request_session_token}'", "}", ",", "status", "=", "404", ")", "return", "await", "handler", "(", "request", ",", "session", ")", "return", "decorated" ]
Trains itself using the sequence data .
def train ( self , training_set , iterations = 500 ) : if len ( training_set ) > 2 : self . __X = np . matrix ( [ example [ 0 ] for example in training_set ] ) if self . __num_labels == 1 : self . __y = np . matrix ( [ example [ 1 ] for example in training_set ] ) . reshape ( ( - 1 , 1 ) ) else : eye = np . eye ( self . __num_labels ) self . __y = np . matrix ( [ eye [ example [ 1 ] ] for example in training_set ] ) else : self . __X = np . matrix ( training_set [ 0 ] ) if self . __num_labels == 1 : self . __y = np . matrix ( training_set [ 1 ] ) . reshape ( ( - 1 , 1 ) ) else : eye = np . eye ( self . __num_labels ) self . __y = np . matrix ( [ eye [ index ] for sublist in training_set [ 1 ] for index in sublist ] ) self . __m = self . __X . shape [ 0 ] self . __input_layer_size = self . __X . shape [ 1 ] self . __sizes = [ self . __input_layer_size ] self . __sizes . extend ( self . __hidden_layers ) self . __sizes . append ( self . __num_labels ) initial_theta = [ ] for count in range ( len ( self . __sizes ) - 1 ) : epsilon = np . sqrt ( 6 ) / np . sqrt ( self . __sizes [ count ] + self . __sizes [ count + 1 ] ) initial_theta . append ( np . random . rand ( self . __sizes [ count + 1 ] , self . __sizes [ count ] + 1 ) * 2 * epsilon - epsilon ) initial_theta = self . __unroll ( initial_theta ) self . __thetas = self . __roll ( fmin_bfgs ( self . __cost_function , initial_theta , fprime = self . __cost_grad_function , maxiter = iterations ) )
8,745
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L20-L46
[ "def", "_put_bucket_cors", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'cors'", "]", "[", "'enabled'", "]", "and", "self", ".", "s3props", "[", "'website'", "]", "[", "'enabled'", "]", ":", "cors_config", "=", "{", "}", "cors_rules", "=", "[", "]", "for", "each_rule", "in", "self", ".", "s3props", "[", "'cors'", "]", "[", "'cors_rules'", "]", ":", "cors_rules", ".", "append", "(", "{", "'AllowedHeaders'", ":", "each_rule", "[", "'cors_headers'", "]", ",", "'AllowedMethods'", ":", "each_rule", "[", "'cors_methods'", "]", ",", "'AllowedOrigins'", ":", "each_rule", "[", "'cors_origins'", "]", ",", "'ExposeHeaders'", ":", "each_rule", "[", "'cors_expose_headers'", "]", ",", "'MaxAgeSeconds'", ":", "each_rule", "[", "'cors_max_age'", "]", "}", ")", "cors_config", "=", "{", "'CORSRules'", ":", "cors_rules", "}", "LOG", ".", "debug", "(", "cors_config", ")", "_response", "=", "self", ".", "s3client", ".", "put_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ",", "CORSConfiguration", "=", "cors_config", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ")", "LOG", ".", "debug", "(", "'Response setting up S3 CORS: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 CORS configuration updated'", ")" ]
Returns predictions of input test cases .
def predict ( self , X ) : return self . __cost ( self . __unroll ( self . __thetas ) , 0 , np . matrix ( X ) )
8,746
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L48-L50
[ "def", "ls", "(", "args", ")", ":", "table", "=", "[", "]", "queues", "=", "list", "(", "resources", ".", "sqs", ".", "queues", ".", "filter", "(", "QueueNamePrefix", "=", "\"github\"", ")", ")", "max_age", "=", "datetime", ".", "now", "(", "tzutc", "(", ")", ")", "-", "timedelta", "(", "days", "=", "15", ")", "for", "topic", "in", "resources", ".", "sns", ".", "topics", ".", "all", "(", ")", ":", "account_id", "=", "ARN", "(", "topic", ".", "arn", ")", ".", "account_id", "try", ":", "bucket", "=", "resources", ".", "s3", ".", "Bucket", "(", "\"deploy-status-{}\"", ".", "format", "(", "account_id", ")", ")", "status_objects", "=", "bucket", ".", "objects", ".", "filter", "(", "Prefix", "=", "ARN", "(", "topic", ".", "arn", ")", ".", "resource", ")", "recent_status_objects", "=", "{", "o", ".", "key", ":", "o", "for", "o", "in", "status_objects", "if", "o", ".", "last_modified", ">", "max_age", "}", "except", "ClientError", ":", "continue", "if", "ARN", "(", "topic", ".", "arn", ")", ".", "resource", ".", "startswith", "(", "\"github\"", ")", ":", "for", "queue", "in", "queues", ":", "queue_name", "=", "os", ".", "path", ".", "basename", "(", "queue", ".", "url", ")", "if", "queue_name", ".", "startswith", "(", "ARN", "(", "topic", ".", "arn", ")", ".", "resource", ")", ":", "row", "=", "dict", "(", "Topic", "=", "topic", ",", "Queue", "=", "queue", ")", "status_object", "=", "bucket", ".", "Object", "(", "os", ".", "path", ".", "join", "(", "queue_name", ",", "\"status\"", ")", ")", "if", "status_object", ".", "key", "not", "in", "recent_status_objects", ":", "continue", "try", ":", "github", ",", "owner", ",", "repo", ",", "events", ",", "instance", "=", "os", ".", "path", ".", "dirname", "(", "status_object", ".", "key", ")", ".", "split", "(", "\"-\"", ",", "4", ")", "status", "=", "json", ".", "loads", "(", "status_object", ".", "get", "(", ")", "[", "\"Body\"", "]", ".", "read", "(", ")", ".", "decode", "(", "\"utf-8\"", ")", ")", "row", ".", "update", "(", "status", ",", "Owner", "=", "owner", ",", "Repo", "=", "repo", ",", "Instance", "=", "instance", ",", "Updated", "=", "status_object", ".", "last_modified", ")", "except", "Exception", ":", "pass", "table", ".", "append", "(", "row", ")", "args", ".", "columns", "=", "[", "\"Owner\"", ",", "\"Repo\"", ",", "\"Instance\"", ",", "\"Status\"", ",", "\"Ref\"", ",", "\"Commit\"", ",", "\"Updated\"", ",", "\"Topic\"", ",", "\"Queue\"", "]", "page_output", "(", "tabulate", "(", "table", ",", "args", ")", ")" ]
Computes activation cost function and derivative .
def __cost ( self , params , phase , X ) : params = self . __roll ( params ) a = np . concatenate ( ( np . ones ( ( X . shape [ 0 ] , 1 ) ) , X ) , axis = 1 ) # This is a1 calculated_a = [ a ] # a1 is at index 0, a_n is at index n-1 calculated_z = [ 0 ] # There is no z1, z_n is at index n-1 for i , theta in enumerate ( params ) : # calculated_a now contains a1, a2, a3 if there was only one hidden layer (two theta matrices) z = calculated_a [ - 1 ] * theta . transpose ( ) # z_n = a_n-1 * Theta_n-1' calculated_z . append ( z ) # Save the new z_n a = self . sigmoid ( z ) # a_n = sigmoid(z_n) if i != len ( params ) - 1 : # Don't append extra ones for the output layer a = np . concatenate ( ( np . ones ( ( a . shape [ 0 ] , 1 ) ) , a ) , axis = 1 ) # Append the extra column of ones for all other layers calculated_a . append ( a ) # Save the new a if phase == 0 : if self . __num_labels > 1 : return np . argmax ( calculated_a [ - 1 ] , axis = 1 ) return np . round ( calculated_a [ - 1 ] ) J = np . sum ( - np . multiply ( self . __y , np . log ( calculated_a [ - 1 ] ) ) - np . multiply ( 1 - self . __y , np . log ( 1 - calculated_a [ - 1 ] ) ) ) / self . __m # Calculate cost if self . __lambda != 0 : # If we're using regularization... J += np . sum ( [ np . sum ( np . power ( theta [ : , 1 : ] , 2 ) ) for theta in params ] ) * self . __lambda / ( 2.0 * self . __m ) # ...add it from all theta matrices if phase == 1 : return J reversed_d = [ ] reversed_theta_grad = [ ] for i in range ( len ( params ) ) : # For once per theta matrix... if i == 0 : # ...if it's the first one... d = calculated_a [ - 1 ] - self . __y # ...initialize the error... else : # ...otherwise d_n-1 = d_n * Theta_n-1[missing ones] .* sigmoid(z_n-1) d = np . multiply ( reversed_d [ - 1 ] * params [ - i ] [ : , 1 : ] , self . sigmoid_grad ( calculated_z [ - 1 - i ] ) ) # With i=1/1 hidden layer we're getting Theta2 at index -1, and z2 at index -2 reversed_d . append ( d ) theta_grad = reversed_d [ - 1 ] . transpose ( ) * calculated_a [ - i - 2 ] / self . __m if self . __lambda != 0 : theta_grad += np . concatenate ( ( np . zeros ( ( params [ - 1 - i ] . shape [ 0 ] , 1 ) ) , params [ - 1 - i ] [ : , 1 : ] ) , axis = 1 ) * self . __lambda / self . __m # regularization reversed_theta_grad . append ( theta_grad ) theta_grad = self . __unroll ( reversed ( reversed_theta_grad ) ) return theta_grad
8,747
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L60-L99
[ "def", "unregister", "(", "self", ",", "entity_class", ",", "entity", ")", ":", "EntityState", ".", "release", "(", "entity", ",", "self", ")", "self", ".", "__entity_set_map", "[", "entity_class", "]", ".", "remove", "(", "entity", ")" ]
Converts parameter array back into matrices .
def __roll ( self , unrolled ) : rolled = [ ] index = 0 for count in range ( len ( self . __sizes ) - 1 ) : in_size = self . __sizes [ count ] out_size = self . __sizes [ count + 1 ] theta_unrolled = np . matrix ( unrolled [ index : index + ( in_size + 1 ) * out_size ] ) theta_rolled = theta_unrolled . reshape ( ( out_size , in_size + 1 ) ) rolled . append ( theta_rolled ) index += ( in_size + 1 ) * out_size return rolled
8,748
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L101-L112
[ "def", "line_line_collide", "(", "line1", ",", "line2", ")", ":", "s", ",", "t", ",", "success", "=", "segment_intersection", "(", "line1", "[", ":", ",", "0", "]", ",", "line1", "[", ":", ",", "1", "]", ",", "line2", "[", ":", ",", "0", "]", ",", "line2", "[", ":", ",", "1", "]", ")", "if", "success", ":", "return", "_helpers", ".", "in_interval", "(", "s", ",", "0.0", ",", "1.0", ")", "and", "_helpers", ".", "in_interval", "(", "t", ",", "0.0", ",", "1.0", ")", "else", ":", "disjoint", ",", "_", "=", "parallel_lines_parameters", "(", "line1", "[", ":", ",", "0", "]", ",", "line1", "[", ":", ",", "1", "]", ",", "line2", "[", ":", ",", "0", "]", ",", "line2", "[", ":", ",", "1", "]", ")", "return", "not", "disjoint" ]
Converts parameter matrices into an array .
def __unroll ( self , rolled ) : return np . array ( np . concatenate ( [ matrix . flatten ( ) for matrix in rolled ] , axis = 1 ) ) . reshape ( - 1 )
8,749
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L114-L116
[ "def", "get_cdn_metadata", "(", "self", ",", "container", ")", ":", "uri", "=", "\"%s/%s\"", "%", "(", "self", ".", "uri_base", ",", "utils", ".", "get_name", "(", "container", ")", ")", "resp", ",", "resp_body", "=", "self", ".", "api", ".", "cdn_request", "(", "uri", ",", "\"HEAD\"", ")", "ret", "=", "dict", "(", "resp", ".", "headers", ")", "# Remove non-CDN headers", "ret", ".", "pop", "(", "\"content-length\"", ",", "None", ")", "ret", ".", "pop", "(", "\"content-type\"", ",", "None", ")", "ret", ".", "pop", "(", "\"date\"", ",", "None", ")", "return", "ret" ]
Gradient of sigmoid function .
def sigmoid_grad ( self , z ) : return np . multiply ( self . sigmoid ( z ) , 1 - self . sigmoid ( z ) )
8,750
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L122-L124
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Used to check gradient estimation through slope approximation .
def grad ( self , params , epsilon = 0.0001 ) : grad = [ ] for x in range ( len ( params ) ) : temp = np . copy ( params ) temp [ x ] += epsilon temp2 = np . copy ( params ) temp2 [ x ] -= epsilon grad . append ( ( self . __cost_function ( temp ) - self . __cost_function ( temp2 ) ) / ( 2 * epsilon ) ) return np . array ( grad )
8,751
https://github.com/pqn/neural/blob/505d8fb1c58868a7292c40caab4a22b577615886/neural/neural.py#L126-L135
[ "def", "_make_query_from_terms", "(", "self", ",", "terms", ")", ":", "expanded_terms", "=", "self", ".", "_expand_terms", "(", "terms", ")", "cterms", "=", "''", "if", "expanded_terms", "[", "'doc'", "]", ":", "cterms", "=", "self", ".", "backend", ".", "_or_join", "(", "expanded_terms", "[", "'doc'", "]", ")", "keywords", "=", "expanded_terms", "[", "'keywords'", "]", "frm_to", "=", "self", ".", "_from_to_as_term", "(", "expanded_terms", "[", "'from'", "]", ",", "expanded_terms", "[", "'to'", "]", ")", "if", "frm_to", ":", "keywords", ".", "append", "(", "frm_to", ")", "if", "keywords", ":", "if", "cterms", ":", "cterms", "=", "self", ".", "backend", ".", "_and_join", "(", "[", "cterms", ",", "self", ".", "backend", ".", "_field_term", "(", "'keywords'", ",", "expanded_terms", "[", "'keywords'", "]", ")", "]", ")", "else", ":", "cterms", "=", "self", ".", "backend", ".", "_field_term", "(", "'keywords'", ",", "expanded_terms", "[", "'keywords'", "]", ")", "logger", ".", "debug", "(", "'partition terms conversion: `{}` terms converted to `{}` query.'", ".", "format", "(", "terms", ",", "cterms", ")", ")", "return", "cterms" ]
Add a webhook to a device .
def postWebhook ( self , dev_id , external_id , url , event_types ) : path = 'notification/webhook' payload = { 'device' : { 'id' : dev_id } , 'externalId' : external_id , 'url' : url , 'eventTypes' : event_types } return self . rachio . post ( path , payload )
8,752
https://github.com/rfverbruggen/rachiopy/blob/c91abc9984f0f453e60fa905285c1b640c3390ae/rachiopy/notification.py#L24-L34
[ "def", "get_cached_image", "(", "self", ",", "width", ",", "height", ",", "zoom", ",", "parameters", "=", "None", ",", "clear", "=", "False", ")", ":", "global", "MAX_ALLOWED_AREA", "if", "not", "parameters", ":", "parameters", "=", "{", "}", "if", "self", ".", "__compare_parameters", "(", "width", ",", "height", ",", "zoom", ",", "parameters", ")", "and", "not", "clear", ":", "return", "True", ",", "self", ".", "__image", ",", "self", ".", "__zoom", "# Restrict image surface size to prevent excessive use of memory", "while", "True", ":", "try", ":", "self", ".", "__limiting_multiplicator", "=", "1", "area", "=", "width", "*", "zoom", "*", "self", ".", "__zoom_multiplicator", "*", "height", "*", "zoom", "*", "self", ".", "__zoom_multiplicator", "if", "area", ">", "MAX_ALLOWED_AREA", ":", "self", ".", "__limiting_multiplicator", "=", "sqrt", "(", "MAX_ALLOWED_AREA", "/", "area", ")", "image", "=", "ImageSurface", "(", "self", ".", "__format", ",", "int", "(", "ceil", "(", "width", "*", "zoom", "*", "self", ".", "multiplicator", ")", ")", ",", "int", "(", "ceil", "(", "height", "*", "zoom", "*", "self", ".", "multiplicator", ")", ")", ")", "break", "# If we reach this point, the area was successfully allocated and we can break the loop", "except", "Error", ":", "MAX_ALLOWED_AREA", "*=", "0.8", "self", ".", "__set_cached_image", "(", "image", ",", "width", ",", "height", ",", "zoom", ",", "parameters", ")", "return", "False", ",", "self", ".", "__image", ",", "zoom" ]
Update a webhook .
def putWebhook ( self , hook_id , external_id , url , event_types ) : path = 'notification/webhook' payload = { 'id' : hook_id , 'externalId' : external_id , 'url' : url , 'eventTypes' : event_types } return self . rachio . put ( path , payload )
8,753
https://github.com/rfverbruggen/rachiopy/blob/c91abc9984f0f453e60fa905285c1b640c3390ae/rachiopy/notification.py#L36-L41
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in", "self", ".", "UPLOAD_DIR", ".", "iterdir", "(", ")", ":", "if", "not", "user_dir", ".", "is_dir", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-directory in upload dir: %r\"", ",", "bytes", "(", "user_dir", ")", ")", "continue", "for", "content", "in", "user_dir", ".", "iterdir", "(", ")", ":", "if", "not", "content", ".", "is_file", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-file in user upload dir: %r\"", ",", "bytes", "(", "content", ")", ")", "continue", "if", "content", ".", "stat", "(", ")", ".", "st_ctime", "<", "minimum_age", ":", "content", ".", "unlink", "(", ")" ]
Remove a webhook .
def deleteWebhook ( self , hook_id ) : path = '/' . join ( [ 'notification' , 'webhook' , hook_id ] ) return self . rachio . delete ( path )
8,754
https://github.com/rfverbruggen/rachiopy/blob/c91abc9984f0f453e60fa905285c1b640c3390ae/rachiopy/notification.py#L43-L46
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in", "self", ".", "UPLOAD_DIR", ".", "iterdir", "(", ")", ":", "if", "not", "user_dir", ".", "is_dir", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-directory in upload dir: %r\"", ",", "bytes", "(", "user_dir", ")", ")", "continue", "for", "content", "in", "user_dir", ".", "iterdir", "(", ")", ":", "if", "not", "content", ".", "is_file", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-file in user upload dir: %r\"", ",", "bytes", "(", "content", ")", ")", "continue", "if", "content", ".", "stat", "(", ")", ".", "st_ctime", "<", "minimum_age", ":", "content", ".", "unlink", "(", ")" ]
Get a webhook .
def get ( self , hook_id ) : path = '/' . join ( [ 'notification' , 'webhook' , hook_id ] ) return self . rachio . get ( path )
8,755
https://github.com/rfverbruggen/rachiopy/blob/c91abc9984f0f453e60fa905285c1b640c3390ae/rachiopy/notification.py#L48-L51
[ "def", "clear_stalled_files", "(", "self", ")", ":", "# FIXME: put lock in directory?", "CLEAR_AFTER", "=", "self", ".", "config", "[", "\"DELETE_STALLED_AFTER\"", "]", "minimum_age", "=", "time", ".", "time", "(", ")", "-", "CLEAR_AFTER", "for", "user_dir", "in", "self", ".", "UPLOAD_DIR", ".", "iterdir", "(", ")", ":", "if", "not", "user_dir", ".", "is_dir", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-directory in upload dir: %r\"", ",", "bytes", "(", "user_dir", ")", ")", "continue", "for", "content", "in", "user_dir", ".", "iterdir", "(", ")", ":", "if", "not", "content", ".", "is_file", "(", ")", ":", "logger", ".", "error", "(", "\"Found non-file in user upload dir: %r\"", ",", "bytes", "(", "content", ")", ")", "continue", "if", "content", ".", "stat", "(", ")", ".", "st_ctime", "<", "minimum_age", ":", "content", ".", "unlink", "(", ")" ]
Connect sets up the connection with the Horizon box .
def connect ( self ) : self . con = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) self . con . connect ( ( self . ip , self . port ) ) log . debug ( 'Connected with set-top box at %s:%s.' , self . ip , self . port )
8,756
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L23-L29
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Disconnect closes the connection to the Horizon box .
def disconnect ( self ) : if self . con is not None : self . con . close ( ) log . debug ( 'Closed connection with with set-top box at %s:%s.' , self . ip , self . port )
8,757
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L31-L36
[ "def", "_generate_noise_temporal", "(", "stimfunction_tr", ",", "tr_duration", ",", "dimensions", ",", "template", ",", "mask", ",", "noise_dict", ")", ":", "# Set up common parameters", "# How many TRs are there", "trs", "=", "len", "(", "stimfunction_tr", ")", "# What time points are sampled by a TR?", "timepoints", "=", "list", "(", "np", ".", "linspace", "(", "0", ",", "(", "trs", "-", "1", ")", "*", "tr_duration", ",", "trs", ")", ")", "# Preset the volume", "noise_volume", "=", "np", ".", "zeros", "(", "(", "dimensions", "[", "0", "]", ",", "dimensions", "[", "1", "]", ",", "dimensions", "[", "2", "]", ",", "trs", ")", ")", "# Generate the drift noise", "if", "noise_dict", "[", "'drift_sigma'", "]", "!=", "0", ":", "# Calculate the drift time course", "noise", "=", "_generate_noise_temporal_drift", "(", "trs", ",", "tr_duration", ",", ")", "# Create a volume with the drift properties", "volume", "=", "np", ".", "ones", "(", "dimensions", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'drift_sigma'", "]", "# Generate the physiological noise", "if", "noise_dict", "[", "'physiological_sigma'", "]", "!=", "0", ":", "# Calculate the physiological time course", "noise", "=", "_generate_noise_temporal_phys", "(", "timepoints", ",", ")", "# Create a brain shaped volume with similar smoothing properties", "volume", "=", "_generate_noise_spatial", "(", "dimensions", "=", "dimensions", ",", "mask", "=", "mask", ",", "fwhm", "=", "noise_dict", "[", "'fwhm'", "]", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'physiological_sigma'", "]", "# Generate the AR noise", "if", "noise_dict", "[", "'auto_reg_sigma'", "]", "!=", "0", ":", "# Calculate the AR time course volume", "noise", "=", "_generate_noise_temporal_autoregression", "(", "timepoints", ",", "noise_dict", ",", "dimensions", ",", "mask", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "noise", "*", "noise_dict", "[", "'auto_reg_sigma'", "]", "# Generate the task related noise", "if", "noise_dict", "[", "'task_sigma'", "]", "!=", "0", "and", "np", ".", "sum", "(", "stimfunction_tr", ")", ">", "0", ":", "# Calculate the task based noise time course", "noise", "=", "_generate_noise_temporal_task", "(", "stimfunction_tr", ",", ")", "# Create a brain shaped volume with similar smoothing properties", "volume", "=", "_generate_noise_spatial", "(", "dimensions", "=", "dimensions", ",", "mask", "=", "mask", ",", "fwhm", "=", "noise_dict", "[", "'fwhm'", "]", ",", ")", "# Combine the volume and noise", "noise_volume", "+=", "np", ".", "multiply", ".", "outer", "(", "volume", ",", "noise", ")", "*", "noise_dict", "[", "'task_sigma'", "]", "# Finally, z score each voxel so things mix nicely", "noise_volume", "=", "stats", ".", "zscore", "(", "noise_volume", ",", "3", ")", "# If it is a nan it is because you just divided by zero (since some", "# voxels are zeros in the template)", "noise_volume", "[", "np", ".", "isnan", "(", "noise_volume", ")", "]", "=", "0", "return", "noise_volume" ]
Use the magic of a unicorn and summon the set - top box to listen to us .
def authorize ( self ) : # Read the version of the set-top box and write it back. Why? I've no # idea. version = self . con . makefile ( ) . readline ( ) self . con . send ( version . encode ( ) ) # The set-top box returns with 2 bytes. I've no idea what they mean. self . con . recv ( 2 ) # The following reads and writes are used to authenticate. But I don't # fully understand what is going on. self . con . send ( struct . pack ( '>B' , 1 ) ) msg = self . con . recv ( 4 ) response = struct . unpack ( ">I" , msg ) if response [ 0 ] != 0 : log . debug ( "Failed to authorize with set-top at %s:%s." , self . ip , self . port ) raise AuthenticationError ( ) # Dunno where this is good for. But otherwise the client doesn't work. self . con . send ( b'0' ) log . debug ( 'Authorized succesfully with set-top box at %s:%s.' , self . ip , self . port )
8,758
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L38-L78
[ "def", "purgeRelationship", "(", "self", ",", "pid", ",", "subject", ",", "predicate", ",", "object", ",", "isLiteral", "=", "False", ",", "datatype", "=", "None", ")", ":", "http_args", "=", "{", "'subject'", ":", "subject", ",", "'predicate'", ":", "predicate", ",", "'object'", ":", "object", ",", "'isLiteral'", ":", "isLiteral", "}", "if", "datatype", "is", "not", "None", ":", "http_args", "[", "'datatype'", "]", "=", "datatype", "url", "=", "'objects/%(pid)s/relationships'", "%", "{", "'pid'", ":", "pid", "}", "response", "=", "self", ".", "delete", "(", "url", ",", "params", "=", "http_args", ")", "# should have a status code of 200;", "# response body text indicates if a relationship was purged or not", "return", "response", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", "and", "response", ".", "content", "==", "b'true'" ]
Send a key to the Horizon box .
def send_key ( self , key ) : cmd = struct . pack ( ">BBBBBBH" , 4 , 1 , 0 , 0 , 0 , 0 , key ) self . con . send ( cmd ) cmd = struct . pack ( ">BBBBBBH" , 4 , 0 , 0 , 0 , 0 , 0 , key ) self . con . send ( cmd )
8,759
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L80-L86
[ "def", "describe_dish", "(", "self", ")", ":", "resp", "=", "random", ".", "choice", "(", "foodpreparations", ")", "if", "random", ".", "random", "(", ")", "<", ".85", ":", "resp", "=", "self", ".", "describe_ingredient", "(", ")", "+", "' '", "+", "resp", "if", "random", ".", "random", "(", ")", "<", ".2", ":", "resp", "=", "self", ".", "describe_ingredient", "(", ")", "+", "' and '", "+", "resp", "if", "random", ".", "random", "(", ")", "<", ".2", ":", "resp", "=", "self", ".", "describe_ingredient", "(", ")", "+", "', '", "+", "resp", "if", "random", ".", "random", "(", ")", "<", ".5", ":", "resp", "+=", "\" with \"", "+", "self", ".", "describe_additive", "(", ")", "elif", "random", ".", "random", "(", ")", "<", ".5", ":", "resp", "+=", "\" with \"", "+", "self", ".", "describe_ingredient", "(", ")", "return", "self", ".", "articleize", "(", "resp", ")" ]
Get power status of device .
def is_powered_on ( self ) : host = '{0}:62137' . format ( self . ip ) try : HTTPConnection ( host , timeout = 2 ) . request ( 'GET' , '/DeviceDescription.xml' ) except ( ConnectionRefusedError , socket . timeout ) : log . debug ( 'Set-top box at %s:%s is powered off.' , self . ip , self . port ) return False log . debug ( 'Set-top box at %s:%s is powered on.' , self . ip , self . port ) return True
8,760
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L88-L112
[ "def", "_put_bucket_cors", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'cors'", "]", "[", "'enabled'", "]", "and", "self", ".", "s3props", "[", "'website'", "]", "[", "'enabled'", "]", ":", "cors_config", "=", "{", "}", "cors_rules", "=", "[", "]", "for", "each_rule", "in", "self", ".", "s3props", "[", "'cors'", "]", "[", "'cors_rules'", "]", ":", "cors_rules", ".", "append", "(", "{", "'AllowedHeaders'", ":", "each_rule", "[", "'cors_headers'", "]", ",", "'AllowedMethods'", ":", "each_rule", "[", "'cors_methods'", "]", ",", "'AllowedOrigins'", ":", "each_rule", "[", "'cors_origins'", "]", ",", "'ExposeHeaders'", ":", "each_rule", "[", "'cors_expose_headers'", "]", ",", "'MaxAgeSeconds'", ":", "each_rule", "[", "'cors_max_age'", "]", "}", ")", "cors_config", "=", "{", "'CORSRules'", ":", "cors_rules", "}", "LOG", ".", "debug", "(", "cors_config", ")", "_response", "=", "self", ".", "s3client", ".", "put_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ",", "CORSConfiguration", "=", "cors_config", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ")", "LOG", ".", "debug", "(", "'Response setting up S3 CORS: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 CORS configuration updated'", ")" ]
Power on the set - top box .
def power_on ( self ) : if not self . is_powered_on ( ) : log . debug ( 'Powering on set-top box at %s:%s.' , self . ip , self . port ) self . send_key ( keys . POWER )
8,761
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L114-L118
[ "def", "quandl_bundle", "(", "environ", ",", "asset_db_writer", ",", "minute_bar_writer", ",", "daily_bar_writer", ",", "adjustment_writer", ",", "calendar", ",", "start_session", ",", "end_session", ",", "cache", ",", "show_progress", ",", "output_dir", ")", ":", "api_key", "=", "environ", ".", "get", "(", "'QUANDL_API_KEY'", ")", "if", "api_key", "is", "None", ":", "raise", "ValueError", "(", "\"Please set your QUANDL_API_KEY environment variable and retry.\"", ")", "raw_data", "=", "fetch_data_table", "(", "api_key", ",", "show_progress", ",", "environ", ".", "get", "(", "'QUANDL_DOWNLOAD_ATTEMPTS'", ",", "5", ")", ")", "asset_metadata", "=", "gen_asset_metadata", "(", "raw_data", "[", "[", "'symbol'", ",", "'date'", "]", "]", ",", "show_progress", ")", "asset_db_writer", ".", "write", "(", "asset_metadata", ")", "symbol_map", "=", "asset_metadata", ".", "symbol", "sessions", "=", "calendar", ".", "sessions_in_range", "(", "start_session", ",", "end_session", ")", "raw_data", ".", "set_index", "(", "[", "'date'", ",", "'symbol'", "]", ",", "inplace", "=", "True", ")", "daily_bar_writer", ".", "write", "(", "parse_pricing_and_vol", "(", "raw_data", ",", "sessions", ",", "symbol_map", ")", ",", "show_progress", "=", "show_progress", ")", "raw_data", ".", "reset_index", "(", "inplace", "=", "True", ")", "raw_data", "[", "'symbol'", "]", "=", "raw_data", "[", "'symbol'", "]", ".", "astype", "(", "'category'", ")", "raw_data", "[", "'sid'", "]", "=", "raw_data", ".", "symbol", ".", "cat", ".", "codes", "adjustment_writer", ".", "write", "(", "splits", "=", "parse_splits", "(", "raw_data", "[", "[", "'sid'", ",", "'date'", ",", "'split_ratio'", ",", "]", "]", ".", "loc", "[", "raw_data", ".", "split_ratio", "!=", "1", "]", ",", "show_progress", "=", "show_progress", ")", ",", "dividends", "=", "parse_dividends", "(", "raw_data", "[", "[", "'sid'", ",", "'date'", ",", "'ex_dividend'", ",", "]", "]", ".", "loc", "[", "raw_data", ".", "ex_dividend", "!=", "0", "]", ",", "show_progress", "=", "show_progress", ")", ")" ]
Select a channel .
def select_channel ( self , channel ) : for i in str ( channel ) : key = int ( i ) + 0xe300 self . send_key ( key )
8,762
https://github.com/OrangeTux/einder/blob/deb2c5f79a69b684257fe939659c3bd751556fd5/einder/client.py#L126-L133
[ "def", "write_backup_state_to_json_file", "(", "self", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "state_file_path", "=", "self", ".", "config", "[", "\"json_state_file_path\"", "]", "self", ".", "state", "[", "\"walreceivers\"", "]", "=", "{", "key", ":", "{", "\"latest_activity\"", ":", "value", ".", "latest_activity", ",", "\"running\"", ":", "value", ".", "running", ",", "\"last_flushed_lsn\"", ":", "value", ".", "last_flushed_lsn", "}", "for", "key", ",", "value", "in", "self", ".", "walreceivers", ".", "items", "(", ")", "}", "self", ".", "state", "[", "\"pg_receivexlogs\"", "]", "=", "{", "key", ":", "{", "\"latest_activity\"", ":", "value", ".", "latest_activity", ",", "\"running\"", ":", "value", ".", "running", "}", "for", "key", ",", "value", "in", "self", ".", "receivexlogs", ".", "items", "(", ")", "}", "self", ".", "state", "[", "\"pg_basebackups\"", "]", "=", "{", "key", ":", "{", "\"latest_activity\"", ":", "value", ".", "latest_activity", ",", "\"running\"", ":", "value", ".", "running", "}", "for", "key", ",", "value", "in", "self", ".", "basebackups", ".", "items", "(", ")", "}", "self", ".", "state", "[", "\"compressors\"", "]", "=", "[", "compressor", ".", "state", "for", "compressor", "in", "self", ".", "compressors", "]", "self", ".", "state", "[", "\"transfer_agents\"", "]", "=", "[", "ta", ".", "state", "for", "ta", "in", "self", ".", "transfer_agents", "]", "self", ".", "state", "[", "\"queues\"", "]", "=", "{", "\"compression_queue\"", ":", "self", ".", "compression_queue", ".", "qsize", "(", ")", ",", "\"transfer_queue\"", ":", "self", ".", "transfer_queue", ".", "qsize", "(", ")", ",", "}", "self", ".", "log", ".", "debug", "(", "\"Writing JSON state file to %r\"", ",", "state_file_path", ")", "write_json_file", "(", "state_file_path", ",", "self", ".", "state", ")", "self", ".", "log", ".", "debug", "(", "\"Wrote JSON state file to disk, took %.4fs\"", ",", "time", ".", "time", "(", ")", "-", "start_time", ")" ]
Calculate HMAC value of message using WEBHOOKS_SECRET_KEY .
def get_hmac ( message ) : key = current_app . config [ 'WEBHOOKS_SECRET_KEY' ] hmac_value = hmac . new ( key . encode ( 'utf-8' ) if hasattr ( key , 'encode' ) else key , message . encode ( 'utf-8' ) if hasattr ( message , 'encode' ) else message , sha1 ) . hexdigest ( ) return hmac_value
8,763
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/signatures.py#L33-L44
[ "def", "set_batch", "(", "self", ",", "data", ")", ":", "# fetch existing documents to get current revisions", "rows", "=", "self", ".", "bucket", ".", "view", "(", "\"_all_docs\"", ",", "keys", "=", "data", ".", "keys", "(", ")", ",", "include_docs", "=", "True", ")", "existing", "=", "{", "}", "for", "row", "in", "rows", ":", "key", "=", "row", ".", "id", "if", "key", "and", "not", "data", "[", "key", "]", ".", "has_key", "(", "\"_rev\"", ")", ":", "data", "[", "key", "]", "[", "\"_rev\"", "]", "=", "row", ".", "doc", "[", "\"_rev\"", "]", "for", "id", ",", "item", "in", "data", ".", "items", "(", ")", ":", "data", "[", "id", "]", "[", "\"_id\"", "]", "=", "id", "revs", "=", "{", "}", "for", "success", ",", "docid", ",", "rev_or_exc", "in", "self", ".", "bucket", ".", "update", "(", "data", ".", "values", "(", ")", ")", ":", "if", "not", "success", "and", "self", ".", "logger", ":", "self", ".", "logger", ".", "error", "(", "\"Document update conflict (batch) '%s', %s\"", "%", "(", "docid", ",", "rev_or_exc", ")", ")", "elif", "success", ":", "revs", "[", "docid", "]", "=", "rev_or_exc", "return", "revs" ]
Check X - Hub - Signature used by GitHub to sign requests .
def check_x_hub_signature ( signature , message ) : hmac_value = get_hmac ( message ) if hmac_value == signature or ( signature . find ( '=' ) > - 1 and hmac_value == signature [ signature . find ( '=' ) + 1 : ] ) : return True return False
8,764
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/signatures.py#L47-L58
[ "def", "size", "(", "filename", ",", "use_cache_only", "=", "False", ")", ":", "filename", "=", "unmap_file", "(", "filename", ")", "if", "filename", "not", "in", "file_cache", ":", "if", "not", "use_cache_only", ":", "cache_file", "(", "filename", ")", "if", "filename", "not", "in", "file_cache", ":", "return", "None", "pass", "return", "len", "(", "file_cache", "[", "filename", "]", ".", "lines", "[", "'plain'", "]", ")" ]
Get all active projects .
async def list_all_active_projects ( self , page_size = 1000 ) : url = f'{self.BASE_URL}/{self.api_version}/projects' params = { 'pageSize' : page_size } responses = await self . list_all ( url , params ) projects = self . _parse_rsps_for_projects ( responses ) return [ project for project in projects if project . get ( 'lifecycleState' , '' ) . lower ( ) == 'active' ]
8,765
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/clients/gcrm.py#L85-L105
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Add another module s bindings to a binder .
def install ( self , binder , module ) : ModuleAdapter ( module , self . _injector ) . configure ( binder )
8,766
https://github.com/dstanek/snake-guice/blob/d20b62de3ee31e84119c801756398c35ed803fb3/snakeguice/modules.py#L23-L25
[ "def", "_check_timeouts", "(", "self", ")", ":", "for", "conn_id", ",", "data", "in", "self", ".", "_connections", ".", "items", "(", ")", ":", "if", "'timeout'", "in", "data", "and", "data", "[", "'timeout'", "]", ".", "expired", ":", "if", "data", "[", "'state'", "]", "==", "self", ".", "Connecting", ":", "self", ".", "finish_connection", "(", "conn_id", ",", "False", ",", "'Connection attempt timed out'", ")", "elif", "data", "[", "'state'", "]", "==", "self", ".", "Disconnecting", ":", "self", ".", "finish_disconnection", "(", "conn_id", ",", "False", ",", "'Disconnection attempt timed out'", ")", "elif", "data", "[", "'state'", "]", "==", "self", ".", "InProgress", ":", "if", "data", "[", "'microstate'", "]", "==", "'rpc'", ":", "self", ".", "finish_operation", "(", "conn_id", ",", "False", ",", "'RPC timed out without response'", ",", "None", ",", "None", ")", "elif", "data", "[", "'microstate'", "]", "==", "'open_interface'", ":", "self", ".", "finish_operation", "(", "conn_id", ",", "False", ",", "'Open interface request timed out'", ")" ]
Expose the child injector to the parent inject for a binding .
def expose ( self , binder , interface , annotation = None ) : private_module = self class Provider ( object ) : def get ( self ) : return private_module . private_injector . get_instance ( interface , annotation ) self . original_binder . bind ( interface , annotated_with = annotation , to_provider = Provider )
8,767
https://github.com/dstanek/snake-guice/blob/d20b62de3ee31e84119c801756398c35ed803fb3/snakeguice/modules.py#L57-L66
[ "def", "CheckSupportedFormat", "(", "cls", ",", "path", ",", "check_readable_only", "=", "False", ")", ":", "try", ":", "connection", "=", "sqlite3", ".", "connect", "(", "path", ",", "detect_types", "=", "sqlite3", ".", "PARSE_DECLTYPES", "|", "sqlite3", ".", "PARSE_COLNAMES", ")", "cursor", "=", "connection", ".", "cursor", "(", ")", "query", "=", "'SELECT * FROM metadata'", "cursor", ".", "execute", "(", "query", ")", "metadata_values", "=", "{", "row", "[", "0", "]", ":", "row", "[", "1", "]", "for", "row", "in", "cursor", ".", "fetchall", "(", ")", "}", "cls", ".", "_CheckStorageMetadata", "(", "metadata_values", ",", "check_readable_only", "=", "check_readable_only", ")", "connection", ".", "close", "(", ")", "result", "=", "True", "except", "(", "IOError", ",", "sqlite3", ".", "DatabaseError", ")", ":", "result", "=", "False", "return", "result" ]
Actually run all the validations .
def _call_validators ( self ) : msg = [ ] msg . extend ( self . _validate_keyfile ( ) ) msg . extend ( self . _validate_dns_zone ( ) ) msg . extend ( self . _validate_retries ( ) ) msg . extend ( self . _validate_project ( ) ) return msg
8,768
https://github.com/spotify/gordon-gcp/blob/5ab19e3c2fe6ace72ee91e2ef1a1326f90b805da/src/gordon_gcp/plugins/service/enricher.py#L93-L104
[ "def", "SetBackingStore", "(", "cls", ",", "backing", ")", ":", "if", "backing", "not", "in", "[", "'json'", ",", "'sqlite'", ",", "'memory'", "]", ":", "raise", "ArgumentError", "(", "\"Unknown backing store type that is not json or sqlite\"", ",", "backing", "=", "backing", ")", "if", "backing", "==", "'json'", ":", "cls", ".", "BackingType", "=", "JSONKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.json'", "elif", "backing", "==", "'memory'", ":", "cls", ".", "BackingType", "=", "InMemoryKVStore", "cls", ".", "BackingFileName", "=", "None", "else", ":", "cls", ".", "BackingType", "=", "SQLiteKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.db'" ]
Parses the relevant PG rdf file
def parse_rdf ( self ) : try : self . metadata = pg_rdf_to_json ( self . rdf_path ) except IOError as e : raise NoRDFError ( e ) if not self . authnames ( ) : self . author = '' elif len ( self . authnames ( ) ) == 1 : self . author = self . authnames ( ) [ 0 ] else : self . author = "Various"
8,769
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/util/catalog.py#L93-L106
[ "def", "remove_from_space_size", "(", "self", ",", "removal_bytes", ")", ":", "# type: (int) -> None", "if", "not", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'This Volume Descriptor is not yet initialized'", ")", "# The 'removal' parameter is expected to be in bytes, but the space", "# size we track is in extents. Round up to the next extent.", "self", ".", "space_size", "-=", "utils", ".", "ceiling_div", "(", "removal_bytes", ",", "self", ".", "log_block_size", ")" ]
Ensures a fresh - enough RDF file is downloaded and extracted .
def download_rdf ( self , force = False ) : if self . downloading : return True if not force and ( os . path . exists ( RDF_PATH ) and ( time . time ( ) - os . path . getmtime ( RDF_PATH ) ) < RDF_MAX_AGE ) : return False self . downloading = True logging . info ( 'Re-downloading RDF library from %s' % RDF_URL ) try : shutil . rmtree ( os . path . join ( self . rdf_library_dir , 'cache' ) ) except OSError as e : # Ignore not finding the directory to remove. if e . errno != errno . ENOENT : raise try : with open ( RDF_PATH , 'w' ) as f : with requests . get ( RDF_URL , stream = True ) as r : shutil . copyfileobj ( r . raw , f ) except requests . exceptions . RequestException as e : logging . error ( e ) return True try : with tarfile . open ( RDF_PATH , 'r' ) as f : f . extractall ( self . rdf_library_dir ) except tarfile . TarError as e : logging . error ( e ) try : os . unlink ( RDF_PATH ) except : pass return True self . downloading = False return False
8,770
https://github.com/gitenberg-dev/gitberg/blob/3f6db8b5a22ccdd2110d3199223c30db4e558b5c/gitenberg/util/catalog.py#L134-L172
[ "async", "def", "async_get_bridgeid", "(", "session", ",", "host", ",", "port", ",", "api_key", ",", "*", "*", "kwargs", ")", ":", "url", "=", "'http://{}:{}/api/{}/config'", ".", "format", "(", "host", ",", "str", "(", "port", ")", ",", "api_key", ")", "response", "=", "await", "async_request", "(", "session", ".", "get", ",", "url", ")", "bridgeid", "=", "response", "[", "'bridgeid'", "]", "_LOGGER", ".", "info", "(", "\"Bridge id: %s\"", ",", "bridgeid", ")", "return", "bridgeid" ]
Start the background twisted thread and create the WAMP connection
def run ( self , url = DEFAULT_AUTOBAHN_ROUTER , realm = DEFAULT_AUTOBAHN_REALM , authmethods = None , authid = None , authrole = None , authextra = None , blocking = False , callback = None , * * kwargs ) : _init_crochet ( in_twisted = False ) self . _bootstrap ( blocking , url = url , realm = realm , authmethods = authmethods , authid = authid , authrole = authrole , authextra = authextra , * * kwargs ) if callback : callback ( ) self . _callbacks_runner . start ( )
8,771
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/core.py#L91-L110
[ "def", "parse_networking_file", "(", ")", ":", "pairs", "=", "dict", "(", ")", "allocated_subnets", "=", "[", "]", "try", ":", "with", "open", "(", "VMWARE_NETWORKING_FILE", ",", "\"r\"", ",", "encoding", "=", "\"utf-8\"", ")", "as", "f", ":", "version", "=", "f", ".", "readline", "(", ")", "for", "line", "in", "f", ".", "read", "(", ")", ".", "splitlines", "(", ")", ":", "try", ":", "_", ",", "key", ",", "value", "=", "line", ".", "split", "(", "' '", ",", "3", ")", "key", "=", "key", ".", "strip", "(", ")", "value", "=", "value", ".", "strip", "(", ")", "pairs", "[", "key", "]", "=", "value", "if", "key", ".", "endswith", "(", "\"HOSTONLY_SUBNET\"", ")", ":", "allocated_subnets", ".", "append", "(", "value", ")", "except", "ValueError", ":", "raise", "SystemExit", "(", "\"Error while parsing {}\"", ".", "format", "(", "VMWARE_NETWORKING_FILE", ")", ")", "except", "OSError", "as", "e", ":", "raise", "SystemExit", "(", "\"Cannot open {}: {}\"", ".", "format", "(", "VMWARE_NETWORKING_FILE", ",", "e", ")", ")", "return", "version", ",", "pairs", ",", "allocated_subnets" ]
Terminate the WAMP session
def stop ( self ) : if not self . _started : raise NotRunningError ( "This AutobahnSync instance is not started" ) self . _callbacks_runner . stop ( ) self . _started = False
8,772
https://github.com/Scille/autobahn-sync/blob/d75fceff0d1aee61fa6dd0168eb1cd40794ad827/autobahn_sync/core.py#L112-L123
[ "def", "count", "(", "self", ",", "searchString", ",", "category", "=", "\"\"", ",", "math", "=", "False", ",", "game", "=", "False", ",", "searchFiles", "=", "False", ",", "extension", "=", "\"\"", ")", ":", "fileData", "=", "{", "}", "nameData", "=", "{", "}", "#Search the index", "if", "searchFiles", ":", "fileData", "=", "self", ".", "searchNamesIndex", "(", "self", ".", "fileIndex", ",", "fileData", ",", "searchString", ",", "category", ",", "math", ",", "game", ",", "extension", ")", "else", ":", "nameData", "=", "self", ".", "searchNamesIndex", "(", "self", ".", "nameIndex", ",", "nameData", ",", "searchString", ")", "#Now search the other index", "if", "searchFiles", ":", "nameData", ",", "fileData", "=", "self", ".", "searchFilesIndex", "(", "fileData", ",", "nameData", ",", "self", ".", "nameIndex", ",", "searchString", ")", "else", ":", "fileData", ",", "nameData", "=", "self", ".", "searchFilesIndex", "(", "nameData", ",", "fileData", ",", "self", ".", "fileIndex", ",", "searchString", ",", "category", ",", "math", ",", "game", ",", "extension", ")", "# Bail out if we failed to do either of those things.", "if", "fileData", "is", "None", "or", "nameData", "is", "None", ":", "self", ".", "repo", ".", "printd", "(", "\"Error: failed to load one or more of the index files for this repo. Exiting.\"", ")", "self", ".", "repo", ".", "printd", "(", "\"Please run 'calcpkg update' and retry this command.\"", ")", "sys", ".", "exit", "(", "1", ")", "#Now obtain a count (exclude \"none\" elements)", "count", "=", "0", "for", "element", "in", "nameData", ":", "if", "not", "nameData", "[", "element", "]", "is", "None", ":", "count", "+=", "1", "self", ".", "repo", ".", "printd", "(", "\"Search for '\"", "+", "searchString", "+", "\"' returned \"", "+", "str", "(", "count", ")", "+", "\" result(s) in \"", "+", "self", ".", "repo", ".", "name", ")", "return", "count" ]
Process event in Celery .
def process_event ( self , event_id ) : with db . session . begin_nested ( ) : event = Event . query . get ( event_id ) event . _celery_task = self # internal binding to a Celery task event . receiver . run ( event ) # call run directly to avoid circular calls flag_modified ( event , 'response' ) flag_modified ( event , 'response_headers' ) db . session . add ( event ) db . session . commit ( )
8,773
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L143-L152
[ "def", "create_chapter_from_url", "(", "self", ",", "url", ",", "title", "=", "None", ")", ":", "try", ":", "request_object", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "request_headers", ",", "allow_redirects", "=", "False", ")", "except", "(", "requests", ".", "exceptions", ".", "MissingSchema", ",", "requests", ".", "exceptions", ".", "ConnectionError", ")", ":", "raise", "ValueError", "(", "\"%s is an invalid url or no network connection\"", "%", "url", ")", "except", "requests", ".", "exceptions", ".", "SSLError", ":", "raise", "ValueError", "(", "\"Url %s doesn't have valid SSL certificate\"", "%", "url", ")", "unicode_string", "=", "request_object", ".", "text", "return", "self", ".", "create_chapter_from_string", "(", "unicode_string", ",", "url", ",", "title", ")" ]
Return JSON column .
def _json_column ( * * kwargs ) : return db . Column ( JSONType ( ) . with_variant ( postgresql . JSON ( none_as_null = True ) , 'postgresql' , ) , nullable = True , * * kwargs )
8,774
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L197-L206
[ "def", "get_robot_variables", "(", ")", ":", "prefix", "=", "'ROBOT_'", "variables", "=", "[", "]", "def", "safe_str", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "six", ".", "text_type", ")", ":", "return", "s", "else", ":", "return", "six", ".", "text_type", "(", "s", ",", "'utf-8'", ",", "'ignore'", ")", "for", "key", "in", "os", ".", "environ", ":", "if", "key", ".", "startswith", "(", "prefix", ")", "and", "len", "(", "key", ")", ">", "len", "(", "prefix", ")", ":", "variables", ".", "append", "(", "safe_str", "(", "'%s:%s'", "%", "(", "key", "[", "len", "(", "prefix", ")", ":", "]", ",", "os", ".", "environ", "[", "key", "]", ")", ",", ")", ")", "return", "variables" ]
Mark event as deleted .
def delete ( self , event ) : assert self . receiver_id == event . receiver_id event . response = { 'status' : 410 , 'message' : 'Gone.' } event . response_code = 410
8,775
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L81-L85
[ "async", "def", "get_access_token", "(", "self", ",", "oauth_verifier", ",", "request_token", "=", "None", ",", "loop", "=", "None", ",", "*", "*", "params", ")", ":", "# Possibility to provide REQUEST DATA to the method", "if", "not", "isinstance", "(", "oauth_verifier", ",", "str", ")", "and", "self", ".", "shared_key", "in", "oauth_verifier", ":", "oauth_verifier", "=", "oauth_verifier", "[", "self", ".", "shared_key", "]", "if", "request_token", "and", "self", ".", "oauth_token", "!=", "request_token", ":", "raise", "web", ".", "HTTPBadRequest", "(", "reason", "=", "'Failed to obtain OAuth 1.0 access token. '", "'Request token is invalid'", ")", "data", "=", "await", "self", ".", "request", "(", "'POST'", ",", "self", ".", "access_token_url", ",", "params", "=", "{", "'oauth_verifier'", ":", "oauth_verifier", ",", "'oauth_token'", ":", "request_token", "}", ",", "loop", "=", "loop", ")", "self", ".", "oauth_token", "=", "data", ".", "get", "(", "'oauth_token'", ")", "self", ".", "oauth_token_secret", "=", "data", ".", "get", "(", "'oauth_token_secret'", ")", "return", "self", ".", "oauth_token", ",", "self", ".", "oauth_token_secret", ",", "data" ]
Get URL for webhook .
def get_hook_url ( self , access_token ) : # Allow overwriting hook URL in debug mode. if ( current_app . debug or current_app . testing ) and current_app . config . get ( 'WEBHOOKS_DEBUG_RECEIVER_URLS' , None ) : url_pattern = current_app . config [ 'WEBHOOKS_DEBUG_RECEIVER_URLS' ] . get ( self . receiver_id , None ) if url_pattern : return url_pattern % dict ( token = access_token ) return url_for ( 'invenio_webhooks.event_list' , receiver_id = self . receiver_id , access_token = access_token , _external = True )
8,776
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L87-L112
[ "def", "random_sample", "(", "self", ",", "num_samples", ",", "df", "=", "None", ",", "replace", "=", "False", ",", "weights", "=", "None", ",", "random_state", "=", "100", ",", "axis", "=", "'row'", ")", ":", "if", "df", "is", "None", ":", "df", "=", "self", ".", "dat_to_df", "(", ")", "if", "axis", "==", "'row'", ":", "axis", "=", "0", "if", "axis", "==", "'col'", ":", "axis", "=", "1", "df", "=", "self", ".", "export_df", "(", ")", "df", "=", "df", ".", "sample", "(", "n", "=", "num_samples", ",", "replace", "=", "replace", ",", "weights", "=", "weights", ",", "random_state", "=", "random_state", ",", "axis", "=", "axis", ")", "self", ".", "load_df", "(", "df", ")" ]
Check signature of signed request .
def check_signature ( self ) : if not self . signature : return True signature_value = request . headers . get ( self . signature , None ) if signature_value : validator = 'check_' + re . sub ( r'[-]' , '_' , self . signature ) . lower ( ) check_signature = getattr ( signatures , validator ) if check_signature ( signature_value , request . data ) : return True return False
8,777
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L117-L127
[ "def", "sampleCellsWithinColumns", "(", "numCellPairs", ",", "cellsPerColumn", ",", "numColumns", ",", "seed", "=", "42", ")", ":", "np", ".", "random", ".", "seed", "(", "seed", ")", "cellPairs", "=", "[", "]", "for", "i", "in", "range", "(", "numCellPairs", ")", ":", "randCol", "=", "np", ".", "random", ".", "randint", "(", "numColumns", ")", "randCells", "=", "np", ".", "random", ".", "choice", "(", "np", ".", "arange", "(", "cellsPerColumn", ")", ",", "(", "2", ",", ")", ",", "replace", "=", "False", ")", "cellsPair", "=", "randCol", "*", "cellsPerColumn", "+", "randCells", "cellPairs", ".", "append", "(", "cellsPair", ")", "return", "cellPairs" ]
Extract payload from request .
def extract_payload ( self ) : if not self . check_signature ( ) : raise InvalidSignature ( 'Invalid Signature' ) if request . is_json : # Request.get_json() could be first called with silent=True. delete_cached_json_for ( request ) return request . get_json ( silent = False , cache = False ) elif request . content_type == 'application/x-www-form-urlencoded' : return dict ( request . form ) raise InvalidPayload ( request . content_type )
8,778
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L129-L139
[ "def", "materialize", "(", "self", ",", "ref", ",", "table_name", "=", "None", ",", "index_columns", "=", "None", ",", "logger", "=", "None", ")", ":", "from", "ambry", ".", "library", "import", "Library", "assert", "isinstance", "(", "self", ".", "_library", ",", "Library", ")", "logger", ".", "debug", "(", "'Materializing warehouse partition.\\n partition: {}'", ".", "format", "(", "ref", ")", ")", "partition", "=", "self", ".", "_library", ".", "partition", "(", "ref", ")", "connection", "=", "self", ".", "_backend", ".", "_get_connection", "(", ")", "return", "self", ".", "_backend", ".", "install", "(", "connection", ",", "partition", ",", "table_name", "=", "table_name", ",", "index_columns", "=", "index_columns", ",", "materialize", "=", "True", ",", "logger", "=", "logger", ")" ]
Abort running task if it exists .
def delete ( self , event ) : super ( CeleryReceiver , self ) . delete ( event ) AsyncResult ( event . id ) . revoke ( terminate = True )
8,779
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L191-L194
[ "def", "_write_cors_configuration", "(", "self", ",", "config", ")", ":", "endpoint", "=", "'/'", ".", "join", "(", "(", "self", ".", "server_url", ",", "'_api'", ",", "'v2'", ",", "'user'", ",", "'config'", ",", "'cors'", ")", ")", "resp", "=", "self", ".", "r_session", ".", "put", "(", "endpoint", ",", "data", "=", "json", ".", "dumps", "(", "config", ",", "cls", "=", "self", ".", "encoder", ")", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ")", "resp", ".", "raise_for_status", "(", ")", "return", "response_to_json_dict", "(", "resp", ")" ]
Validate receiver identifier .
def validate_receiver ( self , key , value ) : if value not in current_webhooks . receivers : raise ReceiverDoesNotExist ( self . receiver_id ) return value
8,780
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L251-L255
[ "def", "set_nvram", "(", "self", ",", "nvram", ")", ":", "if", "self", ".", "_nvram", "==", "nvram", ":", "return", "yield", "from", "self", ".", "_hypervisor", ".", "send", "(", "'vm set_nvram \"{name}\" {nvram}'", ".", "format", "(", "name", "=", "self", ".", "_name", ",", "nvram", "=", "nvram", ")", ")", "log", ".", "info", "(", "'Router \"{name}\" [{id}]: NVRAM updated from {old_nvram}KB to {new_nvram}KB'", ".", "format", "(", "name", "=", "self", ".", "_name", ",", "id", "=", "self", ".", "_id", ",", "old_nvram", "=", "self", ".", "_nvram", ",", "new_nvram", "=", "nvram", ")", ")", "self", ".", "_nvram", "=", "nvram" ]
Create an event instance .
def create ( cls , receiver_id , user_id = None ) : event = cls ( id = uuid . uuid4 ( ) , receiver_id = receiver_id , user_id = user_id ) event . payload = event . receiver . extract_payload ( ) return event
8,781
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L258-L262
[ "def", "finalize_response", "(", "self", ",", "request", ",", "response", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Make the error obvious if a proper response is not returned", "assert", "isinstance", "(", "response", ",", "HttpResponseBase", ")", ",", "(", "'Expected a `Response`, `HttpResponse` or `HttpStreamingResponse` '", "'to be returned from the view, but received a `%s`'", "%", "type", "(", "response", ")", ")", "if", "isinstance", "(", "response", ",", "Response", ")", ":", "if", "not", "getattr", "(", "request", ",", "'accepted_renderer'", ",", "None", ")", ":", "neg", "=", "self", ".", "perform_content_negotiation", "(", "request", ",", "force", "=", "True", ")", "request", ".", "accepted_renderer", ",", "request", ".", "accepted_media_type", "=", "neg", "response", ".", "accepted_renderer", "=", "request", ".", "accepted_renderer", "response", ".", "accepted_media_type", "=", "request", ".", "accepted_media_type", "response", ".", "renderer_context", "=", "self", ".", "get_renderer_context", "(", ")", "for", "key", ",", "value", "in", "self", ".", "headers", ".", "items", "(", ")", ":", "response", "[", "key", "]", "=", "value", "return", "response" ]
Return registered receiver .
def receiver ( self ) : try : return current_webhooks . receivers [ self . receiver_id ] except KeyError : raise ReceiverDoesNotExist ( self . receiver_id )
8,782
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L265-L270
[ "def", "create_or_update_secret", "(", "self", ",", "path", ",", "secret", ",", "cas", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'options'", ":", "{", "}", ",", "'data'", ":", "secret", ",", "}", "if", "cas", "is", "not", "None", ":", "params", "[", "'options'", "]", "[", "'cas'", "]", "=", "cas", "api_path", "=", "'/v1/{mount_point}/data/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "response", "=", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
Set receiver instance .
def receiver ( self , value ) : assert isinstance ( value , Receiver ) self . receiver_id = value . receiver_id
8,783
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L273-L276
[ "def", "delete_link", "(", "link_id", ",", "purge_data", ",", "*", "*", "kwargs", ")", ":", "user_id", "=", "kwargs", ".", "get", "(", "'user_id'", ")", "try", ":", "link_i", "=", "db", ".", "DBSession", ".", "query", "(", "Link", ")", ".", "filter", "(", "Link", ".", "id", "==", "link_id", ")", ".", "one", "(", ")", "except", "NoResultFound", ":", "raise", "ResourceNotFoundError", "(", "\"Link %s not found\"", "%", "(", "link_id", ")", ")", "group_items", "=", "db", ".", "DBSession", ".", "query", "(", "ResourceGroupItem", ")", ".", "filter", "(", "ResourceGroupItem", ".", "link_id", "==", "link_id", ")", ".", "all", "(", ")", "for", "gi", "in", "group_items", ":", "db", ".", "DBSession", ".", "delete", "(", "gi", ")", "if", "purge_data", "==", "'Y'", ":", "_purge_datasets_unique_to_resource", "(", "'LINK'", ",", "link_id", ")", "log", ".", "info", "(", "\"Deleting link %s, id=%s\"", ",", "link_i", ".", "name", ",", "link_id", ")", "link_i", ".", "network", ".", "check_write_permission", "(", "user_id", ")", "db", ".", "DBSession", ".", "delete", "(", "link_i", ")", "db", ".", "DBSession", ".", "flush", "(", ")" ]
Process current event .
def process ( self ) : try : self . receiver ( self ) # TODO RESTException except Exception as e : current_app . logger . exception ( 'Could not process event.' ) self . response_code = 500 self . response = dict ( status = 500 , message = str ( e ) ) return self
8,784
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/models.py#L278-L287
[ "def", "perform_content_negotiation", "(", "self", ",", "request", ",", "force", "=", "False", ")", ":", "renderers", "=", "self", ".", "get_renderers", "(", ")", "conneg", "=", "self", ".", "get_content_negotiator", "(", ")", "try", ":", "return", "conneg", ".", "select_renderer", "(", "request", ",", "renderers", ",", "self", ".", "format_kwarg", ")", "except", "Exception", ":", "if", "force", ":", "return", "(", "renderers", "[", "0", "]", ",", "renderers", "[", "0", "]", ".", "media_type", ")", "raise" ]
Discover and register all post import hooks named in the AUTOWRAPT_BOOTSTRAP environment variable . The value of the environment variable must be a comma separated list .
def register_bootstrap_functions ( ) : # This can be called twice if '.pth' file bootstrapping works and # the 'autowrapt' wrapper script is still also used. We therefore # protect ourselves just in case it is called a second time as we # only want to force registration once. global _registered if _registered : return _registered = True # It should be safe to import wrapt at this point as this code will # be executed after all Python module search directories have been # added to the module search path. from wrapt import discover_post_import_hooks for name in os . environ . get ( 'AUTOWRAPT_BOOTSTRAP' , '' ) . split ( ',' ) : discover_post_import_hooks ( name )
8,785
https://github.com/GrahamDumpleton/autowrapt/blob/d4770e4f511c19012055deaab68ef0ec8aa54ba4/src/bootstrap.py#L13-L39
[ "def", "numeric", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "compare", "=", "Numeric", "(", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "add", "(", "compare", ")", "return", "self" ]
Patches the site module such that the bootstrap functions for registering the post import hook callback functions are called as the last thing done when initialising the Python interpreter . This function would normally be called from the special . pth file .
def bootstrap ( ) : global _patched if _patched : return _patched = True # We want to do our real work as the very last thing in the 'site' # module when it is being imported so that the module search path is # initialised properly. What is the last thing executed depends on # whether the 'usercustomize' module support is enabled. Support for # the 'usercustomize' module will not be enabled in Python virtual # enviromments. We therefore wrap the functions for the loading of # both the 'sitecustomize' and 'usercustomize' modules but detect # when 'usercustomize' support is disabled and in that case do what # we need to after the 'sitecustomize' module is loaded. # # In wrapping these functions though, we can't actually use wrapt to # do so. This is because depending on how wrapt was installed it may # technically be dependent on '.pth' evaluation for Python to know # where to import it from. The addition of the directory which # contains wrapt may not yet have been done. We thus use a simple # function wrapper instead. site . execsitecustomize = _execsitecustomize_wrapper ( site . execsitecustomize ) site . execusercustomize = _execusercustomize_wrapper ( site . execusercustomize )
8,786
https://github.com/GrahamDumpleton/autowrapt/blob/d4770e4f511c19012055deaab68ef0ec8aa54ba4/src/bootstrap.py#L67-L100
[ "def", "same_types", "(", "self", ",", "index1", ",", "index2", ")", ":", "try", ":", "same", "=", "self", ".", "table", "[", "index1", "]", ".", "type", "==", "self", ".", "table", "[", "index2", "]", ".", "type", "!=", "SharedData", ".", "TYPES", ".", "NO_TYPE", "except", "Exception", ":", "self", ".", "error", "(", ")", "return", "same" ]
Gets can cannot and must rules from github license API
def get_rules ( license ) : can = [ ] cannot = [ ] must = [ ] req = requests . get ( "{base_url}/licenses/{license}" . format ( base_url = BASE_URL , license = license ) , headers = _HEADERS ) if req . status_code == requests . codes . ok : data = req . json ( ) can = data [ "permitted" ] cannot = data [ "forbidden" ] must = data [ "required" ] return can , cannot , must
8,787
https://github.com/architv/harvey/blob/2b96d57b7a1e0dd706f1f00aba3d92a7ae702960/harvey/get_tldr.py#L26-L41
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Gets all the license information and stores it in json format
def main ( ) : all_summary = { } for license in RESOURCES : req = requests . get ( RESOURCES [ license ] ) if req . status_code == requests . codes . ok : summary = get_summary ( req . text ) can , cannot , must = get_rules ( license ) all_summary [ license ] = { "summary" : summary , "source" : RESOURCES [ license ] , "can" : can , "cannot" : cannot , "must" : must } with open ( 'summary.json' , 'w+' ) as f : f . write ( json . dumps ( all_summary , indent = 4 ) )
8,788
https://github.com/architv/harvey/blob/2b96d57b7a1e0dd706f1f00aba3d92a7ae702960/harvey/get_tldr.py#L44-L66
[ "def", "future_set_exception_unless_cancelled", "(", "future", ":", "\"Union[futures.Future[_T], Future[_T]]\"", ",", "exc", ":", "BaseException", ")", "->", "None", ":", "if", "not", "future", ".", "cancelled", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "else", ":", "app_log", ".", "error", "(", "\"Exception after Future was cancelled\"", ",", "exc_info", "=", "exc", ")" ]
This get us the cli arguments .
def get_arguments ( ) : # https://docs.python.org/3/library/argparse.html parser = argparse . ArgumentParser ( description = 'Handles bumping of the artifact version' ) parser . add_argument ( '--log-config' , '-l' , action = 'store' , dest = 'logger_config' , help = 'The location of the logging config json file' , default = '' ) parser . add_argument ( '--log-level' , '-L' , help = 'Provide the log level. Defaults to INFO.' , dest = 'log_level' , action = 'store' , default = 'INFO' , choices = [ 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' ] ) parser . add_argument ( '--major' , help = 'Bump the major version' , dest = 'bump_major' , action = 'store_true' , default = False ) parser . add_argument ( '--minor' , help = 'Bump the minor version' , dest = 'bump_minor' , action = 'store_true' , default = False ) parser . add_argument ( '--patch' , help = 'Bump the patch version' , dest = 'bump_patch' , action = 'store_true' , default = False ) parser . add_argument ( '--version' , help = 'Set the version' , dest = 'version' , action = 'store' , default = False ) args = parser . parse_args ( ) return args
8,789
https://github.com/costastf/toonlib/blob/2fa95430240d1a1c2a85a8827aecfcb1ca41c18c/_CI/bin/bump.py#L21-L68
[ "def", "fetch_result", "(", "self", ")", ":", "results", "=", "self", ".", "soup", ".", "find_all", "(", "'div'", ",", "{", "'class'", ":", "'container container-small'", "}", ")", "href", "=", "None", "is_match", "=", "False", "i", "=", "0", "while", "i", "<", "len", "(", "results", ")", "and", "not", "is_match", ":", "result", "=", "results", "[", "i", "]", "anchor", "=", "result", ".", "find", "(", "'a'", ",", "{", "'rel'", ":", "'bookmark'", "}", ")", "is_match", "=", "self", ".", "_filter_results", "(", "result", ",", "anchor", ")", "href", "=", "anchor", "[", "'href'", "]", "i", "+=", "1", "try", ":", "page", "=", "get_soup", "(", "href", ")", "except", "(", "Exception", ")", ":", "page", "=", "None", "# Return page if search is successful", "if", "href", "and", "page", ":", "return", "page", "else", ":", "raise", "PageNotFoundError", "(", "PAGE_ERROR", ")" ]
This sets up the logging .
def setup_logging ( args ) : handler = logging . StreamHandler ( ) handler . setLevel ( args . log_level ) formatter = logging . Formatter ( ( '%(asctime)s - ' '%(name)s - ' '%(levelname)s - ' '%(message)s' ) ) handler . setFormatter ( formatter ) LOGGER . addHandler ( handler )
8,790
https://github.com/costastf/toonlib/blob/2fa95430240d1a1c2a85a8827aecfcb1ca41c18c/_CI/bin/bump.py#L71-L85
[ "def", "rejection_sample", "(", "n_samples", ",", "pool_size", ",", "rng_state", ")", ":", "result", "=", "np", ".", "empty", "(", "n_samples", ",", "dtype", "=", "np", ".", "int64", ")", "for", "i", "in", "range", "(", "n_samples", ")", ":", "reject_sample", "=", "True", "while", "reject_sample", ":", "j", "=", "tau_rand_int", "(", "rng_state", ")", "%", "pool_size", "for", "k", "in", "range", "(", "i", ")", ":", "if", "j", "==", "result", "[", "k", "]", ":", "break", "else", ":", "reject_sample", "=", "False", "result", "[", "i", "]", "=", "j", "return", "result" ]
Make a response from webhook event .
def make_response ( event ) : code , message = event . status response = jsonify ( * * event . response ) response . headers [ 'X-Hub-Event' ] = event . receiver_id response . headers [ 'X-Hub-Delivery' ] = event . id if message : response . headers [ 'X-Hub-Info' ] = message add_link_header ( response , { 'self' : url_for ( '.event_item' , receiver_id = event . receiver_id , event_id = event . id , _external = True ) } ) return response , code
8,791
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L69-L81
[ "def", "BuildChecks", "(", "self", ",", "request", ")", ":", "result", "=", "[", "]", "if", "request", ".", "HasField", "(", "\"start_time\"", ")", "or", "request", ".", "HasField", "(", "\"end_time\"", ")", ":", "def", "FilterTimestamp", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_mtime\"", ")", "and", "(", "file_stat", ".", "st_mtime", "<", "request", ".", "start_time", "or", "file_stat", ".", "st_mtime", ">", "request", ".", "end_time", ")", "result", ".", "append", "(", "FilterTimestamp", ")", "if", "request", ".", "HasField", "(", "\"min_file_size\"", ")", "or", "request", ".", "HasField", "(", "\"max_file_size\"", ")", ":", "def", "FilterSize", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_size\"", ")", "and", "(", "file_stat", ".", "st_size", "<", "request", ".", "min_file_size", "or", "file_stat", ".", "st_size", ">", "request", ".", "max_file_size", ")", "result", ".", "append", "(", "FilterSize", ")", "if", "request", ".", "HasField", "(", "\"perm_mode\"", ")", ":", "def", "FilterPerms", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "(", "file_stat", ".", "st_mode", "&", "request", ".", "perm_mask", ")", "!=", "request", ".", "perm_mode", "result", ".", "append", "(", "FilterPerms", ")", "if", "request", ".", "HasField", "(", "\"uid\"", ")", ":", "def", "FilterUID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_uid", "!=", "request", ".", "uid", "result", ".", "append", "(", "FilterUID", ")", "if", "request", ".", "HasField", "(", "\"gid\"", ")", ":", "def", "FilterGID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_gid", "!=", "request", ".", "gid", "result", ".", "append", "(", "FilterGID", ")", "if", "request", ".", "HasField", "(", "\"path_regex\"", ")", ":", "regex", "=", "request", ".", "path_regex", "def", "FilterPath", "(", "file_stat", ",", "regex", "=", "regex", ")", ":", "\"\"\"Suppress any filename not matching the regular expression.\"\"\"", "return", "not", "regex", ".", "Search", "(", "file_stat", ".", "pathspec", ".", "Basename", "(", ")", ")", "result", ".", "append", "(", "FilterPath", ")", "if", "request", ".", "HasField", "(", "\"data_regex\"", ")", ":", "def", "FilterData", "(", "file_stat", ",", "*", "*", "_", ")", ":", "\"\"\"Suppress files that do not match the content.\"\"\"", "return", "not", "self", ".", "TestFileContent", "(", "file_stat", ")", "result", ".", "append", "(", "FilterData", ")", "return", "result" ]
Return a json payload and appropriate status code on expection .
def error_handler ( f ) : @ wraps ( f ) def inner ( * args , * * kwargs ) : try : return f ( * args , * * kwargs ) except ReceiverDoesNotExist : return jsonify ( status = 404 , description = 'Receiver does not exists.' ) , 404 except InvalidPayload as e : return jsonify ( status = 415 , description = 'Receiver does not support the' ' content-type "%s".' % e . args [ 0 ] ) , 415 except WebhooksError : return jsonify ( status = 500 , description = 'Internal server error' ) , 500 return inner
8,792
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L87-L109
[ "def", "_addProteins", "(", "self", ",", "proteinIds", ",", "containerNames", ")", ":", "proteinIds", "=", "AUX", ".", "toList", "(", "proteinIds", ")", "for", "containerName", "in", "containerNames", ":", "proteinContainer", "=", "getattr", "(", "self", ",", "containerName", ")", "proteinContainer", ".", "update", "(", "proteinIds", ")" ]
Handle POST request .
def post ( self , receiver_id = None ) : try : user_id = request . oauth . access_token . user_id except AttributeError : user_id = current_user . get_id ( ) event = Event . create ( receiver_id = receiver_id , user_id = user_id ) db . session . add ( event ) db . session . commit ( ) # db.session.begin(subtransactions=True) event . process ( ) db . session . commit ( ) return make_response ( event )
8,793
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L121-L138
[ "def", "get_urls", "(", "self", ")", ":", "not_clone_url", "=", "[", "url", "(", "r'^(.+)/will_not_clone/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "will_not_clone", ")", ")", "]", "restore_url", "=", "[", "url", "(", "r'^(.+)/restore/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "restore", ")", ")", "]", "return", "not_clone_url", "+", "restore_url", "+", "super", "(", "VersionedAdmin", ",", "self", ")", ".", "get_urls", "(", ")" ]
Find event and check access rights .
def _get_event ( receiver_id , event_id ) : event = Event . query . filter_by ( receiver_id = receiver_id , id = event_id ) . first_or_404 ( ) try : user_id = request . oauth . access_token . user_id except AttributeError : user_id = current_user . get_id ( ) if event . user_id != int ( user_id ) : abort ( 401 ) return event
8,794
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L149-L163
[ "async", "def", "setup_streamer", "(", "self", ")", ":", "self", ".", "streamer", ".", "volume", "=", "self", ".", "volume", "/", "100", "self", ".", "streamer", ".", "start", "(", ")", "self", ".", "pause_time", "=", "None", "self", ".", "vclient_starttime", "=", "self", ".", "vclient", ".", "loop", ".", "time", "(", ")", "# Cache next song", "self", ".", "logger", ".", "debug", "(", "\"Caching next song\"", ")", "dl_thread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "download_next_song_cache", ")", "dl_thread", ".", "start", "(", ")" ]
Handle GET request .
def get ( self , receiver_id = None , event_id = None ) : event = self . _get_event ( receiver_id , event_id ) return make_response ( event )
8,795
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L168-L171
[ "def", "get_urls", "(", "self", ")", ":", "not_clone_url", "=", "[", "url", "(", "r'^(.+)/will_not_clone/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "will_not_clone", ")", ")", "]", "restore_url", "=", "[", "url", "(", "r'^(.+)/restore/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "restore", ")", ")", "]", "return", "not_clone_url", "+", "restore_url", "+", "super", "(", "VersionedAdmin", ",", "self", ")", ".", "get_urls", "(", ")" ]
Handle DELETE request .
def delete ( self , receiver_id = None , event_id = None ) : event = self . _get_event ( receiver_id , event_id ) event . delete ( ) db . session . commit ( ) return make_response ( event )
8,796
https://github.com/inveniosoftware/invenio-webhooks/blob/f407cb2245464543ee474a81189fb9d3978bdde5/invenio_webhooks/views.py#L176-L181
[ "def", "get_urls", "(", "self", ")", ":", "not_clone_url", "=", "[", "url", "(", "r'^(.+)/will_not_clone/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "will_not_clone", ")", ")", "]", "restore_url", "=", "[", "url", "(", "r'^(.+)/restore/$'", ",", "admin", ".", "site", ".", "admin_view", "(", "self", ".", "restore", ")", ")", "]", "return", "not_clone_url", "+", "restore_url", "+", "super", "(", "VersionedAdmin", ",", "self", ")", ".", "get_urls", "(", ")" ]
Removes trailing and leading backslashes from string
def _stripslashes ( s ) : r = re . sub ( r"\\(n|r)" , "\n" , s ) r = re . sub ( r"\\" , "" , r ) return r
8,797
https://github.com/architv/harvey/blob/2b96d57b7a1e0dd706f1f00aba3d92a7ae702960/harvey/harvey.py#L46-L50
[ "def", "_FindLargestIdPostfixNumber", "(", "self", ",", "schedule", ")", ":", "postfix_number_re", "=", "re", ".", "compile", "(", "'(\\d+)$'", ")", "def", "ExtractPostfixNumber", "(", "entity_id", ")", ":", "\"\"\"Try to extract an integer from the end of entity_id.\n\n If entity_id is None or if there is no integer ending the id, zero is\n returned.\n\n Args:\n entity_id: An id string or None.\n\n Returns:\n An integer ending the entity_id or zero.\n \"\"\"", "if", "entity_id", "is", "None", ":", "return", "0", "match", "=", "postfix_number_re", ".", "search", "(", "entity_id", ")", "if", "match", "is", "not", "None", ":", "return", "int", "(", "match", ".", "group", "(", "1", ")", ")", "else", ":", "return", "0", "id_data_sets", "=", "{", "'agency_id'", ":", "schedule", ".", "GetAgencyList", "(", ")", ",", "'stop_id'", ":", "schedule", ".", "GetStopList", "(", ")", ",", "'route_id'", ":", "schedule", ".", "GetRouteList", "(", ")", ",", "'trip_id'", ":", "schedule", ".", "GetTripList", "(", ")", ",", "'service_id'", ":", "schedule", ".", "GetServicePeriodList", "(", ")", ",", "'fare_id'", ":", "schedule", ".", "GetFareAttributeList", "(", ")", ",", "'shape_id'", ":", "schedule", ".", "GetShapeList", "(", ")", "}", "max_postfix_number", "=", "0", "for", "id_name", ",", "entity_list", "in", "id_data_sets", ".", "items", "(", ")", ":", "for", "entity", "in", "entity_list", ":", "entity_id", "=", "getattr", "(", "entity", ",", "id_name", ")", "postfix_number", "=", "ExtractPostfixNumber", "(", "entity_id", ")", "max_postfix_number", "=", "max", "(", "max_postfix_number", ",", "postfix_number", ")", "return", "max_postfix_number" ]
Get git config user name
def _get_config_name ( ) : p = subprocess . Popen ( 'git config --get user.name' , shell = True , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) output = p . stdout . readlines ( ) return _stripslashes ( output [ 0 ] )
8,798
https://github.com/architv/harvey/blob/2b96d57b7a1e0dd706f1f00aba3d92a7ae702960/harvey/harvey.py#L53-L58
[ "def", "remove_from_space_size", "(", "self", ",", "removal_bytes", ")", ":", "# type: (int) -> None", "if", "not", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'This Volume Descriptor is not yet initialized'", ")", "# The 'removal' parameter is expected to be in bytes, but the space", "# size we track is in extents. Round up to the next extent.", "self", ".", "space_size", "-=", "utils", ".", "ceiling_div", "(", "removal_bytes", ",", "self", ".", "log_block_size", ")" ]
Lists all the licenses on command line
def _get_licences ( ) : licenses = _LICENSES for license in licenses : print ( "{license_name} [{license_code}]" . format ( license_name = licenses [ license ] , license_code = license ) )
8,799
https://github.com/architv/harvey/blob/2b96d57b7a1e0dd706f1f00aba3d92a7ae702960/harvey/harvey.py#L61-L67
[ "def", "run", "(", "self", ",", "gta", ",", "mcube_map", ",", "*", "*", "kwargs", ")", ":", "prefix", "=", "kwargs", ".", "get", "(", "'prefix'", ",", "'test'", ")", "format", "=", "kwargs", ".", "get", "(", "'format'", ",", "self", ".", "config", "[", "'format'", "]", ")", "loge_bounds", "=", "[", "None", "]", "+", "self", ".", "config", "[", "'loge_bounds'", "]", "for", "x", "in", "loge_bounds", ":", "self", ".", "make_roi_plots", "(", "gta", ",", "mcube_map", ",", "loge_bounds", "=", "x", ",", "*", "*", "kwargs", ")", "imfile", "=", "utils", ".", "format_filename", "(", "self", ".", "config", "[", "'fileio'", "]", "[", "'workdir'", "]", ",", "'counts_spectrum'", ",", "prefix", "=", "[", "prefix", "]", ",", "extension", "=", "format", ")", "make_counts_spectrum_plot", "(", "gta", ".", "_roi_data", ",", "gta", ".", "roi", ",", "gta", ".", "log_energies", ",", "imfile", ",", "*", "*", "kwargs", ")" ]