idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
43,100 | def add_static_url ( self , url_path , directory , endpoint = None , roles = None ) : url_path = self . static_url_path + "/" + url_path + "/<path:filename>" self . add_url_rule ( url_path , endpoint = endpoint , view_func = partial ( send_file_from_directory , directory = directory ) , roles = roles , ) self . add_access_controller ( endpoint , allow_access_for_roles ( Anonymous ) , endpoint = True ) | Add a new url rule for static files . |
43,101 | def _message_send ( self , connection ) : sender = current_app . config [ "MAIL_SENDER" ] if not self . extra_headers : self . extra_headers = { } self . extra_headers [ "Sender" ] = sender connection . send ( self , sender ) | Send a single message instance . |
43,102 | def _filter_metadata_for_connection ( target , connection , ** kw ) : engine = connection . engine . name default_engines = ( engine , ) tables = target if isinstance ( target , sa . Table ) else kw . get ( "tables" , [ ] ) for table in tables : indexes = list ( table . indexes ) for idx in indexes : if engine not in idx . info . get ( "engines" , default_engines ) : table . indexes . remove ( idx ) | Listener to control what indexes get created . |
43,103 | def url_for ( obj , ** kw ) : if isinstance ( obj , str ) : return flask_url_for ( obj , ** kw ) try : return current_app . default_view . url_for ( obj , ** kw ) except KeyError : if hasattr ( obj , "_url" ) : return obj . _url elif hasattr ( obj , "url" ) : return obj . url raise BuildError ( repr ( obj ) , kw , "GET" ) | Polymorphic variant of Flask s url_for function . |
43,104 | def send_file_from_directory ( filename , directory , app = None ) : if app is None : app = current_app cache_timeout = app . get_send_file_max_age ( filename ) return send_from_directory ( directory , filename , cache_timeout = cache_timeout ) | Helper to add static rules like in abilian . app . app . |
43,105 | def get_model_changes ( entity_type , year = None , month = None , day = None , hour = None , since = None ) : query = AuditEntry . query if since : query = query . filter ( AuditEntry . happened_at >= since ) if year : query = query . filter ( extract ( "year" , AuditEntry . happened_at ) == year ) if month : query = query . filter ( extract ( "month" , AuditEntry . happened_at ) == month ) if day : query = query . filter ( extract ( "day" , AuditEntry . happened_at ) == day ) if hour : query = query . filter ( extract ( "hour" , AuditEntry . happened_at ) == hour ) query = query . filter ( AuditEntry . entity_type . like ( entity_type ) ) . order_by ( AuditEntry . happened_at ) return query | Get models modified at the given date with the Audit service . |
43,106 | def get_columns_diff ( changes ) : for change in changes : change . diff = [ ] elt_changes = change . get_changes ( ) if elt_changes : change . diff = elt_changes . columns return changes | Add the changed columns as a diff attribute . |
43,107 | def _has_argument ( func ) : if hasattr ( inspect , 'signature' ) : sig = inspect . signature ( func ) return bool ( sig . parameters ) else : return bool ( inspect . getargspec ( func ) . args ) | Test whether a function expects an argument . |
43,108 | def get_context ( self , template ) : context = { } for regex , context_generator in self . contexts : if re . match ( regex , template . name ) : if inspect . isfunction ( context_generator ) : if _has_argument ( context_generator ) : context . update ( context_generator ( template ) ) else : context . update ( context_generator ( ) ) else : context . update ( context_generator ) if not self . mergecontexts : break return context | Get the context for a template . |
43,109 | def get_rule ( self , template_name ) : for regex , render_func in self . rules : if re . match ( regex , template_name ) : return render_func raise ValueError ( "no matching rule" ) | Find a matching compilation rule for a function . |
43,110 | def is_partial ( self , filename ) : return any ( ( x . startswith ( "_" ) for x in filename . split ( os . path . sep ) ) ) | Check if a file is a partial . |
43,111 | def is_template ( self , filename ) : if self . is_partial ( filename ) : return False if self . is_ignored ( filename ) : return False if self . is_static ( filename ) : return False return True | Check if a file is a template . |
43,112 | def _ensure_dir ( self , template_name ) : head = os . path . dirname ( template_name ) if head : file_dirpath = os . path . join ( self . outpath , head ) if not os . path . exists ( file_dirpath ) : os . makedirs ( file_dirpath ) | Ensure the output directory for a template exists . |
43,113 | def render ( self , use_reloader = False ) : self . render_templates ( self . templates ) self . copy_static ( self . static_names ) if use_reloader : self . logger . info ( "Watching '%s' for changes..." % self . searchpath ) self . logger . info ( "Press Ctrl+C to stop." ) Reloader ( self ) . watch ( ) | Generate the site . |
43,114 | def register_jinja_loaders ( self , * loaders ) : if not hasattr ( self , "_jinja_loaders" ) : raise ValueError ( "Cannot register new jinja loaders after first template rendered" ) self . _jinja_loaders . extend ( loaders ) | Register one or many jinja2 . Loader instances for templates lookup . |
43,115 | def render ( args ) : srcpath = ( os . path . join ( os . getcwd ( ) , 'templates' ) if args [ '--srcpath' ] is None else args [ '--srcpath' ] if os . path . isabs ( args [ '--srcpath' ] ) else os . path . join ( os . getcwd ( ) , args [ '--srcpath' ] ) ) if not os . path . isdir ( srcpath ) : print ( "The templates directory '%s' is invalid." % srcpath ) sys . exit ( 1 ) if args [ '--outpath' ] is not None : outpath = args [ '--outpath' ] else : outpath = os . getcwd ( ) if not os . path . isdir ( outpath ) : print ( "The output directory '%s' is invalid." % outpath ) sys . exit ( 1 ) staticdirs = args [ '--static' ] staticpaths = None if staticdirs : staticpaths = staticdirs . split ( "," ) for path in staticpaths : path = os . path . join ( srcpath , path ) if not os . path . isdir ( path ) : print ( "The static files directory '%s' is invalid." % path ) sys . exit ( 1 ) site = staticjinja . make_site ( searchpath = srcpath , outpath = outpath , staticpaths = staticpaths ) use_reloader = args [ 'watch' ] site . render ( use_reloader = use_reloader ) | Render a site . |
43,116 | def indexable_role ( principal ) : principal = unwrap ( principal ) if hasattr ( principal , "is_anonymous" ) and principal . is_anonymous : principal = Anonymous if isinstance ( principal , Role ) : return f"role:{principal.name}" elif isinstance ( principal , User ) : fmt = "user:{:d}" elif isinstance ( principal , Group ) : fmt = "group:{:d}" else : raise ValueError ( repr ( principal ) ) return fmt . format ( principal . id ) | Return a string suitable for query against allowed_roles_and_users field . |
43,117 | def to_text ( self , digest , blob , mime_type ) : if mime_type . startswith ( "image/" ) : return "" cache_key = "txt:" + digest text = self . cache . get ( cache_key ) if text : return text for handler in self . handlers : if handler . accept ( mime_type , "text/plain" ) : text = handler . convert ( blob ) self . cache [ cache_key ] = text return text pdf = self . to_pdf ( digest , blob , mime_type ) for handler in self . handlers : if handler . accept ( "application/pdf" , "text/plain" ) : text = handler . convert ( pdf ) self . cache [ cache_key ] = text return text raise HandlerNotFound ( f"No handler found to convert from {mime_type} to text" ) | Convert a file to plain text . |
43,118 | def has_image ( self , digest , mime_type , index , size = 500 ) : cache_key = f"img:{index}:{size}:{digest}" return mime_type . startswith ( "image/" ) or cache_key in self . cache | Tell if there is a preview image . |
43,119 | def get_image ( self , digest , blob , mime_type , index , size = 500 ) : if mime_type . startswith ( "image/" ) : return "" cache_key = f"img:{index}:{size}:{digest}" return self . cache . get ( cache_key ) | Return an image for the given content only if it already exists in the image cache . |
43,120 | def to_image ( self , digest , blob , mime_type , index , size = 500 ) : if mime_type . startswith ( "image/" ) : return "" cache_key = f"img:{index}:{size}:{digest}" converted = self . cache . get ( cache_key ) if converted : return converted for handler in self . handlers : if handler . accept ( mime_type , "image/jpeg" ) : converted_images = handler . convert ( blob , size = size ) for i in range ( 0 , len ( converted_images ) ) : converted = converted_images [ i ] cache_key = f"img:{i}:{size}:{digest}" self . cache [ cache_key ] = converted return converted_images [ index ] pdf = self . to_pdf ( digest , blob , mime_type ) for handler in self . handlers : if handler . accept ( "application/pdf" , "image/jpeg" ) : converted_images = handler . convert ( pdf , size = size ) for i in range ( 0 , len ( converted_images ) ) : converted = converted_images [ i ] cache_key = f"img:{i}:{size}:{digest}" self . cache [ cache_key ] = converted return converted_images [ index ] raise HandlerNotFound ( f"No handler found to convert from {mime_type} to image" ) | Convert a file to a list of images . |
43,121 | def get_metadata ( self , digest , content , mime_type ) : if mime_type . startswith ( "image/" ) : img = Image . open ( BytesIO ( content ) ) ret = { } if not hasattr ( img , "_getexif" ) : return { } info = img . _getexif ( ) if not info : return { } for tag , value in info . items ( ) : decoded = TAGS . get ( tag , tag ) ret [ "EXIF:" + str ( decoded ) ] = value return ret else : if mime_type != "application/pdf" : content = self . to_pdf ( digest , content , mime_type ) with make_temp_file ( content ) as in_fn : try : output = subprocess . check_output ( [ "pdfinfo" , in_fn ] ) except OSError : logger . error ( "Conversion failed, probably pdfinfo is not installed" ) raise ret = { } for line in output . split ( b"\n" ) : if b":" in line : key , value = line . strip ( ) . split ( b":" , 1 ) key = str ( key ) ret [ "PDF:" + key ] = str ( value . strip ( ) , errors = "replace" ) return ret | Get a dictionary representing the metadata embedded in the given content . |
43,122 | def ToByteArray ( self ) : lc = self . InternalEncodeLc ( ) out = bytearray ( 4 ) out [ 0 ] = self . cla out [ 1 ] = self . ins out [ 2 ] = self . p1 out [ 3 ] = self . p2 if self . data : out . extend ( lc ) out . extend ( self . data ) out . extend ( [ 0x00 , 0x00 ] ) else : out . extend ( [ 0x00 , 0x00 , 0x00 ] ) return out | Serialize the command . |
43,123 | def _BuildPluginRequest ( self , app_id , challenge_data , origin ) : client_data_map = { } encoded_challenges = [ ] app_id_hash_encoded = self . _Base64Encode ( self . _SHA256 ( app_id ) ) for challenge_item in challenge_data : key = challenge_item [ 'key' ] key_handle_encoded = self . _Base64Encode ( key . key_handle ) raw_challenge = challenge_item [ 'challenge' ] client_data_json = model . ClientData ( model . ClientData . TYP_AUTHENTICATION , raw_challenge , origin ) . GetJson ( ) challenge_hash_encoded = self . _Base64Encode ( self . _SHA256 ( client_data_json ) ) encoded_challenges . append ( { 'appIdHash' : app_id_hash_encoded , 'challengeHash' : challenge_hash_encoded , 'keyHandle' : key_handle_encoded , 'version' : key . version , } ) key_challenge_pair = ( key_handle_encoded , challenge_hash_encoded ) client_data_map [ key_challenge_pair ] = client_data_json signing_request = { 'type' : 'sign_helper_request' , 'signData' : encoded_challenges , 'timeoutSeconds' : U2F_SIGNATURE_TIMEOUT_SECONDS , 'localAlways' : True } return client_data_map , json . dumps ( signing_request ) | Builds a JSON request in the form that the plugin expects . |
43,124 | def _BuildAuthenticatorResponse ( self , app_id , client_data , plugin_response ) : encoded_client_data = self . _Base64Encode ( client_data ) signature_data = str ( plugin_response [ 'signatureData' ] ) key_handle = str ( plugin_response [ 'keyHandle' ] ) response = { 'clientData' : encoded_client_data , 'signatureData' : signature_data , 'applicationId' : app_id , 'keyHandle' : key_handle , } return response | Builds the response to return to the caller . |
43,125 | def _CallPlugin ( self , cmd , input_json ) : input_length = len ( input_json ) length_bytes_le = struct . pack ( '<I' , input_length ) request = length_bytes_le + input_json . encode ( ) sign_process = subprocess . Popen ( cmd , stdin = subprocess . PIPE , stdout = subprocess . PIPE ) stdout = sign_process . communicate ( request ) [ 0 ] exit_status = sign_process . wait ( ) response_len_le = stdout [ : 4 ] response_len = struct . unpack ( '<I' , response_len_le ) [ 0 ] response = stdout [ 4 : ] if response_len != len ( response ) : raise errors . PluginError ( 'Plugin response length {} does not match data {} (exit_status={})' . format ( response_len , len ( response ) , exit_status ) ) try : json_response = json . loads ( response . decode ( ) ) except ValueError : raise errors . PluginError ( 'Plugin returned invalid output (exit_status={})' . format ( exit_status ) ) if json_response . get ( 'type' ) != 'sign_helper_reply' : raise errors . PluginError ( 'Plugin returned invalid response type ' '(exit_status={})' . format ( exit_status ) ) result_code = json_response . get ( 'code' ) if result_code is None : raise errors . PluginError ( 'Plugin missing result code (exit_status={})' . format ( exit_status ) ) if result_code == SK_SIGNING_PLUGIN_TOUCH_REQUIRED : raise errors . U2FError ( errors . U2FError . TIMEOUT ) elif result_code == SK_SIGNING_PLUGIN_WRONG_DATA : raise errors . U2FError ( errors . U2FError . DEVICE_INELIGIBLE ) elif result_code != SK_SIGNING_PLUGIN_NO_ERROR : raise errors . PluginError ( 'Plugin failed with error {} - {} (exit_status={})' . format ( result_code , json_response . get ( 'errorDetail' ) , exit_status ) ) response_data = json_response . get ( 'responseData' ) if response_data is None : raise errors . PluginErrors ( 'Plugin returned output with missing responseData (exit_status={})' . format ( exit_status ) ) return response_data | Calls the plugin and validates the response . |
43,126 | def InternalInit ( self ) : self . cid = UsbHidTransport . U2FHID_BROADCAST_CID nonce = bytearray ( os . urandom ( 8 ) ) r = self . InternalExchange ( UsbHidTransport . U2FHID_INIT , nonce ) if len ( r ) < 17 : raise errors . HidError ( 'unexpected init reply len' ) if r [ 0 : 8 ] != nonce : raise errors . HidError ( 'nonce mismatch' ) self . cid = bytearray ( r [ 8 : 12 ] ) self . u2fhid_version = r [ 12 ] | Initializes the device and obtains channel id . |
43,127 | def InternalExchange ( self , cmd , payload_in ) : self . logger . debug ( 'payload: ' + str ( list ( payload_in ) ) ) payload = bytearray ( ) payload [ : ] = payload_in for _ in range ( 2 ) : self . InternalSend ( cmd , payload ) ret_cmd , ret_payload = self . InternalRecv ( ) if ret_cmd == UsbHidTransport . U2FHID_ERROR : if ret_payload == UsbHidTransport . ERR_CHANNEL_BUSY : time . sleep ( 0.5 ) continue raise errors . HidError ( 'Device error: %d' % int ( ret_payload [ 0 ] ) ) elif ret_cmd != cmd : raise errors . HidError ( 'Command mismatch!' ) return ret_payload raise errors . HidError ( 'Device Busy. Please retry' ) | Sends and receives a message from the device . |
43,128 | def InternalSend ( self , cmd , payload ) : length_to_send = len ( payload ) max_payload = self . packet_size - 7 first_frame = payload [ 0 : max_payload ] first_packet = UsbHidTransport . InitPacket ( self . packet_size , self . cid , cmd , len ( payload ) , first_frame ) del payload [ 0 : max_payload ] length_to_send -= len ( first_frame ) self . InternalSendPacket ( first_packet ) seq = 0 while length_to_send > 0 : max_payload = self . packet_size - 5 next_frame = payload [ 0 : max_payload ] del payload [ 0 : max_payload ] length_to_send -= len ( next_frame ) next_packet = UsbHidTransport . ContPacket ( self . packet_size , self . cid , seq , next_frame ) self . InternalSendPacket ( next_packet ) seq += 1 | Sends a message to the device including fragmenting it . |
43,129 | def InternalRecv ( self ) : first_read = self . InternalReadFrame ( ) first_packet = UsbHidTransport . InitPacket . FromWireFormat ( self . packet_size , first_read ) data = first_packet . payload to_read = first_packet . size - len ( first_packet . payload ) seq = 0 while to_read > 0 : next_read = self . InternalReadFrame ( ) next_packet = UsbHidTransport . ContPacket . FromWireFormat ( self . packet_size , next_read ) if self . cid != next_packet . cid : continue if seq != next_packet . seq : raise errors . HardwareError ( 'Packets received out of order' ) to_read -= len ( next_packet . payload ) data . extend ( next_packet . payload ) seq += 1 data = data [ 0 : first_packet . size ] return ( first_packet . cmd , data ) | Receives a message from the device including defragmenting it . |
43,130 | def InternalPlatformSwitch ( funcname , * args , ** kwargs ) : clz = None if sys . platform . startswith ( 'linux' ) : from pyu2f . hid import linux clz = linux . LinuxHidDevice elif sys . platform . startswith ( 'win32' ) : from pyu2f . hid import windows clz = windows . WindowsHidDevice elif sys . platform . startswith ( 'darwin' ) : from pyu2f . hid import macos clz = macos . MacOsHidDevice if not clz : raise Exception ( 'Unsupported platform: ' + sys . platform ) if funcname == '__init__' : return clz ( * args , ** kwargs ) return getattr ( clz , funcname ) ( * args , ** kwargs ) | Determine on a platform - specific basis which module to use . |
43,131 | def GetJson ( self ) : server_challenge_b64 = base64 . urlsafe_b64encode ( self . raw_server_challenge ) . decode ( ) server_challenge_b64 = server_challenge_b64 . rstrip ( '=' ) return json . dumps ( { 'typ' : self . typ , 'challenge' : server_challenge_b64 , 'origin' : self . origin } , sort_keys = True ) | Returns JSON version of ClientData compatible with FIDO spec . |
43,132 | def GetValueLength ( rd , pos ) : rd = bytearray ( rd ) key = rd [ pos ] if key == LONG_ITEM_ENCODING : if pos + 1 < len ( rd ) : return ( 3 , rd [ pos + 1 ] ) else : raise errors . HidError ( 'Malformed report descriptor' ) else : code = key & 0x03 if code <= 0x02 : return ( 1 , code ) elif code == 0x03 : return ( 1 , 4 ) raise errors . HidError ( 'Cannot happen' ) | Get value length for a key in rd . |
43,133 | def ReadLsbBytes ( rd , offset , value_size ) : encoding = None if value_size == 1 : encoding = '<B' elif value_size == 2 : encoding = '<H' elif value_size == 4 : encoding = '<L' else : raise errors . HidError ( 'Invalid value size specified' ) ret , = struct . unpack ( encoding , rd [ offset : offset + value_size ] ) return ret | Reads value_size bytes from rd at offset least signifcant byte first . |
43,134 | def ParseReportDescriptor ( rd , desc ) : rd = bytearray ( rd ) pos = 0 report_count = None report_size = None usage_page = None usage = None while pos < len ( rd ) : key = rd [ pos ] key_size , value_length = GetValueLength ( rd , pos ) if key & REPORT_DESCRIPTOR_KEY_MASK == INPUT_ITEM : if report_count and report_size : byte_length = ( report_count * report_size ) // 8 desc . internal_max_in_report_len = max ( desc . internal_max_in_report_len , byte_length ) report_count = None report_size = None elif key & REPORT_DESCRIPTOR_KEY_MASK == OUTPUT_ITEM : if report_count and report_size : byte_length = ( report_count * report_size ) // 8 desc . internal_max_out_report_len = max ( desc . internal_max_out_report_len , byte_length ) report_count = None report_size = None elif key & REPORT_DESCRIPTOR_KEY_MASK == COLLECTION_ITEM : if usage_page : desc . usage_page = usage_page if usage : desc . usage = usage elif key & REPORT_DESCRIPTOR_KEY_MASK == REPORT_COUNT : if len ( rd ) >= pos + 1 + value_length : report_count = ReadLsbBytes ( rd , pos + 1 , value_length ) elif key & REPORT_DESCRIPTOR_KEY_MASK == REPORT_SIZE : if len ( rd ) >= pos + 1 + value_length : report_size = ReadLsbBytes ( rd , pos + 1 , value_length ) elif key & REPORT_DESCRIPTOR_KEY_MASK == USAGE_PAGE : if len ( rd ) >= pos + 1 + value_length : usage_page = ReadLsbBytes ( rd , pos + 1 , value_length ) elif key & REPORT_DESCRIPTOR_KEY_MASK == USAGE : if len ( rd ) >= pos + 1 + value_length : usage = ReadLsbBytes ( rd , pos + 1 , value_length ) pos += value_length + key_size return desc | Parse the binary report descriptor . |
43,135 | def FillDeviceAttributes ( device , descriptor ) : attributes = HidAttributes ( ) result = hid . HidD_GetAttributes ( device , ctypes . byref ( attributes ) ) if not result : raise ctypes . WinError ( ) buf = ctypes . create_string_buffer ( 1024 ) result = hid . HidD_GetProductString ( device , buf , 1024 ) if not result : raise ctypes . WinError ( ) descriptor . vendor_id = attributes . VendorID descriptor . product_id = attributes . ProductID descriptor . product_string = ctypes . wstring_at ( buf ) | Fill out the attributes of the device . |
43,136 | def FillDeviceCapabilities ( device , descriptor ) : preparsed_data = PHIDP_PREPARSED_DATA ( 0 ) ret = hid . HidD_GetPreparsedData ( device , ctypes . byref ( preparsed_data ) ) if not ret : raise ctypes . WinError ( ) try : caps = HidCapabilities ( ) ret = hid . HidP_GetCaps ( preparsed_data , ctypes . byref ( caps ) ) if ret != HIDP_STATUS_SUCCESS : raise ctypes . WinError ( ) descriptor . usage = caps . Usage descriptor . usage_page = caps . UsagePage descriptor . internal_max_in_report_len = caps . InputReportByteLength descriptor . internal_max_out_report_len = caps . OutputReportByteLength finally : hid . HidD_FreePreparsedData ( preparsed_data ) | Fill out device capabilities . |
43,137 | def OpenDevice ( path , enum = False ) : desired_access = GENERIC_WRITE | GENERIC_READ share_mode = FILE_SHARE_READ | FILE_SHARE_WRITE if enum : desired_access = 0 h = kernel32 . CreateFileA ( path , desired_access , share_mode , None , OPEN_EXISTING , 0 , None ) if h == INVALID_HANDLE_VALUE : raise ctypes . WinError ( ) return h | Open the device and return a handle to it . |
43,138 | def GetLocalU2FInterface ( origin = socket . gethostname ( ) ) : hid_transports = hidtransport . DiscoverLocalHIDU2FDevices ( ) for t in hid_transports : try : return U2FInterface ( security_key = hardware . SecurityKey ( transport = t ) , origin = origin ) except errors . UnsupportedVersionException : pass raise errors . NoDeviceFoundError ( ) | Obtains a U2FInterface for the first valid local U2FHID device found . |
43,139 | def Register ( self , app_id , challenge , registered_keys ) : client_data = model . ClientData ( model . ClientData . TYP_REGISTRATION , challenge , self . origin ) challenge_param = self . InternalSHA256 ( client_data . GetJson ( ) ) app_param = self . InternalSHA256 ( app_id ) for key in registered_keys : try : if key . version != u'U2F_V2' : continue resp = self . security_key . CmdAuthenticate ( challenge_param , app_param , key . key_handle , True ) raise errors . HardwareError ( 'Should Never Happen' ) except errors . TUPRequiredError : raise errors . U2FError ( errors . U2FError . DEVICE_INELIGIBLE ) except errors . InvalidKeyHandleError as e : pass except errors . HardwareError as e : raise errors . U2FError ( errors . U2FError . BAD_REQUEST , e ) for _ in range ( 30 ) : try : resp = self . security_key . CmdRegister ( challenge_param , app_param ) return model . RegisterResponse ( resp , client_data ) except errors . TUPRequiredError as e : self . security_key . CmdWink ( ) time . sleep ( 0.5 ) except errors . HardwareError as e : raise errors . U2FError ( errors . U2FError . BAD_REQUEST , e ) raise errors . U2FError ( errors . U2FError . TIMEOUT ) | Registers app_id with the security key . |
43,140 | def Authenticate ( self , app_id , challenge , registered_keys ) : client_data = model . ClientData ( model . ClientData . TYP_AUTHENTICATION , challenge , self . origin ) app_param = self . InternalSHA256 ( app_id ) challenge_param = self . InternalSHA256 ( client_data . GetJson ( ) ) num_invalid_keys = 0 for key in registered_keys : try : if key . version != u'U2F_V2' : continue for _ in range ( 30 ) : try : resp = self . security_key . CmdAuthenticate ( challenge_param , app_param , key . key_handle ) return model . SignResponse ( key . key_handle , resp , client_data ) except errors . TUPRequiredError : self . security_key . CmdWink ( ) time . sleep ( 0.5 ) except errors . InvalidKeyHandleError : num_invalid_keys += 1 continue except errors . HardwareError as e : raise errors . U2FError ( errors . U2FError . BAD_REQUEST , e ) if num_invalid_keys == len ( registered_keys ) : raise errors . U2FError ( errors . U2FError . DEVICE_INELIGIBLE ) raise errors . U2FError ( errors . U2FError . TIMEOUT ) | Authenticates app_id with the security key . |
43,141 | def CmdRegister ( self , challenge_param , app_param ) : self . logger . debug ( 'CmdRegister' ) if len ( challenge_param ) != 32 or len ( app_param ) != 32 : raise errors . InvalidRequestError ( ) body = bytearray ( challenge_param + app_param ) response = self . InternalSendApdu ( apdu . CommandApdu ( 0 , apdu . CMD_REGISTER , 0x03 , 0x00 , body ) ) response . CheckSuccessOrRaise ( ) return response . body | Register security key . |
43,142 | def CmdAuthenticate ( self , challenge_param , app_param , key_handle , check_only = False ) : self . logger . debug ( 'CmdAuthenticate' ) if len ( challenge_param ) != 32 or len ( app_param ) != 32 : raise errors . InvalidRequestError ( ) control = 0x07 if check_only else 0x03 body = bytearray ( challenge_param + app_param + bytearray ( [ len ( key_handle ) ] ) + key_handle ) response = self . InternalSendApdu ( apdu . CommandApdu ( 0 , apdu . CMD_AUTH , control , 0x00 , body ) ) response . CheckSuccessOrRaise ( ) return response . body | Attempt to obtain an authentication signature . |
43,143 | def CmdVersion ( self ) : self . logger . debug ( 'CmdVersion' ) response = self . InternalSendApdu ( apdu . CommandApdu ( 0 , apdu . CMD_VERSION , 0x00 , 0x00 ) ) if not response . IsSuccess ( ) : raise errors . ApduError ( response . sw1 , response . sw2 ) return response . body | Obtain the version of the device and test transport format . |
43,144 | def InternalSendApdu ( self , apdu_to_send ) : response = None if not self . use_legacy_format : response = apdu . ResponseApdu ( self . transport . SendMsgBytes ( apdu_to_send . ToByteArray ( ) ) ) if response . sw1 == 0x67 and response . sw2 == 0x00 : self . use_legacy_format = True return self . InternalSendApdu ( apdu_to_send ) else : response = apdu . ResponseApdu ( self . transport . SendMsgBytes ( apdu_to_send . ToLegacyU2FByteArray ( ) ) ) return response | Send an APDU to the device . |
43,145 | def GetDeviceIntProperty ( dev_ref , key ) : cf_key = CFStr ( key ) type_ref = iokit . IOHIDDeviceGetProperty ( dev_ref , cf_key ) cf . CFRelease ( cf_key ) if not type_ref : return None if cf . CFGetTypeID ( type_ref ) != cf . CFNumberGetTypeID ( ) : raise errors . OsHidError ( 'Expected number type, got {}' . format ( cf . CFGetTypeID ( type_ref ) ) ) out = ctypes . c_int32 ( ) ret = cf . CFNumberGetValue ( type_ref , K_CF_NUMBER_SINT32_TYPE , ctypes . byref ( out ) ) if not ret : return None return out . value | Reads int property from the HID device . |
43,146 | def GetDeviceStringProperty ( dev_ref , key ) : cf_key = CFStr ( key ) type_ref = iokit . IOHIDDeviceGetProperty ( dev_ref , cf_key ) cf . CFRelease ( cf_key ) if not type_ref : return None if cf . CFGetTypeID ( type_ref ) != cf . CFStringGetTypeID ( ) : raise errors . OsHidError ( 'Expected string type, got {}' . format ( cf . CFGetTypeID ( type_ref ) ) ) type_ref = ctypes . cast ( type_ref , CF_STRING_REF ) out = ctypes . create_string_buffer ( DEVICE_STRING_PROPERTY_BUFFER_SIZE ) ret = cf . CFStringGetCString ( type_ref , out , DEVICE_STRING_PROPERTY_BUFFER_SIZE , K_CF_STRING_ENCODING_UTF8 ) if not ret : return None return out . value . decode ( 'utf8' ) | Reads string property from the HID device . |
43,147 | def GetDevicePath ( device_handle ) : io_service_obj = iokit . IOHIDDeviceGetService ( device_handle ) str_buffer = ctypes . create_string_buffer ( DEVICE_PATH_BUFFER_SIZE ) iokit . IORegistryEntryGetPath ( io_service_obj , K_IO_SERVICE_PLANE , str_buffer ) return str_buffer . value | Obtains the unique path for the device . |
43,148 | def HidReadCallback ( read_queue , result , sender , report_type , report_id , report , report_length ) : del result , sender , report_type , report_id incoming_bytes = [ report [ i ] for i in range ( report_length ) ] read_queue . put ( incoming_bytes ) | Handles incoming IN report from HID device . |
43,149 | def DeviceReadThread ( hid_device ) : hid_device . run_loop_ref = cf . CFRunLoopGetCurrent ( ) if not hid_device . run_loop_ref : logger . error ( 'Failed to get current run loop' ) return iokit . IOHIDDeviceScheduleWithRunLoop ( hid_device . device_handle , hid_device . run_loop_ref , K_CF_RUNLOOP_DEFAULT_MODE ) run_loop_run_result = K_CF_RUN_LOOP_RUN_TIMED_OUT while ( run_loop_run_result == K_CF_RUN_LOOP_RUN_TIMED_OUT or run_loop_run_result == K_CF_RUN_LOOP_RUN_HANDLED_SOURCE ) : run_loop_run_result = cf . CFRunLoopRunInMode ( K_CF_RUNLOOP_DEFAULT_MODE , 1000 , False ) if run_loop_run_result != K_CF_RUN_LOOP_RUN_STOPPED : logger . error ( 'Unexpected run loop exit code: %d' , run_loop_run_result ) iokit . IOHIDDeviceUnscheduleFromRunLoop ( hid_device . device_handle , hid_device . run_loop_ref , K_CF_RUNLOOP_DEFAULT_MODE ) | Binds a device to the thread s run loop then starts the run loop . |
43,150 | def change_view ( self , * args , ** kwargs ) : Hierarchy . init_hierarchy ( self ) self . hierarchy . hook_change_view ( self , args , kwargs ) return super ( HierarchicalModelAdmin , self ) . change_view ( * args , ** kwargs ) | Renders detailed model edit page . |
43,151 | def action_checkbox ( self , obj ) : if getattr ( obj , Hierarchy . UPPER_LEVEL_MODEL_ATTR , False ) : return '' return super ( HierarchicalModelAdmin , self ) . action_checkbox ( obj ) | Renders checkboxes . |
43,152 | def get_queryset ( self , request ) : self . _hierarchy . hook_get_queryset ( self , request ) return super ( HierarchicalChangeList , self ) . get_queryset ( request ) | Constructs a query set . |
43,153 | def get_results ( self , request ) : super ( HierarchicalChangeList , self ) . get_results ( request ) self . _hierarchy . hook_get_results ( self ) | Gets query set results . |
43,154 | def check_field_exists ( self , field_name ) : if not settings . DEBUG : return try : self . lookup_opts . get_field ( field_name ) except FieldDoesNotExist as e : raise AdmirarchyConfigurationError ( e ) | Implements field exists check for debugging purposes . |
43,155 | def init_hierarchy ( cls , model_admin ) : hierarchy = getattr ( model_admin , 'hierarchy' ) if hierarchy : if not isinstance ( hierarchy , Hierarchy ) : hierarchy = AdjacencyList ( ) else : hierarchy = NoHierarchy ( ) model_admin . hierarchy = hierarchy | Initializes model admin with hierarchy data . |
43,156 | def get_pid_from_request ( cls , changelist , request ) : val = request . GET . get ( cls . PARENT_ID_QS_PARAM , False ) pid = val or None try : del changelist . params [ cls . PARENT_ID_QS_PARAM ] except KeyError : pass return pid | Gets parent ID from query string . |
43,157 | def read_file ( fpath ) : with io . open ( os . path . join ( PATH_BASE , fpath ) ) as f : return f . read ( ) | Reads a file within package directories . |
43,158 | def get_version ( ) : contents = read_file ( os . path . join ( 'admirarchy' , '__init__.py' ) ) version = re . search ( 'VERSION = \(([^)]+)\)' , contents ) version = version . group ( 1 ) . replace ( ', ' , '.' ) . strip ( ) return version | Returns version number without module import ( which can lead to ImportError if some dependencies are unavailable before install . |
43,159 | def process_update ( self , update ) : data = json . loads ( update ) NetworkTables . getEntry ( data [ "k" ] ) . setValue ( data [ "v" ] ) | Process an incoming update from a remote NetworkTables |
43,160 | def _send_update ( self , data ) : if isinstance ( data , dict ) : data = json . dumps ( data ) self . update_callback ( data ) | Send a NetworkTables update via the stored send_update callback |
43,161 | def _nt_on_change ( self , key , value , isNew ) : self . _send_update ( { "k" : key , "v" : value , "n" : isNew } ) | NetworkTables global listener callback |
43,162 | def close ( self ) : NetworkTables . removeGlobalListener ( self . _nt_on_change ) NetworkTables . removeConnectionListener ( self . _nt_connected ) | Clean up NetworkTables listeners |
43,163 | def select_station ( candidates , coverage_range = None , min_fraction_coverage = 0.9 , distance_warnings = ( 50000 , 200000 ) , rank = 1 , ) : def _test_station ( station ) : if coverage_range is None : return True , [ ] else : start_date , end_date = coverage_range try : tempC , warnings = eeweather . mockable . load_isd_hourly_temp_data ( station , start_date , end_date ) except ISDDataNotAvailableError : return False , [ ] if len ( tempC ) > 0 : fraction_coverage = tempC . notnull ( ) . sum ( ) / float ( len ( tempC ) ) return ( fraction_coverage > min_fraction_coverage ) , warnings else : return False , [ ] def _station_warnings ( station , distance_meters ) : return [ EEWeatherWarning ( qualified_name = "eeweather.exceeds_maximum_distance" , description = ( "Distance from target to weather station is greater" "than the specified km." ) , data = { "distance_meters" : distance_meters , "max_distance_meters" : d , "rank" : rank , } , ) for d in distance_warnings if distance_meters > d ] n_stations_passed = 0 for usaf_id , row in candidates . iterrows ( ) : station = ISDStation ( usaf_id ) test_result , warnings = _test_station ( station ) if test_result : n_stations_passed += 1 if n_stations_passed == rank : if not warnings : warnings = [ ] warnings . extend ( _station_warnings ( station , row . distance_meters ) ) return station , warnings no_station_warning = EEWeatherWarning ( qualified_name = "eeweather.no_weather_station_selected" , description = ( "No weather station found with the specified rank and" " minimum fracitional coverage." ) , data = { "rank" : rank , "min_fraction_coverage" : min_fraction_coverage } , ) return None , [ no_station_warning ] | Select a station from a list of candidates that meets given data quality criteria . |
43,164 | def _load_isd_station_metadata ( download_path ) : from shapely . geometry import Point isd_history = pd . read_csv ( os . path . join ( download_path , "isd-history.csv" ) , dtype = str , parse_dates = [ "BEGIN" , "END" ] , ) hasGEO = ( isd_history . LAT . notnull ( ) & isd_history . LON . notnull ( ) & ( isd_history . LAT != 0 ) ) isUS = ( ( ( isd_history . CTRY == "US" ) & ( isd_history . STATE . notnull ( ) ) ) | ( isd_history . CTRY . str [ 1 ] == "Q" ) ) hasUSAF = isd_history . USAF != "999999" metadata = { } for usaf_station , group in isd_history [ hasGEO & isUS & hasUSAF ] . groupby ( "USAF" ) : recent = group . loc [ group . END . idxmax ( ) ] wban_stations = list ( group . WBAN ) metadata [ usaf_station ] = { "usaf_id" : usaf_station , "wban_ids" : wban_stations , "recent_wban_id" : recent . WBAN , "name" : recent [ "STATION NAME" ] , "icao_code" : recent . ICAO , "latitude" : recent . LAT if recent . LAT not in ( "+00.000" , ) else None , "longitude" : recent . LON if recent . LON not in ( "+000.000" , ) else None , "point" : Point ( float ( recent . LON ) , float ( recent . LAT ) ) , "elevation" : recent [ "ELEV(M)" ] if not str ( float ( recent [ "ELEV(M)" ] ) ) . startswith ( "-999" ) else None , "state" : recent . STATE , } return metadata | Collect metadata for US isd stations . |
43,165 | def _load_isd_file_metadata ( download_path , isd_station_metadata ) : isd_inventory = pd . read_csv ( os . path . join ( download_path , "isd-inventory.csv" ) , dtype = str ) station_keep = [ usaf in isd_station_metadata for usaf in isd_inventory . USAF ] isd_inventory = isd_inventory [ station_keep ] year_keep = isd_inventory . YEAR > "2005" isd_inventory = isd_inventory [ year_keep ] metadata = { } for ( usaf_station , year ) , group in isd_inventory . groupby ( [ "USAF" , "YEAR" ] ) : if usaf_station not in metadata : metadata [ usaf_station ] = { "usaf_id" : usaf_station , "years" : { } } metadata [ usaf_station ] [ "years" ] [ year ] = [ { "wban_id" : row . WBAN , "counts" : [ row . JAN , row . FEB , row . MAR , row . APR , row . MAY , row . JUN , row . JUL , row . AUG , row . SEP , row . OCT , row . NOV , row . DEC , ] , } for i , row in group . iterrows ( ) ] return metadata | Collect data counts for isd files . |
43,166 | def build_metadata_db ( zcta_geometry = False , iecc_climate_zone_geometry = True , iecc_moisture_regime_geometry = True , ba_climate_zone_geometry = True , ca_climate_zone_geometry = True , ) : try : import shapely except ImportError : raise ImportError ( "Loading polygons requires shapely." ) try : from bs4 import BeautifulSoup except ImportError : raise ImportError ( "Scraping TMY3 station data requires beautifulsoup4." ) try : import pyproj except ImportError : raise ImportError ( "Computing distances requires pyproj." ) try : import simplejson except ImportError : raise ImportError ( "Writing geojson requires simplejson." ) download_path = _download_primary_sources ( ) conn = metadata_db_connection_proxy . reset_database ( ) print ( "Loading ZCTAs" ) zcta_metadata = _load_zcta_metadata ( download_path ) print ( "Loading counties" ) county_metadata = _load_county_metadata ( download_path ) print ( "Merging county climate zones" ) ( iecc_climate_zone_metadata , iecc_moisture_regime_metadata , ba_climate_zone_metadata , ) = _create_merged_climate_zones_metadata ( county_metadata ) print ( "Loading CA climate zones" ) ca_climate_zone_metadata = _load_CA_climate_zone_metadata ( download_path ) print ( "Loading ISD station metadata" ) isd_station_metadata = _load_isd_station_metadata ( download_path ) print ( "Loading ISD station file metadata" ) isd_file_metadata = _load_isd_file_metadata ( download_path , isd_station_metadata ) print ( "Loading TMY3 station metadata" ) tmy3_station_metadata = _load_tmy3_station_metadata ( download_path ) print ( "Loading CZ2010 station metadata" ) cz2010_station_metadata = _load_cz2010_station_metadata ( ) print ( "Computing ISD station quality" ) _compute_isd_station_quality ( isd_station_metadata , isd_file_metadata ) print ( "Mapping ZCTAs to climate zones" ) _map_zcta_to_climate_zones ( zcta_metadata , iecc_climate_zone_metadata , iecc_moisture_regime_metadata , ba_climate_zone_metadata , ca_climate_zone_metadata , ) print ( "Mapping ISD stations to climate zones" ) _map_isd_station_to_climate_zones ( isd_station_metadata , iecc_climate_zone_metadata , iecc_moisture_regime_metadata , ba_climate_zone_metadata , ca_climate_zone_metadata , ) print ( "Creating table structures" ) _create_table_structures ( conn ) print ( "Writing ZCTA data" ) _write_zcta_metadata_table ( conn , zcta_metadata , geometry = zcta_geometry ) print ( "Writing IECC climate zone data" ) _write_iecc_climate_zone_metadata_table ( conn , iecc_climate_zone_metadata , geometry = iecc_climate_zone_geometry ) print ( "Writing IECC moisture regime data" ) _write_iecc_moisture_regime_metadata_table ( conn , iecc_moisture_regime_metadata , geometry = iecc_moisture_regime_geometry ) print ( "Writing BA climate zone data" ) _write_ba_climate_zone_metadata_table ( conn , ba_climate_zone_metadata , geometry = ba_climate_zone_geometry ) print ( "Writing CA climate zone data" ) _write_ca_climate_zone_metadata_table ( conn , ca_climate_zone_metadata , geometry = ca_climate_zone_geometry ) print ( "Writing ISD station metadata" ) _write_isd_station_metadata_table ( conn , isd_station_metadata ) print ( "Writing ISD file metadata" ) _write_isd_file_metadata_table ( conn , isd_file_metadata ) print ( "Writing TMY3 station metadata" ) _write_tmy3_station_metadata_table ( conn , tmy3_station_metadata ) print ( "Writing CZ2010 station metadata" ) _write_cz2010_station_metadata_table ( conn , cz2010_station_metadata ) print ( "Cleaning up..." ) shutil . rmtree ( download_path ) print ( "\u2728 Completed! \u2728" ) | Build database of metadata from primary sources . |
43,167 | def json ( self ) : return { "elevation" : self . elevation , "latitude" : self . latitude , "longitude" : self . longitude , "icao_code" : self . icao_code , "name" : self . name , "quality" : self . quality , "wban_ids" : self . wban_ids , "recent_wban_id" : self . recent_wban_id , "climate_zones" : { "iecc_climate_zone" : self . iecc_climate_zone , "iecc_moisture_regime" : self . iecc_moisture_regime , "ba_climate_zone" : self . ba_climate_zone , "ca_climate_zone" : self . ca_climate_zone , } , } | Return a JSON - serializeable object containing station metadata . |
43,168 | def get_isd_filenames ( self , year = None , with_host = False ) : return get_isd_filenames ( self . usaf_id , year , with_host = with_host ) | Get filenames of raw ISD station data . |
43,169 | def get_gsod_filenames ( self , year = None , with_host = False ) : return get_gsod_filenames ( self . usaf_id , year , with_host = with_host ) | Get filenames of raw GSOD station data . |
43,170 | def execute ( self , input_data ) : if ( input_data [ 'meta' ] [ 'type_tag' ] != 'pdf' ) : return { 'error' : self . __class__ . __name__ + ': called on ' + input_data [ 'meta' ] [ 'type_tag' ] } view = { } view [ 'strings' ] = input_data [ 'strings' ] [ 'string_list' ] [ : 5 ] view . update ( input_data [ 'meta' ] ) return view | Execute the ViewPDF worker |
43,171 | def add_node ( self , node_id , name , labels ) : if node_id not in self . node_cache : self . workbench . add_node ( node_id , name , labels ) self . node_cache . add ( node_id ) | Cache aware add_node |
43,172 | def add_rel ( self , source_id , target_id , rel ) : if ( source_id , target_id ) not in self . rel_cache : self . workbench . add_rel ( source_id , target_id , rel ) self . rel_cache . add ( ( source_id , target_id ) ) | Cache aware add_rel |
43,173 | def run ( ) : args = client_helper . grab_server_args ( ) workbench = zerorpc . Client ( timeout = 300 , heartbeat = 60 ) workbench . connect ( 'tcp://' + args [ 'server' ] + ':' + args [ 'port' ] ) print workbench . help ( ) print workbench . help ( 'basic' ) print workbench . help ( 'commands' ) print workbench . help ( 'store_sample' ) print workbench . help ( 'workers' ) print workbench . help ( 'meta' ) print workbench . test_worker ( 'meta' ) | This client calls a bunch of help commands from workbench |
43,174 | def execute ( self , input_data ) : string_output = input_data [ 'strings' ] [ 'string_list' ] flatten = ' ' . join ( string_output ) urls = self . url_match . findall ( flatten ) return { 'url_list' : urls } | Execute the URL worker |
43,175 | def r_to_s ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : class ReprToStr ( str ) : def __repr__ ( self ) : return str ( self ) return ReprToStr ( func ( * args , ** kwargs ) ) return wrapper | Decorator method for Workbench methods returning a str |
43,176 | def all_files_in_directory ( path ) : file_list = [ ] for dirname , dirnames , filenames in os . walk ( path ) : for filename in filenames : file_list . append ( os . path . join ( dirname , filename ) ) return file_list | Recursively ist all files under a directory |
43,177 | def run ( ) : args = client_helper . grab_server_args ( ) workbench = zerorpc . Client ( timeout = 300 , heartbeat = 60 ) workbench . connect ( 'tcp://' + args [ 'server' ] + ':' + args [ 'port' ] ) data_dir = os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , '../data' ) file_list = all_files_in_directory ( data_dir ) md5_list = [ ] for path in file_list : if '.DS_Store' in path : continue with open ( path , 'rb' ) as f : filename = os . path . basename ( path ) raw_bytes = f . read ( ) md5 = hashlib . md5 ( raw_bytes ) . hexdigest ( ) md5_list . append ( md5 ) if workbench . has_sample ( md5 ) : print 'Workbench already has this sample %s' % md5 else : md5 = workbench . store_sample ( raw_bytes , filename , 'unknown' ) print 'Filename %s uploaded: type_tag %s, md5 %s' % ( filename , 'unknown' , md5 ) zip_files = workbench . generate_sample_set ( 'zip' ) _foo = workbench . set_work_request ( 'unzip' , zip_files ) list ( _foo ) pcap_files = workbench . generate_sample_set ( 'pcap' ) _foo = workbench . set_work_request ( 'pcap_bro' , pcap_files ) list ( _foo ) mem_files = workbench . generate_sample_set ( 'mem' ) _foo = workbench . set_work_request ( 'mem_procdump' , mem_files ) list ( _foo ) print 'Info: Ensuring File Identifications...' type_tag_set = set ( ) all_files = workbench . generate_sample_set ( ) meta_all = workbench . set_work_request ( 'meta' , all_files ) for meta in meta_all : type_tag_set . add ( meta [ 'type_tag' ] ) if meta [ 'type_tag' ] in [ 'unknown' , 'own' ] : print meta pprint . pprint ( type_tag_set ) | This client pushes a big directory of different files into Workbench . |
43,178 | def load_all_plugins ( self ) : self . plugin_path = os . path . realpath ( self . plugin_dir ) sys . path . append ( self . plugin_dir ) print '<<< Plugin Manager >>>' for f in [ os . path . join ( self . plugin_dir , child ) for child in os . listdir ( self . plugin_dir ) ] : if '.DS_Store' in f or '__init__.py' in f : continue self . add_plugin ( f ) | Load all the plugins in the plugin directory |
43,179 | def remove_plugin ( self , f ) : if f . endswith ( '.py' ) : plugin_name = os . path . splitext ( os . path . basename ( f ) ) [ 0 ] print '- %s %sREMOVED' % ( plugin_name , color . Red ) print '\t%sNote: still in memory, restart Workbench to remove...%s' % ( color . Yellow , color . Normal ) | Remvoing a deleted plugin . |
43,180 | def add_plugin ( self , f ) : if f . endswith ( '.py' ) : plugin_name = os . path . splitext ( os . path . basename ( f ) ) [ 0 ] if plugin_name in sys . modules : try : handler = reload ( sys . modules [ plugin_name ] ) print '\t- %s %sRELOAD%s' % ( plugin_name , color . Yellow , color . Normal ) except ImportError , error : print 'Failed to import plugin: %s (%s)' % ( plugin_name , error ) return else : try : handler = __import__ ( plugin_name , globals ( ) , locals ( ) , [ ] , - 1 ) except ImportError , error : print 'Failed to import plugin: %s (%s)' % ( plugin_name , error ) return plugin = self . validate ( handler ) print '\t- %s %sOK%s' % ( plugin_name , color . Green , color . Normal ) if plugin : plugin [ 'name' ] = plugin_name plugin [ 'dependencies' ] = plugin [ 'class' ] . dependencies plugin [ 'docstring' ] = plugin [ 'class' ] . __doc__ plugin [ 'mod_time' ] = datetime . utcfromtimestamp ( os . path . getmtime ( f ) ) try : plugin [ 'sample_set_input' ] = getattr ( plugin [ 'class' ] , 'sample_set_input' ) except AttributeError : plugin [ 'sample_set_input' ] = False self . plugin_callback ( plugin ) | Adding and verifying plugin . |
43,181 | def store_sample ( self , sample_bytes , filename , type_tag ) : if len ( filename ) > 1000 : print 'switched bytes/filename... %s %s' % ( sample_bytes [ : 100 ] , filename [ : 100 ] ) exit ( 1 ) sample_info = { } sample_info [ 'md5' ] = hashlib . md5 ( sample_bytes ) . hexdigest ( ) if self . has_sample ( sample_info [ 'md5' ] ) : return sample_info [ 'md5' ] self . periodic_ops ( ) self . expire_data ( ) sample_info [ 'filename' ] = filename sample_info [ 'length' ] = len ( sample_bytes ) sample_info [ 'import_time' ] = datetime . datetime . utcnow ( ) sample_info [ 'type_tag' ] = type_tag import random sample_info [ 'customer' ] = random . choice ( [ 'Mega Corp' , 'Huge Inc' , 'BearTron' , 'Dorseys Mom' ] ) sample_info [ '__grid_fs' ] = self . gridfs_handle . put ( sample_bytes ) self . database [ self . sample_collection ] . insert ( sample_info ) print 'Sample Storage: %.2f out of %.2f MB' % ( self . sample_storage_size ( ) , self . samples_cap ) return sample_info [ 'md5' ] | Store a sample into the datastore . |
43,182 | def sample_storage_size ( self ) : try : coll_stats = self . database . command ( 'collStats' , 'fs.chunks' ) sample_storage_size = coll_stats [ 'size' ] / 1024.0 / 1024.0 return sample_storage_size except pymongo . errors . OperationFailure : return 0 | Get the storage size of the samples storage collection . |
43,183 | def expire_data ( self ) : while self . sample_storage_size ( ) > self . samples_cap : record = self . database [ self . sample_collection ] . find ( ) . sort ( 'import_time' , pymongo . ASCENDING ) . limit ( 1 ) [ 0 ] self . remove_sample ( record [ 'md5' ] ) | Expire data within the samples collection . |
43,184 | def remove_sample ( self , md5 ) : record = self . database [ self . sample_collection ] . find_one ( { 'md5' : md5 } ) if not record : return print 'Deleting sample: %s (%.2f MB)...' % ( record [ 'md5' ] , record [ 'length' ] / 1024.0 / 1024.0 ) self . database [ self . sample_collection ] . remove ( { 'md5' : record [ 'md5' ] } ) self . gridfs_handle . delete ( record [ '__grid_fs' ] ) print 'Sample Storage: %.2f out of %.2f MB' % ( self . sample_storage_size ( ) , self . samples_cap ) | Delete a specific sample |
43,185 | def clean_for_serialization ( self , data ) : if isinstance ( data , dict ) : for k in data . keys ( ) : if ( k . startswith ( '__' ) ) : del data [ k ] elif isinstance ( data [ k ] , bson . objectid . ObjectId ) : del data [ k ] elif isinstance ( data [ k ] , datetime . datetime ) : data [ k ] = data [ k ] . isoformat ( ) + 'Z' elif isinstance ( data [ k ] , dict ) : data [ k ] = self . clean_for_serialization ( data [ k ] ) elif isinstance ( data [ k ] , list ) : data [ k ] = [ self . clean_for_serialization ( item ) for item in data [ k ] ] return data | Clean data in preparation for serialization . |
43,186 | def clean_for_storage ( self , data ) : data = self . data_to_unicode ( data ) if isinstance ( data , dict ) : for k in dict ( data ) . keys ( ) : if k == '_id' : del data [ k ] continue if '.' in k : new_k = k . replace ( '.' , '_' ) data [ new_k ] = data [ k ] del data [ k ] k = new_k if isinstance ( data [ k ] , dict ) : data [ k ] = self . clean_for_storage ( data [ k ] ) elif isinstance ( data [ k ] , list ) : data [ k ] = [ self . clean_for_storage ( item ) for item in data [ k ] ] return data | Clean data in preparation for storage . |
43,187 | def get_sample ( self , md5 ) : if len ( md5 ) < 32 : md5 = self . get_full_md5 ( md5 , self . sample_collection ) sample_info = self . database [ self . sample_collection ] . find_one ( { 'md5' : md5 } ) if not sample_info : return None try : grid_fs_id = sample_info [ '__grid_fs' ] sample_info = self . clean_for_serialization ( sample_info ) sample_info . update ( { 'raw_bytes' : self . gridfs_handle . get ( grid_fs_id ) . read ( ) } ) return sample_info except gridfs . errors . CorruptGridFile : self . database [ self . sample_collection ] . update ( { 'md5' : md5 } , { 'md5' : None } ) return None | Get the sample from the data store . |
43,188 | def has_sample ( self , md5 ) : sample = self . get_sample ( md5 ) return True if sample else False | Checks if data store has this sample . |
43,189 | def _list_samples ( self , predicate = None ) : cursor = self . database [ self . sample_collection ] . find ( predicate , { '_id' : 0 , 'md5' : 1 } ) return [ item [ 'md5' ] for item in cursor ] | List all samples that meet the predicate or all if predicate is not specified . |
43,190 | def tag_match ( self , tags = None ) : if 'tags' not in self . database . collection_names ( ) : print 'Warning: Searching on non-existance tags collection' return None if not tags : cursor = self . database [ 'tags' ] . find ( { } , { '_id' : 0 , 'md5' : 1 } ) else : cursor = self . database [ 'tags' ] . find ( { 'tags' : { '$in' : tags } } , { '_id' : 0 , 'md5' : 1 } ) tag_md5s = set ( [ item [ 'md5' ] for item in cursor ] ) sample_md5s = set ( item [ 'md5' ] for item in self . database [ 'samples' ] . find ( { } , { '_id' : 0 , 'md5' : 1 } ) ) return list ( tag_md5s . intersection ( sample_md5s ) ) | List all samples that match the tags or all if tags are not specified . |
43,191 | def store_work_results ( self , results , collection , md5 ) : results [ 'md5' ] = md5 results [ '__time_stamp' ] = datetime . datetime . utcnow ( ) if 'mod_time' not in results : results [ 'mod_time' ] = results [ '__time_stamp' ] try : self . database [ collection ] . update ( { 'md5' : md5 } , self . clean_for_storage ( results ) , True ) except pymongo . errors . OperationFailure : print 'Could not update exising object in capped collection, punting...' print 'collection: %s md5:%s' % ( collection , md5 ) | Store the output results of the worker . |
43,192 | def clear_worker_output ( self ) : print 'Dropping all of the worker output collections... Whee!' all_c = self . database . collection_names ( ) try : all_c . remove ( 'system.indexes' ) all_c . remove ( 'fs.chunks' ) all_c . remove ( 'fs.files' ) all_c . remove ( 'sample_set' ) all_c . remove ( 'tags' ) all_c . remove ( self . sample_collection ) except ValueError : print 'Catching a benign exception thats expected...' for collection in all_c : self . database . drop_collection ( collection ) | Drops all of the worker output collections |
43,193 | def periodic_ops ( self ) : if ( time . time ( ) - self . last_ops_run ) < 30 : return try : self . last_ops_run = time . time ( ) print 'Running Periodic Ops' all_c = self . database . collection_names ( ) try : all_c . remove ( 'system.indexes' ) all_c . remove ( 'fs.chunks' ) all_c . remove ( 'fs.files' ) all_c . remove ( 'info' ) all_c . remove ( 'tags' ) all_c . remove ( self . sample_collection ) except ValueError : print 'Catching a benign exception thats expected...' if self . worker_cap : size = self . worker_cap * pow ( 1024 , 2 ) for collection in all_c : self . database . command ( 'convertToCapped' , collection , size = size ) for collection in all_c : self . database [ collection ] . ensure_index ( 'md5' ) self . database [ self . sample_collection ] . create_index ( 'import_time' ) self . database [ 'tags' ] . create_index ( 'tags' ) except pymongo . errors . AutoReconnect as e : print 'Warning: MongoDB raised an AutoReconnect...' % e return except Exception as e : print 'Critical: MongoDB raised an exception' % e return | Run periodic operations on the the data store . Operations like making sure collections are capped and indexes are set up . |
43,194 | def to_unicode ( self , s ) : if isinstance ( s , unicode ) : return s if isinstance ( s , str ) : return unicode ( s , errors = 'ignore' ) return s | Convert an elementary datatype to unicode . |
43,195 | def data_to_unicode ( self , data ) : if isinstance ( data , dict ) : return { self . to_unicode ( k ) : self . to_unicode ( v ) for k , v in data . iteritems ( ) } if isinstance ( data , list ) : return [ self . to_unicode ( l ) for l in data ] else : return self . to_unicode ( data ) | Recursively convert a list or dictionary to unicode . |
43,196 | def grab_server_args ( ) : workbench_conf = ConfigParser . ConfigParser ( ) config_path = os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , 'config.ini' ) workbench_conf . read ( config_path ) server = workbench_conf . get ( 'workbench' , 'server_uri' ) port = workbench_conf . get ( 'workbench' , 'server_port' ) parser = argparse . ArgumentParser ( ) parser . add_argument ( '-s' , '--server' , type = str , default = server , help = 'location of workbench server' ) parser . add_argument ( '-p' , '--port' , type = int , default = port , help = 'port used by workbench server' ) args , commands = parser . parse_known_args ( ) server = str ( args . server ) port = str ( args . port ) return { 'server' : server , 'port' : port , 'commands' : commands } | Grab server info from configuration file |
43,197 | def _make_attachment ( self , attachment , str_encoding = None ) : is_inline_image = False if isinstance ( attachment , MIMEBase ) : name = attachment . get_filename ( ) content = attachment . get_payload ( decode = True ) mimetype = attachment . get_content_type ( ) if attachment . get_content_maintype ( ) == 'image' and attachment [ 'Content-ID' ] is not None : is_inline_image = True name = attachment [ 'Content-ID' ] else : ( name , content , mimetype ) = attachment if mimetype is None and name is not None : mimetype , _ = mimetypes . guess_type ( name ) if mimetype is None : mimetype = DEFAULT_ATTACHMENT_MIME_TYPE try : if isinstance ( content , unicode ) : content = content . encode ( str_encoding ) except NameError : if isinstance ( content , str ) : content = content . encode ( str_encoding ) content_b64 = b64encode ( content ) mj_attachment = { 'Content-type' : mimetype , 'Filename' : name or '' , 'content' : content_b64 . decode ( 'ascii' ) , } return mj_attachment , is_inline_image | Returns EmailMessage . attachments item formatted for sending with Mailjet |
43,198 | def index_data ( self , data , index_name , doc_type ) : if not isinstance ( data , dict ) : raise RuntimeError ( 'Index failed, data needs to be a dict!' ) try : self . els_search . index ( index = index_name , doc_type = doc_type , body = data ) except Exception , error : print 'Index failed: %s' % str ( error ) raise RuntimeError ( 'Index failed: %s' % str ( error ) ) | Take an arbitrary dictionary of data and index it with ELS . |
43,199 | def search ( self , index_name , query ) : try : results = self . els_search . search ( index = index_name , body = query ) return results except Exception , error : error_str = 'Query failed: %s\n' % str ( error ) error_str += '\nIs there a dynamic script in the query?, see www.elasticsearch.org' print error_str raise RuntimeError ( error_str ) | Search the given index_name with the given ELS query . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.