signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def make_grover_circuit ( input_qubits , output_qubit , oracle ) : """Find the value recognized by the oracle in sqrt ( N ) attempts ."""
# For 2 input qubits , that means using Grover operator only once . c = cirq . Circuit ( ) # Initialize qubits . c . append ( [ cirq . X ( output_qubit ) , cirq . H ( output_qubit ) , cirq . H . on_each ( * input_qubits ) , ] ) # Query oracle . c . append ( oracle ) # Construct Grover operator . c . append ( cirq . H . on_each ( * input_qubits ) ) c . append ( cirq . X . on_each ( * input_qubits ) ) c . append ( cirq . H . on ( input_qubits [ 1 ] ) ) c . append ( cirq . CNOT ( input_qubits [ 0 ] , input_qubits [ 1 ] ) ) c . append ( cirq . H . on ( input_qubits [ 1 ] ) ) c . append ( cirq . X . on_each ( * input_qubits ) ) c . append ( cirq . H . on_each ( * input_qubits ) ) # Measure the result . c . append ( cirq . measure ( * input_qubits , key = 'result' ) ) return c
def tickPrice ( self , tickerId , field , price , canAutoExecute ) : """tickPrice ( EWrapper self , TickerId tickerId , TickType field , double price , int canAutoExecute )"""
return _swigibpy . EWrapper_tickPrice ( self , tickerId , field , price , canAutoExecute )
def export_serving ( self , filename , tags = [ tf . saved_model . SERVING if is_tfv2 ( ) else tf . saved_model . tag_constants . SERVING ] , signature_name = 'prediction_pipeline' ) : """Converts a checkpoint and graph to a servable for TensorFlow Serving . Use TF ' s ` SavedModelBuilder ` to export a trained model without tensorpack dependency . Args : filename ( str ) : path for export directory tags ( list ) : list of user specified tags signature _ name ( str ) : name of signature for prediction Note : This produces . . code - block : : none variables / # output from the vanilla Saver variables . data - ? ? ? ? ? - of - ? ? ? ? ? variables . index saved _ model . pb # a ` SavedModel ` protobuf Currently , we only support a single signature , which is the general PredictSignatureDef : https : / / github . com / tensorflow / serving / blob / master / tensorflow _ serving / g3doc / signature _ defs . md"""
self . graph = self . config . _maybe_create_graph ( ) with self . graph . as_default ( ) : input = PlaceholderInput ( ) input . setup ( self . config . input_signature ) with PredictTowerContext ( '' ) : self . config . tower_func ( * input . get_input_tensors ( ) ) input_tensors = get_tensors_by_names ( self . config . input_names ) saved_model = tfv1 . saved_model . utils inputs_signatures = { t . name : saved_model . build_tensor_info ( t ) for t in input_tensors } output_tensors = get_tensors_by_names ( self . config . output_names ) outputs_signatures = { t . name : saved_model . build_tensor_info ( t ) for t in output_tensors } self . config . session_init . _setup_graph ( ) # we cannot use " self . config . session _ creator . create _ session ( ) " here since it finalizes the graph sess = tfv1 . Session ( config = tfv1 . ConfigProto ( allow_soft_placement = True ) ) self . config . session_init . _run_init ( sess ) builder = tfv1 . saved_model . builder . SavedModelBuilder ( filename ) prediction_signature = tfv1 . saved_model . signature_def_utils . build_signature_def ( inputs = inputs_signatures , outputs = outputs_signatures , method_name = tfv1 . saved_model . signature_constants . PREDICT_METHOD_NAME ) builder . add_meta_graph_and_variables ( sess , tags , signature_def_map = { signature_name : prediction_signature } ) builder . save ( ) logger . info ( "SavedModel created at {}." . format ( filename ) )
def get_classes_in_module ( module , superclass = object ) : """Returns a list with all classes in module that descend from parent Args : module : builtins . module superclass : a class Returns : list"""
ret = [ ] for classname in dir ( module ) : attr = module . __getattribute__ ( classname ) try : if issubclass ( attr , superclass ) and ( attr != superclass ) : ret . append ( attr ) except TypeError : # " issubclass ( ) arg 1 must be a class " pass except RuntimeError : # a99 . get _ python _ logger ( ) . exception ( " Failed probing attribute ' { } ' " . format ( classname ) ) # raise pass return ret
def points ( points , T_points_world = None , color = np . array ( [ 0 , 1 , 0 ] ) , scale = 0.01 , n_cuts = 20 , subsample = None , random = False , name = None ) : """Scatter a point cloud in pose T _ points _ world . Parameters points : autolab _ core . BagOfPoints or ( n , 3 ) float The point set to visualize . T _ points _ world : autolab _ core . RigidTransform Pose of points , specified as a transformation from point frame to world frame . color : ( 3 , ) or ( n , 3 ) float Color of whole cloud or per - point colors scale : float Radius of each point . n _ cuts : int Number of longitude / latitude lines on sphere points . subsample : int Parameter of subsampling to display fewer points . name : str A name for the object to be added ."""
if isinstance ( points , BagOfPoints ) : if points . dim != 3 : raise ValueError ( 'BagOfPoints must have dimension 3xN!' ) else : if type ( points ) is not np . ndarray : raise ValueError ( 'Points visualizer expects BagOfPoints or numpy array!' ) if len ( points . shape ) == 1 : points = points [ : , np . newaxis ] . T if len ( points . shape ) != 2 or points . shape [ 1 ] != 3 : raise ValueError ( 'Numpy array of points must have dimension (N,3)' ) frame = 'points' if T_points_world : frame = T_points_world . from_frame points = PointCloud ( points . T , frame = frame ) color = np . array ( color ) if subsample is not None : num_points = points . num_points points , inds = points . subsample ( subsample , random = random ) if color . shape [ 0 ] == num_points and color . shape [ 1 ] == 3 : color = color [ inds , : ] # transform into world frame if points . frame != 'world' : if T_points_world is None : T_points_world = RigidTransform ( from_frame = points . frame , to_frame = 'world' ) points_world = T_points_world * points else : points_world = points point_data = points_world . data if len ( point_data . shape ) == 1 : point_data = point_data [ : , np . newaxis ] point_data = point_data . T mpcolor = color if len ( color . shape ) > 1 : mpcolor = color [ 0 ] mp = MaterialProperties ( color = np . array ( mpcolor ) , k_a = 0.5 , k_d = 0.3 , k_s = 0.0 , alpha = 10.0 , smooth = True ) # For each point , create a sphere of the specified color and size . sphere = trimesh . creation . uv_sphere ( scale , [ n_cuts , n_cuts ] ) raw_pose_data = np . tile ( np . eye ( 4 ) , ( points . num_points , 1 ) ) raw_pose_data [ 3 : : 4 , : 3 ] = point_data instcolor = None if color . ndim == 2 and color . shape [ 0 ] == points . num_points and color . shape [ 1 ] == 3 : instcolor = color obj = InstancedSceneObject ( sphere , raw_pose_data = raw_pose_data , colors = instcolor , material = mp ) if name is None : name = str ( uuid . uuid4 ( ) ) Visualizer3D . _scene . add_object ( name , obj )
def euclidean ( a , b ) : "Returns euclidean distance between a and b"
return np . linalg . norm ( np . subtract ( a , b ) )
def stop ( self ) : """Stop a running SocketIO web server . This method must be called from a HTTP or SocketIO handler function ."""
if self . server . eio . async_mode == 'threading' : func = flask . request . environ . get ( 'werkzeug.server.shutdown' ) if func : func ( ) else : raise RuntimeError ( 'Cannot stop unknown web server' ) elif self . server . eio . async_mode == 'eventlet' : raise SystemExit elif self . server . eio . async_mode == 'gevent' : self . wsgi_server . stop ( )
def dump_memdb ( self , with_source_contents = True , with_names = True ) : """Dumps a sourcemap in MemDB format into bytes ."""
len_out = _ffi . new ( 'unsigned int *' ) buf = rustcall ( _lib . lsm_view_dump_memdb , self . _get_ptr ( ) , len_out , with_source_contents , with_names ) try : rv = _ffi . unpack ( buf , len_out [ 0 ] ) finally : _lib . lsm_buffer_free ( buf ) return rv
def summary ( self ) : '''Create a dictionary with a summary of the updates in the collection . Returns : dict : Summary of the contents of the collection . . code - block : : cfg Summary of Updates : { ' Total ' : < total number of updates returned > , ' Available ' : < updates that are not downloaded or installed > , ' Downloaded ' : < updates that are downloaded but not installed > , ' Installed ' : < updates installed ( usually 0 unless installed = True ) > , ' Categories ' : { < category 1 > : < total for that category > , < category 2 > : < total for category 2 > , Code Example : . . code - block : : python import salt . utils . win _ update updates = salt . utils . win _ update . Updates ( ) updates . summary ( )'''
# https : / / msdn . microsoft . com / en - us / library / windows / desktop / aa386099 ( v = vs . 85 ) . aspx if self . count ( ) == 0 : return 'Nothing to return' # Build a dictionary containing a summary of updates available results = { 'Total' : 0 , 'Available' : 0 , 'Downloaded' : 0 , 'Installed' : 0 , 'Categories' : { } , 'Severity' : { } } for update in self . updates : # Count the total number of updates available results [ 'Total' ] += 1 # Updates available for download if not salt . utils . data . is_true ( update . IsDownloaded ) and not salt . utils . data . is_true ( update . IsInstalled ) : results [ 'Available' ] += 1 # Updates downloaded awaiting install if salt . utils . data . is_true ( update . IsDownloaded ) and not salt . utils . data . is_true ( update . IsInstalled ) : results [ 'Downloaded' ] += 1 # Updates installed if salt . utils . data . is_true ( update . IsInstalled ) : results [ 'Installed' ] += 1 # Add Categories and increment total for each one # The sum will be more than the total because each update can have # multiple categories for category in update . Categories : if category . Name in results [ 'Categories' ] : results [ 'Categories' ] [ category . Name ] += 1 else : results [ 'Categories' ] [ category . Name ] = 1 # Add Severity Summary if update . MsrcSeverity : if update . MsrcSeverity in results [ 'Severity' ] : results [ 'Severity' ] [ update . MsrcSeverity ] += 1 else : results [ 'Severity' ] [ update . MsrcSeverity ] = 1 return results
def get_details ( self ) : """: rtype list [ VmDataField ]"""
data = [ ] if isinstance ( self . model , vCenterCloneVMFromVMResourceModel ) : data . append ( VmDetailsProperty ( key = 'Cloned VM Name' , value = self . model . vcenter_vm ) ) if isinstance ( self . model , VCenterDeployVMFromLinkedCloneResourceModel ) : template = self . model . vcenter_vm snapshot = self . model . vcenter_vm_snapshot data . append ( VmDetailsProperty ( key = 'Cloned VM Name' , value = '{0} (snapshot: {1})' . format ( template , snapshot ) ) ) if isinstance ( self . model , vCenterVMFromImageResourceModel ) : data . append ( VmDetailsProperty ( key = 'Base Image Name' , value = self . model . vcenter_image . split ( '/' ) [ - 1 ] ) ) if isinstance ( self . model , vCenterVMFromTemplateResourceModel ) : data . append ( VmDetailsProperty ( key = 'Template Name' , value = self . model . vcenter_template ) ) return data
def get_contents ( diff_part ) : """Returns a tuple of old content and new content ."""
old_sha = get_old_sha ( diff_part ) old_filename = get_old_filename ( diff_part ) old_contents = get_old_contents ( old_sha , old_filename ) new_filename = get_new_filename ( diff_part ) new_contents = get_new_contents ( new_filename ) return old_contents , new_contents
def create_process ( self , command , shell = True , stdout = None , stderr = None , env = None ) : """Execute a process using subprocess . Popen , setting the backend ' s DISPLAY"""
env = env if env is not None else dict ( os . environ ) env [ 'DISPLAY' ] = self . display return subprocess . Popen ( command , shell = shell , stdout = stdout , stderr = stderr , env = env )
def _trivialgraph_default_namer ( thing , is_edge = True ) : """Returns a " good " string for thing in printed graphs ."""
if is_edge : if thing . name is None or thing . name . startswith ( 'tmp' ) : return '' else : return '/' . join ( [ thing . name , str ( len ( thing ) ) ] ) elif isinstance ( thing , Const ) : return str ( thing . val ) elif isinstance ( thing , WireVector ) : return thing . name or '??' else : try : return thing . op + str ( thing . op_param or '' ) except AttributeError : raise PyrtlError ( 'no naming rule for "%s"' % str ( thing ) )
def dependents ( self , on_predicate = None , from_predicate = None ) : """Returns a map from targets that satisfy the from _ predicate to targets they depend on that satisfy the on _ predicate . : API : public"""
core = set ( self . targets ( on_predicate ) ) dependees = defaultdict ( set ) for target in self . targets ( from_predicate ) : for dependency in target . dependencies : if dependency in core : dependees [ target ] . add ( dependency ) return dependees
def _setup ( self ) : """for each string defined in self . weights , the corresponding attribute in the wrapped module is referenced , then deleted , and subsequently registered as a new parameter with a slightly modified name . Args : None Returns : None"""
if isinstance ( self . module , torch . nn . RNNBase ) : self . module . flatten_parameters = noop for name_w in self . weights : w = getattr ( self . module , name_w ) del self . module . _parameters [ name_w ] self . module . register_parameter ( name_w + '_raw' , nn . Parameter ( w . data ) )
def append_table ( src_con , dst_con , table_name ) : """Append a table from source database to destination database . : param SimpleSQLite src _ con : Connection to the source database . : param SimpleSQLite dst _ con : Connection to the destination database . : param str table _ name : Table name to append . : return : | True | if the append operation succeed . : rtype : bool : raises simplesqlite . TableNotFoundError : | raises _ verify _ table _ existence | : raises ValueError : If attributes of the table are different from each other ."""
logger . debug ( "append table: src={src_db}.{src_tbl}, dst={dst_db}.{dst_tbl}" . format ( src_db = src_con . database_path , src_tbl = table_name , dst_db = dst_con . database_path , dst_tbl = table_name , ) ) src_con . verify_table_existence ( table_name ) dst_con . validate_access_permission ( [ "w" , "a" ] ) if dst_con . has_table ( table_name ) : src_attrs = src_con . fetch_attr_names ( table_name ) dst_attrs = dst_con . fetch_attr_names ( table_name ) if src_attrs != dst_attrs : raise ValueError ( dedent ( """ source and destination attribute is different from each other src: {} dst: {} """ . format ( src_attrs , dst_attrs ) ) ) primary_key , index_attrs , type_hints = extract_table_metadata ( src_con , table_name ) dst_con . create_table_from_tabledata ( src_con . select_as_tabledata ( table_name , type_hints = type_hints ) , primary_key = primary_key , index_attrs = index_attrs , ) return True
def datediff ( end , start ) : """Returns the number of days from ` start ` to ` end ` . > > > df = spark . createDataFrame ( [ ( ' 2015-04-08 ' , ' 2015-05-10 ' ) ] , [ ' d1 ' , ' d2 ' ] ) > > > df . select ( datediff ( df . d2 , df . d1 ) . alias ( ' diff ' ) ) . collect ( ) [ Row ( diff = 32 ) ]"""
sc = SparkContext . _active_spark_context return Column ( sc . _jvm . functions . datediff ( _to_java_column ( end ) , _to_java_column ( start ) ) )
def getFormattedHTML ( self , indent = ' ' ) : '''getFormattedHTML - Get formatted and xhtml of this document , replacing the original whitespace with a pretty - printed version @ param indent - space / tab / newline of each level of indent , or integer for how many spaces per level @ return - < str > Formatted html @ see getHTML - Get HTML with original whitespace @ see getMiniHTML - Get HTML with only functional whitespace remaining'''
from . Formatter import AdvancedHTMLFormatter html = self . getHTML ( ) formatter = AdvancedHTMLFormatter ( indent , None ) # Do not double - encode formatter . feed ( html ) return formatter . getHTML ( )
def bind ( self , field_name , parent ) : """Initializes the field name and parent for the field instance . Called when a field is added to the parent serializer instance . Taken from DRF and modified to support drf _ haystack multiple index functionality ."""
# In order to enforce a consistent style , we error if a redundant # ' source ' argument has been used . For example : # my _ field = serializer . CharField ( source = ' my _ field ' ) assert self . source != field_name , ( "It is redundant to specify `source='%s'` on field '%s' in " "serializer '%s', because it is the same as the field name. " "Remove the `source` keyword argument." % ( field_name , self . __class__ . __name__ , parent . __class__ . __name__ ) ) self . field_name = field_name self . parent = parent # ` self . label ` should default to being based on the field name . if self . label is None : self . label = field_name . replace ( '_' , ' ' ) . capitalize ( ) # self . source should default to being the same as the field name . if self . source is None : self . source = self . convert_field_name ( field_name ) # self . source _ attrs is a list of attributes that need to be looked up # when serializing the instance , or populating the validated data . if self . source == '*' : self . source_attrs = [ ] else : self . source_attrs = self . source . split ( '.' )
def get_type_data ( name ) : """Return dictionary representation of type . Can be used to initialize primordium . type . primitives . Type"""
name = name . upper ( ) if name in CALENDAR_TYPES : domain = 'Calendar Types' calendar_name = CALENDAR_TYPES [ name ] elif name in ANCIENT_CALENDAR_TYPES : domain = 'Ancient Calendar Types' calendar_name = ANCIENT_CALENDAR_TYPES [ name ] elif name in ALTERNATE_CALENDAR_TYPES : domain = 'Alternative Calendar Types' calendar_name = ALTERNATE_CALENDAR_TYPES [ name ] else : raise NotFound ( 'Calendar Type: ' + name ) return { 'authority' : 'okapia.net' , 'namespace' : 'calendar' , 'identifier' : name , 'domain' : domain , 'display_name' : calendar_name + ' Calendar Type' , 'display_label' : calendar_name , 'description' : ( 'The time type for the ' + calendar_name + ' calendar.' ) }
def parse_pdb_file ( self ) : """Runs the PDB parser ."""
self . pdb_parse_tree = { 'info' : { } , 'data' : { self . state : { } } } try : for line in self . pdb_lines : self . current_line = line record_name = line [ : 6 ] . strip ( ) if record_name in self . proc_functions : self . proc_functions [ record_name ] ( ) else : if record_name not in self . pdb_parse_tree [ 'info' ] : self . pdb_parse_tree [ 'info' ] [ record_name ] = [ ] self . pdb_parse_tree [ 'info' ] [ record_name ] . append ( line ) except EOFError : # Raised by END record pass if self . new_labels : ampal_data_session . commit ( ) return
def segment_meander_angles ( neurites , neurite_type = NeuriteType . all ) : '''Inter - segment opening angles in a section'''
return list ( chain . from_iterable ( map_sections ( sectionfunc . section_meander_angles , neurites , neurite_type ) ) )
def backward_transfer_pair ( backward_channel : NettingChannelState , payer_transfer : LockedTransferSignedState , pseudo_random_generator : random . Random , block_number : BlockNumber , ) -> Tuple [ Optional [ MediationPairState ] , List [ Event ] ] : """Sends a transfer backwards , allowing the previous hop to try a new route . When all the routes available for this node failed , send a transfer backwards with the same amount and secrethash , allowing the previous hop to do a retry . Args : backward _ channel : The original channel which sent the mediated transfer to this node . payer _ transfer : The * latest * payer transfer which is backing the mediation . block _ number : The current block number . Returns : The mediator pair and the correspoding refund event ."""
transfer_pair = None events : List [ Event ] = list ( ) lock = payer_transfer . lock lock_timeout = BlockTimeout ( lock . expiration - block_number ) # Ensure the refund transfer ' s lock has a safe expiration , otherwise don ' t # do anything and wait for the received lock to expire . if is_channel_usable ( backward_channel , lock . amount , lock_timeout ) : message_identifier = message_identifier_from_prng ( pseudo_random_generator ) refund_transfer = channel . send_refundtransfer ( channel_state = backward_channel , initiator = payer_transfer . initiator , target = payer_transfer . target , amount = get_lock_amount_after_fees ( lock , backward_channel ) , message_identifier = message_identifier , payment_identifier = payer_transfer . payment_identifier , expiration = lock . expiration , secrethash = lock . secrethash , ) transfer_pair = MediationPairState ( payer_transfer , backward_channel . partner_state . address , refund_transfer . transfer , ) events . append ( refund_transfer ) return ( transfer_pair , events )
def points ( self , points ) : """set points without copying"""
if not isinstance ( points , np . ndarray ) : raise TypeError ( 'Points must be a numpy array' ) # get the unique coordinates along each axial direction x = np . unique ( points [ : , 0 ] ) y = np . unique ( points [ : , 1 ] ) z = np . unique ( points [ : , 2 ] ) nx , ny , nz = len ( x ) , len ( y ) , len ( z ) # TODO : this needs to be tested ( unique might return a tuple ) dx , dy , dz = np . unique ( np . diff ( x ) ) , np . unique ( np . diff ( y ) ) , np . unique ( np . diff ( z ) ) ox , oy , oz = np . min ( x ) , np . min ( y ) , np . min ( z ) # Build the vtk object self . _from_specs ( ( nx , ny , nz ) , ( dx , dy , dz ) , ( ox , oy , oz ) ) # self . _ point _ ref = points self . Modified ( )
def createL2456Columns ( network , networkConfig ) : """Create a network consisting of multiple L2456 columns as described in the file comments above ."""
# Create each column numCorticalColumns = networkConfig [ "numCorticalColumns" ] for i in xrange ( numCorticalColumns ) : networkConfigCopy = copy . deepcopy ( networkConfig ) randomSeedBase = networkConfigCopy [ "randomSeedBase" ] networkConfigCopy [ "L2Params" ] [ "seed" ] = randomSeedBase + i networkConfigCopy [ "L4Params" ] [ "seed" ] = randomSeedBase + i networkConfigCopy [ "L5Params" ] [ "seed" ] = randomSeedBase + i networkConfigCopy [ "L6Params" ] [ "seed" ] = randomSeedBase + i networkConfigCopy [ "L2Params" ] [ "numOtherCorticalColumns" ] = numCorticalColumns - 1 networkConfigCopy [ "L5Params" ] [ "numOtherCorticalColumns" ] = numCorticalColumns - 1 suffix = "_" + str ( i ) network = _createL2456Column ( network , networkConfigCopy , suffix ) # Now connect the L2 columns laterally to every other L2 column , and # the same for L5 columns . for i in range ( networkConfig [ "numCorticalColumns" ] ) : suffixSrc = "_" + str ( i ) for j in range ( networkConfig [ "numCorticalColumns" ] ) : if i != j : suffixDest = "_" + str ( j ) network . link ( "L2Column" + suffixSrc , "L2Column" + suffixDest , "UniformLink" , "" , srcOutput = "feedForwardOutput" , destInput = "lateralInput" ) network . link ( "L5Column" + suffixSrc , "L5Column" + suffixDest , "UniformLink" , "" , srcOutput = "feedForwardOutput" , destInput = "lateralInput" ) enableProfiling ( network ) return network
def load_extra_vi_page_navigation_bindings ( ) : """Key bindings , for scrolling up and down through pages . This are separate bindings , because GNU readline doesn ' t have them ."""
registry = ConditionalRegistry ( Registry ( ) , ViMode ( ) ) handle = registry . add_binding handle ( Keys . ControlF ) ( scroll_forward ) handle ( Keys . ControlB ) ( scroll_backward ) handle ( Keys . ControlD ) ( scroll_half_page_down ) handle ( Keys . ControlU ) ( scroll_half_page_up ) handle ( Keys . ControlE ) ( scroll_one_line_down ) handle ( Keys . ControlY ) ( scroll_one_line_up ) handle ( Keys . PageDown ) ( scroll_page_down ) handle ( Keys . PageUp ) ( scroll_page_up ) return registry
def from_client_config ( cls , client_config , scopes , ** kwargs ) : """Creates a : class : ` requests _ oauthlib . OAuth2Session ` from client configuration loaded from a Google - format client secrets file . Args : client _ config ( Mapping [ str , Any ] ) : The client configuration in the Google ` client secrets ` _ format . scopes ( Sequence [ str ] ) : The list of scopes to request during the flow . kwargs : Any additional parameters passed to : class : ` requests _ oauthlib . OAuth2Session ` Returns : Flow : The constructed Flow instance . Raises : ValueError : If the client configuration is not in the correct format . . . _ client secrets : https : / / developers . google . com / api - client - library / python / guide / aaa _ client _ secrets"""
if 'web' in client_config : client_type = 'web' elif 'installed' in client_config : client_type = 'installed' else : raise ValueError ( 'Client secrets must be for a web or installed app.' ) session , client_config = ( google_auth_oauthlib . helpers . session_from_client_config ( client_config , scopes , ** kwargs ) ) redirect_uri = kwargs . get ( 'redirect_uri' , None ) return cls ( session , client_type , client_config , redirect_uri )
def default_get_arg_names_from_class_name ( class_name ) : """Converts normal class names into normal arg names . Normal class names are assumed to be CamelCase with an optional leading underscore . Normal arg names are assumed to be lower _ with _ underscores . Args : class _ name : a class name , e . g . , " FooBar " or " _ FooBar " Returns : all likely corresponding arg names , e . g . , [ " foo _ bar " ]"""
parts = [ ] rest = class_name if rest . startswith ( '_' ) : rest = rest [ 1 : ] while True : m = re . match ( r'([A-Z][a-z]+)(.*)' , rest ) if m is None : break parts . append ( m . group ( 1 ) ) rest = m . group ( 2 ) if not parts : return [ ] return [ '_' . join ( part . lower ( ) for part in parts ) ]
def _init_catalog ( self , proxy = None , runtime = None ) : """Initialize this session as an OsidCatalog based session ."""
self . _init_proxy_and_runtime ( proxy , runtime ) osid_name = self . _session_namespace . split ( '.' ) [ 0 ] try : config = self . _runtime . get_configuration ( ) parameter_id = Id ( 'parameter:' + osid_name + 'CatalogingProviderImpl@mongo' ) provider_impl = config . get_value_by_parameter ( parameter_id ) . get_string_value ( ) self . _cataloging_manager = self . _runtime . get_manager ( 'CATALOGING' , provider_impl ) # need to add version argument except ( AttributeError , KeyError , errors . NotFound ) : pass
def check_extensions ( module_name , module_path ) : """This function checks for extensions to boto modules . It should be called in the _ _ init _ _ . py file of all boto modules . See : http : / / code . google . com / p / boto / wiki / ExtendModules for details ."""
option_name = '%s_extend' % module_name version = config . get ( 'Boto' , option_name , None ) if version : dirname = module_path [ 0 ] path = os . path . join ( dirname , version ) if os . path . isdir ( path ) : log . info ( 'extending module %s with: %s' % ( module_name , path ) ) module_path . insert ( 0 , path )
def expand_in_basis ( self , basis_states = None , hermitian = False ) : """Write the operator as an expansion into all : class : ` KetBras < . KetBra > ` spanned by ` basis _ states ` . Args : basis _ states ( list or None ) : List of basis states ( : class : ` . State ` instances ) into which to expand the operator . If None , use the operator ' s ` space . basis _ states ` hermitian ( bool ) : If True , assume that the operator is Hermitian and represent all elements in the lower triangle of the expansion via : class : ` OperatorPlusMinusCC ` . This is meant to enhance readability Raises : . BasisNotSetError : If ` basis _ states ` is None and the operator ' s Hilbert space has no well - defined basis Example : > > > hs = LocalSpace ( 1 , basis = ( ' g ' , ' e ' ) ) > > > op = LocalSigma ( ' g ' , ' e ' , hs = hs ) + LocalSigma ( ' e ' , ' g ' , hs = hs ) > > > print ( ascii ( op , sig _ as _ ketbra = False ) ) sigma _ e , g ^ ( 1 ) + sigma _ g , e ^ ( 1) > > > print ( ascii ( op . expand _ in _ basis ( ) ) ) | e > < g | ^ ( 1 ) + | g > < e | ^ ( 1) > > > print ( ascii ( op . expand _ in _ basis ( hermitian = True ) ) ) | g > < e | ^ ( 1 ) + c . c ."""
from qnet . algebra . core . state_algebra import KetBra # KetBra is imported locally to avoid circular imports if basis_states is None : basis_states = list ( self . space . basis_states ) else : basis_states = list ( basis_states ) diag_terms = [ ] terms = [ ] for i , ket_i in enumerate ( basis_states ) : for j , ket_j in enumerate ( basis_states ) : if i > j and hermitian : continue op_ij = ( ket_i . dag ( ) * self * ket_j ) . expand ( ) ketbra = KetBra ( ket_i , ket_j ) term = op_ij * ketbra if term is not ZeroOperator : if i == j : diag_terms . append ( op_ij * ketbra ) else : terms . append ( op_ij * ketbra ) if hermitian : res = OperatorPlus . create ( * diag_terms ) if len ( terms ) > 0 : res = res + OperatorPlusMinusCC ( OperatorPlus . create ( * terms ) ) return res else : return ( OperatorPlus . create ( * diag_terms ) + OperatorPlus . create ( * terms ) )
def save_site ( self , create = True ) : """Save environment settings in the directory that need to be saved even when creating only a new sub - site env ."""
self . _load_sites ( ) if create : self . sites . append ( self . site_name ) task . save_new_site ( self . site_name , self . sitedir , self . target , self . port , self . address , self . site_url , self . passwords )
def _load_stats ( self ) : """Load the webpack - stats file"""
for attempt in range ( 0 , 3 ) : try : with self . stats_file . open ( ) as f : return json . load ( f ) except ValueError : # If we failed to parse the JSON , it ' s possible that the # webpack process is writing to it concurrently and it ' s in a # bad state . Sleep and retry . if attempt < 2 : time . sleep ( attempt * 0.2 ) else : raise except IOError : raise IOError ( "Could not read stats file {0}. Make sure you are using the " "webpack-bundle-tracker plugin" . format ( self . stats_file ) )
def image_import ( infile , force ) : """Import image anchore data from a JSON file ."""
ecode = 0 try : with open ( infile , 'r' ) as FH : savelist = json . loads ( FH . read ( ) ) except Exception as err : anchore_print_err ( "could not load input file: " + str ( err ) ) ecode = 1 if ecode == 0 : for record in savelist : try : imageId = record [ 'image' ] [ 'imageId' ] if contexts [ 'anchore_db' ] . is_image_present ( imageId ) and not force : anchore_print ( "image (" + str ( imageId ) + ") already exists in DB, skipping import." ) else : imagedata = record [ 'image' ] [ 'imagedata' ] try : rc = contexts [ 'anchore_db' ] . save_image_new ( imageId , report = imagedata ) if not rc : contexts [ 'anchore_db' ] . delete_image ( imageId ) raise Exception ( "save to anchore DB failed" ) except Exception as err : contexts [ 'anchore_db' ] . delete_image ( imageId ) raise err except Exception as err : anchore_print_err ( "could not store image (" + str ( imageId ) + ") from import file: " + str ( err ) ) ecode = 1 sys . exit ( ecode )
def apply_T4 ( word ) : '''An agglutination diphthong that ends in / u , y / usually contains a syllable boundary when - C # or - CCV follow , e . g . , [ lau . ka . us ] , [ va . ka . ut . taa ] .'''
WORD = word . split ( '.' ) for i , v in enumerate ( WORD ) : # i % 2 ! = 0 prevents this rule from applying to first , third , etc . # syllables , which receive stress ( WSP ) if is_consonant ( v [ - 1 ] ) and i % 2 != 0 : if i + 1 == len ( WORD ) or is_consonant ( WORD [ i + 1 ] [ 0 ] ) : vv = u_or_y_final_diphthongs ( v ) if vv and not is_long ( vv . group ( 1 ) ) : I = vv . start ( 1 ) + 1 WORD [ i ] = v [ : I ] + '.' + v [ I : ] WORD = '.' . join ( WORD ) RULE = ' T4' if word != WORD else '' return WORD , RULE
def _create_main_config ( cls , overrides = None ) : """See comment block at top of ' rezconfig ' describing how the main config is assembled ."""
filepaths = [ ] filepaths . append ( get_module_root_config ( ) ) filepath = os . getenv ( "REZ_CONFIG_FILE" ) if filepath : filepaths . extend ( filepath . split ( os . pathsep ) ) filepath = os . path . expanduser ( "~/.rezconfig" ) filepaths . append ( filepath ) return Config ( filepaths , overrides )
def enable_precompute ( panel ) : """Schedule a precompute task for ` panel `"""
use_metis = panel [ 'data_source' ] [ 'source_type' ] == 'querybuilder' if use_metis : query = panel [ 'data_source' ] [ 'query' ] else : query = "u'''%s'''" % panel [ 'data_source' ] [ 'code' ] precompute = panel [ 'data_source' ] [ 'precompute' ] timeframe = panel [ 'data_source' ] [ 'timeframe' ] bucket_width = precompute [ 'bucket_width' ] [ 'value' ] time_scale = precompute [ 'bucket_width' ] [ 'scale' ] [ 'name' ] bucket_width_seconds = get_seconds ( bucket_width , time_scale ) if timeframe [ 'mode' ] [ 'value' ] == 'recent' : untrusted_time = precompute [ 'untrusted_time' ] [ 'value' ] untrusted_time_scale = precompute [ 'untrusted_time' ] [ 'scale' ] [ 'name' ] untrusted_time_seconds = get_seconds ( untrusted_time , untrusted_time_scale ) # Schedule the task with an interval equal to the bucket _ width interval = bucket_width_seconds elif timeframe [ 'mode' ] [ 'value' ] == 'range' : untrusted_time_seconds = 0 # Schedule the task with an interval of 0 so it only runs once interval = 0 task_code = PRECOMPUTE_INITIALIZATION_CODE % ( query , timeframe , bucket_width_seconds , untrusted_time_seconds , use_metis ) result = scheduler_client . schedule ( task_code , interval ) if result [ 'status' ] != 'success' : raise RuntimeError ( result . get ( 'reason' ) ) return result [ 'id' ]
def get_objectives ( self ) : """Gets the objective list resulting from the search . return : ( osid . learning . ObjectiveList ) - the objective list raise : IllegalState - list already retrieved * compliance : mandatory - - This method must be implemented . *"""
if self . retrieved : raise errors . IllegalState ( 'List has already been retrieved.' ) self . retrieved = True return objects . ObjectiveList ( self . _results , runtime = self . _runtime )
def capture_update_records ( records ) : """Writes all updated configuration info to DynamoDB"""
for rec in records : data = cloudwatch . get_historical_base_info ( rec ) group = describe_group ( rec , cloudwatch . get_region ( rec ) ) if len ( group ) > 1 : raise Exception ( f'[X] Multiple groups found. Record: {rec}' ) if not group : LOG . warning ( f'[?] No group information found. Record: {rec}' ) continue group = group [ 0 ] # Determine event data for group - and pop off items that are going to the top - level : LOG . debug ( f'Processing group. Group: {group}' ) data . update ( { 'GroupId' : group [ 'GroupId' ] , 'GroupName' : group . pop ( 'GroupName' ) , 'VpcId' : group . pop ( 'VpcId' , None ) , 'arn' : get_arn ( group . pop ( 'GroupId' ) , cloudwatch . get_region ( rec ) , group . pop ( 'OwnerId' ) ) , 'Region' : cloudwatch . get_region ( rec ) } ) data [ 'Tags' ] = pull_tag_dict ( group ) # Set the remaining items to the configuration : data [ 'configuration' ] = group # Set the version : data [ 'version' ] = VERSION LOG . debug ( f'[+] Writing Dynamodb Record. Records: {data}' ) current_revision = CurrentSecurityGroupModel ( ** data ) current_revision . save ( )
def read_chip_sn ( self ) : '''Reading Chip S / N Note Bits [ MSB - LSB ] | [ 15 ] | [ 14-6 ] | [ 5-0] Content | reserved | wafer number | chip number'''
commands = [ ] commands . extend ( self . register . get_commands ( "ConfMode" ) ) self . register_utils . send_commands ( commands ) with self . readout ( fill_buffer = True , callback = None , errback = None ) : if self . register . fei4b : commands = [ ] self . register . set_global_register_value ( 'Efuse_Sense' , 1 ) commands . extend ( self . register . get_commands ( "WrRegister" , name = [ 'Efuse_Sense' ] ) ) commands . extend ( self . register . get_commands ( "GlobalPulse" , Width = 0 ) ) self . register . set_global_register_value ( 'Efuse_Sense' , 0 ) commands . extend ( self . register . get_commands ( "WrRegister" , name = [ 'Efuse_Sense' ] ) ) self . register_utils . send_commands ( commands ) commands = [ ] self . register . set_global_register_value ( 'Conf_AddrEnable' , 1 ) commands . extend ( self . register . get_commands ( "WrRegister" , name = [ 'Conf_AddrEnable' ] ) ) chip_sn_address = self . register . get_global_register_attributes ( "addresses" , name = "Chip_SN" ) commands . extend ( self . register . get_commands ( "RdRegister" , addresses = chip_sn_address ) ) self . register_utils . send_commands ( commands ) data = self . read_data ( ) if data . shape [ 0 ] == 0 : logging . error ( 'Chip S/N: No data' ) return read_values = [ ] for index , word in enumerate ( np . nditer ( data ) ) : fei4_data_word = FEI4Record ( word , self . register . chip_flavor ) if fei4_data_word == 'AR' : fei4_next_data_word = FEI4Record ( data [ index + 1 ] , self . register . chip_flavor ) if fei4_next_data_word == 'VR' : read_value = fei4_next_data_word [ 'value' ] read_values . append ( read_value ) # commands = [ ] # commands . extend ( self . register . get _ commands ( " RunMode " ) ) # self . register _ utils . send _ commands ( commands ) if len ( read_values ) == 0 : logging . error ( 'No Chip S/N was found' ) elif len ( read_values ) == 1 : logging . info ( 'Chip S/N: %d' , read_values [ 0 ] ) else : logging . warning ( 'Ambiguous Chip S/N: %s' , read_values )
def parse_content ( self , text ) : """get Usage section and set to ` raw _ content ` , ` formal _ content ` of no title and empty - line version"""
match = re . search ( self . usage_re_str . format ( self . usage_name ) , text , flags = ( re . DOTALL if self . case_sensitive else ( re . DOTALL | re . IGNORECASE ) ) ) if match is None : return dic = match . groupdict ( ) logger . debug ( dic ) self . raw_content = dic [ 'raw' ] if dic [ 'sep' ] in ( '\n' , '\r\n' ) : self . formal_content = dic [ 'section' ] return reallen = len ( dic [ 'name' ] ) replace = '' . ljust ( reallen ) drop_name = match . expand ( '%s\\g<sep>\\g<section>' % replace ) self . formal_content = self . drop_started_empty_lines ( drop_name ) . rstrip ( )
def fasta_verifier ( entries , ambiguous = False ) : """Raises error if invalid FASTA format detected Args : entries ( list ) : A list of FastaEntry instances ambiguous ( bool ) : Permit ambiguous bases , i . e . permit non - ACGTU bases Raises : FormatError : Error when FASTA format incorrect with descriptive message Example : > > > from bio _ utils . iterators import fasta _ iter > > > import os > > > entries = r ' > entry1{0 } AAGGATTCG { 0 } ' . . . r ' > entry { 0 } AGGTCCCCCG { 0 } ' . . . r ' > entry3{0 } GCCTAGC { 0 } ' . format ( os . linesep ) > > > fasta _ entries = fasta _ iter ( iter ( entries . split ( os . linesep ) ) ) > > > fasta _ verifier ( fasta _ entries )"""
if ambiguous : regex = r'^>.+{0}[ACGTURYKMSWBDHVNX]+{0}$' . format ( os . linesep ) else : regex = r'^>.+{0}[ACGTU]+{0}$' . format ( os . linesep ) delimiter = r'{0}' . format ( os . linesep ) for entry in entries : try : entry_verifier ( [ entry . write ( ) ] , regex , delimiter ) except FormatError as error : if error . part == 0 : msg = 'Unknown Header Error with {0}' . format ( entry . id ) raise FormatError ( message = msg ) elif error . part == 1 and ambiguous : msg = '{0} contains a base not in ' '[ACGTURYKMSWBDHVNX]' . format ( entry . id ) raise FormatError ( message = msg ) elif error . part == 1 and not ambiguous : msg = '{0} contains a base not in ' '[ACGTU]' . format ( entry . id ) raise FormatError ( message = msg ) else : msg = '{0}: Unknown Error: Likely a Bug' . format ( entry . id ) raise FormatError ( message = msg )
def _extract_next_page_link ( self ) : """Try to get next page link ."""
# HEADS UP : we do not abort if next _ page _ link is already set : # we try to find next ( eg . find 3 if already at page 2 ) . for pattern in self . config . next_page_link : items = self . parsed_tree . xpath ( pattern ) if not items : continue if len ( items ) == 1 : item = items [ 0 ] if 'href' in item . keys ( ) : self . next_page_link = item . get ( 'href' ) else : self . next_page_link = item . text . strip ( ) LOGGER . info ( u'Found next page link: %s.' , self . next_page_link ) # First found link is the good one . break else : LOGGER . warning ( u'%s items for next-page link %s' , items , pattern , extra = { 'siteconfig' : self . config . host } )
def _generate_report_all ( self ) : '''Generate report for all subfolders contained by self . folder _ id .'''
assert self . workbook is not None count = 0 # Do all subfolders for sid in self . folders . subfolders ( self . folder_id , self . user ) : count += 1 self . _generate_for_subfolder ( sid ) if count == 0 : print ( "I: empty workbook created: no subfolders found" )
def replace_vobject ( self , uid , ical , filename = None ) : """Update the Remind command with the uid in the file with the new iCalendar"""
if not filename : filename = self . _filename elif filename not in self . _reminders : return uid = uid . split ( '@' ) [ 0 ] with self . _lock : rem = open ( filename ) . readlines ( ) for ( index , line ) in enumerate ( rem ) : if uid == md5 ( line [ : - 1 ] . encode ( 'utf-8' ) ) . hexdigest ( ) : rem [ index ] = self . to_reminders ( ical ) new_uid = self . _get_uid ( rem [ index ] ) open ( filename , 'w' ) . writelines ( rem ) return new_uid
def allow ( self , channel , message ) : """Allow plugins to filter content ."""
return all ( filter ( channel , message ) for filter in _load_filters ( ) )
def _use ( cls , ec ) : """underly implement of use"""
# class ConnectModel ( cls ) : # pass ConnectModel = type ( cls . __name__ , ( cls , ) , { } ) ConnectModel . tablename = cls . tablename ConnectModel . _base_class = cls if isinstance ( ec , ( str , unicode ) ) : ConnectModel . _engine_name = ec elif isinstance ( ec , Session ) : ConnectModel . _engine_name = ec . engine_name ConnectModel . _connection = ec return ConnectModel
def nfc ( ctx , enable , disable , enable_all , disable_all , list , lock_code , force ) : """Enable or disable applications over NFC ."""
if not ( list or enable_all or enable or disable_all or disable ) : ctx . fail ( 'No configuration options chosen.' ) if enable_all : enable = APPLICATION . __members__ . keys ( ) if disable_all : disable = APPLICATION . __members__ . keys ( ) _ensure_not_invalid_options ( ctx , enable , disable ) dev = ctx . obj [ 'dev' ] nfc_supported = dev . config . nfc_supported nfc_enabled = dev . config . nfc_enabled if not nfc_supported : ctx . fail ( 'NFC interface not available.' ) if list : _list_apps ( ctx , nfc_enabled ) for app in enable : if APPLICATION [ app ] & nfc_supported : nfc_enabled |= APPLICATION [ app ] else : ctx . fail ( '{} not supported over NFC.' . format ( app ) ) for app in disable : if APPLICATION [ app ] & nfc_supported : nfc_enabled &= ~ APPLICATION [ app ] else : ctx . fail ( '{} not supported over NFC.' . format ( app ) ) f_confirm = '{}{}Configure NFC interface?' . format ( 'Enable {}.\n' . format ( ', ' . join ( [ str ( APPLICATION [ app ] ) for app in enable ] ) ) if enable else '' , 'Disable {}.\n' . format ( ', ' . join ( [ str ( APPLICATION [ app ] ) for app in disable ] ) ) if disable else '' ) is_locked = dev . config . configuration_locked if force and is_locked and not lock_code : ctx . fail ( 'Configuration is locked - please supply the --lock-code ' 'option.' ) if lock_code and not is_locked : ctx . fail ( 'Configuration is not locked - please remove the ' '--lock-code option.' ) force or click . confirm ( f_confirm , abort = True , err = True ) if is_locked and not lock_code : lock_code = prompt_lock_code ( ) if lock_code : lock_code = _parse_lock_code ( ctx , lock_code ) try : dev . write_config ( device_config ( nfc_enabled = nfc_enabled ) , reboot = True , lock_key = lock_code ) except Exception as e : logger . error ( 'Failed to write config' , exc_info = e ) ctx . fail ( 'Failed to configure NFC applications.' )
def reset ( self , ms = 0 , halt = True ) : """Resets the target . This method resets the target , and by default toggles the RESET and TRST pins . Args : self ( JLink ) : the ` ` JLink ` ` instance ms ( int ) : Amount of milliseconds to delay after reset ( default : 0) halt ( bool ) : if the CPU should halt after reset ( default : True ) Returns : Number of bytes read ."""
self . _dll . JLINKARM_SetResetDelay ( ms ) res = self . _dll . JLINKARM_Reset ( ) if res < 0 : raise errors . JLinkException ( res ) elif not halt : self . _dll . JLINKARM_Go ( ) return res
def category_changed_cb ( self , selection , model ) : """enables and disables action buttons depending on selected item"""
( model , iter ) = selection . get_selected ( ) id = 0 if iter is None : self . activity_store . clear ( ) else : self . prev_selected_activity = None id = model [ iter ] [ 0 ] self . activity_store . load ( model [ iter ] [ 0 ] ) # start with nothing self . get_widget ( 'activity_edit' ) . set_sensitive ( False ) self . get_widget ( 'activity_remove' ) . set_sensitive ( False ) return True
def offset ( self ) : """Return offset to series data in file , if any ."""
if not self . _pages : return None pos = 0 for page in self . _pages : if page is None : return None if not page . is_final : return None if not pos : pos = page . is_contiguous [ 0 ] + page . is_contiguous [ 1 ] continue if pos != page . is_contiguous [ 0 ] : return None pos += page . is_contiguous [ 1 ] page = self . _pages [ 0 ] offset = page . is_contiguous [ 0 ] if ( page . is_imagej or page . is_shaped ) and len ( self . _pages ) == 1 : # truncated files return offset if pos == offset + product ( self . shape ) * self . dtype . itemsize : return offset return None
def _build_tags ( self , tag_names : List [ str ] ) -> dict : """Build a list of tag objects ."""
tags = { } for tag_name in tag_names : tag_obj = self . tag ( tag_name ) if tag_obj is None : LOG . debug ( f"create new tag: {tag_name}" ) tag_obj = self . new_tag ( tag_name ) tags [ tag_name ] = tag_obj return tags
def hard_path ( path , prefix_dir ) : """Returns an absolute path to either the relative or absolute file ."""
relative = abspath ( "%s/%s" % ( prefix_dir , path ) ) a_path = abspath ( path ) if os . path . exists ( relative ) : LOG . debug ( "using relative path %s (%s)" , relative , path ) return relative LOG . debug ( "using absolute path %s" , a_path ) return a_path
def _process_rules ( self , rules : dict , system : System ) : """process a set of rules for a target"""
self . _source = None # reset the template source if not self . _shall_proceed ( rules ) : return self . context . update ( rules . get ( 'context' , { } ) ) self . path = rules . get ( 'path' , '' ) self . source = rules . get ( 'source' , None ) self . _process_rule ( rules . get ( 'system' , None ) , { 'system' : system } ) for module in system . modules : self . _process_rule ( rules . get ( 'module' , None ) , { 'module' : module } ) for interface in module . interfaces : self . _process_rule ( rules . get ( 'interface' , None ) , { 'interface' : interface } ) for struct in module . structs : self . _process_rule ( rules . get ( 'struct' , None ) , { 'struct' : struct } ) for enum in module . enums : self . _process_rule ( rules . get ( 'enum' , None ) , { 'enum' : enum } )
def set_webhook ( url , certificate = None , max_connections = None , allowed_updates = None , ** kwargs ) : """Use this method to specify a url and receive incoming updates via an outgoing webhook . Whenever there is an update for the bot , we will send an HTTPS POST request to the specified url , containing a JSON - serialized Update . In case of an unsuccessful request , we will give up after a reasonable amount of attempts . Please note that you will not be able to receive updates using getUpdates for as long as an outgoing webhook is set up . To use a self - signed certificate , you need to upload your public key certificate using certificate parameter . Please upload as InputFile , sending a String will not work . Ports currently supported for Webhooks : 443 , 80 , 88 , 8443. : param url : HTTPS url to send updates to . Use an empty string to remove webhook integration : param certificate : Upload your public key certificate so that the root certificate in use can be checked . See telegram ' s self - signed guide for details ( https : / / core . telegram . org / bots / self - signed ) . : param max _ connections : Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery , 1-100. Defaults to 40 . Use lower values to limit the load on your bot ‘ s server , and higher values to increase your bot ’ s throughput . : param allowed _ updates : List the types of updates you want your bot to receive . For example , specify [ “ message ” , “ edited _ channel _ post ” , “ callback _ query ” ] to only receive updates of these types . See Update for a complete list of available update types . Specify an empty list to receive all updates regardless of type ( default ) . If not specified , the previous setting will be used . Please note that this parameter doesn ' t affect updates created before the call to the setWebhook , so unwanted updates may be received for a short period of time . : param kwargs : Args that get passed down to : class : ` TelegramBotRPCRequest ` : type url : str : type certificate : InputFile : returns : Returns True on success . : rtype : TelegramBotRPCRequest"""
# optional args params = _clean_params ( url = url , certificate = certificate , max_connections = max_connections , allowed_updates = allowed_updates ) return TelegramBotRPCRequest ( 'setWebhook' , params = params , on_result = lambda result : result , ** kwargs )
def offset ( self , offset ) : """Offset the date range by the given amount of periods . This differs from : meth : ` ~ spans . types . OffsetableRangeMixin . offset ` on : class : ` spans . types . daterange ` by not accepting a ` ` timedelta ` ` object . Instead it expects an integer to adjust the typed date range by . The given value may be negative as well . : param offset : Number of periods to offset this range by . A period is either a day , week , american week , month , quarter or year , depending on this range ' s period type . : return : New offset : class : ` ~ spans . types . PeriodRange `"""
span = self if offset > 0 : for i in iter_range ( offset ) : span = span . next_period ( ) elif offset < 0 : for i in iter_range ( - offset ) : span = span . prev_period ( ) return span
def reverse_delete_ipv6 ( self , subid , ipaddr , params = None ) : '''/ v1 / server / reverse _ delete _ ipv6 POST - account Remove a reverse DNS entry for an IPv6 address of a virtual machine . Upon success , DNS changes may take 6-12 hours to become active . Link : https : / / www . vultr . com / api / # server _ reverse _ delete _ ipv6'''
params = update_params ( params , { 'SUBID' : subid , 'ip' : ipaddr } ) return self . request ( '/v1/server/reverse_delete_ipv6' , params , 'POST' )
def get_data ( self , compact = True ) : '''Returns data representing current state of the form . While Form . raw _ data may contain alien fields and invalid data , this method returns only valid fields that belong to this form only . It ' s designed to pass somewhere current state of the form ( as query string or by other means ) .'''
data = MultiDict ( ) for field in self . fields : raw_value = field . from_python ( self . python_data [ field . name ] ) field . set_raw_value ( data , raw_value ) if compact : data = MultiDict ( [ ( k , v ) for k , v in data . items ( ) if v ] ) return data
def key ( self , id_num ) : """Get the specified deploy key . : param int id _ num : ( required ) , id of the key : returns : : class : ` Key < github3 . users . Key > ` if successful , else None"""
json = None if int ( id_num ) > 0 : url = self . _build_url ( 'keys' , str ( id_num ) , base_url = self . _api ) json = self . _json ( self . _get ( url ) , 200 ) return Key ( json , self ) if json else None
def MatchBuildContext ( self , target_os , target_arch , target_package , context = None ) : """Return true if target _ platforms matches the supplied parameters . Used by buildanddeploy to determine what clients need to be built . Args : target _ os : which os we are building for in this run ( linux , windows , darwin ) target _ arch : which arch we are building for in this run ( i386 , amd64) target _ package : which package type we are building ( exe , dmg , deb , rpm ) context : config _ lib context Returns : bool : True if target _ platforms spec matches parameters ."""
for spec in self . Get ( "ClientBuilder.target_platforms" , context = context ) : spec_os , arch , package_name = spec . split ( "_" ) if ( spec_os == target_os and arch == target_arch and package_name == target_package ) : return True return False
def get_vec_tb ( self ) : """Returns vector from top to bottom"""
return self . height * self . sin_a ( ) , self . height * self . cos_a ( )
def AppendIndexDictionaryToFile ( uniqueWords , ndxFile , ipFile , useShortFileName = 'Y' ) : """Save the list of unique words to the master list"""
if useShortFileName == 'Y' : f = os . path . basename ( ipFile ) else : f = ipFile with open ( ndxFile , "a" , encoding = 'utf-8' , errors = 'replace' ) as ndx : word_keys = uniqueWords . keys ( ) # uniqueWords . sort ( ) for word in sorted ( word_keys ) : if word != '' : line_nums = uniqueWords [ word ] ndx . write ( f + ', ' + word + ', ' ) for line_num in line_nums : ndx . write ( str ( line_num ) ) ndx . write ( '\n' )
def run_ ( self ) : """DEPRECATED"""
all_records = [ ] for k in range ( self . num_classes ) : simulated_records = self . alf_params [ k + 1 ] . run ( ) names = [ 'class{0}_{1:0>{2}}' . format ( k + 1 , i , len ( str ( self . class_list [ k ] ) ) ) for i in range ( 1 , len ( simulated_records ) + 1 ) ] for ( rec , name ) in zip ( simulated_records , names ) : rec . name = name all_records . extend ( simulated_records ) self . result = all_records self . clean ( ) return all_records
def isdicom ( fn ) : '''True if the fn points to a DICOM image'''
fn = str ( fn ) if fn . endswith ( '.dcm' ) : return True # Dicom signature from the dicom spec . with open ( fn , 'rb' ) as fh : fh . seek ( 0x80 ) return fh . read ( 4 ) == b'DICM'
def intf_up ( self , interface ) : '''Can be called when an interface is put in service . FIXME : not currently used ; more needs to be done to correctly put a new intf into service .'''
if interface . name not in self . _devinfo : self . _devinfo [ interface . name ] = interface if self . _devupdown_callback : self . _devupdown_callback ( interface , 'up' ) else : raise ValueError ( "Interface already registered" )
def stats ( self , request ) : '''Live stats for the server . Try sending lots of requests'''
# scheme = ' wss ' if request . is _ secure else ' ws ' # host = request . get ( ' HTTP _ HOST ' ) # address = ' % s : / / % s / stats ' % ( scheme , host ) doc = HtmlDocument ( title = 'Live server stats' , media_path = '/assets/' ) # docs . head . scripts return doc . http_response ( request )
def get_previous_request ( rid ) : """Return the last ceph broker request sent on a given relation @ param rid : Relation id to query for request"""
request = None broker_req = relation_get ( attribute = 'broker_req' , rid = rid , unit = local_unit ( ) ) if broker_req : request_data = json . loads ( broker_req ) request = CephBrokerRq ( api_version = request_data [ 'api-version' ] , request_id = request_data [ 'request-id' ] ) request . set_ops ( request_data [ 'ops' ] ) return request
def parse_methodcall ( self , tup_tree ) : """< ! ELEMENT METHODCALL ( ( LOCALCLASSPATH | LOCALINSTANCEPATH ) , PARAMVALUE * ) > < ! ATTLIST METHODCALL % CIMName ; >"""
self . check_node ( tup_tree , 'METHODCALL' , ( 'NAME' , ) , ( ) , ( 'LOCALCLASSPATH' , 'LOCALINSTANCEPATH' , 'PARAMVALUE' ) ) path = self . list_of_matching ( tup_tree , ( 'LOCALCLASSPATH' , 'LOCALINSTANCEPATH' ) ) if not path : raise CIMXMLParseError ( _format ( "Element {0!A} missing a required child element " "'LOCALCLASSPATH' or 'LOCALINSTANCEPATH'" , name ( tup_tree ) ) , conn_id = self . conn_id ) if len ( path ) > 1 : raise CIMXMLParseError ( _format ( "Element {0!A} has too many child elements {1!A} " "(allowed is one of 'LOCALCLASSPATH' or " "'LOCALINSTANCEPATH')" , name ( tup_tree ) , path ) , conn_id = self . conn_id ) path = path [ 0 ] params = self . list_of_matching ( tup_tree , ( 'PARAMVALUE' , ) ) return ( name ( tup_tree ) , attrs ( tup_tree ) , path , params )
def ListHunts ( context = None ) : """List all GRR hunts ."""
items = context . SendIteratorRequest ( "ListHunts" , hunt_pb2 . ApiListHuntsArgs ( ) ) return utils . MapItemsIterator ( lambda data : Hunt ( data = data , context = context ) , items )
def check_assumptions ( self , training_df , advice = True , show_plots = False , p_value_threshold = 0.01 , plot_n_bootstraps = 10 ) : """Use this function to test the proportional hazards assumption . See usage example at https : / / lifelines . readthedocs . io / en / latest / jupyter _ notebooks / Proportional % 20hazard % 20assumption . html Parameters training _ df : DataFrame the original DataFrame used in the call to ` ` fit ( . . . ) ` ` or a sub - sampled version . advice : boolean , optional display advice as output to the user ' s screen show _ plots : boolean , optional display plots of the scaled schoenfeld residuals and loess curves . This is an eyeball test for violations . This will slow down the function significantly . p _ value _ threshold : float , optional the threshold to use to alert the user of violations . See note below . plot _ n _ bootstraps : in the plots displayed , also display plot _ n _ bootstraps bootstrapped loess curves . This will slow down the function significantly . Examples > > > from lifelines . datasets import load _ rossi > > > from lifelines import CoxPHFitter > > > rossi = load _ rossi ( ) > > > cph = CoxPHFitter ( ) . fit ( rossi , ' week ' , ' arrest ' ) > > > cph . check _ assumptions ( rossi ) Notes The ` ` p _ value _ threshold ` ` is arbitrarily set at 0.01 . Under the null , some covariates will be below the threshold ( i . e . by chance ) . This is compounded when there are many covariates . Similarly , when there are lots of observations , even minor deviances from the proportional hazard assumption will be flagged . With that in mind , it ' s best to use a combination of statistical tests and eyeball tests to determine the most serious violations . References section 5 in https : / / socialsciences . mcmaster . ca / jfox / Books / Companion / appendices / Appendix - Cox - Regression . pdf , http : / / www . mwsug . org / proceedings / 2006 / stats / MWSUG - 2006 - SD08 . pdf , http : / / eprints . lse . ac . uk / 84988/1/06 _ ParkHendry2015 - ReassessingSchoenfeldTests _ Final . pdf"""
if not training_df . index . is_unique : raise IndexError ( "`training_df` index should be unique for this exercise. Please make it unique or use `.reset_index(drop=True)` to force a unique index" ) residuals = self . compute_residuals ( training_df , kind = "scaled_schoenfeld" ) test_results = proportional_hazard_test ( self , training_df , time_transform = [ "rank" , "km" ] , precomputed_residuals = residuals ) residuals_and_duration = residuals . join ( training_df [ self . duration_col ] ) counter = 0 n = residuals_and_duration . shape [ 0 ] for variable in self . hazards_ . index : minumum_observed_p_value = test_results . summary . loc [ variable , "p" ] . min ( ) if np . round ( minumum_observed_p_value , 2 ) > p_value_threshold : continue counter += 1 if counter == 1 : if advice : print ( fill ( """The ``p_value_threshold`` is set at %g. Even under the null hypothesis of no violations, some covariates will be below the threshold by chance. This is compounded when there are many covariates. Similarly, when there are lots of observations, even minor deviances from the proportional hazard assumption will be flagged.""" % p_value_threshold , width = 100 , ) ) print ( ) print ( fill ( """With that in mind, it's best to use a combination of statistical tests and visual tests to determine the most serious violations. Produce visual plots using ``check_assumptions(..., show_plots=True)`` and looking for non-constant lines. See link [A] below for a full example.""" , width = 100 , ) ) print ( ) test_results . print_summary ( ) print ( ) print ( ) print ( "%d. Variable '%s' failed the non-proportional test: p-value is %s." % ( counter , variable , format_p_value ( 4 ) ( minumum_observed_p_value ) ) , end = "\n\n" , ) if advice : values = training_df [ variable ] value_counts = values . value_counts ( ) n_uniques = value_counts . shape [ 0 ] # Arbitrary chosen 10 and 4 to check for ability to use strata col . # This should capture dichotomous / low cardinality values . if n_uniques <= 10 and value_counts . min ( ) >= 5 : print ( fill ( " Advice: with so few unique values (only {0}), you can include `strata=['{1}', ...]` in the call in `.fit`. See documentation in link [E] below." . format ( n_uniques , variable ) , width = 100 , ) ) else : print ( fill ( """ Advice 1: the functional form of the variable '{var}' might be incorrect. That is, there may be non-linear terms missing. The proportional hazard test used is very sensitive to incorrect functional forms. See documentation in link [D] below on how to specify a functional form.""" . format ( var = variable ) , width = 100 , ) , end = "\n\n" , ) print ( fill ( """ Advice 2: try binning the variable '{var}' using pd.cut, and then specify it in `strata=['{var}', ...]` in the call in `.fit`. See documentation in link [B] below.""" . format ( var = variable ) , width = 100 , ) , end = "\n\n" , ) print ( fill ( """ Advice 3: try adding an interaction term with your time variable. See documentation in link [C] below.""" . format ( var = variable ) , width = 100 , ) , end = "\n\n" , ) if show_plots : from matplotlib import pyplot as plt fig = plt . figure ( ) # plot variable against all time transformations . for i , ( transform_name , transformer ) in enumerate ( TimeTransformers ( ) . iter ( [ "rank" , "km" ] ) , start = 1 ) : p_value = test_results . summary . loc [ ( variable , transform_name ) , "p" ] ax = fig . add_subplot ( 1 , 2 , i ) y = residuals_and_duration [ variable ] tt = transformer ( self . durations , self . event_observed , self . weights ) [ self . event_observed . values ] ax . scatter ( tt , y , alpha = 0.75 ) y_lowess = lowess ( tt . values , y . values ) ax . plot ( tt , y_lowess , color = "k" , alpha = 1.0 , linewidth = 2 ) # bootstrap some possible other lowess lines . This is an approximation of the 100 % confidence intervals for _ in range ( plot_n_bootstraps ) : ix = sorted ( np . random . choice ( n , n ) ) tt_ = tt . values [ ix ] y_lowess = lowess ( tt_ , y . values [ ix ] ) ax . plot ( tt_ , y_lowess , color = "k" , alpha = 0.30 ) best_xlim = ax . get_xlim ( ) ax . hlines ( 0 , 0 , tt . max ( ) , linestyles = "dashed" , linewidths = 1 ) ax . set_xlim ( best_xlim ) ax . set_xlabel ( "%s-transformed time\n(p=%.4f)" % ( transform_name , p_value ) , fontsize = 10 ) fig . suptitle ( "Scaled Schoenfeld residuals of '%s'" % variable , fontsize = 14 ) plt . tight_layout ( ) plt . subplots_adjust ( top = 0.90 ) if advice and counter > 0 : print ( dedent ( r""" --- [A] https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Proportional%20hazard%20assumption.html [B] https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Proportional%20hazard%20assumption.html#Bin-variable-and-stratify-on-it [C] https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Proportional%20hazard%20assumption.html#Introduce-time-varying-covariates [D] https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Proportional%20hazard%20assumption.html#Modify-the-functional-form [E] https://lifelines.readthedocs.io/en/latest/jupyter_notebooks/Proportional%20hazard%20assumption.html#Stratification """ ) ) if counter == 0 : print ( "Proportional hazard assumption looks okay." )
def list_vms ( search = None , verbose = False ) : '''List all vms search : string filter vms , see the execution module verbose : boolean print additional information about the vm CLI Example : . . code - block : : bash salt - run vmadm . list salt - run vmadm . list search = ' type = KVM ' salt - run vmadm . list verbose = True'''
ret = OrderedDict ( ) if verbose else [ ] client = salt . client . get_local_client ( __opts__ [ 'conf_file' ] ) try : vmadm_args = { } vmadm_args [ 'order' ] = 'uuid,alias,hostname,state,type,cpu_cap,vcpus,ram' if search : vmadm_args [ 'search' ] = search for cn in client . cmd_iter ( 'G@virtual:physical and G@os:smartos' , 'vmadm.list' , kwarg = vmadm_args , tgt_type = 'compound' ) : if not cn : continue node = next ( six . iterkeys ( cn ) ) if not isinstance ( cn [ node ] , dict ) or 'ret' not in cn [ node ] or not isinstance ( cn [ node ] [ 'ret' ] , dict ) : continue for vm in cn [ node ] [ 'ret' ] : vmcfg = cn [ node ] [ 'ret' ] [ vm ] if verbose : ret [ vm ] = OrderedDict ( ) ret [ vm ] [ 'hostname' ] = vmcfg [ 'hostname' ] ret [ vm ] [ 'alias' ] = vmcfg [ 'alias' ] ret [ vm ] [ 'computenode' ] = node ret [ vm ] [ 'state' ] = vmcfg [ 'state' ] ret [ vm ] [ 'resources' ] = OrderedDict ( ) ret [ vm ] [ 'resources' ] [ 'memory' ] = vmcfg [ 'ram' ] if vmcfg [ 'type' ] == 'KVM' : ret [ vm ] [ 'resources' ] [ 'cpu' ] = "{0:.2f}" . format ( int ( vmcfg [ 'vcpus' ] ) ) else : if vmcfg [ 'cpu_cap' ] != '' : ret [ vm ] [ 'resources' ] [ 'cpu' ] = "{0:.2f}" . format ( int ( vmcfg [ 'cpu_cap' ] ) / 100 ) else : ret . append ( vm ) except SaltClientError as client_error : return "{0}" . format ( client_error ) if not verbose : ret = sorted ( ret ) return ret
def get_permissions ( self ) : """Soyut role ait Permission nesnelerini bulur ve code değerlerini döner . Returns : list : Permission code değerleri"""
return [ p . permission . code for p in self . Permissions if p . permission . code ]
def _set_retcode ( ret , highstate = None ) : '''Set the return code based on the data back from the state system'''
# Set default retcode to 0 __context__ [ 'retcode' ] = salt . defaults . exitcodes . EX_OK if isinstance ( ret , list ) : __context__ [ 'retcode' ] = salt . defaults . exitcodes . EX_STATE_COMPILER_ERROR return if not __utils__ [ 'state.check_result' ] ( ret , highstate = highstate ) : __context__ [ 'retcode' ] = salt . defaults . exitcodes . EX_STATE_FAILURE
def show ( self , ax : plt . Axes = None , figsize : tuple = ( 3 , 3 ) , title : Optional [ str ] = None , hide_axis : bool = True , cmap : str = None , y : Any = None , ** kwargs ) : "Show image on ` ax ` with ` title ` , using ` cmap ` if single - channel , overlaid with optional ` y `"
cmap = ifnone ( cmap , defaults . cmap ) ax = show_image ( self , ax = ax , hide_axis = hide_axis , cmap = cmap , figsize = figsize ) if y is not None : y . show ( ax = ax , ** kwargs ) if title is not None : ax . set_title ( title )
def match ( self , item ) : """Return True if filter matches item ."""
if getattr ( item , self . _name ) is None : # Never match " N / A " items , except when " - 0 " was specified return False if self . _value else self . _cmp ( - 1 , 0 ) else : return super ( DurationFilter , self ) . match ( item )
def convert ( kml_path , output_dir , separate_folders = False , style_type = None , style_filename = 'style.json' ) : """Given a path to a KML file , convert it to one or several GeoJSON FeatureCollection files and save the result ( s ) to the given output directory . If not ` ` separate _ folders ` ` ( the default ) , then create one GeoJSON file . Otherwise , create several GeoJSON files , one for each folder in the KML file that contains geodata or that has a descendant node that contains geodata . Warning : this can produce GeoJSON files with the same geodata in case the KML file has nested folders with geodata . If a ` ` style _ type ` ` is given , then also build a JSON style file of the given style type and save it to the output directory under the name given by ` ` style _ filename ` ` ."""
# Create absolute paths kml_path = Path ( kml_path ) . resolve ( ) output_dir = Path ( output_dir ) if not output_dir . exists ( ) : output_dir . mkdir ( ) output_dir = output_dir . resolve ( ) # Parse KML with kml_path . open ( encoding = 'utf-8' , errors = 'ignore' ) as src : kml_str = src . read ( ) root = md . parseString ( kml_str ) # Build GeoJSON layers if separate_folders : layers = build_layers ( root ) else : layers = [ build_feature_collection ( root , name = kml_path . stem ) ] # Create filenames for layers filenames = disambiguate ( [ to_filename ( layer [ 'name' ] ) for layer in layers ] ) filenames = [ name + '.geojson' for name in filenames ] # Write layers to files for i in range ( len ( layers ) ) : path = output_dir / filenames [ i ] with path . open ( 'w' ) as tgt : json . dump ( layers [ i ] , tgt ) # Build and export style file if desired if style_type is not None : if style_type not in STYLE_TYPES : raise ValueError ( 'style type must be one of {!s}' . format ( STYLE_TYPES ) ) builder_name = 'build_{!s}_style' . format ( style_type ) style_dict = globals ( ) [ builder_name ] ( root ) path = output_dir / style_filename with path . open ( 'w' ) as tgt : json . dump ( style_dict , tgt )
def listdir ( self , match = None ) : """D . listdir ( ) - > List of items in this directory . Use : meth : ` files ` or : meth : ` dirs ` instead if you want a listing of just files or just subdirectories . The elements of the list are Path objects . With the optional ` match ` argument , a callable , only return items whose names match the given pattern . . . seealso : : : meth : ` files ` , : meth : ` dirs `"""
match = matchers . load ( match ) return list ( filter ( match , ( self / child for child in os . listdir ( self ) ) ) )
def from_mat_file ( cls , matfilename ) : """Load gyro data from . mat file The MAT file should contain the following two arrays gyro : ( 3 , N ) float ndarray The angular velocity measurements . timestamps : ( N , ) float ndarray Timestamps of the measurements . Parameters matfilename : string Name of the . mat file Returns A new IMU class instance"""
M = scipy . io . loadmat ( matfilename ) instance = cls ( ) instance . gyro_data = M [ 'gyro' ] instance . timestamps = M [ 'timestamps' ] return instance
def _to_rule ( self , lark_rule ) : """Converts a lark rule , ( lhs , rhs , callback , options ) , to a Rule ."""
assert isinstance ( lark_rule . origin , NT ) assert all ( isinstance ( x , Symbol ) for x in lark_rule . expansion ) return Rule ( lark_rule . origin , lark_rule . expansion , weight = lark_rule . options . priority if lark_rule . options and lark_rule . options . priority else 0 , alias = lark_rule )
def handle ( self , targetdir , app = None , ** options ) : """command execution"""
translation . activate ( settings . LANGUAGE_CODE ) if app : unpack = app . split ( '.' ) if len ( unpack ) == 2 : models = [ get_model ( unpack [ 0 ] , unpack [ 1 ] ) ] elif len ( unpack ) == 1 : models = get_models ( get_app ( unpack [ 0 ] ) ) else : models = get_models ( ) messagemaker = MakeModelMessages ( targetdir ) for model in models : if hasattr ( model , 'localized_fields' ) : for instance in model . objects . all ( ) : messagemaker ( instance )
def sortino_ratio ( rets , rfr_ann = 0 , mar = 0 , full = 0 , expanding = 0 ) : """Compute the sortino ratio as ( Ann Rets - Risk Free Rate ) / Downside Deviation Ann : param rets : period return series : param rfr _ ann : annualized risk free rate : param mar : minimum acceptable rate of return ( MAR ) : param full : If True , use the lenght of full series . If False , use only values below MAR : param expanding : : return :"""
annrets = returns_annualized ( rets , expanding = expanding ) - rfr_ann return annrets / downside_deviation ( rets , mar = mar , expanding = expanding , full = full , ann = 1 )
def locked ( self , target ) : """Returns a context manager for a lock on a datastore , where * target * is the name of the configuration datastore to lock , e . g . : : with m . locked ( " running " ) : # do your stuff . . . instead of : : m . lock ( " running " ) try : # do your stuff finally : m . unlock ( " running " )"""
return operations . LockContext ( self . _session , self . _device_handler , target )
def _m ( self , word , j ) : """m ( ) measures the number of consonant sequences between k0 and j . if c is a consonant sequence and v a vowel sequence , and < . . > indicates arbitrary presence , < c > < v > gives 0 < c > vc < v > gives 1 < c > vcvc < v > gives 2 < c > vcvcvc < v > gives 3"""
n = 0 i = 0 while True : if i > j : return n if not self . _cons ( word , i ) : break i = i + 1 i = i + 1 while True : while True : if i > j : return n if self . _cons ( word , i ) : break i = i + 1 i = i + 1 n = n + 1 while True : if i > j : return n if not self . _cons ( word , i ) : break i = i + 1 i = i + 1
def async_call ( self , * args , ** kwargs ) : """Calls a redis command , waits for the reply and call a callback . Following options are available ( not part of the redis command itself ) : - callback Function called ( with the result as argument ) when the result is available . If not set , the reply is silently discarded . In case of errors , the callback is called with a TornadisException object as argument . Args : * args : full redis command as variable length argument list or a Pipeline object ( as a single argument ) . * * kwargs : options as keyword parameters . Examples : > > > def cb ( result ) : pass > > > client . async _ call ( " HSET " , " key " , " field " , " val " , callback = cb )"""
def after_autoconnect_callback ( future ) : if self . is_connected ( ) : self . _call ( * args , ** kwargs ) else : # FIXME pass if 'callback' not in kwargs : kwargs [ 'callback' ] = discard_reply_cb if not self . is_connected ( ) : if self . autoconnect : connect_future = self . connect ( ) cb = after_autoconnect_callback self . __connection . _ioloop . add_future ( connect_future , cb ) else : error = ConnectionError ( "you are not connected and " "autoconnect=False" ) kwargs [ 'callback' ] ( error ) else : self . _call ( * args , ** kwargs )
def originate ( self , data = '' , syn = False , ack = False , fin = False , rst = False ) : """Create a packet , enqueue it to be sent , and return it ."""
if self . _ackTimer is not None : self . _ackTimer . cancel ( ) self . _ackTimer = None if syn : # We really should be randomizing the ISN but until we finish the # implementations of the various bits of wraparound logic that were # started with relativeSequence assert self . nextSendSeqNum == 0 , ( "NSSN = " + repr ( self . nextSendSeqNum ) ) assert self . hostSendISN == 0 p = PTCPPacket . create ( self . hostPseudoPort , self . peerPseudoPort , seqNum = ( self . nextSendSeqNum + self . hostSendISN ) % ( 2 ** 32 ) , ackNum = self . currentAckNum ( ) , data = data , window = self . recvWindow , syn = syn , ack = ack , fin = fin , rst = rst , destination = self . peerAddressTuple ) # do we want to enqueue this packet for retransmission ? sl = p . segmentLength ( ) self . nextSendSeqNum += sl if p . mustRetransmit ( ) : # print self , ' originating retransmittable packet ' , len ( self . retransmissionQueue ) if self . retransmissionQueue : if self . retransmissionQueue [ - 1 ] . fin : raise AssertionError ( "Sending %r after FIN??!" % ( p , ) ) # print ' putting it on the queue ' self . retransmissionQueue . append ( p ) # print ' and sending it later ' self . _retransmitLater ( ) if not self . sendWindowRemaining : # len ( self . retransmissionQueue ) > 5: # print ' oh no my queue is too big ' # This is a random number ( 5 ) because I ought to be summing the # packet lengths or something . self . _writeBufferFull ( ) else : # print ' my queue is still small enough ' , len ( self . retransmissionQueue ) , self , self . sendWindowRemaining pass self . ptcp . sendPacket ( p ) return p
def run ( self , quil_program , classical_addresses : List [ int ] = None , trials = 1 ) : """Run a Quil program multiple times , accumulating the values deposited in a list of classical addresses . : param Program quil _ program : A Quil program . : param classical _ addresses : The classical memory to retrieve . Specified as a list of integers that index into a readout register named ` ` ro ` ` . This function - - and particularly this argument - - are included for backwards compatibility and will be removed in the future . : param int trials : Number of shots to collect . : return : A list of dictionaries of bits . Each dictionary corresponds to the values in ` classical _ addresses ` . : rtype : list"""
if classical_addresses is None : caddresses = get_classical_addresses_from_program ( quil_program ) else : caddresses = { 'ro' : classical_addresses } buffers = self . _connection . _qvm_run ( quil_program , caddresses , trials , self . measurement_noise , self . gate_noise , self . random_seed ) if len ( buffers ) == 0 : return [ ] if 'ro' in buffers : return buffers [ 'ro' ] . tolist ( ) raise ValueError ( "You are using QVMConnection.run with multiple readout registers not " "named `ro`. Please use the new `QuantumComputer` abstraction." )
def gpg_unstash_key ( appname , key_id , config_dir = None , gpghome = None ) : """Remove a public key locally from our local app keyring Return True on success Return False on error"""
assert is_valid_appname ( appname ) if gpghome is None : config_dir = get_config_dir ( config_dir ) keydir = get_gpg_home ( appname , config_dir = config_dir ) else : keydir = gpghome gpg = gnupg . GPG ( homedir = keydir ) res = gpg . delete_keys ( [ key_id ] ) if res . status == 'Must delete secret key first' : # this is a private key res = gpg . delete_keys ( [ key_id ] , secret = True ) try : assert res . status == 'ok' , "Failed to delete key (%s)" % res except AssertionError , e : log . exception ( e ) log . error ( "Failed to delete key '%s'" % key_id ) log . debug ( "res: %s" % res . __dict__ ) return False return True
def ekopn ( fname , ifname , ncomch ) : """Open a new E - kernel file and prepare the file for writing . http : / / naif . jpl . nasa . gov / pub / naif / toolkit _ docs / C / cspice / ekopn _ c . html : param fname : Name of EK file . : type fname : str : param ifname : Internal file name . : type ifname : str : param ncomch : The number of characters to reserve for comments . : type ncomch : int : return : Handle attached to new EK file . : rtype : int"""
fname = stypes . stringToCharP ( fname ) ifname = stypes . stringToCharP ( ifname ) ncomch = ctypes . c_int ( ncomch ) handle = ctypes . c_int ( ) libspice . ekopn_c ( fname , ifname , ncomch , ctypes . byref ( handle ) ) return handle . value
def to_datetime ( value ) : """Converts a string to a datetime ."""
if value is None : return None if isinstance ( value , six . integer_types ) : return parser . parse ( value ) return parser . isoparse ( value )
def detailed_log_handler ( self , handler ) : """Setter for the detailed log handler function . Args : self ( JLink ) : the ` ` JLink ` ` instance Returns : ` ` None ` `"""
if not self . opened ( ) : handler = handler or util . noop self . _detailed_log_handler = enums . JLinkFunctions . LOG_PROTOTYPE ( handler ) self . _dll . JLINKARM_EnableLogCom ( self . _detailed_log_handler )
def _kw ( keywords ) : """Turn list of keywords into dictionary ."""
r = { } for k , v in keywords : r [ k ] = v return r
def decode_json_body ( ) : """Decode ` ` bottle . request . body ` ` to JSON . Returns : obj : Structure decoded by ` ` json . loads ( ) ` ` . Raises : HTTPError : 400 in case the data was malformed ."""
raw_data = request . body . read ( ) try : return json . loads ( raw_data ) except ValueError as e : raise HTTPError ( 400 , e . __str__ ( ) )
def workers ( cmd ) : """start / stop / restart the workers , or return their status"""
if config . dbserver . multi_user and getpass . getuser ( ) != 'openquake' : sys . exit ( 'oq workers only works in single user mode' ) master = workerpool . WorkerMaster ( config . dbserver . host , ** config . zworkers ) print ( getattr ( master , cmd ) ( ) )
def items ( self ) : """An iterable of all ( anchor - id , Anchor ) mappings in the repository ."""
for anchor_id in self : try : anchor = self [ anchor_id ] except KeyError : assert False , 'Trying to load from missing file or something' yield ( anchor_id , anchor )
def set_max_threads ( self , max_threads ) : """Set the maximum number of concurrent threads . : type max _ threads : int : param max _ threads : The number of threads ."""
if max_threads is None : raise TypeError ( 'max_threads must not be None.' ) self . _check_if_ready ( ) self . collection . set_max_working ( max_threads )
def restart_instance ( self , instance ) : """Restarts a single instance . : param str instance : A Yamcs instance name ."""
params = { 'state' : 'restarted' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
def fetch_user ( query ) : """Get user by ` ` pk ` ` or ` ` username ` ` . Raise error if it doesn ' t exist ."""
user_filter = { 'pk' : query } if query . isdigit ( ) else { 'username' : query } user_model = get_user_model ( ) try : return user_model . objects . get ( ** user_filter ) except user_model . DoesNotExist : raise exceptions . ParseError ( "Unknown user: {}" . format ( query ) )
def release ( self ) : """Get rid of the lock by deleting the lockfile . When working in a ` with ` statement , this gets automatically called at the end ."""
if self . is_locked : os . close ( self . fd ) os . unlink ( self . lockfile ) self . is_locked = False
def push ( self , * items ) : """Prepends the list with @ items - > # int length of list after operation"""
if self . serialized : items = list ( map ( self . _dumps , items ) ) return self . _client . lpush ( self . key_prefix , * items )
def ssl_server_options ( ) : """ssl options for tornado https server these options are defined in each application ' s default . conf file if left empty , use the self generated keys and certificates included in this package . this function is backward compatible with python version lower than 2.7.9 where ssl . SSLContext is not available ."""
cafile = options . ssl_ca_cert keyfile = options . ssl_key certfile = options . ssl_cert verify_mode = options . ssl_cert_reqs try : context = ssl . create_default_context ( purpose = ssl . Purpose . CLIENT_AUTH , cafile = cafile ) context . load_cert_chain ( certfile = certfile , keyfile = keyfile ) context . verify_mode = verify_mode return context except AttributeError : ssl_options = { 'ca_certs' : cafile , 'keyfile' : keyfile , 'certfile' : certfile , 'cert_reqs' : verify_mode } return ssl_options