idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
42,800
def start ( self ) : values = self . _defn . name , self . server_host , self . server_port log . info ( 'Listening for %s telemetry on %s:%d (UDP)' % values ) super ( UdpTelemetryServer , self ) . start ( )
Starts this UdpTelemetryServer .
42,801
def msgBox ( self , promptType , _timeout = - 1 , ** options ) : if promptType == 'confirm' : return self . _sendConfirmPrompt ( _timeout , options ) else : raise ValueError ( 'Unknown prompt type: {}' . format ( promptType ) )
Send a user prompt request to the GUI
42,802
def _subscribe_all ( self ) : for stream in ( self . inbound_streams + self . outbound_streams ) : for input_ in stream . inputs : if not type ( input_ ) is int and input_ is not None : self . _subscribe ( stream , input_ ) for plugin in self . plugins : for input_ in plugin . inputs : self . _subscribe ( plugin , input_ ) for output in plugin . outputs : subscriber = next ( ( x for x in self . outbound_streams if x . name == output ) , None ) if subscriber is None : log . warn ( 'The outbound stream {} does not ' 'exist so will not receive messages ' 'from {}' . format ( output , plugin ) ) else : self . _subscribe ( subscriber , plugin . name )
Subscribes all streams to their input . Subscribes all plugins to all their inputs . Subscribes all plugin outputs to the plugin .
42,803
def format ( self , record ) : record . hostname = self . hostname return logging . Formatter . format ( self , record )
Returns the given LogRecord as formatted text .
42,804
def formatTime ( self , record , datefmt = None ) : if self . bsd : lt_ts = datetime . datetime . fromtimestamp ( record . created ) ts = lt_ts . strftime ( self . BSD_DATEFMT ) if ts [ 4 ] == '0' : ts = ts [ 0 : 4 ] + ' ' + ts [ 5 : ] else : utc_ts = datetime . datetime . utcfromtimestamp ( record . created ) ts = utc_ts . strftime ( self . SYS_DATEFMT ) return ts
Returns the creation time of the given LogRecord as formatted text .
42,805
def query ( starttime , endtime , output = None , * filenames ) : if not output : output = ( filenames [ 0 ] . replace ( '.pcap' , '' ) + starttime . isoformat ( ) + '-' + endtime . isoformat ( ) + '.pcap' ) else : output = output with open ( output , 'w' ) as outfile : for filename in filenames : log . info ( "pcap.query: processing %s..." % filename ) with open ( filename , 'r' ) as stream : for header , packet in stream : if packet is not None : if header . timestamp >= starttime and header . timestamp <= endtime : outfile . write ( packet , header = header )
Given a time range and input file query creates a new file with only that subset of data . If no outfile name is given the new file name is the old file name with the time range appended .
42,806
def read ( self , stream ) : self . _data = stream . read ( self . _size ) if len ( self . _data ) >= self . _size : values = struct . unpack ( self . _format , self . _data ) else : values = None , None , None , None , None , None , None if values [ 0 ] == 0xA1B2C3D4 or values [ 0 ] == 0xA1B23C4D : self . _swap = '@' elif values [ 0 ] == 0xD4C3B2A1 or values [ 0 ] == 0x4D3CB2A1 : self . _swap = EndianSwap if values [ 0 ] is not None : values = struct . unpack ( self . _swap + self . _format , self . _data ) self . magic_number = values [ 0 ] self . version_major = values [ 1 ] self . version_minor = values [ 2 ] self . thiszone = values [ 3 ] self . sigfigs = values [ 4 ] self . snaplen = values [ 5 ] self . network = values [ 6 ]
Reads PCapGlobalHeader data from the given stream .
42,807
def read ( self , stream ) : self . _data = stream . read ( self . _size ) if len ( self . _data ) >= self . _size : values = struct . unpack ( self . _swap + self . _format , self . _data ) else : values = None , None , None , None self . ts_sec = values [ 0 ] self . ts_usec = values [ 1 ] self . incl_len = values [ 2 ] self . orig_len = values [ 3 ]
Reads PCapPacketHeader data from the given stream .
42,808
def rollover ( self ) : rollover = False if not rollover and self . _threshold . nbytes is not None : rollover = self . _total . nbytes >= self . _threshold . nbytes if not rollover and self . _threshold . npackets is not None : rollover = self . _total . npackets >= self . _threshold . npackets if not rollover and self . _threshold . nseconds is not None : nseconds = math . ceil ( self . _total . nseconds ) rollover = nseconds >= self . _threshold . nseconds return rollover
Indicates whether or not its time to rollover to a new file .
42,809
def write ( self , bytes , header = None ) : if header is None : header = PCapPacketHeader ( orig_len = len ( bytes ) ) if self . _stream is None : if self . _threshold . nseconds is not None : nseconds = self . _threshold . nseconds remainder = int ( math . floor ( header . ts % nseconds ) ) delta = datetime . timedelta ( seconds = remainder ) timestamp = header . timestamp - delta else : timestamp = header . timestamp self . _filename = timestamp . strftime ( self . _format ) self . _startTime = calendar . timegm ( timestamp . replace ( microsecond = 0 ) . timetuple ( ) ) if self . _dryrun : self . _stream = True self . _total . nbytes += len ( PCapGlobalHeader ( ) ) else : self . _stream = open ( self . _filename , 'w' ) self . _total . nbytes += len ( self . _stream . header ) if not self . _dryrun : self . _stream . write ( bytes , header ) self . _total . nbytes += len ( bytes ) + len ( header ) self . _total . npackets += 1 self . _total . nseconds = header . ts - self . _startTime if self . rollover : self . close ( ) return header . incl_len
Writes packet bytes and the optional pcap packet header .
42,810
def hash_file ( filename ) : h = hashlib . sha1 ( ) with open ( filename , 'rb' ) as file : chunk = 0 while chunk != b'' : chunk = file . read ( 1024 ) h . update ( chunk ) return h . hexdigest ( )
This function returns the SHA - 1 hash of the file passed into it
42,811
def create ( self , name , * args ) : tab = None defn = self . get ( name , None ) if defn : tab = FSWTab ( defn , * args ) return tab
Creates a new command with the given arguments .
42,812
def load ( self , filename ) : if self . filename is None : self . filename = filename stream = open ( self . filename , "rb" ) for doc in yaml . load_all ( stream ) : for table in doc : self . add ( table ) stream . close ( )
Loads Command Definitions from the given YAML file into this Command Dictionary .
42,813
def publish ( self , msg ) : self . pub . send ( "{} {}" . format ( self . name , msg ) ) log . debug ( 'Published message from {}' . format ( self ) )
Publishes input message with client name as topic .
42,814
def create ( database , tlmdict = None ) : if tlmdict is None : tlmdict = tlm . getDefaultDict ( ) dbconn = connect ( database ) for name , defn in tlmdict . items ( ) : createTable ( dbconn , defn ) return dbconn
Creates a new database for the given Telemetry Dictionary and returns a connection to it .
42,815
def createTable ( dbconn , pd ) : cols = ( '%s %s' % ( defn . name , getTypename ( defn ) ) for defn in pd . fields ) sql = 'CREATE TABLE IF NOT EXISTS %s (%s)' % ( pd . name , ', ' . join ( cols ) ) dbconn . execute ( sql ) dbconn . commit ( )
Creates a database table for the given PacketDefinition .
42,816
def insert ( dbconn , packet ) : values = [ ] pd = packet . _defn for defn in pd . fields : if defn . enum : val = getattr ( packet . raw , defn . name ) else : val = getattr ( packet , defn . name ) if val is None and defn . name in pd . history : val = getattr ( packet . history , defn . name ) values . append ( val ) qmark = [ '?' ] * len ( values ) sql = 'INSERT INTO %s VALUES (%s)' % ( pd . name , ', ' . join ( qmark ) ) dbconn . execute ( sql , values )
Inserts the given packet into the connected database .
42,817
def connect ( self , ** kwargs ) : host = ait . config . get ( 'database.host' , kwargs . get ( 'host' , 'localhost' ) ) port = ait . config . get ( 'database.port' , kwargs . get ( 'port' , 8086 ) ) un = ait . config . get ( 'database.un' , kwargs . get ( 'un' , 'root' ) ) pw = ait . config . get ( 'database.pw' , kwargs . get ( 'pw' , 'root' ) ) dbname = ait . config . get ( 'database.dbname' , kwargs . get ( 'database' , 'ait' ) ) self . _conn = self . _backend . InfluxDBClient ( host , port , un , pw ) if dbname not in [ v [ 'name' ] for v in self . _conn . get_list_database ( ) ] : self . create ( database = dbname ) self . _conn . switch_database ( dbname )
Connect to an InfluxDB instance
42,818
def create ( self , ** kwargs ) : dbname = ait . config . get ( 'database.dbname' , kwargs . get ( 'database' , 'ait' ) ) if self . _conn is None : raise AttributeError ( 'Unable to create database. No connection to database exists.' ) self . _conn . create_database ( dbname ) self . _conn . switch_database ( dbname )
Create a database in a connected InfluxDB instance
42,819
def create_packets_from_results ( self , packet_name , result_set ) : try : pkt_defn = tlm . getDefaultDict ( ) [ packet_name ] except KeyError : log . error ( 'Unknown packet name {} Unable to unpack ResultSet' . format ( packet_name ) ) return None pkt = tlm . Packet ( pkt_defn ) pkts = [ ] for r in result_set . get_points ( ) : new_pkt = tlm . Packet ( pkt_defn ) for f , f_defn in pkt_defn . fieldmap . iteritems ( ) : field_type_name = f_defn . type . name if field_type_name == 'CMD16' : if cmd . getDefaultDict ( ) . opcodes . get ( r [ f ] , None ) : setattr ( new_pkt , f , cmd_def . name ) elif field_type_name == 'EVR16' : if evr . getDefaultDict ( ) . codes . get ( r [ f ] , None ) : setattr ( new_pkt , f , r [ f ] ) elif field_type_name == 'TIME8' : setattr ( new_pkt , f , r [ f ] / 256.0 ) elif field_type_name == 'TIME32' : new_val = dmc . GPS_Epoch + dt . timedelta ( seconds = r [ f ] ) setattr ( new_pkt , f , new_val ) elif field_type_name == 'TIME40' : sec = int ( r [ f ] ) microsec = r [ f ] * 1e6 new_val = dmc . GPS_Epoch + dt . timedelta ( seconds = sec , microseconds = microsec ) setattr ( new_pkt , f , new_val ) elif field_type_name == 'TIME64' : sec = int ( r [ f ] ) microsec = r [ f ] % 1 * 1e6 new_val = dmc . GPS_Epoch + dt . timedelta ( seconds = sec , microseconds = microsec ) setattr ( new_pkt , f , new_val ) else : try : setattr ( new_pkt , f , r [ f ] ) except KeyError : log . info ( 'Field not found in query results {} Skipping ...' . format ( f ) ) pkts . append ( new_pkt ) return pkts
Generate AIT Packets from a InfluxDB query ResultSet
42,820
def create ( self , ** kwargs ) : tlmdict = kwargs . get ( 'tlmdict' , tlm . getDefaultDict ( ) ) self . connect ( ** kwargs ) for name , defn in tlmdict . items ( ) : self . _create_table ( defn )
Create a database for the current telemetry dictionary
42,821
def _create_table ( self , packet_defn ) : cols = ( '%s %s' % ( defn . name , self . _getTypename ( defn ) ) for defn in packet_defn . fields ) sql = 'CREATE TABLE IF NOT EXISTS %s (%s)' % ( packet_defn . name , ', ' . join ( cols ) ) self . _conn . execute ( sql ) self . _conn . commit ( )
Creates a database table for the given PacketDefinition
42,822
def _getTypename ( self , defn ) : return 'REAL' if defn . type . float or 'TIME' in defn . type . name or defn . dntoeu else 'INTEGER'
Returns the SQL typename required to store the given FieldDefinition
42,823
def genCubeVector ( x , y , z , x_mult = 1 , y_mult = 1 , z_mult = 1 ) : cX = ( x - 1 ) / 2.0 cY = ( y - 1 ) / 2.0 cZ = ( z - 1 ) / 2.0 def vect ( _x , _y , _z ) : return int ( math . sqrt ( math . pow ( _x - cX , 2 * x_mult ) + math . pow ( _y - cY , 2 * y_mult ) + math . pow ( _z - cZ , 2 * z_mult ) ) ) return [ [ [ vect ( _x , _y , _z ) for _z in range ( z ) ] for _y in range ( y ) ] for _x in range ( x ) ]
Generates a map of vector lengths from the center point to each coordinate x - width of matrix to generate y - height of matrix to generate z - depth of matrix to generate x_mult - value to scale x - axis by y_mult - value to scale y - axis by z_mult - value to scale z - axis by
42,824
def adjust_datetime_to_timezone ( value , from_tz , to_tz = None ) : if to_tz is None : to_tz = settings . TIME_ZONE if value . tzinfo is None : if not hasattr ( from_tz , "localize" ) : from_tz = pytz . timezone ( smart_str ( from_tz ) ) value = from_tz . localize ( value ) return value . astimezone ( pytz . timezone ( smart_str ( to_tz ) ) )
Given a datetime object adjust it according to the from_tz timezone string into the to_tz timezone string .
42,825
def get_db_prep_lookup ( self , lookup_type , value , connection = None , prepared = None ) : if value . tzinfo is None : value = default_tz . localize ( value ) else : value = value . astimezone ( default_tz ) return super ( LocalizedDateTimeField , self ) . get_db_prep_lookup ( lookup_type , value , connection = connection , prepared = prepared )
Returns field s value prepared for database lookup .
42,826
def make_vel ( self ) : "Make a set of velocities to be randomly chosen for emitted particles" self . vel = random . normal ( self . vel_mu , self . vel_sigma , 16 ) for i , vel in enumerate ( self . vel ) : if abs ( vel ) < 0.125 / self . _size : if vel < 0 : self . vel [ i ] = - 0.125 / self . _size else : self . vel [ i ] = 0.125 / self . _size
Make a set of velocities to be randomly chosen for emitted particles
42,827
def move_emitters ( self ) : moved_emitters = [ ] for e_pos , e_dir , e_vel , e_range , e_color , e_pal in self . emitters : e_pos = e_pos + e_vel if e_vel > 0 : if e_pos >= ( self . _end + 1 ) : if self . wrap : e_pos = e_pos - ( self . _end + 1 ) + self . _start else : continue else : if e_pos < self . _start : if self . wrap : e_pos = e_pos + self . _end + 1 + self . _start else : continue moved_emitters . append ( ( e_pos , e_dir , e_vel , e_range , e_color , e_pal ) ) self . emitters = moved_emitters
Move each emitter by it s velocity . Emmitters that move off the ends and are not wrapped get sacked .
42,828
def start_new_particles ( self ) : for e_pos , e_dir , e_vel , e_range , e_color , e_pal in self . emitters : for roll in range ( self . starts_at_once ) : if random . random ( ) < self . starts_prob : p_vel = self . vel [ random . choice ( len ( self . vel ) ) ] if e_dir < 0 or e_dir == 0 and random . random ( ) > 0.5 : p_vel = - p_vel self . particles . append ( ( p_vel , e_pos , int ( e_range // abs ( p_vel ) ) , e_pal [ random . choice ( len ( e_pal ) ) ] , 255 ) )
Start some new particles from the emitters . We roll the dice starts_at_once times seeing if we can start each particle based on starts_prob . If we start the particle gets a color form the palette and a velocity from the vel list .
42,829
def visibility ( self , strip_pos , particle_pos ) : dist = abs ( particle_pos - strip_pos ) if dist > self . half_size : dist = self . _size - dist if dist < self . aperture : return ( self . aperture - dist ) / self . aperture else : return 0
Compute particle visibility based on distance between current strip position being rendered and particle position . A value of 0 . 0 is returned if they are > = one aperture away values between 0 . 0 and 1 . 0 are returned if they are less than one aperature apart .
42,830
def render_particles ( self ) : for strip_pos in range ( self . _start , self . _end + 1 ) : blended = COLORS . black if self . has_e_colors : for ( e_pos , e_dir , e_vel , e_range , e_color , e_pal ) in self . emitters : if e_color is not None : vis = self . visibility ( strip_pos , e_pos ) if vis > 0 : blended = color_blend ( blended , color_scale ( e_color , int ( vis * 255 ) ) ) for vel , pos , stl , color , bright in self . particles : vis = self . visibility ( strip_pos , pos ) if vis > 0 and bright > 0 : blended = color_blend ( blended , color_scale ( color , int ( vis * bright ) ) ) if ( blended == COLORS . black ) : blended = self . bgcolor self . color_list [ strip_pos ] = blended
Render visible particles at each strip position by modifying the strip s color list .
42,831
def step ( self , amt = 1 ) : "Make a frame of the animation" self . move_particles ( ) if self . has_moving_emitters : self . move_emitters ( ) self . start_new_particles ( ) self . render_particles ( ) if self . emitters == [ ] and self . particles == [ ] : self . completed = True
Make a frame of the animation
42,832
def complete ( self ) : if None not in [ v for v in self . squares ] : return True if self . winner ( ) is not None : return True return False
is the game over?
42,833
def get_squares ( self , player = None ) : if player : return [ k for k , v in enumerate ( self . squares ) if v == player ] else : return self . squares
squares that belong to a player
42,834
def makeGly ( segID , N , CA , C , O , geo ) : res = Residue ( ( ' ' , segID , ' ' ) , "GLY" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) return res
Creates a Glycine residue
42,835
def makeAla ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "ALA" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) return res
Creates an Alanine residue
42,836
def makeSer ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_OG_length = geo . CB_OG_length CA_CB_OG_angle = geo . CA_CB_OG_angle N_CA_CB_OG_diangle = geo . N_CA_CB_OG_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) oxygen_g = calculateCoordinates ( N , CA , CB , CB_OG_length , CA_CB_OG_angle , N_CA_CB_OG_diangle ) OG = Atom ( "OG" , oxygen_g , 0.0 , 1.0 , " " , " OG" , 0 , "O" ) res = Residue ( ( ' ' , segID , ' ' ) , "SER" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( OG ) return res
Creates a Serine residue
42,837
def makeCys ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_SG_length = geo . CB_SG_length CA_CB_SG_angle = geo . CA_CB_SG_angle N_CA_CB_SG_diangle = geo . N_CA_CB_SG_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) sulfur_g = calculateCoordinates ( N , CA , CB , CB_SG_length , CA_CB_SG_angle , N_CA_CB_SG_diangle ) SG = Atom ( "SG" , sulfur_g , 0.0 , 1.0 , " " , " SG" , 0 , "S" ) res = Residue ( ( ' ' , segID , ' ' ) , "CYS" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( SG ) return res
Creates a Cysteine residue
42,838
def makeVal ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG1_length = geo . CB_CG1_length CA_CB_CG1_angle = geo . CA_CB_CG1_angle N_CA_CB_CG1_diangle = geo . N_CA_CB_CG1_diangle CB_CG2_length = geo . CB_CG2_length CA_CB_CG2_angle = geo . CA_CB_CG2_angle N_CA_CB_CG2_diangle = geo . N_CA_CB_CG2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g1 = calculateCoordinates ( N , CA , CB , CB_CG1_length , CA_CB_CG1_angle , N_CA_CB_CG1_diangle ) CG1 = Atom ( "CG1" , carbon_g1 , 0.0 , 1.0 , " " , " CG1" , 0 , "C" ) carbon_g2 = calculateCoordinates ( N , CA , CB , CB_CG2_length , CA_CB_CG2_angle , N_CA_CB_CG2_diangle ) CG2 = Atom ( "CG2" , carbon_g2 , 0.0 , 1.0 , " " , " CG2" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "VAL" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG1 ) res . add ( CG2 ) return res
Creates a Valine residue
42,839
def makeIle ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG1_length = geo . CB_CG1_length CA_CB_CG1_angle = geo . CA_CB_CG1_angle N_CA_CB_CG1_diangle = geo . N_CA_CB_CG1_diangle CB_CG2_length = geo . CB_CG2_length CA_CB_CG2_angle = geo . CA_CB_CG2_angle N_CA_CB_CG2_diangle = geo . N_CA_CB_CG2_diangle CG1_CD1_length = geo . CG1_CD1_length CB_CG1_CD1_angle = geo . CB_CG1_CD1_angle CA_CB_CG1_CD1_diangle = geo . CA_CB_CG1_CD1_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g1 = calculateCoordinates ( N , CA , CB , CB_CG1_length , CA_CB_CG1_angle , N_CA_CB_CG1_diangle ) CG1 = Atom ( "CG1" , carbon_g1 , 0.0 , 1.0 , " " , " CG1" , 0 , "C" ) carbon_g2 = calculateCoordinates ( N , CA , CB , CB_CG2_length , CA_CB_CG2_angle , N_CA_CB_CG2_diangle ) CG2 = Atom ( "CG2" , carbon_g2 , 0.0 , 1.0 , " " , " CG2" , 0 , "C" ) carbon_d1 = calculateCoordinates ( CA , CB , CG1 , CG1_CD1_length , CB_CG1_CD1_angle , CA_CB_CG1_CD1_diangle ) CD1 = Atom ( "CD1" , carbon_d1 , 0.0 , 1.0 , " " , " CD1" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "ILE" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG1 ) res . add ( CG2 ) res . add ( CD1 ) return res
Creates an Isoleucine residue
42,840
def makeLeu ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD1_length = geo . CG_CD1_length CB_CG_CD1_angle = geo . CB_CG_CD1_angle CA_CB_CG_CD1_diangle = geo . CA_CB_CG_CD1_diangle CG_CD2_length = geo . CG_CD2_length CB_CG_CD2_angle = geo . CB_CG_CD2_angle CA_CB_CG_CD2_diangle = geo . CA_CB_CG_CD2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g1 = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g1 , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d1 = calculateCoordinates ( CA , CB , CG , CG_CD1_length , CB_CG_CD1_angle , CA_CB_CG_CD1_diangle ) CD1 = Atom ( "CD1" , carbon_d1 , 0.0 , 1.0 , " " , " CD1" , 0 , "C" ) carbon_d2 = calculateCoordinates ( CA , CB , CG , CG_CD2_length , CB_CG_CD2_angle , CA_CB_CG_CD2_diangle ) CD2 = Atom ( "CD2" , carbon_d2 , 0.0 , 1.0 , " " , " CD2" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "LEU" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD1 ) res . add ( CD2 ) return res
Creates a Leucine residue
42,841
def makeThr ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_OG1_length = geo . CB_OG1_length CA_CB_OG1_angle = geo . CA_CB_OG1_angle N_CA_CB_OG1_diangle = geo . N_CA_CB_OG1_diangle CB_CG2_length = geo . CB_CG2_length CA_CB_CG2_angle = geo . CA_CB_CG2_angle N_CA_CB_CG2_diangle = geo . N_CA_CB_CG2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) oxygen_g1 = calculateCoordinates ( N , CA , CB , CB_OG1_length , CA_CB_OG1_angle , N_CA_CB_OG1_diangle ) OG1 = Atom ( "OG1" , oxygen_g1 , 0.0 , 1.0 , " " , " OG1" , 0 , "O" ) carbon_g2 = calculateCoordinates ( N , CA , CB , CB_CG2_length , CA_CB_CG2_angle , N_CA_CB_CG2_diangle ) CG2 = Atom ( "CG2" , carbon_g2 , 0.0 , 1.0 , " " , " CG2" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "THR" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( OG1 ) res . add ( CG2 ) return res
Creates a Threonine residue
42,842
def makeArg ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD_length = geo . CG_CD_length CB_CG_CD_angle = geo . CB_CG_CD_angle CA_CB_CG_CD_diangle = geo . CA_CB_CG_CD_diangle CD_NE_length = geo . CD_NE_length CG_CD_NE_angle = geo . CG_CD_NE_angle CB_CG_CD_NE_diangle = geo . CB_CG_CD_NE_diangle NE_CZ_length = geo . NE_CZ_length CD_NE_CZ_angle = geo . CD_NE_CZ_angle CG_CD_NE_CZ_diangle = geo . CG_CD_NE_CZ_diangle CZ_NH1_length = geo . CZ_NH1_length NE_CZ_NH1_angle = geo . NE_CZ_NH1_angle CD_NE_CZ_NH1_diangle = geo . CD_NE_CZ_NH1_diangle CZ_NH2_length = geo . CZ_NH2_length NE_CZ_NH2_angle = geo . NE_CZ_NH2_angle CD_NE_CZ_NH2_diangle = geo . CD_NE_CZ_NH2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d = calculateCoordinates ( CA , CB , CG , CG_CD_length , CB_CG_CD_angle , CA_CB_CG_CD_diangle ) CD = Atom ( "CD" , carbon_d , 0.0 , 1.0 , " " , " CD" , 0 , "C" ) nitrogen_e = calculateCoordinates ( CB , CG , CD , CD_NE_length , CG_CD_NE_angle , CB_CG_CD_NE_diangle ) NE = Atom ( "NE" , nitrogen_e , 0.0 , 1.0 , " " , " NE" , 0 , "N" ) carbon_z = calculateCoordinates ( CG , CD , NE , NE_CZ_length , CD_NE_CZ_angle , CG_CD_NE_CZ_diangle ) CZ = Atom ( "CZ" , carbon_z , 0.0 , 1.0 , " " , " CZ" , 0 , "C" ) nitrogen_h1 = calculateCoordinates ( CD , NE , CZ , CZ_NH1_length , NE_CZ_NH1_angle , CD_NE_CZ_NH1_diangle ) NH1 = Atom ( "NH1" , nitrogen_h1 , 0.0 , 1.0 , " " , " NH1" , 0 , "N" ) nitrogen_h2 = calculateCoordinates ( CD , NE , CZ , CZ_NH2_length , NE_CZ_NH2_angle , CD_NE_CZ_NH2_diangle ) NH2 = Atom ( "NH2" , nitrogen_h2 , 0.0 , 1.0 , " " , " NH2" , 0 , "N" ) res = Residue ( ( ' ' , segID , ' ' ) , "ARG" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD ) res . add ( NE ) res . add ( CZ ) res . add ( NH1 ) res . add ( NH2 ) return res
Creates an Arginie residue
42,843
def makeLys ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD_length = geo . CG_CD_length CB_CG_CD_angle = geo . CB_CG_CD_angle CA_CB_CG_CD_diangle = geo . CA_CB_CG_CD_diangle CD_CE_length = geo . CD_CE_length CG_CD_CE_angle = geo . CG_CD_CE_angle CB_CG_CD_CE_diangle = geo . CB_CG_CD_CE_diangle CE_NZ_length = geo . CE_NZ_length CD_CE_NZ_angle = geo . CD_CE_NZ_angle CG_CD_CE_NZ_diangle = geo . CG_CD_CE_NZ_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d = calculateCoordinates ( CA , CB , CG , CG_CD_length , CB_CG_CD_angle , CA_CB_CG_CD_diangle ) CD = Atom ( "CD" , carbon_d , 0.0 , 1.0 , " " , " CD" , 0 , "C" ) carbon_e = calculateCoordinates ( CB , CG , CD , CD_CE_length , CG_CD_CE_angle , CB_CG_CD_CE_diangle ) CE = Atom ( "CE" , carbon_e , 0.0 , 1.0 , " " , " CE" , 0 , "C" ) nitrogen_z = calculateCoordinates ( CG , CD , CE , CE_NZ_length , CD_CE_NZ_angle , CG_CD_CE_NZ_diangle ) NZ = Atom ( "NZ" , nitrogen_z , 0.0 , 1.0 , " " , " NZ" , 0 , "N" ) res = Residue ( ( ' ' , segID , ' ' ) , "LYS" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD ) res . add ( CE ) res . add ( NZ ) return res
Creates a Lysine residue
42,844
def makeAsp ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_OD1_length = geo . CG_OD1_length CB_CG_OD1_angle = geo . CB_CG_OD1_angle CA_CB_CG_OD1_diangle = geo . CA_CB_CG_OD1_diangle CG_OD2_length = geo . CG_OD2_length CB_CG_OD2_angle = geo . CB_CG_OD2_angle CA_CB_CG_OD2_diangle = geo . CA_CB_CG_OD2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) oxygen_d1 = calculateCoordinates ( CA , CB , CG , CG_OD1_length , CB_CG_OD1_angle , CA_CB_CG_OD1_diangle ) OD1 = Atom ( "OD1" , oxygen_d1 , 0.0 , 1.0 , " " , " OD1" , 0 , "O" ) oxygen_d2 = calculateCoordinates ( CA , CB , CG , CG_OD2_length , CB_CG_OD2_angle , CA_CB_CG_OD2_diangle ) OD2 = Atom ( "OD2" , oxygen_d2 , 0.0 , 1.0 , " " , " OD2" , 0 , "O" ) res = Residue ( ( ' ' , segID , ' ' ) , "ASP" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( OD1 ) res . add ( OD2 ) return res
Creates an Aspartic Acid residue
42,845
def makeAsn ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_OD1_length = geo . CG_OD1_length CB_CG_OD1_angle = geo . CB_CG_OD1_angle CA_CB_CG_OD1_diangle = geo . CA_CB_CG_OD1_diangle CG_ND2_length = geo . CG_ND2_length CB_CG_ND2_angle = geo . CB_CG_ND2_angle CA_CB_CG_ND2_diangle = geo . CA_CB_CG_ND2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) oxygen_d1 = calculateCoordinates ( CA , CB , CG , CG_OD1_length , CB_CG_OD1_angle , CA_CB_CG_OD1_diangle ) OD1 = Atom ( "OD1" , oxygen_d1 , 0.0 , 1.0 , " " , " OD1" , 0 , "O" ) nitrogen_d2 = calculateCoordinates ( CA , CB , CG , CG_ND2_length , CB_CG_ND2_angle , CA_CB_CG_ND2_diangle ) ND2 = Atom ( "ND2" , nitrogen_d2 , 0.0 , 1.0 , " " , " ND2" , 0 , "N" ) res = Residue ( ( ' ' , segID , ' ' ) , "ASN" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( OD1 ) res . add ( ND2 ) return res
Creates an Asparagine residue
42,846
def makeGlu ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD_length = geo . CG_CD_length CB_CG_CD_angle = geo . CB_CG_CD_angle CA_CB_CG_CD_diangle = geo . CA_CB_CG_CD_diangle CD_OE1_length = geo . CD_OE1_length CG_CD_OE1_angle = geo . CG_CD_OE1_angle CB_CG_CD_OE1_diangle = geo . CB_CG_CD_OE1_diangle CD_OE2_length = geo . CD_OE2_length CG_CD_OE2_angle = geo . CG_CD_OE2_angle CB_CG_CD_OE2_diangle = geo . CB_CG_CD_OE2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d = calculateCoordinates ( CA , CB , CG , CG_CD_length , CB_CG_CD_angle , CA_CB_CG_CD_diangle ) CD = Atom ( "CD" , carbon_d , 0.0 , 1.0 , " " , " CD" , 0 , "C" ) oxygen_e1 = calculateCoordinates ( CB , CG , CD , CD_OE1_length , CG_CD_OE1_angle , CB_CG_CD_OE1_diangle ) OE1 = Atom ( "OE1" , oxygen_e1 , 0.0 , 1.0 , " " , " OE1" , 0 , "O" ) oxygen_e2 = calculateCoordinates ( CB , CG , CD , CD_OE2_length , CG_CD_OE2_angle , CB_CG_CD_OE2_diangle ) OE2 = Atom ( "OE2" , oxygen_e2 , 0.0 , 1.0 , " " , " OE2" , 0 , "O" ) res = Residue ( ( ' ' , segID , ' ' ) , "GLU" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD ) res . add ( OE1 ) res . add ( OE2 ) return res
Creates a Glutamic Acid residue
42,847
def makeGln ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD_length = geo . CG_CD_length CB_CG_CD_angle = geo . CB_CG_CD_angle CA_CB_CG_CD_diangle = geo . CA_CB_CG_CD_diangle CD_OE1_length = geo . CD_OE1_length CG_CD_OE1_angle = geo . CG_CD_OE1_angle CB_CG_CD_OE1_diangle = geo . CB_CG_CD_OE1_diangle CD_NE2_length = geo . CD_NE2_length CG_CD_NE2_angle = geo . CG_CD_NE2_angle CB_CG_CD_NE2_diangle = geo . CB_CG_CD_NE2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d = calculateCoordinates ( CA , CB , CG , CG_CD_length , CB_CG_CD_angle , CA_CB_CG_CD_diangle ) CD = Atom ( "CD" , carbon_d , 0.0 , 1.0 , " " , " CD" , 0 , "C" ) oxygen_e1 = calculateCoordinates ( CB , CG , CD , CD_OE1_length , CG_CD_OE1_angle , CB_CG_CD_OE1_diangle ) OE1 = Atom ( "OE1" , oxygen_e1 , 0.0 , 1.0 , " " , " OE1" , 0 , "O" ) nitrogen_e2 = calculateCoordinates ( CB , CG , CD , CD_NE2_length , CG_CD_NE2_angle , CB_CG_CD_NE2_diangle ) NE2 = Atom ( "NE2" , nitrogen_e2 , 0.0 , 1.0 , " " , " NE2" , 0 , "N" ) res = Residue ( ( ' ' , segID , ' ' ) , "GLN" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD ) res . add ( OE1 ) res . add ( NE2 ) return res
Creates a Glutamine residue
42,848
def makeMet ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_SD_length = geo . CG_SD_length CB_CG_SD_angle = geo . CB_CG_SD_angle CA_CB_CG_SD_diangle = geo . CA_CB_CG_SD_diangle SD_CE_length = geo . SD_CE_length CG_SD_CE_angle = geo . CG_SD_CE_angle CB_CG_SD_CE_diangle = geo . CB_CG_SD_CE_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) sulfur_d = calculateCoordinates ( CA , CB , CG , CG_SD_length , CB_CG_SD_angle , CA_CB_CG_SD_diangle ) SD = Atom ( "SD" , sulfur_d , 0.0 , 1.0 , " " , " SD" , 0 , "S" ) carbon_e = calculateCoordinates ( CB , CG , SD , SD_CE_length , CG_SD_CE_angle , CB_CG_SD_CE_diangle ) CE = Atom ( "CE" , carbon_e , 0.0 , 1.0 , " " , " CE" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "MET" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( SD ) res . add ( CE ) return res
Creates a Methionine residue
42,849
def makeHis ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_ND1_length = geo . CG_ND1_length CB_CG_ND1_angle = geo . CB_CG_ND1_angle CA_CB_CG_ND1_diangle = geo . CA_CB_CG_ND1_diangle CG_CD2_length = geo . CG_CD2_length CB_CG_CD2_angle = geo . CB_CG_CD2_angle CA_CB_CG_CD2_diangle = geo . CA_CB_CG_CD2_diangle ND1_CE1_length = geo . ND1_CE1_length CG_ND1_CE1_angle = geo . CG_ND1_CE1_angle CB_CG_ND1_CE1_diangle = geo . CB_CG_ND1_CE1_diangle CD2_NE2_length = geo . CD2_NE2_length CG_CD2_NE2_angle = geo . CG_CD2_NE2_angle CB_CG_CD2_NE2_diangle = geo . CB_CG_CD2_NE2_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) nitrogen_d1 = calculateCoordinates ( CA , CB , CG , CG_ND1_length , CB_CG_ND1_angle , CA_CB_CG_ND1_diangle ) ND1 = Atom ( "ND1" , nitrogen_d1 , 0.0 , 1.0 , " " , " ND1" , 0 , "N" ) carbon_d2 = calculateCoordinates ( CA , CB , CG , CG_CD2_length , CB_CG_CD2_angle , CA_CB_CG_CD2_diangle ) CD2 = Atom ( "CD2" , carbon_d2 , 0.0 , 1.0 , " " , " CD2" , 0 , "C" ) carbon_e1 = calculateCoordinates ( CB , CG , ND1 , ND1_CE1_length , CG_ND1_CE1_angle , CB_CG_ND1_CE1_diangle ) CE1 = Atom ( "CE1" , carbon_e1 , 0.0 , 1.0 , " " , " CE1" , 0 , "C" ) nitrogen_e2 = calculateCoordinates ( CB , CG , CD2 , CD2_NE2_length , CG_CD2_NE2_angle , CB_CG_CD2_NE2_diangle ) NE2 = Atom ( "NE2" , nitrogen_e2 , 0.0 , 1.0 , " " , " NE2" , 0 , "N" ) res = Residue ( ( ' ' , segID , ' ' ) , "HIS" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( ND1 ) res . add ( CD2 ) res . add ( CE1 ) res . add ( NE2 ) return res
Creates a Histidine residue
42,850
def makePro ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD_length = geo . CG_CD_length CB_CG_CD_angle = geo . CB_CG_CD_angle CA_CB_CG_CD_diangle = geo . CA_CB_CG_CD_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d = calculateCoordinates ( CA , CB , CG , CG_CD_length , CB_CG_CD_angle , CA_CB_CG_CD_diangle ) CD = Atom ( "CD" , carbon_d , 0.0 , 1.0 , " " , " CD" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "PRO" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD ) return res
Creates a Proline residue
42,851
def makePhe ( segID , N , CA , C , O , geo ) : CA_CB_length = geo . CA_CB_length C_CA_CB_angle = geo . C_CA_CB_angle N_C_CA_CB_diangle = geo . N_C_CA_CB_diangle CB_CG_length = geo . CB_CG_length CA_CB_CG_angle = geo . CA_CB_CG_angle N_CA_CB_CG_diangle = geo . N_CA_CB_CG_diangle CG_CD1_length = geo . CG_CD1_length CB_CG_CD1_angle = geo . CB_CG_CD1_angle CA_CB_CG_CD1_diangle = geo . CA_CB_CG_CD1_diangle CG_CD2_length = geo . CG_CD2_length CB_CG_CD2_angle = geo . CB_CG_CD2_angle CA_CB_CG_CD2_diangle = geo . CA_CB_CG_CD2_diangle CD1_CE1_length = geo . CD1_CE1_length CG_CD1_CE1_angle = geo . CG_CD1_CE1_angle CB_CG_CD1_CE1_diangle = geo . CB_CG_CD1_CE1_diangle CD2_CE2_length = geo . CD2_CE2_length CG_CD2_CE2_angle = geo . CG_CD2_CE2_angle CB_CG_CD2_CE2_diangle = geo . CB_CG_CD2_CE2_diangle CE1_CZ_length = geo . CE1_CZ_length CD1_CE1_CZ_angle = geo . CD1_CE1_CZ_angle CG_CD1_CE1_CZ_diangle = geo . CG_CD1_CE1_CZ_diangle carbon_b = calculateCoordinates ( N , C , CA , CA_CB_length , C_CA_CB_angle , N_C_CA_CB_diangle ) CB = Atom ( "CB" , carbon_b , 0.0 , 1.0 , " " , " CB" , 0 , "C" ) carbon_g = calculateCoordinates ( N , CA , CB , CB_CG_length , CA_CB_CG_angle , N_CA_CB_CG_diangle ) CG = Atom ( "CG" , carbon_g , 0.0 , 1.0 , " " , " CG" , 0 , "C" ) carbon_d1 = calculateCoordinates ( CA , CB , CG , CG_CD1_length , CB_CG_CD1_angle , CA_CB_CG_CD1_diangle ) CD1 = Atom ( "CD1" , carbon_d1 , 0.0 , 1.0 , " " , " CD1" , 0 , "C" ) carbon_d2 = calculateCoordinates ( CA , CB , CG , CG_CD2_length , CB_CG_CD2_angle , CA_CB_CG_CD2_diangle ) CD2 = Atom ( "CD2" , carbon_d2 , 0.0 , 1.0 , " " , " CD2" , 0 , "C" ) carbon_e1 = calculateCoordinates ( CB , CG , CD1 , CD1_CE1_length , CG_CD1_CE1_angle , CB_CG_CD1_CE1_diangle ) CE1 = Atom ( "CE1" , carbon_e1 , 0.0 , 1.0 , " " , " CE1" , 0 , "C" ) carbon_e2 = calculateCoordinates ( CB , CG , CD2 , CD2_CE2_length , CG_CD2_CE2_angle , CB_CG_CD2_CE2_diangle ) CE2 = Atom ( "CE2" , carbon_e2 , 0.0 , 1.0 , " " , " CE2" , 0 , "C" ) carbon_z = calculateCoordinates ( CG , CD1 , CE1 , CE1_CZ_length , CD1_CE1_CZ_angle , CG_CD1_CE1_CZ_diangle ) CZ = Atom ( "CZ" , carbon_z , 0.0 , 1.0 , " " , " CZ" , 0 , "C" ) res = Residue ( ( ' ' , segID , ' ' ) , "PHE" , ' ' ) res . add ( N ) res . add ( CA ) res . add ( C ) res . add ( O ) res . add ( CB ) res . add ( CG ) res . add ( CD1 ) res . add ( CE1 ) res . add ( CD2 ) res . add ( CE2 ) res . add ( CZ ) return res
Creates a Phenylalanine residue
42,852
def make_extended_structure ( AA_chain ) : geo = geometry ( AA_chain [ 0 ] ) struc = initialize_res ( geo ) for i in range ( 1 , len ( AA_chain ) ) : AA = AA_chain [ i ] geo = geometry ( AA ) add_residue ( struc , geo ) return struc
Place a sequence of amino acids into a peptide in the extended conformation . The argument AA_chain holds the sequence of amino acids to be used .
42,853
def make_structure_from_geos ( geos ) : model_structure = initialize_res ( geos [ 0 ] ) for i in range ( 1 , len ( geos ) ) : model_structure = add_residue ( model_structure , geos [ i ] ) return model_structure
Creates a structure out of a list of geometry objects .
42,854
def geometry ( AA ) : if ( AA == 'G' ) : return GlyGeo ( ) elif ( AA == 'A' ) : return AlaGeo ( ) elif ( AA == 'S' ) : return SerGeo ( ) elif ( AA == 'C' ) : return CysGeo ( ) elif ( AA == 'V' ) : return ValGeo ( ) elif ( AA == 'I' ) : return IleGeo ( ) elif ( AA == 'L' ) : return LeuGeo ( ) elif ( AA == 'T' ) : return ThrGeo ( ) elif ( AA == 'R' ) : return ArgGeo ( ) elif ( AA == 'K' ) : return LysGeo ( ) elif ( AA == 'D' ) : return AspGeo ( ) elif ( AA == 'E' ) : return GluGeo ( ) elif ( AA == 'N' ) : return AsnGeo ( ) elif ( AA == 'Q' ) : return GlnGeo ( ) elif ( AA == 'M' ) : return MetGeo ( ) elif ( AA == 'H' ) : return HisGeo ( ) elif ( AA == 'P' ) : return ProGeo ( ) elif ( AA == 'F' ) : return PheGeo ( ) elif ( AA == 'Y' ) : return TyrGeo ( ) elif ( AA == 'W' ) : return TrpGeo ( ) else : return GlyGeo ( )
Generates the geometry of the requested amino acid . The amino acid needs to be specified by its single - letter code . If an invalid code is specified the function returns the geometry of Glycine .
42,855
def enregister ( svc , newAddress , password ) : return svc . connectQ2Q ( q2q . Q2QAddress ( "" , "" ) , q2q . Q2QAddress ( newAddress . domain , "accounts" ) , 'identity-admin' , protocol . ClientFactory . forProtocol ( AMP ) ) . addCallback ( AMP . callRemote , AddUser , name = newAddress . resource , password = password ) . addErrback ( Failure . trap , error . ConnectionDone )
Register a new account and return a Deferred that fires if it worked .
42,856
def connectCached ( self , endpoint , protocolFactory , extraWork = lambda x : x , extraHash = None ) : key = endpoint , extraHash D = Deferred ( ) if key in self . cachedConnections : D . callback ( self . cachedConnections [ key ] ) elif key in self . inProgress : self . inProgress [ key ] . append ( D ) else : self . inProgress [ key ] = [ D ] endpoint . connect ( _CachingClientFactory ( self , key , protocolFactory , extraWork ) ) return D
See module docstring
42,857
def connectionLostForKey ( self , key ) : if key in self . cachedConnections : del self . cachedConnections [ key ] if self . _shuttingDown and self . _shuttingDown . get ( key ) : d , self . _shuttingDown [ key ] = self . _shuttingDown [ key ] , None d . callback ( None )
Remove lost connection from cache .
42,858
def shutdown ( self ) : self . _shuttingDown = { key : Deferred ( ) for key in self . cachedConnections . keys ( ) } return DeferredList ( [ maybeDeferred ( p . transport . loseConnection ) for p in self . cachedConnections . values ( ) ] + self . _shuttingDown . values ( ) )
Disconnect all cached connections .
42,859
def parse_state ( state ) : if isinstance ( state , bool ) : return state if not isinstance ( state , basestring ) : raise TypeError ( 'ACL state must be bool or string' ) try : return _state_strings [ state . lower ( ) ] except KeyError : raise ValueError ( 'unknown ACL state string' )
Convert a bool or string into a bool .
42,860
def _getattr_path ( obj , path ) : if not path : return None for attr in path . split ( '.' ) : obj = getattr ( obj , attr , None ) return obj
getattr for a dot separated path
42,861
def _get_settings_from_request ( request ) : settings = request . registry . settings if 'zipkin.create_zipkin_attr' in settings : zipkin_attrs = settings [ 'zipkin.create_zipkin_attr' ] ( request ) else : zipkin_attrs = create_zipkin_attr ( request ) if 'zipkin.transport_handler' in settings : transport_handler = settings [ 'zipkin.transport_handler' ] if not isinstance ( transport_handler , BaseTransportHandler ) : warnings . warn ( 'Using a function as transport_handler is deprecated. ' 'Please extend py_zipkin.transport.BaseTransportHandler' , DeprecationWarning , ) stream_name = settings . get ( 'zipkin.stream_name' , 'zipkin' ) transport_handler = functools . partial ( transport_handler , stream_name ) else : raise ZipkinError ( "`zipkin.transport_handler` is a required config property, which" " is missing. Have a look at py_zipkin's docs for how to implement" " it: https://github.com/Yelp/py_zipkin#transport" ) context_stack = _getattr_path ( request , settings . get ( 'zipkin.request_context' ) ) service_name = settings . get ( 'service_name' , 'unknown' ) span_name = '{0} {1}' . format ( request . method , request . path ) add_logging_annotation = settings . get ( 'zipkin.add_logging_annotation' , False , ) if 'zipkin.report_root_timestamp' in settings : report_root_timestamp = settings [ 'zipkin.report_root_timestamp' ] else : report_root_timestamp = 'X-B3-TraceId' not in request . headers zipkin_host = settings . get ( 'zipkin.host' ) zipkin_port = settings . get ( 'zipkin.port' , request . server_port ) firehose_handler = settings . get ( 'zipkin.firehose_handler' ) post_handler_hook = settings . get ( 'zipkin.post_handler_hook' ) max_span_batch_size = settings . get ( 'zipkin.max_span_batch_size' ) use_pattern_as_span_name = bool ( settings . get ( 'zipkin.use_pattern_as_span_name' , False ) , ) encoding = settings . get ( 'zipkin.encoding' , Encoding . V1_THRIFT ) return _ZipkinSettings ( zipkin_attrs , transport_handler , service_name , span_name , add_logging_annotation , report_root_timestamp , zipkin_host , zipkin_port , context_stack , firehose_handler , post_handler_hook , max_span_batch_size , use_pattern_as_span_name , encoding = encoding , )
Extracts Zipkin attributes and configuration from request attributes . See the zipkin_span context in py - zipkin for more detaied information on all the settings .
42,862
def zipkin_tween ( handler , registry ) : def tween ( request ) : zipkin_settings = _get_settings_from_request ( request ) tracer = get_default_tracer ( ) tween_kwargs = dict ( service_name = zipkin_settings . service_name , span_name = zipkin_settings . span_name , zipkin_attrs = zipkin_settings . zipkin_attrs , transport_handler = zipkin_settings . transport_handler , host = zipkin_settings . host , port = zipkin_settings . port , add_logging_annotation = zipkin_settings . add_logging_annotation , report_root_timestamp = zipkin_settings . report_root_timestamp , context_stack = zipkin_settings . context_stack , max_span_batch_size = zipkin_settings . max_span_batch_size , encoding = zipkin_settings . encoding , kind = Kind . SERVER , ) if zipkin_settings . firehose_handler is not None : tween_kwargs [ 'firehose_handler' ] = zipkin_settings . firehose_handler with tracer . zipkin_span ( ** tween_kwargs ) as zipkin_context : response = handler ( request ) if zipkin_settings . use_pattern_as_span_name and request . matched_route : zipkin_context . override_span_name ( '{} {}' . format ( request . method , request . matched_route . pattern , ) ) zipkin_context . update_binary_annotations ( get_binary_annotations ( request , response ) , ) if zipkin_settings . post_handler_hook : zipkin_settings . post_handler_hook ( request , response ) return response return tween
Factory for pyramid tween to handle zipkin server logging . Note that even if the request isn t sampled Zipkin attributes are generated and pushed into threadlocal storage so create_http_headers_for_new_span and zipkin_span will have access to the proper Zipkin state .
42,863
def get_skill_data ( self ) : path_to_sha = { folder : sha for folder , sha in self . get_shas ( ) } modules = self . read_file ( '.gitmodules' ) . split ( '[submodule "' ) for i , module in enumerate ( modules ) : if not module : continue try : name = module . split ( '"]' ) [ 0 ] . strip ( ) path = module . split ( 'path = ' ) [ 1 ] . split ( '\n' ) [ 0 ] . strip ( ) url = module . split ( 'url = ' ) [ 1 ] . strip ( ) sha = path_to_sha . get ( path , '' ) yield name , path , url , sha except ( ValueError , IndexError ) as e : LOG . warning ( 'Failed to parse submodule "{}" #{}:{}' . format ( locals ( ) . get ( 'name' , '' ) , i , e ) )
generates tuples of name path url sha
42,864
def expectAck ( self ) : last = self . lastTransmitted self . ackPredicate = lambda ackPacket : ( ackPacket . relativeAck ( ) >= last . relativeSeq ( ) )
When the most recent packet produced as an output of this state machine is acknowledged by our peer generate a single ack input .
42,865
def _set_options ( ) : CleanCommand . user_options = _CleanCommand . user_options [ : ] CleanCommand . user_options . extend ( [ ( 'dist' , 'd' , 'remove distribution directory' ) , ( 'eggs' , None , 'remove egg and egg-info directories' ) , ( 'environment' , 'E' , 'remove virtual environment directory' ) , ( 'pycache' , 'p' , 'remove __pycache__ directories' ) , ( 'egg-base=' , 'e' , 'directory containing .egg-info directories ' '(default: top of the source tree)' ) , ( 'virtualenv-dir=' , None , 'root directory for the virtual directory ' '(default: value of VIRTUAL_ENV environment variable)' ) , ] ) CleanCommand . boolean_options = _CleanCommand . boolean_options [ : ] CleanCommand . boolean_options . extend ( [ 'dist' , 'eggs' , 'environment' , 'pycache' ] )
Set the options for CleanCommand .
42,866
def openMaskFile ( filename ) : dirname , basename = os . path . split ( filename ) newbasename = '_%s_.sbm' % ( basename , ) maskfname = os . path . join ( dirname , newbasename ) maskfile = openReadWrite ( maskfname ) return maskfile
Open the bitmask file sitting next to a file in the filesystem .
42,867
def data ( self , name , chunk , body ) : self . callRemote ( Data , name = name , chunk = chunk , body = body )
Issue a DATA command
42,868
def get ( self , name , mask = None ) : mypeer = self . transport . getQ2QPeer ( ) tl = self . nexus . transloads [ name ] peerz = tl . peers if mypeer in peerz : peerk = peerz [ mypeer ] else : peerk = PeerKnowledge ( bits . BitArray ( size = len ( tl . mask ) , default = 1 ) ) peerz [ mypeer ] = peerk peerk . sentGet = True return self . callRemote ( Get , name = name , mask = mask ) . addCallback ( lambda r : r [ 'size' ] )
Issue a GET command
42,869
def selectPeerToIntroduce ( self , otherPeers ) : for peer in otherPeers : if peer not in self . otherPeers : self . otherPeers . append ( peer ) return peer
Choose a peer to introduce . Return a q2q address or None if there are no suitable peers to introduce at this time .
42,870
def chunkReceived ( self , who , chunkNumber , chunkData ) : def verifyError ( error ) : error . trap ( VerifyError ) self . nexus . decreaseScore ( who , self . authorities ) return self . nexus . verifyChunk ( self . name , who , chunkNumber , sha . new ( chunkData ) . digest ( ) , self . authorities ) . addCallbacks ( lambda whatever : self . chunkVerified ( who , chunkNumber , chunkData ) , verifyError )
A chunk was received from the peer .
42,871
def selectOptimalChunk ( self , peer ) : have = sets . Set ( self . mask . positions ( 1 ) ) want = sets . Set ( self . peers [ peer ] . mask . positions ( 0 ) ) exchangeable = have . intersection ( want ) finalSet = dict . fromkeys ( exchangeable , 0 ) for chunkNumber in exchangeable : for otherPeer in self . peers . itervalues ( ) : finalSet [ chunkNumber ] += not otherPeer . mask [ chunkNumber ] rarityList = [ ( rarity , random . random ( ) , chunkNumber ) for ( chunkNumber , rarity ) in finalSet . iteritems ( ) ] if not rarityList : return None , None rarityList . sort ( ) chunkNumber = rarityList [ - 1 ] [ - 1 ] assert self . mask [ chunkNumber ] , "I wanted to send a chunk I didn't have" self . file . seek ( chunkNumber * CHUNK_SIZE ) chunkData = self . file . read ( CHUNK_SIZE ) self . sha1sums [ chunkNumber ] = sha . new ( chunkData ) . digest ( ) return chunkNumber , chunkData
select an optimal chunk to send to a peer .
42,872
def allocateFile ( self , sharename , peer ) : peerDir = self . basepath . child ( str ( peer ) ) if not peerDir . isdir ( ) : peerDir . makedirs ( ) return ( peerDir . child ( sharename + '.incomplete' ) , peerDir . child ( sharename ) )
return a 2 - tuple of incompletePath fullPath
42,873
def transloadsForPeer ( self , peer ) : for tl in self . transloads . itervalues ( ) : if peer in tl . peers : yield tl
Returns an iterator of transloads that apply to a particular peer .
42,874
def seed ( self , path , name ) : t = self . transloads [ name ] = Transload ( self . addr , self , name , None , path , self . ui . startTransload ( name , self . addr ) , seed = True ) return t
Create a transload from an existing file that is complete .
42,875
def connectPeer ( self , peer ) : return self . conns . connectCached ( endpoint . Q2QEndpoint ( self . svc , self . addr , peer , PROTOCOL_NAME ) , self . clientFactory )
Establish a SIGMA connection to the given peer .
42,876
def increaseScore ( self , participant ) : if participant not in self . scores : self . scores [ participant ] = 0 self . scores [ participant ] += 1
The participant successfully transferred a chunk to me .
42,877
def start ( self ) -> None : self . stop ( ) self . _thread = receiverThread ( socket = self . sock , callbacks = self . _callbacks ) self . _thread . start ( )
Starts a new thread that handles the input . If a thread is already running the thread will be restarted .
42,878
def route_acl ( self , * acl , ** options ) : def _route_acl ( func ) : func . __acl__ = acl @ functools . wraps ( func ) def wrapped ( * args , ** kwargs ) : permission = 'http.' + request . method . lower ( ) local_opts = options . copy ( ) local_opts . setdefault ( 'default' , current_app . config [ 'ACL_ROUTE_DEFAULT_STATE' ] ) self . assert_can ( permission , func , ** local_opts ) return func ( * args , ** kwargs ) return wrapped return _route_acl
Decorator to attach an ACL to a route .
42,879
def can ( self , permission , obj , ** kwargs ) : context = { 'user' : current_user } for func in self . context_processors : context . update ( func ( ) ) context . update ( get_object_context ( obj ) ) context . update ( kwargs ) return check ( permission , iter_object_acl ( obj ) , ** context )
Check if we can do something with an object .
42,880
def assert_can ( self , permission , obj , ** kwargs ) : flash_message = kwargs . pop ( 'flash' , None ) stealth = kwargs . pop ( 'stealth' , False ) default = kwargs . pop ( 'default' , None ) res = self . can ( permission , obj , ** kwargs ) res = default if res is None else res if not res : if flash_message and not stealth : flask . flash ( flash_message , 'danger' ) if current_user . is_authenticated ( ) : if flash_message is not False : flask . flash ( flash_message or 'You are not permitted to "%s" this resource' % permission ) flask . abort ( 403 ) elif not stealth and self . login_view : if flash_message is not False : flask . flash ( flash_message or 'Please login for access.' ) raise _Redirect ( flask . url_for ( self . login_view ) + '?' + urlencode ( dict ( next = flask . request . script_root + flask . request . path ) ) ) else : flask . abort ( 404 )
Make sure we have a permission or abort the request .
42,881
def can_route ( self , endpoint , method = None , ** kwargs ) : view = flask . current_app . view_functions . get ( endpoint ) if not view : endpoint , args = flask . _request_ctx . top . match ( endpoint ) view = flask . current_app . view_functions . get ( endpoint ) if not view : return False return self . can ( 'http.' + ( method or 'GET' ) . lower ( ) , view , ** kwargs )
Make sure we can route to the given endpoint or url .
42,882
def should_not_sample_path ( request ) : blacklisted_paths = request . registry . settings . get ( 'zipkin.blacklisted_paths' , [ ] ) regexes = [ re . compile ( r ) if isinstance ( r , six . string_types ) else r for r in blacklisted_paths ] return any ( r . match ( request . path ) for r in regexes )
Decided whether current request path should be sampled or not . This is checked previous to should_not_sample_route and takes precedence .
42,883
def should_not_sample_route ( request ) : blacklisted_routes = request . registry . settings . get ( 'zipkin.blacklisted_routes' , [ ] ) if not blacklisted_routes : return False route_mapper = request . registry . queryUtility ( IRoutesMapper ) route_info = route_mapper ( request ) . get ( 'route' ) return ( route_info and route_info . name in blacklisted_routes )
Decided whether current request route should be sampled or not .
42,884
def create_zipkin_attr ( request ) : settings = request . registry . settings if 'zipkin.is_tracing' in settings : is_sampled = settings [ 'zipkin.is_tracing' ] ( request ) else : is_sampled = is_tracing ( request ) request . zipkin_trace_id = get_trace_id ( request ) span_id = request . headers . get ( 'X-B3-SpanId' , generate_random_64bit_string ( ) ) parent_span_id = request . headers . get ( 'X-B3-ParentSpanId' , None ) flags = request . headers . get ( 'X-B3-Flags' , '0' ) return ZipkinAttrs ( trace_id = request . zipkin_trace_id , span_id = span_id , parent_span_id = parent_span_id , flags = flags , is_sampled = is_sampled , )
Create ZipkinAttrs object from a request with sampled flag as True . Attaches lazy attribute zipkin_trace_id with request which is then used throughout the tween .
42,885
def get_binary_annotations ( request , response ) : route = request . matched_route . pattern if request . matched_route else '' annotations = { 'http.uri' : request . path , 'http.uri.qs' : request . path_qs , 'http.route' : route , 'response_status_code' : str ( response . status_code ) , } settings = request . registry . settings if 'zipkin.set_extra_binary_annotations' in settings : annotations . update ( settings [ 'zipkin.set_extra_binary_annotations' ] ( request , response ) ) return annotations
Helper method for getting all binary annotations from the request .
42,886
def dmxData ( self , data : tuple ) : newData = [ 0 ] * 512 for i in range ( 0 , min ( len ( data ) , 512 ) ) : newData [ i ] = data [ i ] self . _dmxData = tuple ( newData ) self . length = 126 + len ( self . _dmxData )
For legacy devices and to prevent errors the length of the DMX data is normalized to 512
42,887
def getBytes ( self ) -> list : tmpList = [ ] tmpList . extend ( _FIRST_INDEX ) length = self . length - 16 tmpList . append ( ( 0x7 << 4 ) + ( length >> 8 ) ) tmpList . append ( length & 0xFF ) tmpList . extend ( self . _vector ) tmpList . extend ( self . _cid ) return tmpList
Returns the Root layer as list with bytes
42,888
def curate_skills_data ( self , skills_data ) : local_skills = [ s for s in self . list ( ) if s . is_local ] default_skills = [ s . name for s in self . list_defaults ( ) ] local_skill_names = [ s . name for s in local_skills ] skills_data_skills = [ s [ 'name' ] for s in skills_data [ 'skills' ] ] for skill in local_skills : if skill . name not in skills_data_skills : if skill . name in default_skills : origin = 'default' elif skill . url : origin = 'cli' else : origin = 'non-msm' entry = build_skill_entry ( skill . name , origin , False ) skills_data [ 'skills' ] . append ( entry ) remove_list = [ ] for s in skills_data . get ( 'skills' , [ ] ) : if ( s [ 'name' ] not in local_skill_names and s [ 'installation' ] == 'installed' ) : remove_list . append ( s ) for skill in remove_list : skills_data [ 'skills' ] . remove ( skill ) return skills_data
Sync skills_data with actual skills on disk .
42,889
def sync_skills_data ( self ) : self . skills_data = self . load_skills_data ( ) if 'upgraded' in self . skills_data : self . skills_data . pop ( 'upgraded' ) else : self . skills_data_hash = skills_data_hash ( self . skills_data )
Update internal skill_data_structure from disk .
42,890
def write_skills_data ( self , data = None ) : data = data or self . skills_data if skills_data_hash ( data ) != self . skills_data_hash : write_skills_data ( data ) self . skills_data_hash = skills_data_hash ( data )
Write skills data hash if it has been modified .
42,891
def install ( self , param , author = None , constraints = None , origin = '' ) : if isinstance ( param , SkillEntry ) : skill = param else : skill = self . find_skill ( param , author ) entry = build_skill_entry ( skill . name , origin , skill . is_beta ) try : skill . install ( constraints ) entry [ 'installed' ] = time . time ( ) entry [ 'installation' ] = 'installed' entry [ 'status' ] = 'active' entry [ 'beta' ] = skill . is_beta except AlreadyInstalled : entry = None raise except MsmException as e : entry [ 'installation' ] = 'failed' entry [ 'status' ] = 'error' entry [ 'failure_message' ] = repr ( e ) raise finally : if entry : self . skills_data [ 'skills' ] . append ( entry )
Install by url or name
42,892
def remove ( self , param , author = None ) : if isinstance ( param , SkillEntry ) : skill = param else : skill = self . find_skill ( param , author ) skill . remove ( ) skills = [ s for s in self . skills_data [ 'skills' ] if s [ 'name' ] != skill . name ] self . skills_data [ 'skills' ] = skills return
Remove by url or name
42,893
def update ( self , skill = None , author = None ) : if skill is None : return self . update_all ( ) else : if isinstance ( skill , str ) : skill = self . find_skill ( skill , author ) entry = get_skill_entry ( skill . name , self . skills_data ) if entry : entry [ 'beta' ] = skill . is_beta if skill . update ( ) : if entry : entry [ 'updated' ] = time . time ( )
Update all downloaded skills or one specified skill .
42,894
def apply ( self , func , skills ) : def run_item ( skill ) : try : func ( skill ) return True except MsmException as e : LOG . error ( 'Error running {} on {}: {}' . format ( func . __name__ , skill . name , repr ( e ) ) ) return False except : LOG . exception ( 'Error running {} on {}:' . format ( func . __name__ , skill . name ) ) with ThreadPool ( 20 ) as tp : return tp . map ( run_item , skills )
Run a function on all skills in parallel
42,895
def install_defaults ( self ) : def install_or_update_skill ( skill ) : if skill . is_local : self . update ( skill ) else : self . install ( skill , origin = 'default' ) return self . apply ( install_or_update_skill , self . list_defaults ( ) )
Installs the default skills updates all others
42,896
def list ( self ) : try : self . repo . update ( ) except GitException as e : if not isdir ( self . repo . path ) : raise LOG . warning ( 'Failed to update repo: {}' . format ( repr ( e ) ) ) remote_skill_list = ( SkillEntry ( name , SkillEntry . create_path ( self . skills_dir , url , name ) , url , sha if self . versioned else '' , msm = self ) for name , path , url , sha in self . repo . get_skill_data ( ) ) remote_skills = { skill . id : skill for skill in remote_skill_list } all_skills = [ ] for skill_file in glob ( join ( self . skills_dir , '*' , '__init__.py' ) ) : skill = SkillEntry . from_folder ( dirname ( skill_file ) , msm = self ) if skill . id in remote_skills : skill . attach ( remote_skills . pop ( skill . id ) ) all_skills . append ( skill ) all_skills += list ( remote_skills . values ( ) ) return all_skills
Load a list of SkillEntry objects from both local and remote skills
42,897
def find_skill ( self , param , author = None , skills = None ) : if param . startswith ( 'https://' ) or param . startswith ( 'http://' ) : repo_id = SkillEntry . extract_repo_id ( param ) for skill in self . list ( ) : if skill . id == repo_id : return skill name = SkillEntry . extract_repo_name ( param ) path = SkillEntry . create_path ( self . skills_dir , param ) return SkillEntry ( name , path , param , msm = self ) else : skill_confs = { skill : skill . match ( param , author ) for skill in skills or self . list ( ) } best_skill , score = max ( skill_confs . items ( ) , key = lambda x : x [ 1 ] ) LOG . info ( 'Best match ({}): {} by {}' . format ( round ( score , 2 ) , best_skill . name , best_skill . author ) ) if score < 0.3 : raise SkillNotFound ( param ) low_bound = ( score * 0.7 ) if score != 1.0 else 1.0 close_skills = [ skill for skill , conf in skill_confs . items ( ) if conf >= low_bound and skill != best_skill ] if close_skills : raise MultipleSkillMatches ( [ best_skill ] + close_skills ) return best_skill
Find skill by name or url
42,898
def iterchunks ( data , chunksize ) : offt = 0 while offt < len ( data ) : yield data [ offt : offt + chunksize ] offt += chunksize
iterate chunks of data
42,899
def mustRetransmit ( self ) : if self . syn or self . fin or self . dlen : return True return False
Check to see if this packet must be retransmitted until it was received .