idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
45,400
def extract_subject_from_dn ( cert_obj ) : return "," . join ( "{}={}" . format ( OID_TO_SHORT_NAME_DICT . get ( v . oid . dotted_string , v . oid . dotted_string ) , rdn_escape ( v . value ) , ) for v in reversed ( list ( cert_obj . subject ) ) )
Serialize a DN to a DataONE subject string .
45,401
def create_d1_dn_subject ( common_name_str ) : return cryptography . x509 . Name ( [ cryptography . x509 . NameAttribute ( cryptography . x509 . oid . NameOID . COUNTRY_NAME , "US" ) , cryptography . x509 . NameAttribute ( cryptography . x509 . oid . NameOID . STATE_OR_PROVINCE_NAME , "California" ) , cryptography . x509 . NameAttribute ( cryptography . x509 . oid . NameOID . LOCALITY_NAME , "San Francisco" ) , cryptography . x509 . NameAttribute ( cryptography . x509 . oid . NameOID . ORGANIZATION_NAME , "Root CA" ) , cryptography . x509 . NameAttribute ( cryptography . x509 . oid . NameOID . COMMON_NAME , "ca.ca.com" ) , ] )
Create the DN Subject for certificate that will be used in a DataONE environment .
45,402
def serialize_cert_to_pem ( cert_obj ) : return cert_obj . public_bytes ( encoding = cryptography . hazmat . primitives . serialization . Encoding . PEM )
Serialize certificate to PEM .
45,403
def extract_subject_info_extension ( cert_obj ) : try : subject_info_der = cert_obj . extensions . get_extension_for_oid ( cryptography . x509 . oid . ObjectIdentifier ( DATAONE_SUBJECT_INFO_OID ) ) . value . value return str ( pyasn1 . codec . der . decoder . decode ( subject_info_der ) [ 0 ] ) except Exception as e : logging . debug ( 'SubjectInfo not extracted. reason="{}"' . format ( e ) )
Extract DataONE SubjectInfo XML doc from certificate .
45,404
def decode_der ( cert_der ) : return cryptography . x509 . load_der_x509_certificate ( data = cert_der , backend = cryptography . hazmat . backends . default_backend ( ) )
Decode cert DER string to Certificate object .
45,405
def disable_cert_validation ( ) : current_context = ssl . _create_default_https_context ssl . _create_default_https_context = ssl . _create_unverified_context try : yield finally : ssl . _create_default_https_context = current_context
Context manager to temporarily disable certificate validation in the standard SSL library .
45,406
def extract_issuer_ca_cert_url ( cert_obj ) : for extension in cert_obj . extensions : if extension . oid . dotted_string == AUTHORITY_INFO_ACCESS_OID : authority_info_access = extension . value for access_description in authority_info_access : if access_description . access_method . dotted_string == CA_ISSUERS_OID : return access_description . access_location . value
Extract issuer CA certificate URL from certificate .
45,407
def serialize_private_key_to_pem ( private_key , passphrase_bytes = None ) : return private_key . private_bytes ( encoding = cryptography . hazmat . primitives . serialization . Encoding . PEM , format = cryptography . hazmat . primitives . serialization . PrivateFormat . TraditionalOpenSSL , encryption_algorithm = cryptography . hazmat . primitives . serialization . BestAvailableEncryption ( passphrase_bytes ) if passphrase_bytes is not None else cryptography . hazmat . primitives . serialization . NoEncryption ( ) , )
Serialize private key to PEM .
45,408
def generate_private_key ( key_size = 2048 ) : return cryptography . hazmat . primitives . asymmetric . rsa . generate_private_key ( public_exponent = 65537 , key_size = key_size , backend = cryptography . hazmat . backends . default_backend ( ) , )
Generate a private key
45,409
def load_csr ( pem_path ) : with open ( pem_path , "rb" ) as f : return cryptography . x509 . load_pem_x509_csr ( data = f . read ( ) , backend = cryptography . hazmat . backends . default_backend ( ) )
Load CSR from PEM encoded file
45,410
def load_private_key ( pem_path , passphrase_bytes = None ) : with open ( pem_path , "rb" ) as f : return cryptography . hazmat . primitives . serialization . load_pem_private_key ( data = f . read ( ) , password = passphrase_bytes , backend = cryptography . hazmat . backends . default_backend ( ) , )
Load private key from PEM encoded file
45,411
def serialize_cert_to_der ( cert_obj ) : return cert_obj . public_bytes ( cryptography . hazmat . primitives . serialization . Encoding . DER )
Serialize certificate to DER .
45,412
def log_cert_info ( logger , msg_str , cert_obj ) : list ( map ( logger , [ "{}:" . format ( msg_str ) ] + [ " {}" . format ( v ) for v in [ "Subject: {}" . format ( _get_val_str ( cert_obj , [ "subject" , "value" ] , reverse = True ) ) , "Issuer: {}" . format ( _get_val_str ( cert_obj , [ "issuer" , "value" ] , reverse = True ) ) , "Not Valid Before: {}" . format ( cert_obj . not_valid_before . isoformat ( ) ) , "Not Valid After: {}" . format ( cert_obj . not_valid_after . isoformat ( ) ) , "Subject Alt Names: {}" . format ( _get_ext_val_str ( cert_obj , "SUBJECT_ALTERNATIVE_NAME" , [ "value" , "value" ] ) ) , "CRL Distribution Points: {}" . format ( _get_ext_val_str ( cert_obj , "CRL_DISTRIBUTION_POINTS" , [ "value" , "full_name" , "value" , "value" ] , ) ) , "Authority Access Location: {}" . format ( extract_issuer_ca_cert_url ( cert_obj ) or "<not found>" ) , ] ] , ) )
Dump basic certificate values to the log .
45,413
def get_extension_by_name ( cert_obj , extension_name ) : try : return cert_obj . extensions . get_extension_for_oid ( getattr ( cryptography . x509 . oid . ExtensionOID , extension_name ) ) except cryptography . x509 . ExtensionNotFound : pass
Get a standard certificate extension by attribute name .
45,414
def _get_val_list ( obj , path_list , reverse = False ) : try : y = getattr ( obj , path_list [ 0 ] ) except AttributeError : return [ ] if len ( path_list ) == 1 : return [ y ] else : val_list = [ x for a in y for x in _get_val_list ( a , path_list [ 1 : ] , reverse ) ] if reverse : val_list . reverse ( ) return val_list
Extract values from nested objects by attribute names .
45,415
def _get_val_str ( obj , path_list = None , reverse = False ) : val_list = _get_val_list ( obj , path_list or [ ] , reverse ) return "<not found>" if obj is None else " / " . join ( map ( str , val_list ) )
Extract values from nested objects by attribute names and concatenate their string representations .
45,416
def _JMS_to_Bern_II ( C , udlnu ) : u = uflav [ udlnu [ 0 ] ] d = dflav [ udlnu [ 1 ] ] l = lflav [ udlnu [ 4 : udlnu . find ( 'n' ) ] ] lp = lflav [ udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] ] ind = udlnu [ 0 ] + udlnu [ 1 ] + udlnu [ 4 : udlnu . find ( 'n' ) ] + udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] return { '1' + ind : C [ "VnueduLL" ] [ lp , l , d , u ] . conj ( ) , '5' + ind : C [ "SnueduRL" ] [ lp , l , d , u ] . conj ( ) , '1p' + ind : C [ "VnueduLR" ] [ lp , l , d , u ] . conj ( ) , '5p' + ind : C [ "SnueduRR" ] [ lp , l , d , u ] . conj ( ) , '7p' + ind : C [ "TnueduRR" ] [ lp , l , d , u ] . conj ( ) }
From JMS to BernII basis for charged current process semileptonic operators . udlnu should be of the form udl_enu_tau cbl_munu_e etc .
45,417
def _Bern_to_JMS_II ( C , udlnu ) : u = uflav [ udlnu [ 0 ] ] d = dflav [ udlnu [ 1 ] ] l = lflav [ udlnu [ 4 : udlnu . find ( 'n' ) ] ] lp = lflav [ udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] ] ind = udlnu [ 0 ] + udlnu [ 1 ] + udlnu [ 4 : udlnu . find ( 'n' ) ] + udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] return { "VnueduLL_{}{}{}{}" . format ( lp + 1 , l + 1 , d + 1 , u + 1 ) : C [ '1' + ind ] . conjugate ( ) , "SnueduRL_{}{}{}{}" . format ( lp + 1 , l + 1 , d + 1 , u + 1 ) : C [ '5' + ind ] . conjugate ( ) , "VnueduLR_{}{}{}{}" . format ( lp + 1 , l + 1 , d + 1 , u + 1 ) : C [ '1p' + ind ] . conjugate ( ) , "SnueduRR_{}{}{}{}" . format ( lp + 1 , l + 1 , d + 1 , u + 1 ) : C [ '5p' + ind ] . conjugate ( ) , "TnueduRR_{}{}{}{}" . format ( lp + 1 , l + 1 , d + 1 , u + 1 ) : C [ '7p' + ind ] . conjugate ( ) }
From BernII to JMS basis for charged current process semileptonic operators . udlnu should be of the form udl_enu_tau cbl_munu_e etc .
45,418
def _BernII_to_Flavio_II ( C , udlnu , parameters ) : p = parameters u = uflav [ udlnu [ 0 ] ] d = dflav [ udlnu [ 1 ] ] l = lflav [ udlnu [ 4 : udlnu . find ( 'n' ) ] ] lp = lflav [ udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] ] ind = udlnu [ 0 ] + udlnu [ 1 ] + udlnu [ 4 : udlnu . find ( 'n' ) ] + udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] ind2 = udlnu [ 1 ] + udlnu [ 0 ] + udlnu [ 4 : udlnu . find ( 'n' ) ] + 'nu' + udlnu [ udlnu . find ( '_' , 5 ) + 1 : len ( udlnu ) ] dic = { 'CVL_' + ind2 : C [ '1' + ind ] , 'CVR_' + ind2 : C [ '1p' + ind ] , 'CSR_' + ind2 : C [ '5' + ind ] , 'CSL_' + ind2 : C [ '5p' + ind ] , 'CT_' + ind2 : C [ '7p' + ind ] } V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) prefactor = - sqrt ( 2 ) / p [ 'GF' ] / V [ u , d ] / 4 return { k : prefactor * v for k , v in dic . items ( ) }
From BernII to FlavioII basis for charged current process semileptonic operators . udlnu should be of the form udl_enu_tau cbl_munu_e etc .
45,419
def Flavio_to_Fierz_nunu ( C , ddll , parameters , norm_gf = True ) : p = parameters V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) if ddll [ : 2 ] == 'sb' : xi = V [ 2 , 2 ] * V [ 2 , 1 ] . conj ( ) elif ddll [ : 2 ] == 'db' : xi = V [ 2 , 2 ] * V [ 2 , 0 ] . conj ( ) elif ddll [ : 2 ] == 'ds' : xi = V [ 2 , 1 ] * V [ 2 , 0 ] . conj ( ) else : raise ValueError ( "Unexpected flavours: {}" . format ( ddll [ : 2 ] ) ) q1 , q2 = ddll [ : 2 ] l1 = ddll [ 4 : ddll . find ( 'n' ) ] l2 = ddll [ ddll . find ( '_' , 5 ) + 1 : ] ind = q1 + q2 + l1 + l2 indnu = q2 + q1 + 'nu' + l2 + 'nu' + l1 e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) dic = { 'F' + ind + 'nu' : C [ "CL_" + indnu ] / ( ( 8 * pi ** 2 ) / e ** 2 ) , 'F' + ind + 'nup' : C [ "CR_" + indnu ] / ( ( 8 * pi ** 2 ) / e ** 2 ) , } if norm_gf : prefactor = sqrt ( 2 ) / p [ 'GF' ] / xi / 4 else : prefactor = 1 / xi return { k : v / prefactor for k , v in dic . items ( ) }
From Flavio semileptonic basis to semileptonic Fierz basis for Class V . C should be the corresponding leptonic Fierz basis and ddll should be of the form sbl_enu_tau dbl_munu_e etc .
45,420
def Fierz_to_EOS_lep ( C , ddll , parameters ) : p = parameters V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) Vtb = V [ 2 , 2 ] Vts = V [ 2 , 1 ] ind = ddll . replace ( 'l_' , '' ) . replace ( 'nu_' , '' ) ind2 = ddll . replace ( 'l_' , '' ) . replace ( 'nu_' , '' ) [ 2 : : ] e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) dic = { 'b->s' + ind2 + '::c9' : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + '9' ] , 'b->s' + ind2 + "::c9'" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + '9p' ] , 'b->s' + ind2 + "::c10" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + '10' ] , 'b->s' + ind2 + "::c10'" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + '10p' ] , 'b->s' + ind2 + "::cS" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'S' ] , 'b->s' + ind2 + "::cS'" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'Sp' ] , 'b->s' + ind2 + "::cP" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'P' ] , 'b->s' + ind2 + "::cP'" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'Pp' ] , 'b->s' + ind2 + "::cT" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'T' ] , 'b->s' + ind2 + "::cT5" : ( 16 * pi ** 2 ) / e ** 2 * C [ 'F' + ind + 'T5' ] } prefactor = sqrt ( 2 ) / p [ 'GF' ] / Vtb / Vts . conj ( ) / 4 return { k : prefactor * v for k , v in dic . items ( ) }
From semileptonic Fierz basis to EOS semileptonic basis for Class V . C should be the corresponding leptonic Fierz basis and ddll should be of the form sbl_enu_tau dbl_munu_e etc .
45,421
def JMS_to_FormFlavor_lep ( C , dd ) : b = dflav [ dd [ 0 ] ] s = dflav [ dd [ 1 ] ] return { 'CVLL_' + dd + 'mm' : C [ "VedLL" ] [ 1 , 1 , s , b ] , 'CVRR_' + dd + 'mm' : C [ "VedRR" ] [ 1 , 1 , s , b ] , 'CVLR_' + dd + 'mm' : C [ "VdeLR" ] [ s , b , 1 , 1 ] , 'CVRL_' + dd + 'mm' : C [ "VedLR" ] [ 1 , 1 , s , b ] , 'CSLL_' + dd + 'mm' : C [ "SedRR" ] [ 1 , 1 , b , s ] . conj ( ) , 'CSRR_' + dd + 'mm' : C [ "SedRR" ] [ 1 , 1 , s , b ] , 'CSLR_' + dd + 'mm' : C [ "SedRL" ] [ 1 , 1 , s , b ] , 'CSRL_' + dd + 'mm' : C [ "SedRL" ] [ 1 , 1 , b , s ] . conj ( ) , 'CTLL_' + dd + 'mm' : C [ "TedRR" ] [ 1 , 1 , b , s ] . conj ( ) , 'CTRR_' + dd + 'mm' : C [ "TedRR" ] [ 1 , 1 , s , b ] , 'CVLL_sdnn' : 1 / 3 * C [ "VnudLL" ] [ 0 , 0 , s - 1 , s ] + 1 / 3 * C [ "VnudLL" ] [ 1 , 1 , s - 1 , s ] + 1 / 3 * C [ "VnudLL" ] [ 2 , 2 , s - 1 , s ] , 'CVRL_sdnn' : 1 / 3 * C [ "VnudLR" ] [ 0 , 0 , s - 1 , s ] + 1 / 3 * C [ "VnudLR" ] [ 1 , 1 , s - 1 , s ] + 1 / 3 * C [ "VnudLR" ] [ 2 , 2 , s - 1 , s ] }
From JMS to semileptonic Fierz basis for Classes V . C should be the JMS basis and ddll should be of the form sbl_eni_tau dbl_munu_e etc .
45,422
def JMS_to_Fierz_chrom ( C , qq ) : if qq [ 0 ] in dflav : s = dflav [ qq [ 0 ] ] b = dflav [ qq [ 1 ] ] return { 'F7gamma' + qq : C [ 'dgamma' ] [ s , b ] , 'F8g' + qq : C [ 'dG' ] [ s , b ] , 'F7pgamma' + qq : C [ 'dgamma' ] [ b , s ] . conj ( ) , 'F8pg' + qq : C [ 'dG' ] [ b , s ] . conj ( ) } else : u = uflav [ qq [ 0 ] ] c = uflav [ qq [ 1 ] ] return { 'F7gamma' + qq : C [ 'ugamma' ] [ u , c ] , 'F8g' + qq : C [ 'uG' ] [ u , c ] , 'F7pgamma' + qq : C [ 'ugamma' ] [ c , u ] . conj ( ) , 'F8pg' + qq : C [ 'uG' ] [ c , u ] . conj ( ) }
From JMS to chromomagnetic Fierz basis for Class V . qq should be of the form sb ds etc .
45,423
def Fierz_to_JMS_chrom ( C , qq ) : if qq [ 0 ] in dflav : s = dflav [ qq [ 0 ] ] + 1 b = dflav [ qq [ 1 ] ] + 1 return { 'dgamma_{}{}' . format ( s , b ) : C [ 'F7gamma' + qq ] , 'dG_{}{}' . format ( s , b ) : C [ 'F8g' + qq ] , 'dgamma_{}{}' . format ( b , s ) : C [ 'F7pgamma' + qq ] . conjugate ( ) , 'dG_{}{}' . format ( b , s ) : C [ 'F8pg' + qq ] . conjugate ( ) , } else : u = uflav [ qq [ 0 ] ] + 1 c = uflav [ qq [ 1 ] ] + 1 return { 'ugamma_{}{}' . format ( u , c ) : C [ 'F7gamma' + qq ] , 'uG_{}{}' . format ( u , c ) : C [ 'F8g' + qq ] , 'ugamma_{}{}' . format ( c , u ) : C [ 'F7pgamma' + qq ] . conjugate ( ) , 'uG_{}{}' . format ( c , u ) : C [ 'F8pg' + qq ] . conjugate ( ) , }
From chromomagnetic Fierz to JMS basis for Class V . qq should be of the form sb ds etc .
45,424
def Fierz_to_Bern_chrom ( C , dd , parameters ) : e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) gs = sqrt ( 4 * pi * parameters [ 'alpha_s' ] ) if dd == 'sb' or dd == 'db' : mq = parameters [ 'm_b' ] elif dd == 'ds' : mq = parameters [ 'm_s' ] else : KeyError ( "Not sure what to do with quark mass for flavour {}" . format ( dd ) ) return { '7gamma' + dd : gs ** 2 / e / mq * C [ 'F7gamma' + dd ] , '8g' + dd : gs / mq * C [ 'F8g' + dd ] , '7pgamma' + dd : gs ** 2 / e / mq * C [ 'F7pgamma' + dd ] , '8pg' + dd : gs / mq * C [ 'F8pg' + dd ] }
From Fierz to chromomagnetic Bern basis for Class V . dd should be of the form sb ds etc .
45,425
def Flavio_to_Fierz_chrom ( C , qq , parameters ) : p = parameters V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) if qq == 'sb' : xi = V [ 2 , 2 ] * V [ 2 , 1 ] . conj ( ) elif qq == 'db' : xi = V [ 2 , 2 ] * V [ 2 , 0 ] . conj ( ) elif qq == 'ds' : xi = V [ 2 , 1 ] * V [ 2 , 0 ] . conj ( ) elif qq == 'uc' : xi = V [ 1 , 2 ] . conj ( ) * V [ 0 , 2 ] else : raise ValueError ( "Unexpected flavours: {}" . format ( qq ) ) qqfl = qq [ : : - 1 ] e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) gs = sqrt ( 4 * pi * parameters [ 'alpha_s' ] ) if qq == 'sb' or qq == 'db' : mq = parameters [ 'm_b' ] elif qq == 'ds' : mq = parameters [ 'm_s' ] elif qq == 'uc' : mq = parameters [ 'm_c' ] else : KeyError ( "Not sure what to do with quark mass for flavour {}" . format ( qq ) ) dic = { 'F7gamma' + qq : C [ "C7_" + qqfl ] / ( ( 16 * pi ** 2 ) / e / mq ) , 'F8g' + qq : C [ "C8_" + qqfl ] / ( ( 16 * pi ** 2 ) / gs / mq ) , 'F7pgamma' + qq : C [ "C7p_" + qqfl ] / ( ( 16 * pi ** 2 ) / e / mq ) , 'F8pg' + qq : C [ "C8p_" + qqfl ] / ( ( 16 * pi ** 2 ) / gs / mq ) } prefactor = sqrt ( 2 ) / p [ 'GF' ] / xi / 4 return { k : v / prefactor for k , v in dic . items ( ) }
From Flavio to chromomagnetic Fierz basis for Class V . qq should be of the form sb db etc .
45,426
def Fierz_to_EOS_chrom ( C , dd , parameters ) : p = parameters V = ckmutil . ckm . ckm_tree ( p [ "Vus" ] , p [ "Vub" ] , p [ "Vcb" ] , p [ "delta" ] ) Vtb = V [ 2 , 2 ] Vts = V [ 2 , 1 ] e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) gs = sqrt ( 4 * pi * parameters [ 'alpha_s' ] ) mb = parameters [ 'm_b' ] dic = { "b->s::c7" : 16 * pi ** 2 / mb / e * C [ "F7gamma" + dd ] , "b->s::c7'" : 16 * pi ** 2 / mb / e * C [ "F7pgamma" + dd ] , "b->s::c8" : 16 * pi ** 2 / mb / gs * C [ "F8g" + dd ] , "b->s::c8'" : 16 * pi ** 2 / mb / gs * C [ "F8pg" + dd ] } prefactor = sqrt ( 2 ) / p [ 'GF' ] / Vtb / Vts . conj ( ) / 4 return { k : prefactor * v for k , v in dic . items ( ) }
From Fierz to chromomagnetic EOS basis for Class V . dd should be of the form sb ds etc .
45,427
def _JMS_to_Flavio_VII ( C , parameters ) : d = { } dtrans = json . loads ( pkgutil . get_data ( 'wilson' , 'data/flavio_jms_vii.json' ) . decode ( 'utf8' ) ) for cj , cf in dtrans . items ( ) : d [ cf ] = C . get ( cj , 0 ) gs = sqrt ( 4 * pi * parameters [ 'alpha_s' ] ) e = sqrt ( 4 * pi * parameters [ 'alpha_e' ] ) preC7 = 16 * pi ** 2 / e preC8 = 16 * pi ** 2 / gs d [ 'C8_uu' ] = preC8 / parameters [ 'm_u' ] * C . get ( 'uG_11' , 0 ) d [ 'C8_cc' ] = preC8 / parameters [ 'm_c' ] * C . get ( 'uG_22' , 0 ) d [ 'C8_dd' ] = preC8 / parameters [ 'm_d' ] * C . get ( 'dG_11' , 0 ) d [ 'C8_ss' ] = preC8 / parameters [ 'm_s' ] * C . get ( 'dG_22' , 0 ) d [ 'C8_bb' ] = preC8 / parameters [ 'm_b' ] * C . get ( 'dG_33' , 0 ) d [ 'C7_uu' ] = preC7 / parameters [ 'm_u' ] * C . get ( 'ugamma_11' , 0 ) d [ 'C7_cc' ] = preC7 / parameters [ 'm_c' ] * C . get ( 'ugamma_22' , 0 ) d [ 'C7_dd' ] = preC7 / parameters [ 'm_d' ] * C . get ( 'dgamma_11' , 0 ) d [ 'C7_ss' ] = preC7 / parameters [ 'm_s' ] * C . get ( 'dgamma_22' , 0 ) d [ 'C7_bb' ] = preC7 / parameters [ 'm_b' ] * C . get ( 'dgamma_33' , 0 ) d [ 'C7_ee' ] = preC7 / parameters [ 'm_e' ] * C . get ( 'egamma_11' , 0 ) d [ 'C7_mumu' ] = preC7 / parameters [ 'm_mu' ] * C . get ( 'egamma_22' , 0 ) d [ 'C7_tautau' ] = preC7 / parameters [ 'm_tau' ] * C . get ( 'egamma_33' , 0 ) preGF = sqrt ( 2 ) / parameters [ 'GF' ] / 4 return { k : preGF * v for k , v in d . items ( ) }
From JMS to flavio basis for class VII i . e . flavour blind operators .
45,428
def cut_from_chain ( sciobj_model ) : if _is_head ( sciobj_model ) : old_pid = sciobj_model . obsoletes . did _cut_head_from_chain ( sciobj_model ) elif _is_tail ( sciobj_model ) : old_pid = sciobj_model . obsoleted_by . did _cut_tail_from_chain ( sciobj_model ) else : old_pid = sciobj_model . obsoleted_by . did _cut_embedded_from_chain ( sciobj_model ) _update_sid_to_last_existing_pid_map ( old_pid )
Remove an object from a revision chain .
45,429
def resolve_sid ( sid ) : return d1_gmn . app . models . Chain . objects . get ( sid__did = sid ) . head_pid . did
Get the PID to which the sid currently maps .
45,430
def is_obsoletes_pid ( pid ) : return d1_gmn . app . models . ScienceObject . objects . filter ( obsoletes__did = pid ) . exists ( )
Return True if pid is referenced in the obsoletes field of any object .
45,431
def is_obsoleted_by_pid ( pid ) : return d1_gmn . app . models . ScienceObject . objects . filter ( obsoleted_by__did = pid ) . exists ( )
Return True if pid is referenced in the obsoletedBy field of any object .
45,432
def _merge_chains ( chain_model_a , chain_model_b ) : _set_chain_sid ( chain_model_a , d1_gmn . app . did . get_did_by_foreign_key ( chain_model_b . sid ) ) for member_model in _get_all_chain_member_queryset_by_chain ( chain_model_b ) : member_model . chain = chain_model_a member_model . save ( ) chain_model_b . delete ( )
Merge two chains .
45,433
def _set_chain_sid ( chain_model , sid ) : if not sid : return if chain_model . sid and chain_model . sid . did != sid : raise d1_common . types . exceptions . ServiceFailure ( 0 , 'Attempted to modify existing SID. ' 'existing_sid="{}", new_sid="{}"' . format ( chain_model . sid . did , sid ) , ) chain_model . sid = d1_gmn . app . did . get_or_create_did ( sid ) chain_model . save ( )
Set or update SID for chain .
45,434
def _get_chain_by_pid ( pid ) : try : return d1_gmn . app . models . ChainMember . objects . get ( pid__did = pid ) . chain except d1_gmn . app . models . ChainMember . DoesNotExist : pass
Find chain by pid .
45,435
def _get_chain_by_sid ( sid ) : try : return d1_gmn . app . models . Chain . objects . get ( sid__did = sid ) except d1_gmn . app . models . Chain . DoesNotExist : pass
Return None if not found .
45,436
def _update_sid_to_last_existing_pid_map ( pid ) : last_pid = _find_head_or_latest_connected ( pid ) chain_model = _get_chain_by_pid ( last_pid ) if not chain_model : return chain_model . head_pid = d1_gmn . app . did . get_or_create_did ( last_pid ) chain_model . save ( )
Set chain head PID to the last existing object in the chain to which pid belongs . If SID has been set for chain it resolves to chain head PID .
45,437
def populate_entity_type ( apps , schema_editor ) : Entity = apps . get_model ( 'flow' , 'Entity' ) for entity in Entity . objects . all ( ) : if entity . descriptor_schema is not None : entity . type = entity . descriptor_schema . slug entity . save ( )
Populate entity type from attached descriptor schema .
45,438
def deserialize ( doc_xml , pyxb_binding = None ) : pyxb_binding = pyxb_binding or d1_common . types . dataoneTypes try : return pyxb_binding . CreateFromDocument ( doc_xml ) except pyxb . ValidationError as e : raise ValueError ( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"' . format ( e . details ( ) , doc_xml ) ) except ( pyxb . PyXBException , xml . sax . SAXParseException , Exception ) as e : raise ValueError ( 'Unable to deserialize XML to PyXB. error="{}" xml="{}"' . format ( str ( e ) , doc_xml ) )
Deserialize DataONE XML types to PyXB .
45,439
def serialize_gen ( obj_pyxb , encoding = 'utf-8' , pretty = False , strip_prolog = False , xslt_url = None ) : assert d1_common . type_conversions . is_pyxb ( obj_pyxb ) assert encoding in ( None , 'utf-8' , 'UTF-8' ) try : obj_dom = obj_pyxb . toDOM ( ) except pyxb . ValidationError as e : raise ValueError ( 'Unable to serialize PyXB to XML. error="{}"' . format ( e . details ( ) ) ) except pyxb . PyXBException as e : raise ValueError ( 'Unable to serialize PyXB to XML. error="{}"' . format ( str ( e ) ) ) if xslt_url : xslt_processing_instruction = obj_dom . createProcessingInstruction ( 'xml-stylesheet' , 'type="text/xsl" href="{}"' . format ( xslt_url ) ) root = obj_dom . firstChild obj_dom . insertBefore ( xslt_processing_instruction , root ) if pretty : xml_str = obj_dom . toprettyxml ( indent = ' ' , encoding = encoding ) if encoding is None : xml_str = re . sub ( r'^\s*$\n' , r'' , xml_str , flags = re . MULTILINE ) else : xml_str = re . sub ( b'^\s*$\n' , b'' , xml_str , flags = re . MULTILINE ) else : xml_str = obj_dom . toxml ( encoding ) if strip_prolog : if encoding is None : xml_str = re . sub ( r'^<\?(.*)\?>' , r'' , xml_str ) else : xml_str = re . sub ( b'^<\?(.*)\?>' , b'' , xml_str ) return xml_str . strip ( )
Serialize PyXB object to XML .
45,440
def serialize_for_transport ( obj_pyxb , pretty = False , strip_prolog = False , xslt_url = None ) : return serialize_gen ( obj_pyxb , 'utf-8' , pretty , strip_prolog , xslt_url )
Serialize PyXB object to XML bytes with UTF - 8 encoding for transport over the network filesystem storage and other machine usage .
45,441
def serialize_to_xml_str ( obj_pyxb , pretty = True , strip_prolog = False , xslt_url = None ) : return serialize_gen ( obj_pyxb , None , pretty , strip_prolog , xslt_url )
Serialize PyXB object to pretty printed XML str for display .
45,442
def reformat_to_pretty_xml ( doc_xml ) : assert isinstance ( doc_xml , str ) dom_obj = xml . dom . minidom . parseString ( doc_xml ) pretty_xml = dom_obj . toprettyxml ( indent = ' ' ) return re . sub ( r'^\s*$\n' , r'' , pretty_xml , flags = re . MULTILINE )
Pretty print XML doc .
45,443
def are_equivalent ( a_xml , b_xml , encoding = None ) : assert isinstance ( a_xml , str ) assert isinstance ( b_xml , str ) a_tree = str_to_etree ( a_xml , encoding ) b_tree = str_to_etree ( b_xml , encoding ) return are_equal_or_superset ( a_tree , b_tree ) and are_equal_or_superset ( b_tree , a_tree )
Return True if two XML docs are semantically equivalent else False .
45,444
def are_equal_or_superset ( superset_tree , base_tree ) : try : _compare_attr ( superset_tree , base_tree ) _compare_text ( superset_tree , base_tree ) except CompareError as e : logger . debug ( str ( e ) ) return False return True
Return True if superset_tree is equal to or a superset of base_tree
45,445
def are_equal_xml ( a_xml , b_xml ) : a_dom = xml . dom . minidom . parseString ( a_xml ) b_dom = xml . dom . minidom . parseString ( b_xml ) return are_equal_elements ( a_dom . documentElement , b_dom . documentElement )
Normalize and compare XML documents for equality . The document may or may not be a DataONE type .
45,446
def are_equal_elements ( a_el , b_el ) : if a_el . tagName != b_el . tagName : return False if sorted ( a_el . attributes . items ( ) ) != sorted ( b_el . attributes . items ( ) ) : return False if len ( a_el . childNodes ) != len ( b_el . childNodes ) : return False for a_child_el , b_child_el in zip ( a_el . childNodes , b_el . childNodes ) : if a_child_el . nodeType != b_child_el . nodeType : return False if ( a_child_el . nodeType == a_child_el . TEXT_NODE and a_child_el . data != b_child_el . data ) : return False if a_child_el . nodeType == a_child_el . ELEMENT_NODE and not are_equal_elements ( a_child_el , b_child_el ) : return False return True
Normalize and compare ElementTrees for equality .
45,447
def sort_elements_by_child_values ( obj_pyxb , child_name_list ) : obj_pyxb . sort ( key = lambda x : [ get_auto ( getattr ( x , n ) ) for n in child_name_list ] )
In - place sort simple or complex elements in a PyXB object by values they contain in child elements .
45,448
def format_diff_pyxb ( a_pyxb , b_pyxb ) : return '\n' . join ( difflib . ndiff ( serialize_to_xml_str ( a_pyxb ) . splitlines ( ) , serialize_to_xml_str ( b_pyxb ) . splitlines ( ) , ) )
Create a diff between two PyXB objects .
45,449
def format_diff_xml ( a_xml , b_xml ) : return '\n' . join ( difflib . ndiff ( reformat_to_pretty_xml ( a_xml ) . splitlines ( ) , reformat_to_pretty_xml ( b_xml ) . splitlines ( ) , ) )
Create a diff between two XML documents .
45,450
def get_opt_attr ( obj_pyxb , attr_str , default_val = None ) : v = getattr ( obj_pyxb , attr_str , default_val ) return v if v is not None else default_val
Get an optional attribute value from a PyXB element .
45,451
def get_opt_val ( obj_pyxb , attr_str , default_val = None ) : try : return get_req_val ( getattr ( obj_pyxb , attr_str ) ) except ( ValueError , AttributeError ) : return default_val
Get an optional Simple Content value from a PyXB element .
45,452
def resolwe_exception_handler ( exc , context ) : response = exception_handler ( exc , context ) if isinstance ( exc , ValidationError ) : if response is None : response = Response ( { } ) response . status_code = 400 response . data [ 'error' ] = exc . message return response
Handle exceptions raised in API and make them nicer .
45,453
def validate_and_decode ( jwt_bu64 , cert_obj ) : public_key = cert_obj . public_key ( ) message = '.' . join ( d1_common . cert . jwt . get_bu64_tup ( jwt_bu64 ) [ : 2 ] ) signature = d1_common . cert . jwt . get_jwt_tup ( jwt_bu64 ) [ 2 ] try : public_key . verify ( signature , message , cryptography . hazmat . primitives . asymmetric . padding . PKCS1v15 ( ) , cryptography . hazmat . primitives . hashes . SHA256 ( ) , ) except cryptography . exceptions . InvalidSignature as e : raise Exception ( 'Signature is invalid. error="{}"' . format ( str ( e ) ) ) return d1_common . cert . jwt . get_jwt_dict ( jwt_bu64 )
Example for validating the signature of a JWT using only the cryptography library .
45,454
def find_valid_combinations ( cert_file_name_list , jwt_file_name_list ) : for cert_file_name in cert_file_name_list : cert_pem = '' cert_obj = d1_common . cert . x509 . deserialize_pem ( cert_pem ) for jwt_file_name in jwt_file_name_list : jwt_bu64 = '' is_ok = False try : d1_common . cert . jwt . validate_and_decode ( jwt_bu64 , cert_obj ) except d1_common . cert . jwt . JwtException as e : logging . info ( 'Invalid. msg="{}"' . format ( str ( e ) ) ) else : is_ok = True logging . info ( '{} {} {}' . format ( '***' if is_ok else ' ' , cert_file_name , jwt_file_name ) )
Given a list of cert and JWT file names print a list showing each combination along with indicators for combinations where the JWT signature was successfully validated with the cert .
45,455
def parseUrl ( url ) : scheme , netloc , url , params , query , fragment = urllib . parse . urlparse ( url ) query_dict = { k : sorted ( v ) if len ( v ) > 1 else v [ 0 ] for k , v in list ( urllib . parse . parse_qs ( query ) . items ( ) ) } return { 'scheme' : scheme , 'netloc' : netloc , 'url' : url , 'params' : params , 'query' : query_dict , 'fragment' : fragment , }
Return a dict containing scheme netloc url params query fragment keys .
45,456
def encodePathElement ( element ) : return urllib . parse . quote ( ( element . encode ( 'utf-8' ) if isinstance ( element , str ) else str ( element ) if isinstance ( element , int ) else element ) , safe = d1_common . const . URL_PATHELEMENT_SAFE_CHARS , )
Encode a URL path element according to RFC3986 .
45,457
def encodeQueryElement ( element ) : return urllib . parse . quote ( ( element . encode ( 'utf-8' ) if isinstance ( element , str ) else str ( element ) if isinstance ( element , int ) else element ) , safe = d1_common . const . URL_QUERYELEMENT_SAFE_CHARS , )
Encode a URL query element according to RFC3986 .
45,458
def urlencode ( query , doseq = 0 ) : if hasattr ( query , "items" ) : for k in list ( query . keys ( ) ) : if query [ k ] is None : del query [ k ] query = list ( query . items ( ) ) else : query = list ( filter ( ( lambda x : x [ 1 ] is not None ) , query ) ) try : if len ( query ) and not isinstance ( query [ 0 ] , tuple ) : raise TypeError except TypeError : ty , va , tb = sys . exc_info ( ) raise TypeError ( "not a valid non-string sequence or mapping object" ) . with_traceback ( tb ) l = [ ] if not doseq : for k , v in query : k = encodeQueryElement ( str ( k ) ) v = encodeQueryElement ( str ( v ) ) l . append ( k + '=' + v ) else : for k , v in query : k = encodeQueryElement ( str ( k ) ) if isinstance ( v , str ) : v = encodeQueryElement ( v ) l . append ( k + '=' + v ) elif isinstance ( v , str ) : v = encodeQueryElement ( v . encode ( "ASCII" , "replace" ) ) l . append ( k + '=' + v ) else : try : len ( v ) except TypeError : v = encodeQueryElement ( str ( v ) ) l . append ( k + '=' + v ) else : for elt in v : l . append ( k + '=' + encodeQueryElement ( str ( elt ) ) ) return '&' . join ( sorted ( l ) )
Modified version of the standard urllib . urlencode that is conforms to RFC3986 . The urllib version encodes spaces as + which can lead to inconsistency . This version will always encode spaces as %20 .
45,459
def makeCNBaseURL ( url ) : o = urllib . parse . urlparse ( url , scheme = d1_common . const . DEFAULT_CN_PROTOCOL ) if o . netloc and o . path : netloc = o . netloc path = o . path elif o . netloc : netloc = o . netloc path = d1_common . const . DEFAULT_CN_PATH elif o . path : s = o . path . split ( '/' , 1 ) netloc = s [ 0 ] if len ( s ) == 1 : path = d1_common . const . DEFAULT_CN_PATH else : path = s [ 1 ] else : netloc = d1_common . const . DEFAULT_CN_HOST path = d1_common . const . DEFAULT_CN_PATH return urllib . parse . urlunparse ( ( o . scheme , netloc , path , o . params , o . query , o . fragment ) )
Attempt to create a valid CN BaseURL when one or more sections of the URL are missing .
45,460
def makeMNBaseURL ( url ) : o = urllib . parse . urlparse ( url , scheme = d1_common . const . DEFAULT_MN_PROTOCOL ) if o . netloc and o . path : netloc = o . netloc path = o . path elif o . netloc : netloc = o . netloc path = d1_common . const . DEFAULT_MN_PATH elif o . path : s = o . path . split ( '/' , 1 ) netloc = s [ 0 ] if len ( s ) == 1 : path = d1_common . const . DEFAULT_MN_PATH else : path = s [ 1 ] else : netloc = d1_common . const . DEFAULT_MN_HOST path = d1_common . const . DEFAULT_MN_PATH return urllib . parse . urlunparse ( ( o . scheme , netloc , path , o . params , o . query , o . fragment ) )
Attempt to create a valid MN BaseURL when one or more sections of the URL are missing .
45,461
def find_url_mismatches ( a_url , b_url ) : diff_list = [ ] a_parts = urllib . parse . urlparse ( a_url ) b_parts = urllib . parse . urlparse ( b_url ) if a_parts . scheme . lower ( ) != b_parts . scheme . lower ( ) : diff_list . append ( 'Schemes differ. a="{}" b="{}" differ' . format ( a_parts . scheme . lower ( ) , b_parts . scheme . lower ( ) ) ) if a_parts . netloc . lower ( ) != b_parts . netloc . lower ( ) : diff_list . append ( 'Network locations differ. a="{}" b="{}"' . format ( a_parts . netloc . lower ( ) , b_parts . netloc . lower ) ) if a_parts . path != b_parts . path : diff_list . append ( 'Paths differ: a="{}" b="{}"' . format ( a_parts . path , b_parts . path ) ) if a_parts . fragment != b_parts . fragment : diff_list . append ( 'Fragments differ. a="{}" b="{}"' . format ( a_parts . fragment , b_parts . fragment ) ) a_param_list = sorted ( a_parts . params . split ( ";" ) ) b_param_list = sorted ( b_parts . params . split ( ";" ) ) if a_param_list != b_param_list : diff_list . append ( 'Parameters differ. a="{}" b="{}"' . format ( ', ' . join ( a_param_list ) , ', ' . join ( b_param_list ) ) ) a_query_dict = urllib . parse . parse_qs ( a_parts . query ) b_query_dict = urllib . parse . parse_qs ( b_parts . query ) if len ( list ( a_query_dict . keys ( ) ) ) != len ( list ( b_query_dict . keys ( ) ) ) : diff_list . append ( 'Number of query keys differs. a={} b={}' . format ( len ( list ( a_query_dict . keys ( ) ) ) , len ( list ( b_query_dict . keys ( ) ) ) ) ) for a_key in b_query_dict : if a_key not in list ( b_query_dict . keys ( ) ) : diff_list . append ( 'Query key in first missing in second. a_key="{}"' . format ( a_key ) ) elif sorted ( a_query_dict [ a_key ] ) != sorted ( b_query_dict [ a_key ] ) : diff_list . append ( 'Query values differ. key="{}" a_value="{}" b_value="{}"' . format ( a_key , sorted ( a_query_dict [ a_key ] ) , sorted ( b_query_dict [ a_key ] ) ) ) for b_key in b_query_dict : if b_key not in a_query_dict : diff_list . append ( 'Query key in second missing in first. b_key="{}"' . format ( b_key ) ) return diff_list
Given two URLs return a list of any mismatches .
45,462
def search ( self , line ) : if self . _session . get ( d1_cli . impl . session . QUERY_ENGINE_NAME ) == "solr" : return self . _search_solr ( line ) raise d1_cli . impl . exceptions . InvalidArguments ( "Unsupported query engine: {}" . format ( self . _session . get ( d1_cli . impl . session . QUERY_ENGINE_NAME ) ) )
CN search .
45,463
def resolve ( self , pid ) : client = d1_cli . impl . client . CLICNClient ( ** self . _cn_client_connect_params_from_session ( ) ) object_location_list_pyxb = client . resolve ( pid ) for location in object_location_list_pyxb . objectLocation : d1_cli . impl . util . print_info ( location . url )
Get Object Locations for Object .
45,464
def science_object_get ( self , pid , path ) : mn_client = d1_cli . impl . client . CLIMNClient ( ** self . _mn_client_connect_params_from_session ( ) ) try : response = mn_client . get ( pid ) except d1_common . types . exceptions . DataONEException : pass else : self . _output ( response , path ) return cn_client = d1_cli . impl . client . CLICNClient ( ** self . _cn_client_connect_params_from_session ( ) ) object_location_list_pyxb = cn_client . resolve ( pid ) for location in object_location_list_pyxb . objectLocation : try : params = self . _mn_client_connect_params_from_session ( ) params [ "base_url" ] = location . baseURL mn_client = d1_cli . impl . client . CLIMNClient ( ** params ) response = mn_client . get ( pid ) except d1_common . types . exceptions . DataONEException : pass else : self . _output ( response , path ) return raise d1_cli . impl . exceptions . CLIError ( "Could not find object: {}" . format ( pid ) )
First try the MN set in the session .
45,465
def science_object_create ( self , pid , path , format_id = None ) : self . _queue_science_object_create ( pid , path , format_id )
Create a new Science Object on a Member Node .
45,466
def science_object_update ( self , pid_old , path , pid_new , format_id = None ) : self . _queue_science_object_update ( pid_old , path , pid_new , format_id )
Obsolete a Science Object on a Member Node with a different one .
45,467
def _search_solr ( self , line ) : try : query_str = self . _create_solr_query ( line ) client = d1_cli . impl . client . CLICNClient ( ** self . _cn_client_connect_params_from_session ( ) ) object_list_pyxb = client . search ( queryType = d1_common . const . DEFAULT_SEARCH_ENGINE , query = query_str , start = self . _session . get ( d1_cli . impl . session . START_NAME ) , rows = self . _session . get ( d1_cli . impl . session . COUNT_NAME ) , ) d1_cli . impl . util . print_info ( self . _pretty ( object_list_pyxb . toxml ( "utf-8" ) ) ) except d1_common . types . exceptions . ServiceFailure as e : e = "%" . join ( str ( e ) . splitlines ( ) ) regexp = re . compile ( r"errorCode: (?P<error_code>\d+)%.*%Status code: (?P<status_code>\d+)" ) result = regexp . search ( e ) if ( ( result is not None ) and ( result . group ( "error_code" ) == "500" ) and ( result . group ( "status_code" ) == "400" ) ) : result = re . search ( r"<b>description</b> <u>(?P<description>[^<]+)</u>" , e ) msg = re . sub ( "&([^;]+);" , lambda m : chr ( html . entities . name2codepoint [ m . group ( 1 ) ] ) , result . group ( "description" ) , ) d1_cli . impl . util . print_info ( "Warning: %s" % msg ) else : d1_cli . impl . util . print_error ( "Unexpected error:\n%s" % str ( e ) )
Perform a SOLR search .
45,468
def _create_solr_query ( self , line ) : p0 = "" if line : p0 = line . strip ( ) p1 = self . _query_string_to_solr_filter ( line ) p2 = self . _object_format_to_solr_filter ( line ) p3 = self . _time_span_to_solr_filter ( ) result = p0 + p1 + p2 + p3 return result . strip ( )
Actual search - easier to test .
45,469
def apply_filter_list ( func , obj ) : if isinstance ( obj , ( list , tuple ) ) : return [ func ( item ) for item in obj ] return func ( obj )
Apply func to list or tuple obj element - wise and directly otherwise .
45,470
def _get_data_attr ( data , attr ) : if isinstance ( data , dict ) : data = data [ '__id' ] data_obj = Data . objects . get ( id = data ) return getattr ( data_obj , attr )
Get data object field .
45,471
def input_ ( data , field_path ) : data_obj = Data . objects . get ( id = data [ '__id' ] ) inputs = copy . deepcopy ( data_obj . input ) hydrate_input_references ( inputs , data_obj . process . input_schema ) hydrate_input_uploads ( inputs , data_obj . process . input_schema ) return dict_dot ( inputs , field_path )
Return a hydrated value of the input field .
45,472
def _get_hydrated_path ( field ) : if isinstance ( field , str ) and hasattr ( field , 'file_name' ) : return field if isinstance ( field , dict ) and 'file' in field : hydrated_path = field [ 'file' ] if not hasattr ( hydrated_path , 'file_name' ) : raise TypeError ( "Filter argument must be a valid file-type field." ) return hydrated_path
Return HydratedPath object for file - type field .
45,473
def get_url ( field ) : hydrated_path = _get_hydrated_path ( field ) base_url = getattr ( settings , 'RESOLWE_HOST_URL' , 'localhost' ) return "{}/data/{}/{}" . format ( base_url , hydrated_path . data_id , hydrated_path . file_name )
Return file s url based on base url set in settings .
45,474
def descriptor ( obj , path = '' ) : if isinstance ( obj , dict ) : desc = obj [ '__descriptor' ] else : desc = obj . descriptor resp = dict_dot ( desc , path ) if isinstance ( resp , list ) or isinstance ( resp , dict ) : return json . dumps ( resp ) return resp
Return descriptor of given object .
45,475
def _close_open_date_ranges ( self , record ) : date_ranges = ( ( 'beginDate' , 'endDate' ) , ) for begin , end in date_ranges : if begin in record and end in record : return elif begin in record : record [ end ] = record [ begin ] elif end in record : record [ begin ] = record [ end ]
If a date range is missing the start or end date close it by copying the date from the existing value .
45,476
def getSolrType ( self , field ) : ftype = 'string' try : ftype = self . fieldtypes [ field ] return ftype except Exception : pass fta = field . split ( '_' ) if len ( fta ) > 1 : ft = fta [ len ( fta ) - 1 ] try : ftype = self . fieldtypes [ ft ] self . fieldtypes [ field ] = ftype except Exception : pass return ftype
Returns the SOLR type of the specified field name .
45,477
def getftype ( self , name ) : fields = self . getFields ( ) try : fld = fields [ 'fields' ] [ name ] except Exception : return str if fld [ 'type' ] in [ 'string' , 'text' , 'stext' , 'text_ws' ] : return str if fld [ 'type' ] in [ 'sint' , 'integer' , 'long' , 'slong' ] : return int if fld [ 'type' ] in [ 'sdouble' , 'double' , 'sfloat' , 'float' ] : return float if fld [ 'type' ] in [ 'boolean' ] : return bool return fld [ 'type' ]
Returns the python type for the specified field name . The field list is cached so multiple calls do not invoke a getFields request each time .
45,478
def clean ( self , value ) : if value is None : value = self . default try : value = self . to_python ( value ) self . validate ( value ) except ValidationError as error : raise ValidationError ( "invalid value for {}: {}" . format ( self . name , error . args [ 0 ] ) ) return value
Run validators and return the clean value .
45,479
def import_file ( self , imported_format = None , progress_from = 0.0 , progress_to = None ) : if not hasattr ( resolwe_runtime_utils , 'import_file' ) : raise RuntimeError ( 'Requires resolwe-runtime-utils >= 2.0.0' ) if imported_format is None : imported_format = resolwe_runtime_utils . ImportedFormat . BOTH return resolwe_runtime_utils . import_file ( src = self . file_temp , file_name = self . path , imported_format = imported_format , progress_from = progress_from , progress_to = progress_to )
Import field source file to working directory .
45,480
def _get ( self , key ) : self . _populate_cache ( ) if key not in self . _cache : raise AttributeError ( "DataField has no member {}" . format ( key ) ) return self . _cache [ key ]
Return given key from cache .
45,481
def update_constants ( ) : global MANAGER_CONTROL_CHANNEL , MANAGER_EXECUTOR_CHANNELS global MANAGER_LISTENER_STATS , MANAGER_STATE_PREFIX redis_prefix = getattr ( settings , 'FLOW_MANAGER' , { } ) . get ( 'REDIS_PREFIX' , '' ) MANAGER_CONTROL_CHANNEL = '{}.control' . format ( redis_prefix ) MANAGER_EXECUTOR_CHANNELS = ManagerChannelPair ( '{}.result_queue' . format ( redis_prefix ) , '{}.result_queue_response' . format ( redis_prefix ) , ) MANAGER_STATE_PREFIX = '{}.state' . format ( redis_prefix ) MANAGER_LISTENER_STATS = '{}.listener_stats' . format ( redis_prefix )
Recreate channel name constants with changed settings .
45,482
def destroy_channels ( self ) : for item_name in dir ( self ) : item = getattr ( self , item_name ) if isinstance ( item , self . RedisAtomicBase ) : self . redis . delete ( item . item_name )
Destroy Redis channels managed by this state instance .
45,483
def render_to_image_file ( self , image_out_path , width_pixels = None , height_pixels = None , dpi = 90 ) : self . _render_type = "file" self . _tree . render ( file_name = image_out_path , w = width_pixels , h = height_pixels , dpi = dpi , units = "px" , tree_style = self . _get_tree_style ( ) , )
Render the SubjectInfo to an image file .
45,484
def browse_in_qt5_ui ( self ) : self . _render_type = "browse" self . _tree . show ( tree_style = self . _get_tree_style ( ) )
Browse and edit the SubjectInfo in a simple Qt5 based UI .
45,485
def _gen_etetoolkit_tree ( self , node , subject_info_tree ) : for si_node in subject_info_tree . child_list : if si_node . type_str == TYPE_NODE_TAG : child = self . _add_type_node ( node , si_node . label_str ) elif si_node . type_str == SUBJECT_NODE_TAG : child = self . _add_subject_node ( node , si_node . label_str ) else : raise AssertionError ( 'Unknown node type. type_str="{}"' . format ( si_node . type_str ) ) self . _gen_etetoolkit_tree ( child , si_node )
Copy SubjectInfoTree to a ETE Tree .
45,486
def _add_type_node ( self , node , label ) : child = node . add_child ( name = label ) child . add_feature ( TYPE_NODE_TAG , True ) return child
Add a node representing a SubjectInfo type .
45,487
def _add_subject_node ( self , node , subj_str ) : child = node . add_child ( name = subj_str ) child . add_feature ( SUBJECT_NODE_TAG , True ) return child
Add a node containing a subject string .
45,488
def _get_node_path ( self , node ) : path = [ ] while node . up : path . append ( node . name ) node = node . up return list ( reversed ( path ) )
Return the path from the root to node as a list of node names .
45,489
def _layout ( self , node ) : def set_edge_style ( ) : node_style = ete3 . NodeStyle ( ) node_style [ "vt_line_color" ] = EDGE_COLOR node_style [ "hz_line_color" ] = EDGE_COLOR node_style [ "vt_line_width" ] = EDGE_WIDTH node_style [ "hz_line_width" ] = EDGE_WIDTH node_style [ "size" ] = 0 node . set_style ( node_style ) def style_subject_node ( color = "Black" ) : face = ete3 . TextFace ( node . name , fsize = SUBJECT_NODE_FONT_SIZE , fgcolor = color ) set_face_margin ( face ) node . add_face ( face , column = 0 , position = "branch-right" ) def style_type_node ( color = "Black" ) : face = ete3 . CircleFace ( radius = TYPE_NODE_RADIUS , color = TYPE_NODE_COLOR_DICT . get ( node . name , "White" ) , style = "circle" , label = { "text" : node . name , "color" : color , "fontsize" : ( TYPE_NODE_FONT_SIZE_FILE if self . _render_type == "file" else TYPE_NODE_FONT_SIZE_BROWSE ) , } , ) set_face_margin ( face ) node . add_face ( face , column = 0 , position = "branch-right" ) def set_face_margin ( face ) : face . margin_left = 5 face . margin_right = 5 set_edge_style ( ) if hasattr ( node , SUBJECT_NODE_TAG ) : style_subject_node ( ) elif hasattr ( node , TYPE_NODE_TAG ) : style_type_node ( ) else : raise AssertionError ( "Unknown node type" )
ETE calls this function to style each node before rendering .
45,490
def extend_settings ( self , data_id , files , secrets ) : process = Data . objects . get ( pk = data_id ) . process if process . requirements . get ( 'resources' , { } ) . get ( 'secrets' , False ) : raise PermissionDenied ( "Process which requires access to secrets cannot be run using the local executor" ) return super ( ) . extend_settings ( data_id , files , secrets )
Prevent processes requiring access to secrets from being run .
45,491
def get_finder ( import_path ) : finder_class = import_string ( import_path ) if not issubclass ( finder_class , BaseProcessesFinder ) : raise ImproperlyConfigured ( 'Finder "{}" is not a subclass of "{}"' . format ( finder_class , BaseProcessesFinder ) ) return finder_class ( )
Get a process finder .
45,492
def _find_folders ( self , folder_name ) : found_folders = [ ] for app_config in apps . get_app_configs ( ) : folder_path = os . path . join ( app_config . path , folder_name ) if os . path . isdir ( folder_path ) : found_folders . append ( folder_path ) return found_folders
Return a list of sub - directories .
45,493
def sanity ( request , sysmeta_pyxb ) : _does_not_contain_replica_sections ( sysmeta_pyxb ) _is_not_archived ( sysmeta_pyxb ) _obsoleted_by_not_specified ( sysmeta_pyxb ) if 'HTTP_VENDOR_GMN_REMOTE_URL' in request . META : return _has_correct_file_size ( request , sysmeta_pyxb ) _is_supported_checksum_algorithm ( sysmeta_pyxb ) _is_correct_checksum ( request , sysmeta_pyxb )
Check that sysmeta_pyxb is suitable for creating a new object and matches the uploaded sciobj bytes .
45,494
def is_valid_sid_for_new_standalone ( sysmeta_pyxb ) : sid = d1_common . xml . get_opt_val ( sysmeta_pyxb , 'seriesId' ) if not d1_gmn . app . did . is_valid_sid_for_new_standalone ( sid ) : raise d1_common . types . exceptions . IdentifierNotUnique ( 0 , 'Identifier is already in use as {}. did="{}"' . format ( d1_gmn . app . did . classify_identifier ( sid ) , sid ) , identifier = sid , )
Assert that any SID in sysmeta_pyxb can be assigned to a new standalone object .
45,495
def is_valid_sid_for_chain ( pid , sid ) : if not d1_gmn . app . did . is_valid_sid_for_chain ( pid , sid ) : existing_sid = d1_gmn . app . revision . get_sid_by_pid ( pid ) raise d1_common . types . exceptions . IdentifierNotUnique ( 0 , 'A different SID is already assigned to the revision chain to which ' 'the object being created or updated belongs. A SID cannot be changed ' 'once it has been assigned to a chain. ' 'existing_sid="{}", new_sid="{}", pid="{}"' . format ( existing_sid , sid , pid ) , )
Assert that sid can be assigned to the single object pid or to the chain to which pid belongs .
45,496
def _does_not_contain_replica_sections ( sysmeta_pyxb ) : if len ( getattr ( sysmeta_pyxb , 'replica' , [ ] ) ) : raise d1_common . types . exceptions . InvalidSystemMetadata ( 0 , 'A replica section was included. A new object object created via ' 'create() or update() cannot already have replicas. pid="{}"' . format ( d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) ) , identifier = d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) , )
Assert that sysmeta_pyxb does not contain any replica information .
45,497
def _is_not_archived ( sysmeta_pyxb ) : if _is_archived ( sysmeta_pyxb ) : raise d1_common . types . exceptions . InvalidSystemMetadata ( 0 , 'Archived flag was set. A new object created via create() or update() ' 'cannot already be archived. pid="{}"' . format ( d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) ) , identifier = d1_common . xml . get_req_val ( sysmeta_pyxb . identifier ) , )
Assert that sysmeta_pyxb does not have have the archived flag set .
45,498
def get_d1_env_by_base_url ( cn_base_url ) : for k , v in D1_ENV_DICT : if v [ 'base_url' ] . startswith ( cn_base_url ) : return D1_ENV_DICT [ k ]
Given the BaseURL for a CN return the DataONE environment dict for the CN s environemnt .
45,499
def match_all ( d_SMEFT , parameters = None ) : p = default_parameters . copy ( ) if parameters is not None : p . update ( parameters ) C = wilson . util . smeftutil . wcxf2arrays_symmetrized ( d_SMEFT ) C [ 'vT' ] = 246.22 C_WET = match_all_array ( C , p ) C_WET = wilson . translate . wet . rotate_down ( C_WET , p ) C_WET = wetutil . unscale_dict_wet ( C_WET ) d_WET = wilson . util . smeftutil . arrays2wcxf ( C_WET ) basis = wcxf . Basis [ 'WET' , 'JMS' ] keys = set ( d_WET . keys ( ) ) & set ( basis . all_wcs ) d_WET = { k : d_WET [ k ] for k in keys } return d_WET
Match the SMEFT Warsaw basis onto the WET JMS basis .