idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
246,000
def set_sum_w2 ( self , w , ix , iy = 0 , iz = 0 ) : if self . GetSumw2N ( ) == 0 : raise RuntimeError ( "Attempting to access Sumw2 in histogram " "where weights were not stored" ) xl = self . nbins ( axis = 0 , overflow = True ) yl = self . nbins ( axis = 1 , overflow = True ) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self . GetSumw2N ( ) : raise IndexError ( "bin index out of range" ) self . GetSumw2 ( ) . SetAt ( w , idx )
Sets the true number of entries in the bin weighted by w^2
163
15
246,001
def rebinned ( self , bins , axis = 0 ) : ndim = self . GetDimension ( ) if axis >= ndim : raise ValueError ( "axis must be less than the dimensionality of the histogram" ) if isinstance ( bins , int ) : _bins = [ 1 ] * ndim try : _bins [ axis ] = bins except IndexError : raise ValueError ( "axis must be 0, 1, or 2" ) bins = tuple ( _bins ) if isinstance ( bins , tuple ) : if len ( bins ) != ndim : raise ValueError ( "bins must be a tuple with the same " "number of elements as histogram axes" ) newname = '{0}_{1}' . format ( self . __class__ . __name__ , uuid ( ) ) if ndim == 1 : hist = self . Rebin ( bins [ 0 ] , newname ) elif ndim == 2 : hist = self . Rebin2D ( bins [ 0 ] , bins [ 1 ] , newname ) else : hist = self . Rebin3D ( bins [ 0 ] , bins [ 1 ] , bins [ 2 ] , newname ) hist = asrootpy ( hist ) elif hasattr ( bins , '__iter__' ) : hist = self . empty_clone ( bins , axis = axis ) nbinsx = self . nbins ( 0 ) nbinsy = self . nbins ( 1 ) nbinsz = self . nbins ( 2 ) xaxis = self . xaxis yaxis = self . yaxis zaxis = self . zaxis sum_w2 = self . GetSumw2 ( ) _sum_w2_at = sum_w2 . At new_sum_w2 = hist . GetSumw2 ( ) _new_sum_w2_at = new_sum_w2 . At _new_sum_w2_setat = new_sum_w2 . SetAt _x_center = xaxis . GetBinCenter _y_center = yaxis . GetBinCenter _z_center = zaxis . GetBinCenter _find = hist . FindBin _set = hist . SetBinContent _get = hist . GetBinContent _this_get = self . GetBinContent _get_bin = super ( _HistBase , self ) . GetBin for z in range ( 1 , nbinsz + 1 ) : for y in range ( 1 , nbinsy + 1 ) : for x in range ( 1 , nbinsx + 1 ) : newbin = _find ( _x_center ( x ) , _y_center ( y ) , _z_center ( z ) ) idx = _get_bin ( x , y , z ) _set ( newbin , _get ( newbin ) + _this_get ( idx ) ) _new_sum_w2_setat ( _new_sum_w2_at ( newbin ) + _sum_w2_at ( idx ) , newbin ) hist . SetEntries ( self . GetEntries ( ) ) else : raise TypeError ( "bins must either be an integer, a tuple, or an iterable" ) return hist
Return a new rebinned histogram
714
8
246,002
def smoothed ( self , iterations = 1 ) : copy = self . Clone ( shallow = True ) copy . Smooth ( iterations ) return copy
Return a smoothed copy of this histogram
29
9
246,003
def empty_clone ( self , binning = None , axis = 0 , type = None , * * kwargs ) : ndim = self . GetDimension ( ) if binning is False and ndim == 1 : raise ValueError ( "cannot remove the x-axis of a 1D histogram" ) args = [ ] for iaxis in range ( ndim ) : if iaxis == axis : if binning is False : # skip this axis continue elif binning is not None : if hasattr ( binning , '__iter__' ) : binning = ( binning , ) args . extend ( binning ) continue args . append ( list ( self . _edges ( axis = iaxis ) ) ) if type is None : type = self . TYPE if binning is False : ndim -= 1 cls = [ Hist , Hist2D , Hist3D ] [ ndim - 1 ] return cls ( * args , type = type , * * kwargs )
Return a new empty histogram . The binning may be modified along one axis by specifying the binning and axis arguments . If binning is False then the corresponding axis is dropped from the returned histogram .
219
42
246,004
def poisson_errors ( self ) : graph = Graph ( self . nbins ( axis = 0 ) , type = 'asymm' ) graph . SetLineWidth ( self . GetLineWidth ( ) ) graph . SetMarkerSize ( self . GetMarkerSize ( ) ) chisqr = ROOT . TMath . ChisquareQuantile npoints = 0 for bin in self . bins ( overflow = False ) : entries = bin . effective_entries if entries <= 0 : continue ey_low = entries - 0.5 * chisqr ( 0.1586555 , 2. * entries ) ey_high = 0.5 * chisqr ( 1. - 0.1586555 , 2. * ( entries + 1 ) ) - entries ex = bin . x . width / 2. graph . SetPoint ( npoints , bin . x . center , bin . value ) graph . SetPointEXlow ( npoints , ex ) graph . SetPointEXhigh ( npoints , ex ) graph . SetPointEYlow ( npoints , ey_low ) graph . SetPointEYhigh ( npoints , ey_high ) npoints += 1 graph . Set ( npoints ) return graph
Return a TGraphAsymmErrors representation of this histogram where the point y errors are Poisson .
260
22
246,005
def attach_event_handler ( canvas , handler = close_on_esc_or_middlemouse ) : if getattr ( canvas , "_py_event_dispatcher_attached" , None ) : return event_dispatcher = C . TPyDispatcherProcessedEvent ( handler ) canvas . Connect ( "ProcessedEvent(int,int,int,TObject*)" , "TPyDispatcherProcessedEvent" , event_dispatcher , "Dispatch(int,int,int,TObject*)" ) # Attach a handler only once to each canvas, and keep the dispatcher alive canvas . _py_event_dispatcher_attached = event_dispatcher
Attach a handler function to the ProcessedEvent slot defaulting to closing when middle mouse is clicked or escape is pressed
151
23
246,006
def _num_to_string ( self , number , pad_to_length = None ) : output = "" while number : number , digit = divmod ( number , self . _alpha_len ) output += self . _alphabet [ digit ] if pad_to_length : remainder = max ( pad_to_length - len ( output ) , 0 ) output = output + self . _alphabet [ 0 ] * remainder return output
Convert a number to a string using the given alphabet .
93
12
246,007
def _string_to_int ( self , string ) : number = 0 for char in string [ : : - 1 ] : number = number * self . _alpha_len + self . _alphabet . index ( char ) return number
Convert a string to a number using the given alphabet ..
50
12
246,008
def uuid ( self , name = None , pad_length = 22 ) : # If no name is given, generate a random UUID. if name is None : uuid = _uu . uuid4 ( ) elif "http" not in name . lower ( ) : uuid = _uu . uuid5 ( _uu . NAMESPACE_DNS , name ) else : uuid = _uu . uuid5 ( _uu . NAMESPACE_URL , name ) return self . encode ( uuid , pad_length )
Generate and return a UUID .
118
8
246,009
def fit ( self , data = 'obsData' , model_config = 'ModelConfig' , param_const = None , param_values = None , param_ranges = None , poi_const = False , poi_value = None , poi_range = None , extended = False , num_cpu = 1 , process_strategy = 0 , offset = False , print_level = None , return_nll = False , * * kwargs ) : if isinstance ( model_config , string_types ) : model_config = self . obj ( model_config , cls = ROOT . RooStats . ModelConfig ) if isinstance ( data , string_types ) : data = self . data ( data ) pdf = model_config . GetPdf ( ) pois = model_config . GetParametersOfInterest ( ) if pois . getSize ( ) > 0 : poi = pois . first ( ) poi . setConstant ( poi_const ) if poi_value is not None : poi . setVal ( poi_value ) if poi_range is not None : poi . setRange ( * poi_range ) if param_const is not None : for param_name , const in param_const . items ( ) : var = self . var ( param_name ) var . setConstant ( const ) if param_values is not None : for param_name , param_value in param_values . items ( ) : var = self . var ( param_name ) var . setVal ( param_value ) if param_ranges is not None : for param_name , param_range in param_ranges . items ( ) : var = self . var ( param_name ) var . setRange ( * param_range ) if print_level < 0 : msg_service = ROOT . RooMsgService . instance ( ) msg_level = msg_service . globalKillBelow ( ) msg_service . setGlobalKillBelow ( ROOT . RooFit . FATAL ) args = [ ROOT . RooFit . Constrain ( model_config . GetNuisanceParameters ( ) ) , ROOT . RooFit . GlobalObservables ( model_config . GetGlobalObservables ( ) ) ] if extended : args . append ( ROOT . RooFit . Extended ( True ) ) if offset : args . append ( ROOT . RooFit . Offset ( True ) ) if num_cpu != 1 : if num_cpu == 0 : raise ValueError ( "num_cpu must be non-zero" ) if num_cpu < 0 : num_cpu = NCPU args . append ( ROOT . RooFit . NumCPU ( num_cpu , process_strategy ) ) func = pdf . createNLL ( data , * args ) if print_level < 0 : msg_service . setGlobalKillBelow ( msg_level ) result = minimize ( func , print_level = print_level , * * kwargs ) if return_nll : return result , func return result
Fit a pdf to data in a workspace
664
8
246,010
def ensure_trafaret ( trafaret ) : if isinstance ( trafaret , Trafaret ) : return trafaret elif isinstance ( trafaret , type ) : if issubclass ( trafaret , Trafaret ) : return trafaret ( ) # str, int, float are classes, but its appropriate to use them # as trafaret functions return Call ( lambda val : trafaret ( val ) ) elif callable ( trafaret ) : return Call ( trafaret ) else : raise RuntimeError ( "%r should be instance or subclass" " of Trafaret" % trafaret )
Helper for complex trafarets takes trafaret instance or class and returns trafaret instance
137
19
246,011
def DictKeys ( keys ) : req = [ ( Key ( key ) , Any ) for key in keys ] return Dict ( dict ( req ) )
Checks if dict has all given keys
33
8
246,012
def guard ( trafaret = None , * * kwargs ) : if ( trafaret and not isinstance ( trafaret , Dict ) and not isinstance ( trafaret , Forward ) ) : raise RuntimeError ( "trafaret should be instance of Dict or Forward" ) elif trafaret and kwargs : raise RuntimeError ( "choose one way of initialization," " trafaret or kwargs" ) if not trafaret : trafaret = Dict ( * * kwargs ) def wrapper ( fn ) : argspec = getargspec ( fn ) @ functools . wraps ( fn ) def decor ( * args , * * kwargs ) : fnargs = argspec . args if fnargs and fnargs [ 0 ] in [ 'self' , 'cls' ] : obj = args [ 0 ] fnargs = fnargs [ 1 : ] checkargs = args [ 1 : ] else : obj = None checkargs = args try : call_args = dict ( itertools . chain ( zip ( fnargs , checkargs ) , kwargs . items ( ) ) ) for name , default in zip ( reversed ( fnargs ) , reversed ( argspec . defaults or ( ) ) ) : if name not in call_args : call_args [ name ] = default converted = trafaret ( call_args ) except DataError as err : raise GuardError ( error = err . error ) return fn ( obj , * * converted ) if obj else fn ( * * converted ) decor . __doc__ = "guarded with %r\n\n" % trafaret + ( decor . __doc__ or "" ) return decor return wrapper
Decorator for protecting function with trafarets
364
10
246,013
def _clone_args ( self ) : keys = list ( self . keys ) kw = { } if self . allow_any or self . extras : kw [ 'allow_extra' ] = list ( self . extras ) if self . allow_any : kw [ 'allow_extra' ] . append ( '*' ) kw [ 'allow_extra_trafaret' ] = self . extras_trafaret if self . ignore_any or self . ignore : kw [ 'ignore_extra' ] = list ( self . ignore ) if self . ignore_any : kw [ 'ignore_any' ] . append ( '*' ) return keys , kw
return args to create new Dict clone
148
8
246,014
def merge ( self , other ) : ignore = self . ignore extra = self . extras if isinstance ( other , Dict ) : other_keys = other . keys ignore += other . ignore extra += other . extras elif isinstance ( other , ( list , tuple ) ) : other_keys = list ( other ) elif isinstance ( other , dict ) : return self . __class__ ( other , * self . keys ) else : raise TypeError ( 'You must merge Dict only with Dict' ' or list of Keys' ) return self . __class__ ( * ( self . keys + other_keys ) , ignore_extra = ignore , allow_extra = extra )
Extends one Dict with other Dict Key s or Key s list or dict instance supposed for Dict
145
22
246,015
def get_deep_attr ( obj , keys ) : cur = obj for k in keys : if isinstance ( cur , Mapping ) and k in cur : cur = cur [ k ] continue else : try : cur = getattr ( cur , k ) continue except AttributeError : pass raise DataError ( error = 'Unexistent key' ) return cur
Helper for DeepKey
76
4
246,016
def construct ( arg ) : if isinstance ( arg , t . Trafaret ) : return arg elif isinstance ( arg , tuple ) or ( isinstance ( arg , list ) and len ( arg ) > 1 ) : return t . Tuple ( * ( construct ( a ) for a in arg ) ) elif isinstance ( arg , list ) : # if len(arg) == 1 return t . List ( construct ( arg [ 0 ] ) ) elif isinstance ( arg , dict ) : return t . Dict ( { construct_key ( key ) : construct ( value ) for key , value in arg . items ( ) } ) elif isinstance ( arg , str ) : return t . Atom ( arg ) elif isinstance ( arg , type ) : if arg is int : return t . Int ( ) elif arg is float : return t . Float ( ) elif arg is str : return t . String ( ) elif arg is bool : return t . Bool ( ) else : return t . Type ( arg ) elif callable ( arg ) : return t . Call ( arg ) else : return arg
Shortcut syntax to define trafarets .
240
9
246,017
def subdict ( name , * keys , * * kw ) : trafaret = kw . pop ( 'trafaret' ) # coz py2k def inner ( data , context = None ) : errors = False preserve_output = [ ] touched = set ( ) collect = { } for key in keys : for k , v , names in key ( data , context = context ) : touched . update ( names ) preserve_output . append ( ( k , v , names ) ) if isinstance ( v , t . DataError ) : errors = True else : collect [ k ] = v if errors : for out in preserve_output : yield out elif collect : yield name , t . catch ( trafaret , collect ) , touched return inner
Subdict key .
161
4
246,018
def xor_key ( first , second , trafaret ) : trafaret = t . Trafaret . _trafaret ( trafaret ) def check_ ( value ) : if ( first in value ) ^ ( second in value ) : key = first if first in value else second yield first , t . catch_error ( trafaret , value [ key ] ) , ( key , ) elif first in value and second in value : yield first , t . DataError ( error = 'correct only if {} is not defined' . format ( second ) ) , ( first , ) yield second , t . DataError ( error = 'correct only if {} is not defined' . format ( first ) ) , ( second , ) else : yield first , t . DataError ( error = 'is required if {} is not defined' . format ( 'second' ) ) , ( first , ) yield second , t . DataError ( error = 'is required if {} is not defined' . format ( 'first' ) ) , ( second , ) return check_
xor_key - takes first and second key names and trafaret .
225
16
246,019
def confirm_key ( name , confirm_name , trafaret ) : def check_ ( value ) : first , second = None , None if name in value : first = value [ name ] else : yield name , t . DataError ( 'is required' ) , ( name , ) if confirm_name in value : second = value [ confirm_name ] else : yield confirm_name , t . DataError ( 'is required' ) , ( confirm_name , ) if not ( first and second ) : return yield name , t . catch_error ( trafaret , first ) , ( name , ) yield confirm_name , t . catch_error ( trafaret , second ) , ( confirm_name , ) if first != second : yield confirm_name , t . DataError ( 'must be equal to {}' . format ( name ) ) , ( confirm_name , ) return check_
confirm_key - takes name confirm_name and trafaret .
191
15
246,020
def get_capacity ( self , legacy = None ) : params = None if legacy : params = { 'legacy' : legacy } return self . call_api ( '/capacity' , params = params ) [ 'capacity' ]
Get capacity of all facilities .
48
6
246,021
def altcore_data ( self ) : ret = [ ] for symbol in self . supported_currencies ( project = 'altcore' , level = "address" ) : data = crypto_data [ symbol ] priv = data . get ( 'private_key_prefix' ) pub = data . get ( 'address_version_byte' ) hha = data . get ( 'header_hash_algo' ) shb = data . get ( 'script_hash_byte' ) supported = collections . OrderedDict ( ) supported [ 'name' ] = data [ 'name' ] supported [ 'alias' ] = symbol if pub is not None : supported [ 'pubkeyhash' ] = int ( pub ) if priv : supported [ 'privatekey' ] = priv supported [ 'scripthash' ] = shb if shb else 5 if 'transaction_form' in data : supported [ 'transactionForm' ] = data [ 'transaction_form' ] if 'private_key_form' in data : supported [ 'privateKeyForm' ] = data [ 'private_key_form' ] #if 'message_magic' in data and data['message_magic']: # supported['networkMagic'] = '0x%s' % binascii.hexlify(data['message_magic']) supported [ 'port' ] = data . get ( 'port' ) or None if hha not in ( None , 'double-sha256' ) : supported [ 'headerHashAlgo' ] = hha if data . get ( 'script_hash_algo' , 'double-sha256' ) not in ( None , 'double-sha256' ) : supported [ 'scriptHashAlgo' ] = data [ 'script_hash_algo' ] if data . get ( 'transaction_hash_algo' , 'double-sha256' ) not in ( None , 'double-sha256' ) : supported [ 'transactionHashAlgo' ] = data [ 'transaction_hash_algo' ] if data . get ( 'seed_nodes' ) : supported [ 'dnsSeeds' ] = data [ 'seed_nodes' ] ret . append ( supported ) return ret
Returns the crypto_data for all currencies defined in moneywagon that also meet the minimum support for altcore . Data is keyed according to the bitcore specification .
486
33
246,022
def from_unit_to_satoshi ( self , value , unit = 'satoshi' ) : if not unit or unit == 'satoshi' : return value if unit == 'bitcoin' or unit == 'btc' : return value * 1e8 # assume fiat currency that we can convert convert = get_current_price ( self . crypto , unit ) return int ( value / convert * 1e8 )
Convert a value to satoshis . units can be any fiat currency . By default the unit is satoshi .
88
24
246,023
def _get_utxos ( self , address , services , * * modes ) : return get_unspent_outputs ( self . crypto , address , services = services , * * modes )
Using the service fallback engine get utxos from remote service .
43
14
246,024
def total_input_satoshis ( self ) : just_inputs = [ x [ 'input' ] for x in self . ins ] return sum ( [ x [ 'amount' ] for x in just_inputs ] )
Add up all the satoshis coming from all input tx s .
50
14
246,025
def select_inputs ( self , amount ) : sorted_txin = sorted ( self . ins , key = lambda x : - x [ 'input' ] [ 'confirmations' ] ) total_amount = 0 for ( idx , tx_in ) in enumerate ( sorted_txin ) : total_amount += tx_in [ 'input' ] [ 'amount' ] if ( total_amount >= amount ) : break sorted_txin = sorted ( sorted_txin [ : idx + 1 ] , key = lambda x : x [ 'input' ] [ 'amount' ] ) for ( idx , tx_in ) in enumerate ( sorted_txin ) : value = tx_in [ 'input' ] [ 'amount' ] if ( total_amount - value < amount ) : break else : total_amount -= value self . ins = sorted_txin [ idx : ] return total_amount
Maximize transaction priority . Select the oldest inputs that are sufficient to cover the spent amount . Then remove any unneeded inputs starting with the smallest in value . Returns sum of amounts of inputs selected
199
38
246,026
def onchain_exchange ( self , withdraw_crypto , withdraw_address , value , unit = 'satoshi' ) : self . onchain_rate = get_onchain_exchange_rates ( self . crypto , withdraw_crypto , best = True , verbose = self . verbose ) exchange_rate = float ( self . onchain_rate [ 'rate' ] ) result = self . onchain_rate [ 'service' ] . get_onchain_exchange_address ( self . crypto , withdraw_crypto , withdraw_address ) address = result [ 'deposit' ] value_satoshi = self . from_unit_to_satoshi ( value , unit ) if self . verbose : print ( "Adding output of: %s satoshi (%.8f) via onchain exchange, converting to %s %s" % ( value_satoshi , ( value_satoshi / 1e8 ) , exchange_rate * value_satoshi / 1e8 , withdraw_crypto . upper ( ) ) ) self . outs . append ( { 'address' : address , 'value' : value_satoshi } )
This method is like add_output but it sends to another
248
12
246,027
def fee ( self , value = None , unit = 'satoshi' ) : convert = None if not value : # no fee was specified, use $0.02 as default. convert = get_current_price ( self . crypto , "usd" ) self . fee_satoshi = int ( 0.02 / convert * 1e8 ) verbose = "Using default fee of:" elif value == 'optimal' : self . fee_satoshi = get_optimal_fee ( self . crypto , self . estimate_size ( ) , verbose = self . verbose ) verbose = "Using optimal fee of:" else : self . fee_satoshi = self . from_unit_to_satoshi ( value , unit ) verbose = "Using manually set fee of:" if self . verbose : if not convert : convert = get_current_price ( self . crypto , "usd" ) fee_dollar = convert * self . fee_satoshi / 1e8 print ( verbose + " %s satoshis ($%.2f)" % ( self . fee_satoshi , fee_dollar ) )
Set the miner fee if unit is not set assumes value is satoshi . If using optimal make sure you have already added all outputs .
242
27
246,028
def get_hex ( self , signed = True ) : total_ins_satoshi = self . total_input_satoshis ( ) if total_ins_satoshi == 0 : raise ValueError ( "Can't make transaction, there are zero inputs" ) # Note: there can be zero outs (sweep or coalesc transactions) total_outs_satoshi = sum ( [ x [ 'value' ] for x in self . outs ] ) if not self . fee_satoshi : self . fee ( ) # use default of $0.02 change_satoshi = total_ins_satoshi - ( total_outs_satoshi + self . fee_satoshi ) if change_satoshi < 0 : raise ValueError ( "Input amount (%s) must be more than all output amounts (%s) plus fees (%s). You need more %s." % ( total_ins_satoshi , total_outs_satoshi , self . fee_satoshi , self . crypto . upper ( ) ) ) ins = [ x [ 'input' ] for x in self . ins ] if change_satoshi > 0 : if self . verbose : print ( "Adding change address of %s satoshis to %s" % ( change_satoshi , self . change_address ) ) change = [ { 'value' : change_satoshi , 'address' : self . change_address } ] else : change = [ ] # no change ?! if self . verbose : print ( "Inputs == Outputs, no change address needed." ) tx = mktx ( ins , self . outs + change ) if signed : for i , input_data in enumerate ( self . ins ) : if not input_data [ 'private_key' ] : raise Exception ( "Can't sign transaction, missing private key for input %s" % i ) tx = sign ( tx , i , input_data [ 'private_key' ] ) return tx
Given all the data the user has given so far make the hex using pybitcointools
417
19
246,029
def get_current_price ( crypto , fiat , services = None , convert_to = None , helper_prices = None , * * modes ) : fiat = fiat . lower ( ) args = { 'crypto' : crypto , 'fiat' : fiat , 'convert_to' : convert_to } if not services : services = get_optimal_services ( crypto , 'current_price' ) if fiat in services : # first, try service with explicit fiat support try_services = services [ fiat ] result = _try_price_fetch ( try_services , args , modes ) if not isinstance ( result , Exception ) : return result if '*' in services : # then try wildcard service try_services = services [ '*' ] result = _try_price_fetch ( try_services , args , modes ) if not isinstance ( result , Exception ) : return result def _do_composite_price_fetch ( crypto , convert_crypto , fiat , helpers , modes ) : before = modes . get ( 'report_services' , False ) modes [ 'report_services' ] = True services1 , converted_price = get_current_price ( crypto , convert_crypto , * * modes ) if not helpers or convert_crypto not in helpers [ fiat ] : services2 , fiat_price = get_current_price ( convert_crypto , fiat , * * modes ) else : services2 , fiat_price = helpers [ fiat ] [ convert_crypto ] modes [ 'report_services' ] = before if modes . get ( 'report_services' , False ) : #print("composit service:", crypto, fiat, services1, services2) serv = CompositeService ( services1 , services2 , convert_crypto ) return [ serv ] , converted_price * fiat_price else : return converted_price * fiat_price all_composite_cryptos = [ 'btc' , 'ltc' , 'doge' , 'uno' ] if crypto in all_composite_cryptos : all_composite_cryptos . remove ( crypto ) for composite_attempt in all_composite_cryptos : if composite_attempt in services and services [ composite_attempt ] : result = _do_composite_price_fetch ( crypto , composite_attempt , fiat , helper_prices , modes ) if not isinstance ( result , Exception ) : return result raise result
High level function for getting current exchange rate for a cryptocurrency . If the fiat value is not explicitly defined it will try the wildcard service . if that does not work it tries converting to an intermediate cryptocurrency if available .
537
43
246,030
def get_onchain_exchange_rates ( deposit_crypto = None , withdraw_crypto = None , * * modes ) : from moneywagon . onchain_exchange import ALL_SERVICES rates = [ ] for Service in ALL_SERVICES : srv = Service ( verbose = modes . get ( 'verbose' , False ) ) rates . extend ( srv . onchain_exchange_rates ( ) ) if deposit_crypto : rates = [ x for x in rates if x [ 'deposit_currency' ] [ 'code' ] == deposit_crypto . upper ( ) ] if withdraw_crypto : rates = [ x for x in rates if x [ 'withdraw_currency' ] [ 'code' ] == withdraw_crypto . upper ( ) ] if modes . get ( 'best' , False ) : return max ( rates , key = lambda x : float ( x [ 'rate' ] ) ) return rates
Gets exchange rates for all defined on - chain exchange services .
207
13
246,031
def generate_keypair ( crypto , seed , password = None ) : if crypto in [ 'eth' , 'etc' ] : raise CurrencyNotSupported ( "Ethereums not yet supported" ) pub_byte , priv_byte = get_magic_bytes ( crypto ) priv = sha256 ( seed ) pub = privtopub ( priv ) priv_wif = encode_privkey ( priv , 'wif_compressed' , vbyte = priv_byte ) if password : # pycrypto etc. must be installed or this will raise ImportError, hence inline import. from . bip38 import Bip38EncryptedPrivateKey priv_wif = str ( Bip38EncryptedPrivateKey . encrypt ( crypto , priv_wif , password ) ) compressed_pub = encode_pubkey ( pub , 'hex_compressed' ) ret = { 'public' : { 'hex_uncompressed' : pub , 'hex' : compressed_pub , 'address' : pubtoaddr ( compressed_pub , pub_byte ) } , 'private' : { 'wif' : priv_wif } } if not password : # only these are valid when no bip38 password is supplied ret [ 'private' ] [ 'hex' ] = encode_privkey ( priv , 'hex_compressed' , vbyte = priv_byte ) ret [ 'private' ] [ 'hex_uncompressed' ] = encode_privkey ( priv , 'hex' , vbyte = priv_byte ) ret [ 'private' ] [ 'wif_uncompressed' ] = encode_privkey ( priv , 'wif' , vbyte = priv_byte ) return ret
Generate a private key and publickey for any currency given a seed . That seed can be random or a brainwallet phrase .
363
26
246,032
def sweep ( crypto , private_key , to_address , fee = None , password = None , * * modes ) : from moneywagon . tx import Transaction tx = Transaction ( crypto , verbose = modes . get ( 'verbose' , False ) ) tx . add_inputs ( private_key = private_key , password = password , * * modes ) tx . change_address = to_address tx . fee ( fee ) return tx . push ( )
Move all funds by private key to another address .
98
10
246,033
def guess_currency_from_address ( address ) : if is_py2 : fixer = lambda x : int ( x . encode ( 'hex' ) , 16 ) else : fixer = lambda x : x # does nothing first_byte = fixer ( b58decode_check ( address ) [ 0 ] ) double_first_byte = fixer ( b58decode_check ( address ) [ : 2 ] ) hits = [ ] for currency , data in crypto_data . items ( ) : if hasattr ( data , 'get' ) : # skip incomplete data listings version = data . get ( 'address_version_byte' , None ) if version is not None and version in [ double_first_byte , first_byte ] : hits . append ( [ currency , data [ 'name' ] ] ) if hits : return hits raise ValueError ( "Unknown Currency with first byte: %s" % first_byte )
Given a crypto address find which currency it likely belongs to . Raises an exception if it can t find a match . Raises exception if address is invalid .
200
32
246,034
def service_table ( format = 'simple' , authenticated = False ) : if authenticated : all_services = ExchangeUniverse . get_authenticated_services ( ) else : all_services = ALL_SERVICES if format == 'html' : linkify = lambda x : "<a href='{0}' target='_blank'>{0}</a>" . format ( x ) else : linkify = lambda x : x ret = [ ] for service in sorted ( all_services , key = lambda x : x . service_id ) : ret . append ( [ service . service_id , service . __name__ , linkify ( service . api_homepage . format ( domain = service . domain , protocol = service . protocol ) ) , ", " . join ( service . supported_cryptos or [ ] ) ] ) return tabulate ( ret , headers = [ 'ID' , 'Name' , 'URL' , 'Supported Currencies' ] , tablefmt = format )
Returns a string depicting all services currently installed .
211
9
246,035
def find_pair ( self , crypto = "" , fiat = "" , verbose = False ) : self . fetch_pairs ( ) if not crypto and not fiat : raise Exception ( "Fiat or Crypto required" ) def is_matched ( crypto , fiat , pair ) : if crypto and not fiat : return pair . startswith ( "%s-" % crypto ) if crypto and fiat : return pair == "%s-%s" % ( crypto , fiat ) if not crypto : return pair . endswith ( "-%s" % fiat ) matched_pairs = { } for Service , pairs in self . _all_pairs . items ( ) : matched = [ p for p in pairs if is_matched ( crypto , fiat , p ) ] if matched : matched_pairs [ Service ] = matched return matched_pairs
This utility is used to find an exchange that supports a given exchange pair .
177
15
246,036
def all_balances ( currency , services = None , verbose = False , timeout = None ) : balances = { } if not services : services = [ x ( verbose = verbose , timeout = timeout ) for x in ExchangeUniverse . get_authenticated_services ( ) ] for e in services : try : balances [ e ] = e . get_exchange_balance ( currency ) except NotImplementedError : if verbose : print ( e . name , "balance not implemented" ) except Exception as exc : if verbose : print ( e . name , "failed:" , exc . __class__ . __name__ , str ( exc ) ) return balances
Get balances for passed in currency for all exchanges .
143
10
246,037
def total_exchange_balances ( services = None , verbose = None , timeout = None , by_service = False ) : balances = defaultdict ( lambda : 0 ) if not services : services = [ x ( verbose = verbose , timeout = timeout ) for x in ExchangeUniverse . get_authenticated_services ( ) ] for e in services : try : more_balances = e . get_total_exchange_balances ( ) if by_service : balances [ e . __class__ ] = more_balances else : for code , bal in more_balances . items ( ) : balances [ code ] += bal except NotImplementedError : if verbose : print ( e . name , "total balance not implemented" ) except Exception as exc : if verbose : print ( e . name , "failed:" , exc . __class__ . __name__ , str ( exc ) ) return balances
Returns all balances for all currencies for all exchanges
198
9
246,038
def compress ( x , y ) : polarity = "02" if y % 2 == 0 else "03" wrap = lambda x : x if not is_py2 : wrap = lambda x : bytes ( x , 'ascii' ) return unhexlify ( wrap ( "%s%0.64x" % ( polarity , x ) ) )
Given a x y coordinate encode in compressed format Returned is always 33 bytes .
77
16
246,039
def decrypt ( self , passphrase , wif = False ) : passphrase = normalize ( 'NFC' , unicode ( passphrase ) ) if is_py2 : passphrase = passphrase . encode ( 'utf8' ) if self . ec_multiply : raise Exception ( "Not supported yet" ) key = scrypt . hash ( passphrase , self . addresshash , 16384 , 8 , 8 ) derivedhalf1 = key [ 0 : 32 ] derivedhalf2 = key [ 32 : 64 ] aes = AES . new ( derivedhalf2 ) decryptedhalf2 = aes . decrypt ( self . encryptedhalf2 ) decryptedhalf1 = aes . decrypt ( self . encryptedhalf1 ) priv = decryptedhalf1 + decryptedhalf2 priv = unhexlify ( '%064x' % ( long ( hexlify ( priv ) , 16 ) ^ long ( hexlify ( derivedhalf1 ) , 16 ) ) ) pub = privtopub ( priv ) if self . compressed : pub = encode_pubkey ( pub , 'hex_compressed' ) addr = pubtoaddr ( pub , self . pub_byte ) if is_py2 : ascii_key = addr else : ascii_key = bytes ( addr , 'ascii' ) if sha256 ( sha256 ( ascii_key ) . digest ( ) ) . digest ( ) [ 0 : 4 ] != self . addresshash : raise Exception ( 'Bip38 password decrypt failed: Wrong password?' ) else : formatt = 'wif' if wif else 'hex' if self . compressed : return encode_privkey ( priv , formatt + '_compressed' , self . priv_byte ) else : return encode_privkey ( priv , formatt , self . priv_byte )
BIP0038 non - ec - multiply decryption . Returns hex privkey .
395
16
246,040
def encrypt ( cls , crypto , privkey , passphrase ) : pub_byte , priv_byte = get_magic_bytes ( crypto ) privformat = get_privkey_format ( privkey ) if privformat in [ 'wif_compressed' , 'hex_compressed' ] : compressed = True flagbyte = b'\xe0' if privformat == 'wif_compressed' : privkey = encode_privkey ( privkey , 'hex_compressed' ) privformat = get_privkey_format ( privkey ) if privformat in [ 'wif' , 'hex' ] : compressed = False flagbyte = b'\xc0' if privformat == 'wif' : privkey = encode_privkey ( privkey , 'hex' ) privformat = get_privkey_format ( privkey ) pubkey = privtopub ( privkey ) addr = pubtoaddr ( pubkey , pub_byte ) passphrase = normalize ( 'NFC' , unicode ( passphrase ) ) if is_py2 : ascii_key = addr passphrase = passphrase . encode ( 'utf8' ) else : ascii_key = bytes ( addr , 'ascii' ) salt = sha256 ( sha256 ( ascii_key ) . digest ( ) ) . digest ( ) [ 0 : 4 ] key = scrypt . hash ( passphrase , salt , 16384 , 8 , 8 ) derivedhalf1 , derivedhalf2 = key [ : 32 ] , key [ 32 : ] aes = AES . new ( derivedhalf2 ) encryptedhalf1 = aes . encrypt ( unhexlify ( '%0.32x' % ( long ( privkey [ 0 : 32 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 0 : 16 ] ) , 16 ) ) ) ) encryptedhalf2 = aes . encrypt ( unhexlify ( '%0.32x' % ( long ( privkey [ 32 : 64 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : 32 ] ) , 16 ) ) ) ) # 39 bytes 2 (6P) 1(R/Y) 4 16 16 payload = b'\x01\x42' + flagbyte + salt + encryptedhalf1 + encryptedhalf2 return cls ( crypto , b58encode_check ( payload ) )
BIP0038 non - ec - multiply encryption . Returns BIP0038 encrypted privkey .
520
19
246,041
def create_from_intermediate ( cls , crypto , intermediate_point , seed , compressed = True , include_cfrm = True ) : flagbyte = b'\x20' if compressed else b'\x00' payload = b58decode_check ( str ( intermediate_point ) ) ownerentropy = payload [ 8 : 16 ] passpoint = payload [ 16 : - 4 ] x , y = uncompress ( passpoint ) if not is_py2 : seed = bytes ( seed , 'ascii' ) seedb = hexlify ( sha256 ( seed ) . digest ( ) ) [ : 24 ] factorb = int ( hexlify ( sha256 ( sha256 ( seedb ) . digest ( ) ) . digest ( ) ) , 16 ) generatedaddress = pubtoaddr ( fast_multiply ( ( x , y ) , factorb ) ) wrap = lambda x : x if not is_py2 : wrap = lambda x : bytes ( x , 'ascii' ) addresshash = sha256 ( sha256 ( wrap ( generatedaddress ) ) . digest ( ) ) . digest ( ) [ : 4 ] encrypted_seedb = scrypt . hash ( passpoint , addresshash + ownerentropy , 1024 , 1 , 1 , 64 ) derivedhalf1 , derivedhalf2 = encrypted_seedb [ : 32 ] , encrypted_seedb [ 32 : ] aes = AES . new ( derivedhalf2 ) block1 = long ( seedb [ 0 : 16 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 0 : 16 ] ) , 16 ) encryptedpart1 = aes . encrypt ( unhexlify ( '%0.32x' % block1 ) ) block2 = long ( hexlify ( encryptedpart1 [ 8 : 16 ] ) + seedb [ 16 : 24 ] , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : 32 ] ) , 16 ) encryptedpart2 = aes . encrypt ( unhexlify ( '%0.32x' % block2 ) ) # 39 bytes 2 1 4 8 8 16 payload = b"\x01\x43" + flagbyte + addresshash + ownerentropy + encryptedpart1 [ : 8 ] + encryptedpart2 encrypted_pk = b58encode_check ( payload ) if not include_cfrm : return generatedaddress , encrypted_pk confirmation_code = Bip38ConfirmationCode . create ( flagbyte , ownerentropy , factorb , derivedhalf1 , derivedhalf2 , addresshash ) return generatedaddress , cls ( crypto , encrypted_pk ) , confirmation_code
Given an intermediate point given to us by owner generate an address and encrypted private key that can be decoded by the passphrase used to generate the intermediate point .
574
32
246,042
def generate_address ( self , passphrase ) : inter = Bip38IntermediatePoint . create ( passphrase , ownersalt = self . ownersalt ) public_key = privtopub ( inter . passpoint ) # from Bip38EncryptedPrivateKey.create_from_intermediate derived = scrypt . hash ( inter . passpoint , self . addresshash + inter . ownerentropy , 1024 , 1 , 1 , 64 ) derivedhalf1 , derivedhalf2 = derived [ : 32 ] , derived [ 32 : ] unencrypted_prefix = bytes_to_int ( self . pointbprefix ) ^ ( bytes_to_int ( derived [ 63 ] ) & 0x01 ) aes = AES . new ( derivedhalf2 ) block1 = aes . decrypt ( self . pointbx1 ) block2 = aes . decrypt ( self . pointbx2 ) raise Exception ( "Not done yet" ) return block2 = long ( hexlify ( pointb2 ) , 16 ) ^ long ( hexlify ( derivedhalf1 [ 16 : ] ) , 16 ) return pubtoaddr ( * fast_multiply ( pointb , passfactor ) )
Make sure the confirm code is valid for the given password and address .
252
14
246,043
def push_tx ( self , crypto , tx_hex ) : url = "%s/pushtx" % self . base_url return self . post_url ( url , { 'hex' : tx_hex } ) . content
This method is untested .
49
6
246,044
def replay_block ( self , block_to_replay , limit = 5 ) : if block_to_replay == 'latest' : if self . verbose : print ( "Getting latest %s block header" % source . upper ( ) ) block = get_block ( self . source , latest = True , verbose = self . verbose ) if self . verbose : print ( "Latest %s block is #%s" % ( self . source . upper ( ) , block [ 'block_number' ] ) ) else : blocknum = block_to_replay if type ( block_to_replay ) == int else block_to_replay [ 'block_number' ] if blocknum < self . parent_fork_block or blocknum < self . child_fork_block : raise Exception ( "Can't replay blocks mined before the fork" ) if type ( block_to_replay ) is not dict : if self . verbose : print ( "Getting %s block header #%s" % ( self . source . upper ( ) , block_to_replay ) ) block = get_block ( self . source , block_number = int ( block_to_replay ) , verbose = self . verbose ) else : block = block_to_replay if self . verbose : print ( "Using %s for pushing to %s" % ( self . pusher . name , self . destination . upper ( ) ) ) print ( "Using %s for getting %s transactions" % ( self . tx_fetcher . name , self . source . upper ( ) ) ) print ( "Finished getting block header," , len ( block [ 'txids' ] ) , "transactions in block, will replay" , ( limit or "all of them" ) ) results = [ ] enforced_limit = ( limit or len ( block [ 'txids' ] ) ) for i , txid in enumerate ( block [ 'txids' ] [ : enforced_limit ] ) : print ( "outside" , txid ) self . _replay_tx ( txid , i )
Replay all transactions in parent currency to passed in source currency . Block_to_replay can either be an integer or a block object .
457
29
246,045
def get_block_adjustments ( crypto , points = None , intervals = None , * * modes ) : from moneywagon import get_block all_points = [ ] if intervals : latest_block_height = get_block ( crypto , latest = True , * * modes ) [ 'block_number' ] interval = int ( latest_block_height / float ( intervals ) ) all_points = [ x * interval for x in range ( 1 , intervals - 1 ) ] if points : all_points . extend ( points ) all_points . sort ( ) adjustments = [ ] previous_point = 0 previous_time = ( crypto_data [ crypto . lower ( ) ] . get ( 'genesis_date' ) . replace ( tzinfo = pytz . UTC ) or get_block ( crypto , block_number = 0 , * * modes ) [ 'time' ] ) for point in all_points : if point == 0 : continue point_time = get_block ( crypto , block_number = point , * * modes ) [ 'time' ] length = point - previous_point minutes = ( point_time - previous_time ) . total_seconds ( ) / 60 rate = minutes / length adjustments . append ( [ previous_point , rate ] ) previous_time = point_time previous_point = point return adjustments
This utility is used to determine the actual block rate . The output can be directly copied to the blocktime_adjustments setting .
283
26
246,046
def _per_era_supply ( self , block_height ) : coins = 0 for era in self . supply_data [ 'eras' ] : end_block = era [ 'end' ] start_block = era [ 'start' ] reward = era [ 'reward' ] if not end_block or block_height <= end_block : blocks_this_era = block_height - start_block coins += blocks_this_era * reward break blocks_per_era = end_block - start_block coins += reward * blocks_per_era return coins
Calculate the coin supply based on eras defined in crypto_data . Some currencies don t have a simple algorithmically defined halfing schedule so coins supply has to be defined explicitly per era .
124
39
246,047
def _prepare_consensus ( FetcherClass , results ) : # _get_results returns lists of 2 item list, first element is service, second is the returned value. # when determining consensus amoung services, only take into account values returned. if hasattr ( FetcherClass , "strip_for_consensus" ) : to_compare = [ FetcherClass . strip_for_consensus ( value ) for ( fetcher , value ) in results ] else : to_compare = [ value for fetcher , value in results ] return to_compare , [ fetcher . _successful_service for fetcher , values in results ]
Given a list of results return a list that is simplified to make consensus determination possible . Returns two item tuple first arg is simplified list the second argument is a list of all services used in making these results .
140
41
246,048
def _get_results ( FetcherClass , services , kwargs , num_results = None , fast = 0 , verbose = False , timeout = None ) : results = [ ] if not num_results or fast : num_results = len ( services ) with futures . ThreadPoolExecutor ( max_workers = len ( services ) ) as executor : fetches = { } for service in services [ : num_results ] : tail = [ x for x in services if x is not service ] random . shuffle ( tail ) srv = FetcherClass ( services = [ service ] + tail , verbose = verbose , timeout = timeout ) fetches [ executor . submit ( srv . action , * * kwargs ) ] = srv if fast == 1 : raise NotImplementedError # ths code is a work in progress. futures.FIRST_COMPLETED works differently than I thought... to_iterate , still_going = futures . wait ( fetches , return_when = futures . FIRST_COMPLETED ) for x in still_going : try : x . result ( timeout = 1.001 ) except futures . _base . TimeoutError : pass elif fast > 1 : raise Exception ( "fast level greater than 1 not yet implemented" ) else : to_iterate = futures . as_completed ( fetches ) for future in to_iterate : service = fetches [ future ] results . append ( [ service , future . result ( ) ] ) return results
Does the fetching in multiple threads of needed . Used by paranoid and fast mode .
320
17
246,049
def _do_private_mode ( FetcherClass , services , kwargs , random_wait_seconds , timeout , verbose ) : addresses = kwargs . pop ( 'addresses' ) results = { } with futures . ThreadPoolExecutor ( max_workers = len ( addresses ) ) as executor : fetches = { } for address in addresses : k = kwargs k [ 'address' ] = address random . shuffle ( services ) srv = FetcherClass ( services = services , verbose = verbose , timeout = timeout or 5.0 , random_wait_seconds = random_wait_seconds ) # address is returned because balance needs to be returned # attached to the address. Other methods (get_transaction, unspent_outputs, etc) # do not need to be indexed by address. (upstream they are stripped out) fetches [ executor . submit ( srv . action , * * k ) ] = ( srv , address ) to_iterate = futures . as_completed ( fetches ) for future in to_iterate : service , address = fetches [ future ] results [ address ] = future . result ( ) return results
Private mode is only applicable to address_balance unspent_outputs and historical_transactions . There will always be a list for the addresses argument . Each address goes to a random service . Also a random delay is performed before the external fetch for improved privacy .
254
54
246,050
def currency_to_protocol ( amount ) : if type ( amount ) in [ float , int ] : amount = "%.8f" % amount return int ( amount . replace ( "." , '' ) )
Convert a string of currency units to protocol units . For instance converts 19 . 1 bitcoin to 1910000000 satoshis .
45
25
246,051
def to_rawtx ( tx ) : if tx . get ( 'hex' ) : return tx [ 'hex' ] new_tx = { } locktime = tx . get ( 'locktime' , 0 ) new_tx [ 'locktime' ] = locktime new_tx [ 'version' ] = tx . get ( 'version' , 1 ) new_tx [ 'ins' ] = [ { 'outpoint' : { 'hash' : str ( x [ 'txid' ] ) , 'index' : x [ 'n' ] } , 'script' : str ( x [ 'scriptSig' ] . replace ( ' ' , '' ) ) , 'sequence' : x . get ( 'sequence' , 0xFFFFFFFF if locktime == 0 else None ) } for x in tx [ 'inputs' ] ] new_tx [ 'outs' ] = [ { 'script' : str ( x [ 'scriptPubKey' ] ) , 'value' : x [ 'amount' ] } for x in tx [ 'outputs' ] ] return serialize ( new_tx )
Take a tx object in the moneywagon format and convert it to the format that pybitcointools s serialize funcion takes then return in raw hex format .
238
34
246,052
def check_error ( self , response ) : if response . status_code == 500 : raise ServiceError ( "500 - " + response . content ) if response . status_code == 503 : if "DDoS protection by Cloudflare" in response . content : raise ServiceError ( "Foiled by Cloudfare's DDoS protection" ) raise ServiceError ( "503 - Temporarily out of service." ) if response . status_code == 429 : raise ServiceError ( "429 - Too many requests" ) if response . status_code == 404 : raise ServiceError ( "404 - Not Found" ) if response . status_code == 400 : raise ServiceError ( "400 - Bad Request" )
If the service is returning an error this function should raise an exception . such as SkipThisService
149
19
246,053
def convert_currency ( self , base_fiat , base_amount , target_fiat ) : url = "http://api.fixer.io/latest?base=%s" % base_fiat data = self . get_url ( url ) . json ( ) try : return data [ 'rates' ] [ target_fiat . upper ( ) ] * base_amount except KeyError : raise Exception ( "Can not convert %s to %s" % ( base_fiat , target_fiat ) )
Convert one fiat amount to another fiat . Uses the fixer . io service .
113
17
246,054
def fix_symbol ( self , symbol , reverse = False ) : if not self . symbol_mapping : return symbol for old , new in self . symbol_mapping : if reverse : if symbol == new : return old else : if symbol == old : return new return symbol
In comes a moneywagon format symbol and returned in the symbol converted to one the service can understand .
59
20
246,055
def parse_market ( self , market , split_char = '_' ) : crypto , fiat = market . lower ( ) . split ( split_char ) return ( self . fix_symbol ( crypto , reverse = True ) , self . fix_symbol ( fiat , reverse = True ) )
In comes the market identifier directly from the service . Returned is the crypto and fiat identifier in moneywagon format .
64
23
246,056
def make_market ( self , crypto , fiat , seperator = "_" ) : return ( "%s%s%s" % ( self . fix_symbol ( crypto ) , seperator , self . fix_symbol ( fiat ) ) ) . lower ( )
Convert a crypto and fiat to a market string . All exchanges use their own format for specifying markets . Subclasses can define their own implementation .
59
29
246,057
def _external_request ( self , method , url , * args , * * kwargs ) : self . last_url = url if url in self . responses . keys ( ) and method == 'get' : return self . responses [ url ] # return from cache if its there headers = kwargs . pop ( 'headers' , None ) custom = { 'User-Agent' : useragent } if headers : headers . update ( custom ) kwargs [ 'headers' ] = headers else : kwargs [ 'headers' ] = custom if self . timeout : # add timeout parameter to requests.get if one was passed in on construction... kwargs [ 'timeout' ] = self . timeout start = datetime . datetime . now ( ) response = getattr ( requests , method ) ( url , verify = self . ssl_verify , * args , * * kwargs ) self . total_external_fetch_duration += datetime . datetime . now ( ) - start if self . verbose : print ( "Got Response: %s (took %s)" % ( url , ( datetime . datetime . now ( ) - start ) ) ) self . last_raw_response = response self . check_error ( response ) if method == 'get' : self . responses [ url ] = response # cache for later return response
Wrapper for requests . get with useragent automatically set . And also all requests are reponses are cached .
291
23
246,058
def get_block ( self , crypto , block_hash = '' , block_number = '' , latest = False ) : raise NotImplementedError ( self . name + " does not support getting getting block data. " "Or rather it has no defined 'get_block' method." )
Get block based on either block height block number or get the latest block . Only one of the previous arguments must be passed on .
62
26
246,059
def make_order ( self , crypto , fiat , amount , price , type = "limit" ) : raise NotImplementedError ( self . name + " does not support making orders. " "Or rather it has no defined 'make_order' method." )
This method buys or sells crypto on an exchange using fiat balance . Type can either be fill - or - kill post - only market or limit . To get what modes are supported consult make_order . supported_types if one is defined .
56
48
246,060
def _try_services ( self , method_name , * args , * * kwargs ) : crypto = ( ( args and args [ 0 ] ) or kwargs [ 'crypto' ] ) . lower ( ) address = kwargs . get ( 'address' , '' ) . lower ( ) fiat = kwargs . get ( 'fiat' , '' ) . lower ( ) if not self . services : raise CurrencyNotSupported ( "No services defined for %s for %s" % ( method_name , crypto ) ) if self . random_wait_seconds > 0 : # for privacy... To avoid correlating addresses to same origin # only gets called before the first service call. Does not pause # before each and every call. pause_time = random . random ( ) * self . random_wait_seconds if self . verbose : print ( "Pausing for: %.2f seconds" % pause_time ) time . sleep ( pause_time ) for service in self . services : if service . supported_cryptos and ( crypto not in service . supported_cryptos ) : if self . verbose : print ( "SKIP:" , "%s not supported for %s" % ( crypto , service . __class__ . __name__ ) ) continue try : if self . verbose : print ( "* Trying:" , service , crypto , "%s%s" % ( address , fiat ) ) ret = getattr ( service , method_name ) ( * args , * * kwargs ) self . _successful_service = service return ret except ( KeyError , IndexError , TypeError , ValueError , requests . exceptions . Timeout , requests . exceptions . ConnectionError ) as exc : # API has probably changed, therefore service class broken if self . verbose : print ( "FAIL:" , service , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "%s %s" % ( exc . __class__ . __name__ , exc ) } ) except NoService as exc : # service classes can raise this exception if for whatever reason # that service can't return a response, but maybe another one can. if self . verbose : print ( "SKIP:" , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "Skipped: %s" % str ( exc ) } ) except NotImplementedError as exc : if self . verbose : print ( "SKIP:" , exc . __class__ . __name__ , exc ) self . _failed_services . append ( { 'service' : service , 'error' : "Not Implemented" } ) if not self . _failed_services : raise NotImplementedError ( "No Services defined for %s and %s" % ( crypto , method_name ) ) if set ( x [ 'error' ] for x in self . _failed_services ) == set ( [ 'Not Implemented' ] ) and method_name . endswith ( "multi" ) : # some currencies may not have any multi functions defined, so retry # with private mode (which tries multiple services). raise RevertToPrivateMode ( "All services do not implement %s service" % method_name ) failed_msg = ', ' . join ( [ "{service.name} -> {error}" . format ( * * x ) for x in self . _failed_services ] ) raise NoService ( self . no_service_msg ( * args , * * kwargs ) + "! Tried: " + failed_msg )
Try each service until one returns a response . This function only catches the bare minimum of exceptions from the service class . We want exceptions to be raised so the service classes can be debugged and fixed quickly .
793
41
246,061
def uconcatenate ( arrs , axis = 0 ) : v = np . concatenate ( arrs , axis = axis ) v = _validate_numpy_wrapper_units ( v , arrs ) return v
Concatenate a sequence of arrays .
50
9
246,062
def ucross ( arr1 , arr2 , registry = None , axisa = - 1 , axisb = - 1 , axisc = - 1 , axis = None ) : v = np . cross ( arr1 , arr2 , axisa = axisa , axisb = axisb , axisc = axisc , axis = axis ) units = arr1 . units * arr2 . units arr = unyt_array ( v , units , registry = registry ) return arr
Applies the cross product to two YT arrays .
99
11
246,063
def uintersect1d ( arr1 , arr2 , assume_unique = False ) : v = np . intersect1d ( arr1 , arr2 , assume_unique = assume_unique ) v = _validate_numpy_wrapper_units ( v , [ arr1 , arr2 ] ) return v
Find the sorted unique elements of the two input arrays .
67
11
246,064
def uunion1d ( arr1 , arr2 ) : v = np . union1d ( arr1 , arr2 ) v = _validate_numpy_wrapper_units ( v , [ arr1 , arr2 ] ) return v
Find the union of two arrays .
52
7
246,065
def unorm ( data , ord = None , axis = None , keepdims = False ) : norm = np . linalg . norm ( data , ord = ord , axis = axis , keepdims = keepdims ) if norm . shape == ( ) : return unyt_quantity ( norm , data . units ) return unyt_array ( norm , data . units )
Matrix or vector norm that preserves units
82
7
246,066
def udot ( op1 , op2 ) : dot = np . dot ( op1 . d , op2 . d ) units = op1 . units * op2 . units if dot . shape == ( ) : return unyt_quantity ( dot , units ) return unyt_array ( dot , units )
Matrix or vector dot product that preserves units
67
8
246,067
def uhstack ( arrs ) : v = np . hstack ( arrs ) v = _validate_numpy_wrapper_units ( v , arrs ) return v
Stack arrays in sequence horizontally while preserving units
38
8
246,068
def ustack ( arrs , axis = 0 ) : v = np . stack ( arrs , axis = axis ) v = _validate_numpy_wrapper_units ( v , arrs ) return v
Join a sequence of arrays along a new axis while preserving units
45
12
246,069
def loadtxt ( fname , dtype = "float" , delimiter = "\t" , usecols = None , comments = "#" ) : f = open ( fname , "r" ) next_one = False units = [ ] num_cols = - 1 for line in f . readlines ( ) : words = line . strip ( ) . split ( ) if len ( words ) == 0 : continue if line [ 0 ] == comments : if next_one : units = words [ 1 : ] if len ( words ) == 2 and words [ 1 ] == "Units" : next_one = True else : # Here we catch the first line of numbers col_words = line . strip ( ) . split ( delimiter ) for word in col_words : float ( word ) num_cols = len ( col_words ) break f . close ( ) if len ( units ) != num_cols : units = [ "dimensionless" ] * num_cols arrays = np . loadtxt ( fname , dtype = dtype , comments = comments , delimiter = delimiter , converters = None , unpack = True , usecols = usecols , ndmin = 0 , ) if len ( arrays . shape ) < 2 : arrays = [ arrays ] if usecols is not None : units = [ units [ col ] for col in usecols ] ret = tuple ( [ unyt_array ( arr , unit ) for arr , unit in zip ( arrays , units ) ] ) if len ( ret ) == 1 : return ret [ 0 ] return ret
r Load unyt_arrays with unit information from a text file . Each row in the text file must have the same number of values .
341
29
246,070
def savetxt ( fname , arrays , fmt = "%.18e" , delimiter = "\t" , header = "" , footer = "" , comments = "#" ) : if not isinstance ( arrays , list ) : arrays = [ arrays ] units = [ ] for array in arrays : if hasattr ( array , "units" ) : units . append ( str ( array . units ) ) else : units . append ( "dimensionless" ) if header != "" and not header . endswith ( "\n" ) : header += "\n" header += " Units\n " + "\t" . join ( units ) np . savetxt ( fname , np . transpose ( arrays ) , header = header , fmt = fmt , delimiter = delimiter , footer = footer , newline = "\n" , comments = comments , )
r Write unyt_arrays with unit information to a text file .
182
15
246,071
def convert_to_units ( self , units , equivalence = None , * * kwargs ) : units = _sanitize_units_convert ( units , self . units . registry ) if equivalence is None : conv_data = _check_em_conversion ( self . units , units , registry = self . units . registry ) if any ( conv_data ) : new_units , ( conv_factor , offset ) = _em_conversion ( self . units , conv_data , units ) else : new_units = units ( conv_factor , offset ) = self . units . get_conversion_factor ( new_units , self . dtype ) self . units = new_units values = self . d # if our dtype is an integer do the following somewhat awkward # dance to change the dtype in-place. We can't use astype # directly because that will create a copy and not update self if self . dtype . kind in ( "u" , "i" ) : # create a copy of the original data in floating point # form, it's possible this may lose precision for very # large integers dsize = values . dtype . itemsize new_dtype = "f" + str ( dsize ) large = LARGE_INPUT . get ( dsize , 0 ) if large and np . any ( np . abs ( values ) > large ) : warnings . warn ( "Overflow encountered while converting to units '%s'" % new_units , RuntimeWarning , stacklevel = 2 , ) float_values = values . astype ( new_dtype ) # change the dtypes in-place, this does not change the # underlying memory buffer values . dtype = new_dtype self . dtype = new_dtype # actually fill in the new float values now that our # dtype is correct np . copyto ( values , float_values ) values *= conv_factor if offset : np . subtract ( values , offset , values ) else : self . convert_to_equivalent ( units , equivalence , * * kwargs )
Convert the array to the given units in - place .
448
12
246,072
def convert_to_base ( self , unit_system = None , equivalence = None , * * kwargs ) : self . convert_to_units ( self . units . get_base_equivalent ( unit_system ) , equivalence = equivalence , * * kwargs )
Convert the array in - place to the equivalent base units in the specified unit system .
63
18
246,073
def convert_to_cgs ( self , equivalence = None , * * kwargs ) : self . convert_to_units ( self . units . get_cgs_equivalent ( ) , equivalence = equivalence , * * kwargs )
Convert the array and in - place to the equivalent cgs units .
56
15
246,074
def convert_to_mks ( self , equivalence = None , * * kwargs ) : self . convert_to_units ( self . units . get_mks_equivalent ( ) , equivalence , * * kwargs )
Convert the array and units to the equivalent mks units .
53
13
246,075
def to_value ( self , units = None , equivalence = None , * * kwargs ) : if units is None : v = self . value else : v = self . in_units ( units , equivalence = equivalence , * * kwargs ) . value if isinstance ( self , unyt_quantity ) : return float ( v ) else : return v
Creates a copy of this array with the data in the supplied units and returns it without units . Output is therefore a bare NumPy array .
81
29
246,076
def in_base ( self , unit_system = None ) : us = _sanitize_unit_system ( unit_system , self ) try : conv_data = _check_em_conversion ( self . units , unit_system = us , registry = self . units . registry ) except MKSCGSConversionError : raise UnitsNotReducible ( self . units , us ) if any ( conv_data ) : to_units , ( conv , offset ) = _em_conversion ( self . units , conv_data , unit_system = us ) else : to_units = self . units . get_base_equivalent ( unit_system ) conv , offset = self . units . get_conversion_factor ( to_units , self . dtype ) new_dtype = np . dtype ( "f" + str ( self . dtype . itemsize ) ) conv = new_dtype . type ( conv ) ret = self . v * conv if offset : ret = ret - offset return type ( self ) ( ret , to_units )
Creates a copy of this array with the data in the specified unit system and returns it in that system s base units .
230
25
246,077
def argsort ( self , axis = - 1 , kind = "quicksort" , order = None ) : return self . view ( np . ndarray ) . argsort ( axis , kind , order )
Returns the indices that would sort the array .
45
9
246,078
def from_astropy ( cls , arr , unit_registry = None ) : # Converting from AstroPy Quantity try : u = arr . unit _arr = arr except AttributeError : u = arr _arr = 1.0 * u ap_units = [ ] for base , exponent in zip ( u . bases , u . powers ) : unit_str = base . to_string ( ) # we have to do this because AstroPy is silly and defines # hour as "h" if unit_str == "h" : unit_str = "hr" ap_units . append ( "%s**(%s)" % ( unit_str , Rational ( exponent ) ) ) ap_units = "*" . join ( ap_units ) if isinstance ( _arr . value , np . ndarray ) and _arr . shape != ( ) : return unyt_array ( _arr . value , ap_units , registry = unit_registry ) else : return unyt_quantity ( _arr . value , ap_units , registry = unit_registry )
Convert an AstroPy Quantity to a unyt_array or unyt_quantity .
231
19
246,079
def to_astropy ( self , * * kwargs ) : return self . value * _astropy . units . Unit ( str ( self . units ) , * * kwargs )
Creates a new AstroPy quantity with the same unit information .
41
13
246,080
def from_pint ( cls , arr , unit_registry = None ) : p_units = [ ] for base , exponent in arr . _units . items ( ) : bs = convert_pint_units ( base ) p_units . append ( "%s**(%s)" % ( bs , Rational ( exponent ) ) ) p_units = "*" . join ( p_units ) if isinstance ( arr . magnitude , np . ndarray ) : return unyt_array ( arr . magnitude , p_units , registry = unit_registry ) else : return unyt_quantity ( arr . magnitude , p_units , registry = unit_registry )
Convert a Pint Quantity to a unyt_array or unyt_quantity .
149
19
246,081
def to_pint ( self , unit_registry = None ) : if unit_registry is None : unit_registry = _pint . UnitRegistry ( ) powers_dict = self . units . expr . as_powers_dict ( ) units = [ ] for unit , pow in powers_dict . items ( ) : # we have to do this because Pint doesn't recognize # "yr" as "year" if str ( unit ) . endswith ( "yr" ) and len ( str ( unit ) ) in [ 2 , 3 ] : unit = str ( unit ) . replace ( "yr" , "year" ) units . append ( "%s**(%s)" % ( unit , Rational ( pow ) ) ) units = "*" . join ( units ) return unit_registry . Quantity ( self . value , units )
Convert a unyt_array or unyt_quantity to a Pint Quantity .
183
19
246,082
def write_hdf5 ( self , filename , dataset_name = None , info = None , group_name = None ) : from unyt . _on_demand_imports import _h5py as h5py import pickle if info is None : info = { } info [ "units" ] = str ( self . units ) info [ "unit_registry" ] = np . void ( pickle . dumps ( self . units . registry . lut ) ) if dataset_name is None : dataset_name = "array_data" f = h5py . File ( filename ) if group_name is not None : if group_name in f : g = f [ group_name ] else : g = f . create_group ( group_name ) else : g = f if dataset_name in g . keys ( ) : d = g [ dataset_name ] # Overwrite without deleting if we can get away with it. if d . shape == self . shape and d . dtype == self . dtype : d [ ... ] = self for k in d . attrs . keys ( ) : del d . attrs [ k ] else : del f [ dataset_name ] d = g . create_dataset ( dataset_name , data = self ) else : d = g . create_dataset ( dataset_name , data = self ) for k , v in info . items ( ) : d . attrs [ k ] = v f . close ( )
r Writes a unyt_array to hdf5 file .
319
14
246,083
def from_hdf5 ( cls , filename , dataset_name = None , group_name = None ) : from unyt . _on_demand_imports import _h5py as h5py import pickle if dataset_name is None : dataset_name = "array_data" f = h5py . File ( filename ) if group_name is not None : g = f [ group_name ] else : g = f dataset = g [ dataset_name ] data = dataset [ : ] units = dataset . attrs . get ( "units" , "" ) unit_lut = pickle . loads ( dataset . attrs [ "unit_registry" ] . tostring ( ) ) f . close ( ) registry = UnitRegistry ( lut = unit_lut , add_default_symbols = False ) return cls ( data , units , registry = registry )
r Attempts read in and convert a dataset in an hdf5 file into a unyt_array .
193
22
246,084
def copy ( self , order = "C" ) : return type ( self ) ( np . copy ( np . asarray ( self ) ) , self . units )
Return a copy of the array .
35
7
246,085
def dot ( self , b , out = None ) : res_units = self . units * getattr ( b , "units" , NULL_UNIT ) ret = self . view ( np . ndarray ) . dot ( np . asarray ( b ) , out = out ) * res_units if out is not None : out . units = res_units return ret
dot product of two arrays .
80
6
246,086
def import_units ( module , namespace ) : for key , value in module . __dict__ . items ( ) : if isinstance ( value , ( unyt_quantity , Unit ) ) : namespace [ key ] = value
Import Unit objects from a module into a namespace
48
9
246,087
def _lookup_unit_symbol ( symbol_str , unit_symbol_lut ) : if symbol_str in unit_symbol_lut : # lookup successful, return the tuple directly return unit_symbol_lut [ symbol_str ] # could still be a known symbol with a prefix prefix , symbol_wo_prefix = _split_prefix ( symbol_str , unit_symbol_lut ) if prefix : # lookup successful, it's a symbol with a prefix unit_data = unit_symbol_lut [ symbol_wo_prefix ] prefix_value = unit_prefixes [ prefix ] [ 0 ] # Need to add some special handling for comoving units # this is fine for now, but it wouldn't work for a general # unit that has an arbitrary LaTeX representation if symbol_wo_prefix != "cm" and symbol_wo_prefix . endswith ( "cm" ) : sub_symbol_wo_prefix = symbol_wo_prefix [ : - 2 ] sub_symbol_str = symbol_str [ : - 2 ] else : sub_symbol_wo_prefix = symbol_wo_prefix sub_symbol_str = symbol_str latex_repr = unit_data [ 3 ] . replace ( "{" + sub_symbol_wo_prefix + "}" , "{" + sub_symbol_str + "}" ) # Leave offset and dimensions the same, but adjust scale factor and # LaTeX representation ret = ( unit_data [ 0 ] * prefix_value , unit_data [ 1 ] , unit_data [ 2 ] , latex_repr , False , ) unit_symbol_lut [ symbol_str ] = ret return ret # no dice raise UnitParseError ( "Could not find unit symbol '%s' in the provided " "symbols." % symbol_str )
Searches for the unit data tuple corresponding to the given symbol .
403
14
246,088
def unit_system_id ( self ) : if self . _unit_system_id is None : hash_data = bytearray ( ) for k , v in sorted ( self . lut . items ( ) ) : hash_data . extend ( k . encode ( "utf8" ) ) hash_data . extend ( repr ( v ) . encode ( "utf8" ) ) m = md5 ( ) m . update ( hash_data ) self . _unit_system_id = str ( m . hexdigest ( ) ) return self . _unit_system_id
This is a unique identifier for the unit registry created from a FNV hash . It is needed to register a dataset s code unit system in the unit system registry .
126
33
246,089
def add ( self , symbol , base_value , dimensions , tex_repr = None , offset = None , prefixable = False , ) : from unyt . unit_object import _validate_dimensions self . _unit_system_id = None # Validate if not isinstance ( base_value , float ) : raise UnitParseError ( "base_value (%s) must be a float, got a %s." % ( base_value , type ( base_value ) ) ) if offset is not None : if not isinstance ( offset , float ) : raise UnitParseError ( "offset value (%s) must be a float, got a %s." % ( offset , type ( offset ) ) ) else : offset = 0.0 _validate_dimensions ( dimensions ) if tex_repr is None : # make educated guess that will look nice in most cases tex_repr = r"\rm{" + symbol . replace ( "_" , r"\ " ) + "}" # Add to lut self . lut [ symbol ] = ( base_value , dimensions , offset , tex_repr , prefixable )
Add a symbol to this registry .
246
7
246,090
def remove ( self , symbol ) : self . _unit_system_id = None if symbol not in self . lut : raise SymbolNotFoundError ( "Tried to remove the symbol '%s', but it does not exist " "in this registry." % symbol ) del self . lut [ symbol ]
Remove the entry for the unit matching symbol .
66
9
246,091
def modify ( self , symbol , base_value ) : self . _unit_system_id = None if symbol not in self . lut : raise SymbolNotFoundError ( "Tried to modify the symbol '%s', but it does not exist " "in this registry." % symbol ) if hasattr ( base_value , "in_base" ) : new_dimensions = base_value . units . dimensions base_value = base_value . in_base ( "mks" ) base_value = base_value . value else : new_dimensions = self . lut [ symbol ] [ 1 ] self . lut [ symbol ] = ( float ( base_value ) , new_dimensions ) + self . lut [ symbol ] [ 2 : ]
Change the base value of a unit symbol . Useful for adjusting code units after parsing parameters .
165
18
246,092
def to_json ( self ) : sanitized_lut = { } for k , v in self . lut . items ( ) : san_v = list ( v ) repr_dims = str ( v [ 1 ] ) san_v [ 1 ] = repr_dims sanitized_lut [ k ] = tuple ( san_v ) return json . dumps ( sanitized_lut )
Returns a json - serialized version of the unit registry
87
11
246,093
def from_json ( cls , json_text ) : data = json . loads ( json_text ) lut = { } for k , v in data . items ( ) : unsan_v = list ( v ) unsan_v [ 1 ] = sympify ( v [ 1 ] , locals = vars ( unyt_dims ) ) lut [ k ] = tuple ( unsan_v ) return cls ( lut = lut , add_default_symbols = False )
Returns a UnitRegistry object from a json - serialized unit registry
109
14
246,094
def _em_conversion ( orig_units , conv_data , to_units = None , unit_system = None ) : conv_unit , canonical_unit , scale = conv_data if conv_unit is None : conv_unit = canonical_unit new_expr = scale * canonical_unit . expr if unit_system is not None : # we don't know the to_units, so we get it directly from the # conv_data to_units = Unit ( conv_unit . expr , registry = orig_units . registry ) new_units = Unit ( new_expr , registry = orig_units . registry ) conv = new_units . get_conversion_factor ( to_units ) return to_units , conv
Convert between E&M & MKS base units .
156
12
246,095
def _check_em_conversion ( unit , to_unit = None , unit_system = None , registry = None ) : em_map = ( ) if unit == to_unit or unit . dimensions not in em_conversion_dims : return em_map if unit . is_atomic : prefix , unit_wo_prefix = _split_prefix ( str ( unit ) , unit . registry . lut ) else : prefix , unit_wo_prefix = "" , str ( unit ) if ( unit_wo_prefix , unit . dimensions ) in em_conversions : em_info = em_conversions [ unit_wo_prefix , unit . dimensions ] em_unit = Unit ( prefix + em_info [ 1 ] , registry = registry ) if to_unit is None : cmks_in_unit = current_mks in unit . dimensions . atoms ( ) cmks_in_unit_system = unit_system . units_map [ current_mks ] cmks_in_unit_system = cmks_in_unit_system is not None if cmks_in_unit and cmks_in_unit_system : em_map = ( unit_system [ unit . dimensions ] , unit , 1.0 ) else : em_map = ( None , em_unit , em_info [ 2 ] ) elif to_unit . dimensions == em_unit . dimensions : em_map = ( to_unit , em_unit , em_info [ 2 ] ) if em_map : return em_map if unit_system is None : from unyt . unit_systems import unit_system_registry unit_system = unit_system_registry [ "mks" ] for unit_atom in unit . expr . atoms ( ) : if unit_atom . is_Number : continue bu = str ( unit_atom ) budims = Unit ( bu , registry = registry ) . dimensions try : if str ( unit_system [ budims ] ) == bu : continue except MissingMKSCurrent : raise MKSCGSConversionError ( unit ) return em_map
Check to see if the units contain E&M units
449
11
246,096
def _get_conversion_factor ( old_units , new_units , dtype ) : if old_units . dimensions != new_units . dimensions : raise UnitConversionError ( old_units , old_units . dimensions , new_units , new_units . dimensions ) ratio = old_units . base_value / new_units . base_value if old_units . base_offset == 0 and new_units . base_offset == 0 : return ( ratio , None ) else : # the dimensions are the same, so both are temperatures, where # it's legal to convert units so no need to do error checking return ratio , ratio * old_units . base_offset - new_units . base_offset
Get the conversion factor between two units of equivalent dimensions . This is the number you multiply data by to convert from values in old_units to values in new_units .
153
34
246,097
def _get_unit_data_from_expr ( unit_expr , unit_symbol_lut ) : # Now for the sympy possibilities if isinstance ( unit_expr , Number ) : if unit_expr is sympy_one : return ( 1.0 , sympy_one ) return ( float ( unit_expr ) , sympy_one ) if isinstance ( unit_expr , Symbol ) : return _lookup_unit_symbol ( unit_expr . name , unit_symbol_lut ) if isinstance ( unit_expr , Pow ) : unit_data = _get_unit_data_from_expr ( unit_expr . args [ 0 ] , unit_symbol_lut ) power = unit_expr . args [ 1 ] if isinstance ( power , Symbol ) : raise UnitParseError ( "Invalid unit expression '%s'." % unit_expr ) conv = float ( unit_data [ 0 ] ** power ) unit = unit_data [ 1 ] ** power return ( conv , unit ) if isinstance ( unit_expr , Mul ) : base_value = 1.0 dimensions = 1 for expr in unit_expr . args : unit_data = _get_unit_data_from_expr ( expr , unit_symbol_lut ) base_value *= unit_data [ 0 ] dimensions *= unit_data [ 1 ] return ( float ( base_value ) , dimensions ) raise UnitParseError ( "Cannot parse for unit data from '%s'. Please supply" " an expression of only Unit, Symbol, Pow, and Mul" "objects." % str ( unit_expr ) )
Grabs the total base_value and dimensions from a valid unit expression .
355
15
246,098
def define_unit ( symbol , value , tex_repr = None , offset = None , prefixable = False , registry = None ) : from unyt . array import unyt_quantity , _iterable import unyt if registry is None : registry = default_unit_registry if symbol in registry : raise RuntimeError ( "Unit symbol '%s' already exists in the provided " "registry" % symbol ) if not isinstance ( value , unyt_quantity ) : if _iterable ( value ) and len ( value ) == 2 : value = unyt_quantity ( value [ 0 ] , value [ 1 ] , registry = registry ) else : raise RuntimeError ( '"value" needs to be a quantity or ' "(value, unit) tuple!" ) base_value = float ( value . in_base ( unit_system = "mks" ) ) dimensions = value . units . dimensions registry . add ( symbol , base_value , dimensions , prefixable = prefixable , tex_repr = tex_repr , offset = offset , ) if registry is default_unit_registry : u = Unit ( symbol , registry = registry ) setattr ( unyt , symbol , u )
Define a new unit and add it to the specified unit registry .
258
14
246,099
def latex_repr ( self ) : if self . _latex_repr is not None : return self . _latex_repr if self . expr . is_Atom : expr = self . expr else : expr = self . expr . copy ( ) self . _latex_repr = _get_latex_representation ( expr , self . registry ) return self . _latex_repr
A LaTeX representation for the unit
90
7