idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
228,300
def true_range ( close_data , period ) : catch_errors . check_for_period_error ( close_data , period ) tr = [ np . max ( [ np . max ( close_data [ idx + 1 - period : idx + 1 ] ) - np . min ( close_data [ idx + 1 - period : idx + 1 ] ) , abs ( np . max ( close_data [ idx + 1 - period : idx + 1 ] ) - close_data [ idx - 1 ] ) , abs ( np . min ( close_data [ idx + 1 - period : idx + 1 ] ) - close_data [ idx - 1 ] ) ] ) for idx in range ( period - 1 , len ( close_data ) ) ] tr = fill_for_noncomputable_vals ( close_data , tr ) return tr
True Range .
191
3
228,301
def double_smoothed_stochastic ( data , period ) : catch_errors . check_for_period_error ( data , period ) lows = [ data [ idx ] - np . min ( data [ idx + 1 - period : idx + 1 ] ) for idx in range ( period - 1 , len ( data ) ) ] sm_lows = ema ( ema ( lows , period ) , period ) highs = [ np . max ( data [ idx + 1 - period : idx + 1 ] ) - np . min ( data [ idx + 1 - period : idx + 1 ] ) for idx in range ( period - 1 , len ( data ) ) ] sm_highs = ema ( ema ( highs , period ) , period ) dss = ( sm_lows / sm_highs ) * 100 dss = fill_for_noncomputable_vals ( data , dss ) return dss
Double Smoothed Stochastic .
207
8
228,302
def volume_adjusted_moving_average ( close_data , volume , period ) : catch_errors . check_for_input_len_diff ( close_data , volume ) catch_errors . check_for_period_error ( close_data , period ) avg_vol = np . mean ( volume ) vol_incr = avg_vol * 0.67 vol_ratio = [ val / vol_incr for val in volume ] close_vol = np . array ( close_data ) * vol_ratio vama = [ sum ( close_vol [ idx + 1 - period : idx + 1 ] ) / period for idx in range ( period - 1 , len ( close_data ) ) ] vama = fill_for_noncomputable_vals ( close_data , vama ) return vama
Volume Adjusted Moving Average .
178
5
228,303
def double_exponential_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) dema = ( 2 * ema ( data , period ) ) - ema ( ema ( data , period ) , period ) return dema
Double Exponential Moving Average .
62
6
228,304
def triangular_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) tma = sma ( sma ( data , period ) , period ) return tma
Triangular Moving Average .
47
5
228,305
def weighted_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) k = ( period * ( period + 1 ) ) / 2.0 wmas = [ ] for idx in range ( 0 , len ( data ) - period + 1 ) : product = [ data [ idx + period_idx ] * ( period_idx + 1 ) for period_idx in range ( 0 , period ) ] wma = sum ( product ) / k wmas . append ( wma ) wmas = fill_for_noncomputable_vals ( data , wmas ) return wmas
Weighted Moving Average .
139
5
228,306
def conversion_base_line_helper ( data , period ) : catch_errors . check_for_period_error ( data , period ) cblh = [ ( np . max ( data [ idx + 1 - period : idx + 1 ] ) + np . min ( data [ idx + 1 - period : idx + 1 ] ) ) / 2 for idx in range ( period - 1 , len ( data ) ) ] cblh = fill_for_noncomputable_vals ( data , cblh ) return cblh
The only real difference between TenkanSen and KijunSen is the period value
119
17
228,307
def chande_momentum_oscillator ( close_data , period ) : catch_errors . check_for_period_error ( close_data , period ) close_data = np . array ( close_data ) moving_period_diffs = [ [ ( close_data [ idx + 1 - period : idx + 1 ] [ i ] - close_data [ idx + 1 - period : idx + 1 ] [ i - 1 ] ) for i in range ( 1 , len ( close_data [ idx + 1 - period : idx + 1 ] ) ) ] for idx in range ( 0 , len ( close_data ) ) ] sum_up = [ ] sum_down = [ ] for period_diffs in moving_period_diffs : ups = [ val if val > 0 else 0 for val in period_diffs ] sum_up . append ( sum ( ups ) ) downs = [ abs ( val ) if val < 0 else 0 for val in period_diffs ] sum_down . append ( sum ( downs ) ) sum_up = np . array ( sum_up ) sum_down = np . array ( sum_down ) # numpy is able to handle dividing by zero and makes those calculations # nans which is what we want, so we safely suppress the RuntimeWarning with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" , category = RuntimeWarning ) cmo = 100 * ( ( sum_up - sum_down ) / ( sum_up + sum_down ) ) return cmo
Chande Momentum Oscillator .
338
8
228,308
def price_oscillator ( data , short_period , long_period ) : catch_errors . check_for_period_error ( data , short_period ) catch_errors . check_for_period_error ( data , long_period ) ema_short = ema ( data , short_period ) ema_long = ema ( data , long_period ) po = ( ( ema_short - ema_long ) / ema_long ) * 100 return po
Price Oscillator .
106
5
228,309
def check_for_period_error ( data , period ) : period = int ( period ) data_len = len ( data ) if data_len < period : raise Exception ( "Error: data_len < period" )
Check for Period Error .
48
5
228,310
def check_for_input_len_diff ( * args ) : arrays_len = [ len ( arr ) for arr in args ] if not all ( a == arrays_len [ 0 ] for a in arrays_len ) : err_msg = ( "Error: mismatched data lengths, check to ensure that all " "input data is the same length and valid" ) raise Exception ( err_msg )
Check for Input Length Difference .
86
6
228,311
def upper_bollinger_band ( data , period , std_mult = 2.0 ) : catch_errors . check_for_period_error ( data , period ) period = int ( period ) simple_ma = sma ( data , period ) [ period - 1 : ] upper_bb = [ ] for idx in range ( len ( data ) - period + 1 ) : std_dev = np . std ( data [ idx : idx + period ] ) upper_bb . append ( simple_ma [ idx ] + std_dev * std_mult ) upper_bb = fill_for_noncomputable_vals ( data , upper_bb ) return np . array ( upper_bb )
Upper Bollinger Band .
153
6
228,312
def middle_bollinger_band ( data , period , std = 2.0 ) : catch_errors . check_for_period_error ( data , period ) period = int ( period ) mid_bb = sma ( data , period ) return mid_bb
Middle Bollinger Band .
57
5
228,313
def lower_bollinger_band ( data , period , std = 2.0 ) : catch_errors . check_for_period_error ( data , period ) period = int ( period ) simple_ma = sma ( data , period ) [ period - 1 : ] lower_bb = [ ] for idx in range ( len ( data ) - period + 1 ) : std_dev = np . std ( data [ idx : idx + period ] ) lower_bb . append ( simple_ma [ idx ] - std_dev * std ) lower_bb = fill_for_noncomputable_vals ( data , lower_bb ) return np . array ( lower_bb )
Lower Bollinger Band .
149
5
228,314
def percent_bandwidth ( data , period , std = 2.0 ) : catch_errors . check_for_period_error ( data , period ) period = int ( period ) percent_bandwidth = ( ( np . array ( data ) - lower_bollinger_band ( data , period , std ) ) / bb_range ( data , period , std ) ) return percent_bandwidth
Percent Bandwidth .
86
4
228,315
def standard_deviation ( data , period ) : catch_errors . check_for_period_error ( data , period ) stds = [ np . std ( data [ idx + 1 - period : idx + 1 ] , ddof = 1 ) for idx in range ( period - 1 , len ( data ) ) ] stds = fill_for_noncomputable_vals ( data , stds ) return stds
Standard Deviation .
92
4
228,316
def detrended_price_oscillator ( data , period ) : catch_errors . check_for_period_error ( data , period ) period = int ( period ) dop = [ data [ idx ] - np . mean ( data [ idx + 1 - ( int ( period / 2 ) + 1 ) : idx + 1 ] ) for idx in range ( period - 1 , len ( data ) ) ] dop = fill_for_noncomputable_vals ( data , dop ) return dop
Detrended Price Oscillator .
109
8
228,317
def smoothed_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) series = pd . Series ( data ) return series . ewm ( alpha = 1.0 / period ) . mean ( ) . values . flatten ( )
Smoothed Moving Average .
63
6
228,318
def chaikin_money_flow ( close_data , high_data , low_data , volume , period ) : catch_errors . check_for_input_len_diff ( close_data , high_data , low_data , volume ) catch_errors . check_for_period_error ( close_data , period ) close_data = np . array ( close_data ) high_data = np . array ( high_data ) low_data = np . array ( low_data ) volume = np . array ( volume ) cmf = [ sum ( ( ( ( close_data [ idx + 1 - period : idx + 1 ] - low_data [ idx + 1 - period : idx + 1 ] ) - ( high_data [ idx + 1 - period : idx + 1 ] - close_data [ idx + 1 - period : idx + 1 ] ) ) / ( high_data [ idx + 1 - period : idx + 1 ] - low_data [ idx + 1 - period : idx + 1 ] ) ) * volume [ idx + 1 - period : idx + 1 ] ) / sum ( volume [ idx + 1 - period : idx + 1 ] ) for idx in range ( period - 1 , len ( close_data ) ) ] cmf = fill_for_noncomputable_vals ( close_data , cmf ) return cmf
Chaikin Money Flow .
308
7
228,319
def hull_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) hma = wma ( 2 * wma ( data , int ( period / 2 ) ) - wma ( data , period ) , int ( np . sqrt ( period ) ) ) return hma
Hull Moving Average .
71
5
228,320
def standard_variance ( data , period ) : catch_errors . check_for_period_error ( data , period ) sv = [ np . var ( data [ idx + 1 - period : idx + 1 ] , ddof = 1 ) for idx in range ( period - 1 , len ( data ) ) ] sv = fill_for_noncomputable_vals ( data , sv ) return sv
Standard Variance .
88
4
228,321
def calculate_up_moves ( high_data ) : up_moves = [ high_data [ idx ] - high_data [ idx - 1 ] for idx in range ( 1 , len ( high_data ) ) ] return [ np . nan ] + up_moves
Up Move .
63
3
228,322
def calculate_down_moves ( low_data ) : down_moves = [ low_data [ idx - 1 ] - low_data [ idx ] for idx in range ( 1 , len ( low_data ) ) ] return [ np . nan ] + down_moves
Down Move .
63
3
228,323
def average_directional_index ( close_data , high_data , low_data , period ) : avg_di = ( abs ( ( positive_directional_index ( close_data , high_data , low_data , period ) - negative_directional_index ( close_data , high_data , low_data , period ) ) / ( positive_directional_index ( close_data , high_data , low_data , period ) + negative_directional_index ( close_data , high_data , low_data , period ) ) ) ) adx = 100 * smma ( avg_di , period ) return adx
Average Directional Index .
140
5
228,324
def linear_weighted_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) idx_period = list ( range ( 1 , period + 1 ) ) lwma = [ ( sum ( [ i * idx_period [ data [ idx - ( period - 1 ) : idx + 1 ] . index ( i ) ] for i in data [ idx - ( period - 1 ) : idx + 1 ] ] ) ) / sum ( range ( 1 , len ( data [ idx + 1 - period : idx + 1 ] ) + 1 ) ) for idx in range ( period - 1 , len ( data ) ) ] lwma = fill_for_noncomputable_vals ( data , lwma ) return lwma
Linear Weighted Moving Average .
175
7
228,325
def volume_oscillator ( volume , short_period , long_period ) : catch_errors . check_for_period_error ( volume , short_period ) catch_errors . check_for_period_error ( volume , long_period ) vo = ( 100 * ( ( sma ( volume , short_period ) - sma ( volume , long_period ) ) / sma ( volume , long_period ) ) ) return vo
Volume Oscillator .
95
5
228,326
def triple_exponential_moving_average ( data , period ) : catch_errors . check_for_period_error ( data , period ) tema = ( ( 3 * ema ( data , period ) - ( 3 * ema ( ema ( data , period ) , period ) ) ) + ema ( ema ( ema ( data , period ) , period ) , period ) ) return tema
Triple Exponential Moving Average .
88
7
228,327
def money_flow ( close_data , high_data , low_data , volume ) : catch_errors . check_for_input_len_diff ( close_data , high_data , low_data , volume ) mf = volume * tp ( close_data , high_data , low_data ) return mf
Money Flow .
71
3
228,328
def request_and_check ( self , url , method = 'get' , expected_content_type = None , * * kwargs ) : assert method in [ 'get' , 'post' ] result = self . driver . request ( method , url , * * kwargs ) if result . status_code != requests . codes . ok : raise RuntimeError ( 'Error requesting %r, status = %d' % ( url , result . status_code ) ) if expected_content_type is not None : content_type = result . headers . get ( 'content-type' , '' ) if not re . match ( expected_content_type , content_type ) : raise RuntimeError ( 'Error requesting %r, content type %r does not match %r' % ( url , content_type , expected_content_type ) ) return result
Performs a request and checks that the status is OK and that the content - type matches expectations .
183
20
228,329
def get_transactions_json ( self , include_investment = False , skip_duplicates = False , start_date = None , id = 0 ) : # Warning: This is a global property for the user that we are changing. self . set_user_property ( 'hide_duplicates' , 'T' if skip_duplicates else 'F' ) # Converts the start date into datetime format - must be mm/dd/yy try : start_date = datetime . strptime ( start_date , '%m/%d/%y' ) except ( TypeError , ValueError ) : start_date = None all_txns = [ ] offset = 0 # Mint only returns some of the transactions at once. To get all of # them, we have to keep asking for more until we reach the end. while 1 : url = MINT_ROOT_URL + '/getJsonData.xevent' params = { 'queryNew' : '' , 'offset' : offset , 'comparableType' : '8' , 'rnd' : Mint . get_rnd ( ) , } # Specifying accountId=0 causes Mint to return investment # transactions as well. Otherwise they are skipped by # default. if id > 0 or include_investment : params [ 'id' ] = id if include_investment : params [ 'task' ] = 'transactions' else : params [ 'task' ] = 'transactions,txnfilters' params [ 'filterType' ] = 'cash' result = self . request_and_check ( url , headers = JSON_HEADER , params = params , expected_content_type = 'text/json|application/json' ) data = json . loads ( result . text ) txns = data [ 'set' ] [ 0 ] . get ( 'data' , [ ] ) if not txns : break if start_date : last_dt = json_date_to_datetime ( txns [ - 1 ] [ 'odate' ] ) if last_dt < start_date : keep_txns = [ t for t in txns if json_date_to_datetime ( t [ 'odate' ] ) >= start_date ] all_txns . extend ( keep_txns ) break all_txns . extend ( txns ) offset += len ( txns ) return all_txns
Returns the raw JSON transaction data as downloaded from Mint . The JSON transaction data includes some additional information missing from the CSV data such as whether the transaction is pending or completed but leaves off the year for current year transactions .
522
43
228,330
def get_detailed_transactions ( self , include_investment = False , skip_duplicates = False , remove_pending = True , start_date = None ) : assert_pd ( ) result = self . get_transactions_json ( include_investment , skip_duplicates , start_date ) df = pd . DataFrame ( result ) df [ 'odate' ] = df [ 'odate' ] . apply ( json_date_to_datetime ) if remove_pending : df = df [ ~ df . isPending ] df . reset_index ( drop = True , inplace = True ) df . amount = df . apply ( reverse_credit_amount , axis = 1 ) return df
Returns the JSON transaction data as a DataFrame and converts current year dates and prior year dates into consistent datetime format and reverses credit activity .
160
29
228,331
def get_transactions_csv ( self , include_investment = False , acct = 0 ) : # Specifying accountId=0 causes Mint to return investment # transactions as well. Otherwise they are skipped by # default. params = None if include_investment or acct > 0 : params = { 'accountId' : acct } result = self . request_and_check ( '{}/transactionDownload.event' . format ( MINT_ROOT_URL ) , params = params , expected_content_type = 'text/csv' ) return result . content
Returns the raw CSV transaction data as downloaded from Mint .
125
11
228,332
def get_transactions ( self , include_investment = False ) : assert_pd ( ) s = StringIO ( self . get_transactions_csv ( include_investment = include_investment ) ) s . seek ( 0 ) df = pd . read_csv ( s , parse_dates = [ 'Date' ] ) df . columns = [ c . lower ( ) . replace ( ' ' , '_' ) for c in df . columns ] df . category = ( df . category . str . lower ( ) . replace ( 'uncategorized' , pd . np . nan ) ) return df
Returns the transaction data as a Pandas DataFrame .
133
11
228,333
def payments ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_payments ( address = self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the payments JSON from this instance s Horizon server .
59
12
228,334
def offers ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_offers ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the offers JSON from this instance s Horizon server .
57
12
228,335
def transactions ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_transactions ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the transactions JSON from this instance s Horizon server .
57
12
228,336
def operations ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_operations ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the operations JSON from this instance s Horizon server .
57
12
228,337
def trades ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_trades ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the trades JSON from this instance s Horizon server .
57
12
228,338
def effects ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : return self . horizon . account_effects ( self . address , cursor = cursor , order = order , limit = limit , sse = sse )
Retrieve the effects JSON from this instance s Horizon server .
56
12
228,339
def submit ( self , te ) : params = { 'tx' : te } url = urljoin ( self . horizon_uri , 'transactions/' ) # POST is not included in Retry's method_whitelist for a good reason. # our custom retry mechanism follows reply = None retry_count = self . num_retries while True : try : reply = self . _session . post ( url , data = params , timeout = self . request_timeout ) return check_horizon_reply ( reply . json ( ) ) except ( RequestException , NewConnectionError , ValueError ) as e : if reply is not None : msg = 'Horizon submit exception: {}, reply: [{}] {}' . format ( str ( e ) , reply . status_code , reply . text ) else : msg = 'Horizon submit exception: {}' . format ( str ( e ) ) logging . warning ( msg ) if ( reply is not None and reply . status_code not in self . status_forcelist ) or retry_count <= 0 : if reply is None : raise HorizonRequestError ( e ) raise HorizonError ( 'Invalid horizon reply: [{}] {}' . format ( reply . status_code , reply . text ) , reply . status_code ) retry_count -= 1 logging . warning ( 'Submit retry attempt {}' . format ( retry_count ) ) sleep ( self . backoff_factor )
Submit the transaction using a pooled connection and retry on failure .
310
13
228,340
def account ( self , address ) : endpoint = '/accounts/{account_id}' . format ( account_id = address ) return self . query ( endpoint )
Returns information and links relating to a single account .
36
10
228,341
def account_data ( self , address , key ) : endpoint = '/accounts/{account_id}/data/{data_key}' . format ( account_id = address , data_key = key ) return self . query ( endpoint )
This endpoint represents a single data associated with a given account .
54
12
228,342
def account_effects ( self , address , cursor = None , order = 'asc' , limit = 10 , sse = False ) : endpoint = '/accounts/{account_id}/effects' . format ( account_id = address ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params , sse )
This endpoint represents all effects that changed a given account .
85
11
228,343
def assets ( self , asset_code = None , asset_issuer = None , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/assets' params = self . __query_params ( asset_code = asset_code , asset_issuer = asset_issuer , cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all assets . It will give you all the assets in the system along with various statistics about each .
86
23
228,344
def transaction ( self , tx_hash ) : endpoint = '/transactions/{tx_hash}' . format ( tx_hash = tx_hash ) return self . query ( endpoint )
The transaction details endpoint provides information on a single transaction .
40
11
228,345
def transaction_operations ( self , tx_hash , cursor = None , order = 'asc' , include_failed = False , limit = 10 ) : endpoint = '/transactions/{tx_hash}/operations' . format ( tx_hash = tx_hash ) params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params )
This endpoint represents all operations that are part of a given transaction .
97
13
228,346
def transaction_effects ( self , tx_hash , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/transactions/{tx_hash}/effects' . format ( tx_hash = tx_hash ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred as a result of a given transaction .
81
15
228,347
def order_book ( self , selling_asset_code , buying_asset_code , selling_asset_issuer = None , buying_asset_issuer = None , limit = 10 ) : selling_asset = Asset ( selling_asset_code , selling_asset_issuer ) buying_asset = Asset ( buying_asset_code , buying_asset_issuer ) asset_params = { 'selling_asset_type' : selling_asset . type , 'selling_asset_code' : None if selling_asset . is_native ( ) else selling_asset . code , 'selling_asset_issuer' : selling_asset . issuer , 'buying_asset_type' : buying_asset . type , 'buying_asset_code' : None if buying_asset . is_native ( ) else buying_asset . code , 'buying_asset_issuer' : buying_asset . issuer , } endpoint = '/order_book' params = self . __query_params ( limit = limit , * * asset_params ) return self . query ( endpoint , params )
Return for each orderbook a summary of the orderbook and the bids and asks associated with that orderbook .
258
22
228,348
def ledger ( self , ledger_id ) : endpoint = '/ledgers/{ledger_id}' . format ( ledger_id = ledger_id ) return self . query ( endpoint )
The ledger details endpoint provides information on a single ledger .
41
11
228,349
def ledger_effects ( self , ledger_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/ledgers/{ledger_id}/effects' . format ( ledger_id = ledger_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred in the given ledger .
82
12
228,350
def ledger_transactions ( self , ledger_id , cursor = None , order = 'asc' , include_failed = False , limit = 10 ) : endpoint = '/ledgers/{ledger_id}/transactions' . format ( ledger_id = ledger_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params )
This endpoint represents all transactions in a given ledger .
98
10
228,351
def effects ( self , cursor = None , order = 'asc' , limit = 10 , sse = False ) : endpoint = '/effects' params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params , sse )
This endpoint represents all effects .
63
6
228,352
def operations ( self , cursor = None , order = 'asc' , limit = 10 , include_failed = False , sse = False ) : endpoint = '/operations' params = self . __query_params ( cursor = cursor , order = order , limit = limit , include_failed = include_failed ) return self . query ( endpoint , params , sse )
This endpoint represents all operations that are part of validated transactions .
78
12
228,353
def operation ( self , op_id ) : endpoint = '/operations/{op_id}' . format ( op_id = op_id ) return self . query ( endpoint )
The operation details endpoint provides information on a single operation .
40
11
228,354
def operation_effects ( self , op_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/operations/{op_id}/effects' . format ( op_id = op_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all effects that occurred as a result of a given operation .
81
15
228,355
def paths ( self , destination_account , destination_amount , source_account , destination_asset_code , destination_asset_issuer = None ) : destination_asset = Asset ( destination_asset_code , destination_asset_issuer ) destination_asset_params = { 'destination_asset_type' : destination_asset . type , 'destination_asset_code' : None if destination_asset . is_native ( ) else destination_asset . code , 'destination_asset_issuer' : destination_asset . issuer } endpoint = '/paths' params = self . __query_params ( destination_account = destination_account , source_account = source_account , destination_amount = destination_amount , * * destination_asset_params ) return self . query ( endpoint , params )
Load a list of assets available to the source account id and find any payment paths from those source assets to the desired destination asset .
187
26
228,356
def trades ( self , base_asset_code = None , counter_asset_code = None , base_asset_issuer = None , counter_asset_issuer = None , offer_id = None , cursor = None , order = 'asc' , limit = 10 ) : base_asset = Asset ( base_asset_code , base_asset_issuer ) counter_asset = Asset ( counter_asset_code , counter_asset_issuer ) asset_params = { 'base_asset_type' : base_asset . type , 'base_asset_code' : None if base_asset . is_native ( ) else base_asset . code , 'base_asset_issuer' : base_asset . issuer , 'counter_asset_type' : counter_asset . type , 'counter_asset_code' : None if counter_asset . is_native ( ) else counter_asset . code , 'counter_asset_issuer' : counter_asset . issuer } endpoint = '/trades' params = self . __query_params ( offer_id = offer_id , cursor = cursor , order = order , limit = limit , * * asset_params ) return self . query ( endpoint , params )
Load a list of trades optionally filtered by an orderbook .
287
12
228,357
def trade_aggregations ( self , resolution , base_asset_code , counter_asset_code , base_asset_issuer = None , counter_asset_issuer = None , start_time = None , end_time = None , order = 'asc' , limit = 10 , offset = 0 ) : allowed_resolutions = ( 60000 , 300000 , 900000 , 3600000 , 86400000 , 604800000 ) if resolution not in allowed_resolutions : raise NotValidParamError ( "resolution is invalid" ) if offset > resolution or offset >= 24 * 3600000 or offset % 3600000 != 0 : raise NotValidParamError ( "offset is invalid" ) base_asset = Asset ( base_asset_code , base_asset_issuer ) counter_asset = Asset ( counter_asset_code , counter_asset_issuer ) asset_params = { 'base_asset_type' : base_asset . type , 'base_asset_code' : None if base_asset . is_native ( ) else base_asset . code , 'base_asset_issuer' : base_asset . issuer , 'counter_asset_type' : counter_asset . type , 'counter_asset_code' : None if counter_asset . is_native ( ) else counter_asset . code , 'counter_asset_issuer' : counter_asset . issuer } endpoint = '/trade_aggregations' params = self . __query_params ( start_time = start_time , end_time = end_time , resolution = resolution , order = order , limit = limit , offset = offset , * * asset_params ) return self . query ( endpoint , params )
Load a list of aggregated historical trade data optionally filtered by an orderbook .
388
16
228,358
def offer_trades ( self , offer_id , cursor = None , order = 'asc' , limit = 10 ) : endpoint = '/offers/{offer_id}/trades' . format ( offer_id = offer_id ) params = self . __query_params ( cursor = cursor , order = order , limit = limit ) return self . query ( endpoint , params )
This endpoint represents all trades for a given offer .
83
10
228,359
def sign ( self , keypair ) : assert isinstance ( keypair , Keypair ) tx_hash = self . hash_meta ( ) sig = keypair . sign_decorated ( tx_hash ) sig_dict = [ signature . __dict__ for signature in self . signatures ] if sig . __dict__ in sig_dict : raise SignatureExistError ( 'The keypair has already signed' ) else : self . signatures . append ( sig )
Sign this transaction envelope with a given keypair .
98
10
228,360
def signature_base ( self ) : network_id = self . network_id tx_type = Xdr . StellarXDRPacker ( ) tx_type . pack_EnvelopeType ( Xdr . const . ENVELOPE_TYPE_TX ) tx_type = tx_type . get_buffer ( ) tx = Xdr . StellarXDRPacker ( ) tx . pack_Transaction ( self . tx . to_xdr_object ( ) ) tx = tx . get_buffer ( ) return network_id + tx_type + tx
Get the signature base of this transaction envelope .
118
9
228,361
def get_federation_service ( domain , allow_http = False ) : st = get_stellar_toml ( domain , allow_http ) if not st : return None return st . get ( 'FEDERATION_SERVER' )
Retrieve the FEDERATION_SERVER config from a domain s stellar . toml .
53
20
228,362
def get_auth_server ( domain , allow_http = False ) : st = get_stellar_toml ( domain , allow_http ) if not st : return None return st . get ( 'AUTH_SERVER' )
Retrieve the AUTH_SERVER config from a domain s stellar . toml .
50
17
228,363
def get_stellar_toml ( domain , allow_http = False ) : toml_link = '/.well-known/stellar.toml' if allow_http : protocol = 'http://' else : protocol = 'https://' url_list = [ '' , 'www.' , 'stellar.' ] url_list = [ protocol + url + domain + toml_link for url in url_list ] for url in url_list : r = requests . get ( url ) if r . status_code == 200 : return toml . loads ( r . text ) return None
Retrieve the stellar . toml file from a given domain .
125
13
228,364
def account_xdr_object ( self ) : return Xdr . types . PublicKey ( Xdr . const . KEY_TYPE_ED25519 , self . verifying_key . to_bytes ( ) )
Create PublicKey XDR object via public key bytes .
45
11
228,365
def xdr ( self ) : kp = Xdr . StellarXDRPacker ( ) kp . pack_PublicKey ( self . account_xdr_object ( ) ) return base64 . b64encode ( kp . get_buffer ( ) )
Generate base64 encoded XDR PublicKey object .
57
11
228,366
def verify ( self , data , signature ) : try : return self . verifying_key . verify ( signature , data ) except ed25519 . BadSignatureError : raise BadSignatureError ( "Signature verification failed." )
Verify the signature of a sequence of bytes .
48
10
228,367
def sign_decorated ( self , data ) : signature = self . sign ( data ) hint = self . signature_hint ( ) return Xdr . types . DecoratedSignature ( hint , signature )
Sign a bytes - like object and return the decorated signature .
46
12
228,368
def bytes_from_decode_data ( s ) : if isinstance ( s , ( str , unicode ) ) : try : return s . encode ( 'ascii' ) except UnicodeEncodeError : raise NotValidParamError ( 'String argument should contain only ASCII characters' ) if isinstance ( s , bytes_types ) : return s try : return memoryview ( s ) . tobytes ( ) except TypeError : raise suppress_context ( TypeError ( 'Argument should be a bytes-like object or ASCII string, not ' '{!r}' . format ( s . __class__ . __name__ ) ) )
copy from base64 . _bytes_from_decode_data
137
14
228,369
def to_xdr_amount ( value ) : if not isinstance ( value , str ) : raise NotValidParamError ( "Value of type '{}' must be of type String, but got {}" . format ( value , type ( value ) ) ) # throw exception if value * ONE has decimal places (it can't be # represented as int64) try : amount = int ( ( Decimal ( value ) * ONE ) . to_integral_exact ( context = Context ( traps = [ Inexact ] ) ) ) except decimal . Inexact : raise NotValidParamError ( "Value of '{}' must have at most 7 digits after the decimal." . format ( value ) ) except decimal . InvalidOperation : raise NotValidParamError ( "Value of '{}' must represent a positive number." . format ( value ) ) return amount
Converts an amount to the appropriate value to send over the network as a part of an XDR object .
184
22
228,370
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_TEXT , text = self . text )
Creates an XDR Memo object for a transaction with MEMO_TEXT .
38
17
228,371
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_ID , id = self . memo_id )
Creates an XDR Memo object for a transaction with MEMO_ID .
40
17
228,372
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_HASH , hash = self . memo_hash )
Creates an XDR Memo object for a transaction with MEMO_HASH .
41
18
228,373
def to_xdr_object ( self ) : return Xdr . types . Memo ( type = Xdr . const . MEMO_RETURN , retHash = self . memo_return )
Creates an XDR Memo object for a transaction with MEMO_RETURN .
42
18
228,374
def append_hashx_signer ( self , hashx , signer_weight , source = None ) : return self . append_set_options_op ( signer_address = hashx , signer_type = 'hashX' , signer_weight = signer_weight , source = source )
Add a HashX signer to an account .
67
10
228,375
def append_pre_auth_tx_signer ( self , pre_auth_tx , signer_weight , source = None ) : return self . append_set_options_op ( signer_address = pre_auth_tx , signer_type = 'preAuthTx' , signer_weight = signer_weight , source = source )
Add a PreAuthTx signer to an account .
77
11
228,376
def next_builder ( self ) : sequence = self . sequence + 1 next_builder = Builder ( horizon_uri = self . horizon . horizon_uri , address = self . address , network = self . network , sequence = sequence , fee = self . fee ) next_builder . keypair = self . keypair return next_builder
Create a new builder based off of this one with its sequence number incremented .
70
16
228,377
def get_sequence ( self ) : if not self . address : raise StellarAddressInvalidError ( 'No address provided.' ) address = self . horizon . account ( self . address ) return int ( address . get ( 'sequence' ) )
Get the sequence number for a given account via Horizon .
50
11
228,378
def to_dict ( self ) : rv = { 'code' : self . code } if not self . is_native ( ) : rv [ 'issuer' ] = self . issuer rv [ 'type' ] = self . type else : rv [ 'type' ] = 'native' return rv
Generate a dict for this object s attributes .
69
10
228,379
def id_unique ( dict_id , name , lineno ) : if dict_id in name_dict : global error_occurred error_occurred = True print ( "ERROR - {0:s} definition {1:s} at line {2:d} conflicts with {3:s}" . format ( name , dict_id , lineno , name_dict [ dict_id ] ) ) return False else : return True
Returns True if dict_id not already used . Otherwise invokes error
93
14
228,380
def main ( ) : import sys import argparse stdout = buffer ( sys . stdout ) parser = argparse . ArgumentParser ( description = main . __doc__ ) parser . add_argument ( 'file' , metavar = 'FILE' , nargs = '?' , type = argparse . FileType ( 'r' ) , default = '-' ) parser . add_argument ( '-d' , '--decode' , action = 'store_true' , help = 'decode data' ) parser . add_argument ( '-c' , '--check' , action = 'store_true' , help = 'append a checksum before encoding' ) args = parser . parse_args ( ) fun = { ( False , False ) : b58encode , ( False , True ) : b58encode_check , ( True , False ) : b58decode , ( True , True ) : b58decode_check } [ ( args . decode , args . check ) ] data = buffer ( args . file ) . read ( ) . rstrip ( b'\n' ) try : result = fun ( data ) except Exception as e : sys . exit ( e ) if not isinstance ( result , bytes ) : result = result . encode ( 'ascii' ) stdout . write ( result )
Base58 encode or decode FILE or standard input to standard output .
287
13
228,381
def _Dhcpcd ( self , interfaces , logger ) : for interface in interfaces : dhcpcd = [ '/sbin/dhcpcd' ] try : subprocess . check_call ( dhcpcd + [ '-x' , interface ] ) except subprocess . CalledProcessError : # Dhcpcd not yet running for this device. logger . info ( 'Dhcpcd not yet running for interface %s.' , interface ) try : subprocess . check_call ( dhcpcd + [ interface ] ) except subprocess . CalledProcessError : # The interface is already active. logger . warning ( 'Could not activate interface %s.' , interface )
Use dhcpcd to activate the interfaces .
141
9
228,382
def _CreateTempDir ( prefix , run_dir = None ) : temp_dir = tempfile . mkdtemp ( prefix = prefix + '-' , dir = run_dir ) try : yield temp_dir finally : shutil . rmtree ( temp_dir )
Context manager for creating a temporary directory .
58
8
228,383
def _RunScripts ( self , run_dir = None ) : with _CreateTempDir ( self . script_type , run_dir = run_dir ) as dest_dir : try : self . logger . info ( 'Starting %s scripts.' , self . script_type ) script_dict = self . retriever . GetScripts ( dest_dir ) self . executor . RunScripts ( script_dict ) finally : self . logger . info ( 'Finished running %s scripts.' , self . script_type )
Retrieve metadata scripts and execute them .
114
8
228,384
def _GetInstanceConfig ( self ) : try : instance_data = self . metadata_dict [ 'instance' ] [ 'attributes' ] except KeyError : instance_data = { } self . logger . warning ( 'Instance attributes were not found.' ) try : project_data = self . metadata_dict [ 'project' ] [ 'attributes' ] except KeyError : project_data = { } self . logger . warning ( 'Project attributes were not found.' ) return ( instance_data . get ( 'google-instance-configs' ) or project_data . get ( 'google-instance-configs' ) )
Get the instance configuration specified in metadata .
135
8
228,385
def _GenerateSshKey ( self , key_type , key_dest ) : # Create a temporary file to save the created RSA keys. with tempfile . NamedTemporaryFile ( prefix = key_type , delete = True ) as temp : temp_key = temp . name command = [ 'ssh-keygen' , '-t' , key_type , '-f' , temp_key , '-N' , '' , '-q' ] try : self . logger . info ( 'Generating SSH key %s.' , key_dest ) subprocess . check_call ( command ) except subprocess . CalledProcessError : self . logger . warning ( 'Could not create SSH key %s.' , key_dest ) return shutil . move ( temp_key , key_dest ) shutil . move ( '%s.pub' % temp_key , '%s.pub' % key_dest ) file_utils . SetPermissions ( key_dest , mode = 0o600 ) file_utils . SetPermissions ( '%s.pub' % key_dest , mode = 0o644 )
Generate a new SSH key .
242
7
228,386
def _StartSshd ( self ) : # Exit as early as possible. # Instance setup systemd scripts block sshd from starting. if os . path . exists ( constants . LOCALBASE + '/bin/systemctl' ) : return elif ( os . path . exists ( '/etc/init.d/ssh' ) or os . path . exists ( '/etc/init/ssh.conf' ) ) : subprocess . call ( [ 'service' , 'ssh' , 'start' ] ) subprocess . call ( [ 'service' , 'ssh' , 'reload' ] ) elif ( os . path . exists ( '/etc/init.d/sshd' ) or os . path . exists ( '/etc/init/sshd.conf' ) ) : subprocess . call ( [ 'service' , 'sshd' , 'start' ] ) subprocess . call ( [ 'service' , 'sshd' , 'reload' ] )
Initialize the SSH daemon .
209
6
228,387
def _SetSshHostKeys ( self , host_key_types = None ) : section = 'Instance' instance_id = self . _GetInstanceId ( ) if instance_id != self . instance_config . GetOptionString ( section , 'instance_id' ) : self . logger . info ( 'Generating SSH host keys for instance %s.' , instance_id ) file_regex = re . compile ( r'ssh_host_(?P<type>[a-z0-9]*)_key\Z' ) key_dir = '/etc/ssh' key_files = [ f for f in os . listdir ( key_dir ) if file_regex . match ( f ) ] key_types = host_key_types . split ( ',' ) if host_key_types else [ ] key_types_files = [ 'ssh_host_%s_key' % key_type for key_type in key_types ] for key_file in set ( key_files ) | set ( key_types_files ) : key_type = file_regex . match ( key_file ) . group ( 'type' ) key_dest = os . path . join ( key_dir , key_file ) self . _GenerateSshKey ( key_type , key_dest ) self . _StartSshd ( ) self . instance_config . SetOption ( section , 'instance_id' , str ( instance_id ) )
Regenerates SSH host keys when the VM is restarted with a new IP address .
319
18
228,388
def _SetupBotoConfig ( self ) : project_id = self . _GetNumericProjectId ( ) try : boto_config . BotoConfig ( project_id , debug = self . debug ) except ( IOError , OSError ) as e : self . logger . warning ( str ( e ) )
Set the boto config so GSUtil works with provisioned service accounts .
69
16
228,389
def _DownloadAuthUrl ( self , url , dest_dir ) : dest_file = tempfile . NamedTemporaryFile ( dir = dest_dir , delete = False ) dest_file . close ( ) dest = dest_file . name self . logger . info ( 'Downloading url from %s to %s using authentication token.' , url , dest ) if not self . token : response = self . watcher . GetMetadata ( self . token_metadata_key , recursive = False , retry = False ) if not response : self . logger . info ( 'Authentication token not found. Attempting unauthenticated ' 'download.' ) return self . _DownloadUrl ( url , dest_dir ) self . token = '%s %s' % ( response . get ( 'token_type' , '' ) , response . get ( 'access_token' , '' ) ) try : request = urlrequest . Request ( url ) request . add_unredirected_header ( 'Metadata-Flavor' , 'Google' ) request . add_unredirected_header ( 'Authorization' , self . token ) content = urlrequest . urlopen ( request ) . read ( ) . decode ( 'utf-8' ) except ( httpclient . HTTPException , socket . error , urlerror . URLError ) as e : self . logger . warning ( 'Could not download %s. %s.' , url , str ( e ) ) return None with open ( dest , 'wb' ) as f : f . write ( content ) return dest
Download a Google Storage URL using an authentication token .
335
10
228,390
def _DownloadUrl ( self , url , dest_dir ) : dest_file = tempfile . NamedTemporaryFile ( dir = dest_dir , delete = False ) dest_file . close ( ) dest = dest_file . name self . logger . info ( 'Downloading url from %s to %s.' , url , dest ) try : urlretrieve . urlretrieve ( url , dest ) return dest except ( httpclient . HTTPException , socket . error , urlerror . URLError ) as e : self . logger . warning ( 'Could not download %s. %s.' , url , str ( e ) ) except Exception as e : self . logger . warning ( 'Exception downloading %s. %s.' , url , str ( e ) ) return None
Download a script from a given URL .
166
8
228,391
def _DownloadScript ( self , url , dest_dir ) : # Check for the preferred Google Storage URL format: # gs://<bucket>/<object> if url . startswith ( r'gs://' ) : # Convert the string into a standard URL. url = re . sub ( '^gs://' , 'https://storage.googleapis.com/' , url ) return self . _DownloadAuthUrl ( url , dest_dir ) header = r'http[s]?://' domain = r'storage\.googleapis\.com' # Many of the Google Storage URLs are supported below. # It is prefered that customers specify their object using # its gs://<bucket>/<object> url. bucket = r'(?P<bucket>[a-z0-9][-_.a-z0-9]*[a-z0-9])' # Accept any non-empty string that doesn't contain a wildcard character obj = r'(?P<obj>[^\*\?]+)' # Check for the Google Storage URLs: # http://<bucket>.storage.googleapis.com/<object> # https://<bucket>.storage.googleapis.com/<object> gs_regex = re . compile ( r'\A%s%s\.%s/%s\Z' % ( header , bucket , domain , obj ) ) match = gs_regex . match ( url ) if match : return self . _DownloadAuthUrl ( url , dest_dir ) # Check for the other possible Google Storage URLs: # http://storage.googleapis.com/<bucket>/<object> # https://storage.googleapis.com/<bucket>/<object> # # The following are deprecated but checked: # http://commondatastorage.googleapis.com/<bucket>/<object> # https://commondatastorage.googleapis.com/<bucket>/<object> gs_regex = re . compile ( r'\A%s(commondata)?%s/%s/%s\Z' % ( header , domain , bucket , obj ) ) match = gs_regex . match ( url ) if match : return self . _DownloadAuthUrl ( url , dest_dir ) # Unauthenticated download of the object. return self . _DownloadUrl ( url , dest_dir )
Download the contents of the URL to the destination .
538
10
228,392
def _GetAttributeScripts ( self , attribute_data , dest_dir ) : script_dict = { } attribute_data = attribute_data or { } metadata_key = '%s-script' % self . script_type metadata_value = attribute_data . get ( metadata_key ) if metadata_value : self . logger . info ( 'Found %s in metadata.' , metadata_key ) with tempfile . NamedTemporaryFile ( mode = 'w' , dir = dest_dir , delete = False ) as dest : dest . write ( metadata_value . lstrip ( ) ) script_dict [ metadata_key ] = dest . name metadata_key = '%s-script-url' % self . script_type metadata_value = attribute_data . get ( metadata_key ) if metadata_value : self . logger . info ( 'Found %s in metadata.' , metadata_key ) script_dict [ metadata_key ] = self . _DownloadScript ( metadata_value , dest_dir ) return script_dict
Retrieve the scripts from attribute metadata .
222
8
228,393
def GetScripts ( self , dest_dir ) : metadata_dict = self . watcher . GetMetadata ( ) or { } try : instance_data = metadata_dict [ 'instance' ] [ 'attributes' ] except KeyError : instance_data = None self . logger . warning ( 'Instance attributes were not found.' ) try : project_data = metadata_dict [ 'project' ] [ 'attributes' ] except KeyError : project_data = None self . logger . warning ( 'Project attributes were not found.' ) return ( self . _GetAttributeScripts ( instance_data , dest_dir ) or self . _GetAttributeScripts ( project_data , dest_dir ) )
Retrieve the scripts to execute .
151
7
228,394
def _MakeExecutable ( self , metadata_script ) : mode = os . stat ( metadata_script ) . st_mode os . chmod ( metadata_script , mode | stat . S_IEXEC )
Add executable permissions to a file .
46
7
228,395
def RunScripts ( self , script_dict ) : metadata_types = [ '%s-script-url' , '%s-script' ] metadata_keys = [ key % self . script_type for key in metadata_types ] metadata_keys = [ key for key in metadata_keys if script_dict . get ( key ) ] if not metadata_keys : self . logger . info ( 'No %s scripts found in metadata.' , self . script_type ) for metadata_key in metadata_keys : metadata_script = script_dict . get ( metadata_key ) self . _MakeExecutable ( metadata_script ) self . _RunScript ( metadata_key , metadata_script )
Run the metadata scripts ; execute a URL script first if one is provided .
150
15
228,396
def _AddHeader ( self , fp ) : text = textwrap . wrap ( textwrap . dedent ( self . config_header ) , break_on_hyphens = False ) fp . write ( '\n' . join ( [ '# ' + line for line in text ] ) ) fp . write ( '\n\n' )
Create a file header in the config .
77
8
228,397
def SetOption ( self , section , option , value , overwrite = True ) : if not overwrite and self . config . has_option ( section , option ) : return if not self . config . has_section ( section ) : self . config . add_section ( section ) self . config . set ( section , option , str ( value ) )
Set the value of an option in the config file .
73
11
228,398
def WriteConfig ( self , config_file = None ) : config_file = config_file or self . config_file config_name = os . path . splitext ( os . path . basename ( config_file ) ) [ 0 ] config_lock = ( '%s/lock/google_%s.lock' % ( constants . LOCALSTATEDIR , config_name ) ) with file_utils . LockFile ( config_lock ) : with open ( config_file , 'w' ) as config_fp : if self . config_header : self . _AddHeader ( config_fp ) self . config . write ( config_fp )
Write the config values to a given file .
142
9
228,399
def Logger ( name , debug = False , facility = None ) : logger = logging . getLogger ( name ) logger . handlers = [ ] logger . addHandler ( logging . NullHandler ( ) ) logger . propagate = False logger . setLevel ( logging . DEBUG ) formatter = logging . Formatter ( name + ': %(levelname)s %(message)s' ) if debug : # Create a handler for console logging. console_handler = logging . StreamHandler ( ) console_handler . setLevel ( logging . DEBUG ) console_handler . setFormatter ( formatter ) logger . addHandler ( console_handler ) if facility : # Create a handler for sending logs to SysLog. syslog_handler = logging . handlers . SysLogHandler ( address = constants . SYSLOG_SOCKET , facility = facility ) syslog_handler . setLevel ( logging . INFO ) syslog_handler . setFormatter ( formatter ) logger . addHandler ( syslog_handler ) return logger
Get a logging object with handlers for sending logs to SysLog .
215
14