idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
5,900
def check_orthogonal ( angle ) : flow_dir_taudem = - 1 flow_dir = - 1 if MathClass . floatequal ( angle , FlowModelConst . e ) : flow_dir_taudem = FlowModelConst . e flow_dir = 1 elif MathClass . floatequal ( angle , FlowModelConst . ne ) : flow_dir_taudem = FlowModelConst . ne flow_dir = 128 elif MathClass . floatequa...
Check the given Dinf angle based on D8 flow direction encoding code by ArcGIS
5,901
def start ( self , * args , ** kwargs ) : self . _stop = False super ( Plant , self ) . start ( * args , ** kwargs )
start the instrument thread
5,902
def quit ( self , * args , ** kwargs ) : self . stop ( ) self . _stop = True self . msleep ( 2 * int ( 1e3 / self . settings [ 'update frequency' ] ) ) super ( Plant , self ) . quit ( * args , ** kwargs )
quit the instrument thread
5,903
def controler_output ( self , current_value ) : set_point = self . settings [ 'set_point' ] Kp = self . settings [ 'gains' ] [ 'proportional' ] Ki = self . settings [ 'gains' ] [ 'integral' ] output_range = self . settings [ 'output_range' ] time_step = self . settings [ 'time_step' ] error_new = set_point - current_va...
Calculate PI output value for given reference input and feedback
5,904
def get_opts ( opts ) : defaults = { 'board' : None , 'terrain' : Opt . random , 'numbers' : Opt . preset , 'ports' : Opt . preset , 'pieces' : Opt . preset , 'players' : Opt . preset , } _opts = defaults . copy ( ) if opts is None : opts = dict ( ) try : for key , val in opts . copy ( ) . items ( ) : if key == 'board'...
Validate options and apply defaults for options not supplied .
5,905
def _get_tiles ( board = None , terrain = None , numbers = None ) : if board is not None : tiles = _read_tiles_from_string ( board ) else : tiles = _generate_tiles ( terrain , numbers ) return tiles
Generate a list of tiles using the given terrain and numbers options .
5,906
def _get_ports ( port_opts ) : if port_opts in [ Opt . preset , Opt . debug ] : _preset_ports = [ ( 1 , 'NW' , catan . board . PortType . any3 ) , ( 2 , 'W' , catan . board . PortType . wood ) , ( 4 , 'W' , catan . board . PortType . brick ) , ( 5 , 'SW' , catan . board . PortType . any3 ) , ( 6 , 'SE' , catan . board ...
Generate a list of ports using the given options .
5,907
def _get_pieces ( tiles , ports , players_opts , pieces_opts ) : if pieces_opts == Opt . empty : return dict ( ) elif pieces_opts == Opt . debug : players = catan . game . Game . get_debug_players ( ) return { ( hexgrid . NODE , 0x23 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 0 ] )...
Generate a dictionary of pieces using the given options .
5,908
def create_feature ( self , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . create_feature_with_http_info ( ** kwargs ) else : ( data ) = self . create_feature_with_http_info ( ** kwargs ) return data
Create an enumerated sequence feature
5,909
def list_features ( self , locus , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_with_http_info ( locus , ** kwargs ) else : ( data ) = self . list_features_with_http_info ( locus , ** kwargs ) return data
List the enumerated sequence features at a locus
5,910
def list_features_0 ( self , locus , term , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_0_with_http_info ( locus , term , ** kwargs ) else : ( data ) = self . list_features_0_with_http_info ( locus , term , ** kwargs ) return data
List the enumerated sequence features matching a term at a locus
5,911
def list_features_1 ( self , locus , term , rank , ** kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_1_with_http_info ( locus , term , rank , ** kwargs ) else : ( data ) = self . list_features_1_with_http_info ( locus , term , rank , ** kwargs ) return...
List the enumerated sequence features matching a term and rank at a locus
5,912
def request ( self , method , uri , headers = None , bodyProducer = None ) : if self . _parent_trace is None : trace = Trace ( method ) else : trace = self . _parent_trace . child ( method ) if self . _endpoint is not None : trace . set_endpoint ( self . _endpoint ) if headers is None : headers = Headers ( { } ) header...
Send a client request following HTTP redirects .
5,913
def stringify ( data ) : def serialize ( k , v ) : if k == "candidates" : return int ( v ) if isinstance ( v , numbers . Number ) : if k == "zipcode" : return str ( v ) . zfill ( 5 ) return str ( v ) return v return [ { k : serialize ( k , v ) for k , v in json_dict . items ( ) } for json_dict in data ]
Ensure all values in the dictionary are strings except for the value for candidate which should just be an integer .
5,914
def post ( self , endpoint , data ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } i...
Executes the HTTP POST request
5,915
def street_addresses ( self , addresses ) : if type ( addresses [ 0 ] ) != dict : addresses = [ { "street" : arg for arg in addresses } ] return AddressCollection ( self . post ( "street-address" , data = addresses ) )
API method for verifying street address and geolocating
5,916
def street_address ( self , address ) : address = self . street_addresses ( [ address ] ) if not len ( address ) : return None return Address ( address [ 0 ] )
Geocode one and only address get a single Address object back
5,917
def load ( schema , uri = None , spec = None , provider = None ) : factory = Factory ( provider , spec ) return factory ( schema , uri or '#' )
Scaffold a validator against a schema .
5,918
def sizeHint ( self ) : w , h = self . get_width_height ( ) return QtCore . QSize ( w , h )
gives qt a starting point for widget size during window resizing
5,919
def stage ( obj , parent = None , member = None ) : obj = Staged ( obj , parent , member ) if isinstance ( obj , Mapping ) : for key , value in obj . items ( ) : stage ( value , obj , key ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : for index , value in enumerate ( obj ) : stage ( va...
Prepare obj to be staged .
5,920
def rotate ( key_prefix , key_ext , bucket_name , daily_backups = 7 , weekly_backups = 4 , aws_key = None , aws_secret = None ) : session = boto3 . Session ( aws_access_key_id = aws_key , aws_secret_access_key = aws_secret ) s3 = session . resource ( 's3' ) bucket = s3 . Bucket ( bucket_name ) keys = bucket . objects ....
Delete old files we ve uploaded to S3 according to grandfather father sun strategy
5,921
def splitext ( filename ) : index = filename . find ( '.' ) if index == 0 : index = 1 + filename [ 1 : ] . find ( '.' ) if index == - 1 : return filename , '' return filename [ : index ] , filename [ index : ] return os . path . splitext ( filename )
Return the filename and extension according to the first dot in the filename . This helps date stamping . tar . bz2 or . ext . gz files properly .
5,922
def start_fitting ( self ) : self . queue = queue . Queue ( ) self . peak_vals = [ ] self . fit_thread = QThread ( ) self . fitobj = self . do_fit ( str ( self . data_filepath . text ( ) ) , self . matplotlibwidget , self . queue , self . peak_vals , self . peak_locs ) self . fitobj . moveToThread ( self . fit_thread )...
Launches the fitting routine on another thread
5,923
def _get_bandgap_from_bands ( energies , nelec ) : nelec = int ( nelec ) valence = [ x [ nelec - 1 ] for x in energies ] conduction = [ x [ nelec ] for x in energies ] return max ( min ( conduction ) - max ( valence ) , 0.0 )
Compute difference in conduction band min and valence band max
5,924
def _get_bandgap_eigenval ( eigenval_fname , outcar_fname ) : with open ( outcar_fname , "r" ) as f : parser = OutcarParser ( ) nelec = next ( iter ( filter ( lambda x : "number of electrons" in x , parser . parse ( f . readlines ( ) ) ) ) ) [ "number of electrons" ] with open ( eigenval_fname , "r" ) as f : eigenval_i...
Get the bandgap from the EIGENVAL file
5,925
def _get_bandgap_doscar ( filename ) : with open ( filename ) as fp : for i in range ( 6 ) : l = fp . readline ( ) efermi = float ( l . split ( ) [ 3 ] ) step1 = fp . readline ( ) . split ( ) [ 0 ] step2 = fp . readline ( ) . split ( ) [ 0 ] step_size = float ( step2 ) - float ( step1 ) not_found = True while not_found...
Get the bandgap from the DOSCAR file
5,926
def get_band_gap ( self ) : if self . outcar is not None and self . eignval is not None : bandgap = VaspParser . _get_bandgap_eigenval ( self . eignval , self . outcar ) elif self . doscar is not None : bandgap = VaspParser . _get_bandgap_doscar ( self . doscar ) else : return None return Property ( scalars = [ Scalar ...
Get the bandgap either from the EIGENVAL or DOSCAR files
5,927
def get_value_by_xy ( self , x , y ) : if x < self . xMin or x > self . xMax or y < self . yMin or y > self . yMax : return None else : row = self . nRows - int ( numpy . ceil ( ( y - self . yMin ) / self . dx ) ) col = int ( numpy . floor ( ( x - self . xMin ) / self . dx ) ) value = self . data [ row ] [ col ] if val...
Get raster value by xy coordinates .
5,928
def get_central_coors ( self , row , col ) : if row < 0 or row >= self . nRows or col < 0 or col >= self . nCols : raise ValueError ( "The row (%d) or col (%d) must be >=0 and less than " "nRows (%d) or nCols (%d)!" % ( row , col , self . nRows , self . nCols ) ) else : tmpx = self . xMin + ( col + 0.5 ) * self . dx tm...
Get the coordinates of central grid .
5,929
def read_raster ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) data = band . ReadAsArray ( ) xsize = band . XSize ysize = band . YSize nodata_value = band . GetNoDataValue ( ) geotrans = ds . GetGeoTransform ( ) dttype = band . DataType srs = osr_SpatialReference ( ) srs . ImportFromWk...
Read raster by GDAL .
5,930
def get_mask_from_raster ( rasterfile , outmaskfile , keep_nodata = False ) : raster_r = RasterUtilClass . read_raster ( rasterfile ) xsize = raster_r . nCols ysize = raster_r . nRows nodata_value = raster_r . noDataValue srs = raster_r . srs x_min = raster_r . xMin y_max = raster_r . yMax dx = raster_r . dx data = ras...
Generate mask data from a given raster data .
5,931
def raster_reclassify ( srcfile , v_dict , dstfile , gdaltype = GDT_Float32 ) : src_r = RasterUtilClass . read_raster ( srcfile ) src_data = src_r . data dst_data = numpy . copy ( src_data ) if gdaltype == GDT_Float32 and src_r . dataType != GDT_Float32 : gdaltype = src_r . dataType no_data = src_r . noDataValue new_no...
Reclassify raster by given classifier dict .
5,932
def write_gtiff_file ( f_name , n_rows , n_cols , data , geotransform , srs , nodata_value , gdal_type = GDT_Float32 ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( f_name ) ) ) driver = gdal_GetDriverByName ( str ( 'GTiff' ) ) try : ds = driver . Create ( f_name , n_cols , n_rows , 1 , gd...
Output Raster to GeoTiff format file .
5,933
def write_asc_file ( filename , data , xsize , ysize , geotransform , nodata_value ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( filename ) ) ) header = 'NCOLS %d\n' 'NROWS %d\n' 'XLLCENTER %f\n' 'YLLCENTER %f\n' 'CELLSIZE %f\n' 'NODATA_VALUE %f' % ( xsize , ysize , geotransform [ 0 ] + ...
Output Raster to ASCII file .
5,934
def raster_to_gtiff ( tif , geotif , change_nodata = False , change_gdal_type = False ) : rst_file = RasterUtilClass . read_raster ( tif ) nodata = rst_file . noDataValue if change_nodata : if not MathClass . floatequal ( rst_file . noDataValue , DEFAULT_NODATA ) : nodata = DEFAULT_NODATA rst_file . data [ rst_file . d...
Converting Raster format to GeoTIFF .
5,935
def raster_to_asc ( raster_f , asc_f ) : raster_r = RasterUtilClass . read_raster ( raster_f ) RasterUtilClass . write_asc_file ( asc_f , raster_r . data , raster_r . nCols , raster_r . nRows , raster_r . geotrans , raster_r . noDataValue )
Converting Raster format to ASCII raster .
5,936
def raster_statistics ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) minv , maxv , meanv , std = band . ComputeStatistics ( False ) return minv , maxv , meanv , std
Get basic statistics of raster data .
5,937
def split_raster ( rs , split_shp , field_name , temp_dir ) : UtilClass . rmmkdir ( temp_dir ) ds = ogr_Open ( split_shp ) lyr = ds . GetLayer ( 0 ) lyr . ResetReading ( ) ft = lyr . GetNextFeature ( ) while ft : cur_field_name = ft . GetFieldAsString ( field_name ) for r in rs : cur_file_name = r . split ( os . sep ) ...
Split raster by given shapefile and field name .
5,938
def get_negative_dem ( raw_dem , neg_dem ) : origin = RasterUtilClass . read_raster ( raw_dem ) max_v = numpy . max ( origin . data ) temp = origin . data < 0 neg = numpy . where ( temp , origin . noDataValue , max_v - origin . data ) RasterUtilClass . write_gtiff_file ( neg_dem , origin . nRows , origin . nCols , neg ...
Get negative DEM data .
5,939
def raster_binarization ( given_value , rasterfilename ) : origin_raster = RasterUtilClass . read_raster ( rasterfilename ) binary_raster = numpy . where ( origin_raster . data == given_value , 1 , 0 ) return binary_raster
Make the raster into binarization .
5,940
def raster_erosion ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return "Your rasterfile ...
Erode the raster image .
5,941
def raster_dilation ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return 'Your rasterfile...
Dilate the raster image .
5,942
def openning ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) openning_raster = input_raster for i in range ( times ) : openning_raster = RasterUtilClass . raster_erosion ( openning_raster ) for i in range ( times ) : openning_raster = RasterUtilClass . raster_dil...
Do openning .
5,943
def closing ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) closing_raster = input_raster for i in range ( times ) : closing_raster = RasterUtilClass . raster_dilation ( closing_raster ) for i in range ( times ) : closing_raster = RasterUtilClass . raster_erosion...
Do closing .
5,944
def calculate_tx_fee ( tx_size : int ) -> Decimal : per_kb_cost = 0.01 min_fee = Decimal ( 0.001 ) fee = Decimal ( ( tx_size / 1000 ) * per_kb_cost ) if fee <= min_fee : return min_fee else : return fee
return tx fee from tx size in bytes
5,945
def p2sh_p2pkh_script ( network : str , address : str ) -> P2shScript : network_params = net_query ( network ) addr = Address . from_string ( network = network_params , string = address ) p2pkh = P2pkhScript ( addr ) return P2shScript ( p2pkh )
p2sh embedding p2pkh
5,946
def tx_output ( network : str , value : Decimal , n : int , script : ScriptSig ) -> TxOut : network_params = net_query ( network ) return TxOut ( network = network_params , value = int ( value * network_params . to_unit ) , n = n , script_pubkey = script )
create TxOut object
5,947
def make_raw_transaction ( network : str , inputs : list , outputs : list , locktime : Locktime , timestamp : int = int ( time ( ) ) , version : int = 1 , ) -> MutableTransaction : network_params = net_query ( network ) if network_params . name . startswith ( "peercoin" ) : return MutableTransaction ( version = version...
create raw transaction
5,948
def find_parent_outputs ( provider : Provider , utxo : TxIn ) -> TxOut : network_params = net_query ( provider . network ) index = utxo . txout return TxOut . from_json ( provider . getrawtransaction ( utxo . txid , 1 ) [ 'vout' ] [ index ] , network = network_params )
due to design of the btcpy library TxIn object must be converted to TxOut object before signing
5,949
def sign_transaction ( provider : Provider , unsigned : MutableTransaction , key : Kutil ) -> Transaction : parent_outputs = [ find_parent_outputs ( provider , i ) for i in unsigned . ins ] return key . sign_transaction ( parent_outputs , unsigned )
sign transaction with Kutil
5,950
def set_style ( style = 'basic' , ** kwargs ) : style = _read_style ( style ) if style [ 0 ] != 'basic' : style = [ 'basic' ] + style for s in style : _set_style ( s , ** kwargs )
Changes Matplotlib basic style to produce high quality graphs . Call this function at the beginning of your script . You can even further improve graphs with a call to fix_style at the end of your script .
5,951
def fix_style ( style = 'basic' , ax = None , ** kwargs ) : style = _read_style ( style ) for s in style : if not s in style_params . keys ( ) : avail = [ f . replace ( '.mplstyle' , '' ) for f in os . listdir ( _get_lib ( ) ) if f . endswith ( '.mplstyle' ) ] raise ValueError ( '{0} is not a valid style. ' . format ( ...
Add an extra formatting layer to an axe that couldn t be changed directly in matplotlib . rcParams or with styles . Apply this function to every axe you created .
5,952
def _get_label ( self ) : if self . _label is None : foundfiles = False for f in self . _files : if ".files" in f : foundfiles = True self . _label = f . split ( "." ) [ 0 ] with open ( self . _label + '.files' , 'r' ) as fp : line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + ".in" : fp . close ( ) ...
Find the label for the output files for this calculation
5,953
def next_player ( self ) : logging . warning ( 'turn={}, players={}' . format ( self . game . _cur_turn , self . game . players ) ) return self . game . players [ ( self . game . _cur_turn + 1 ) % len ( self . game . players ) ]
Returns the player whose turn it will be next .
5,954
def _estimate_progress ( self ) : estimate = True current_subscript = self . _current_subscript_stage [ 'current_subscript' ] num_subscripts = len ( self . scripts ) if self . iterator_type == 'loop' : num_iterations = self . settings [ 'num_loops' ] elif self . iterator_type == 'sweep' : sweep_range = self . settings ...
estimates the current progress that is then used in _receive_signal
5,955
def plot ( self , figure_list ) : if self . _current_subscript_stage is not None : if self . _current_subscript_stage [ 'current_subscript' ] is not None : self . _current_subscript_stage [ 'current_subscript' ] . plot ( figure_list ) if ( self . is_running is False ) and not ( self . data == { } or self . data is None...
When each subscript is called uses its standard plotting
5,956
def get_default_settings ( sub_scripts , script_order , script_execution_freq , iterator_type ) : def populate_sweep_param ( scripts , parameter_list , trace = '' ) : def get_parameter_from_dict ( trace , dic , parameter_list , valid_values = None ) : if valid_values is None and isinstance ( dic , Parameter ) : valid_v...
assigning the actual script settings depending on the iterator type
5,957
def raster2shp ( rasterfile , vectorshp , layername = None , fieldname = None , band_num = 1 , mask = 'default' ) : FileClass . remove_files ( vectorshp ) FileClass . check_file_exists ( rasterfile ) gdal . UseExceptions ( ) src_ds = gdal . Open ( rasterfile ) if src_ds is None : print ( 'Unable to open %s' % rasterfil...
Convert raster to ESRI shapefile
5,958
def convert2geojson ( jsonfile , src_srs , dst_srs , src_file ) : if os . path . exists ( jsonfile ) : os . remove ( jsonfile ) if sysstr == 'Windows' : exepath = '"%s/Lib/site-packages/osgeo/ogr2ogr"' % sys . exec_prefix else : exepath = FileClass . get_executable_fullpath ( 'ogr2ogr' ) s = '%s -f GeoJSON -s_srs "%s" ...
convert shapefile to geojson file
5,959
def consensus ( aln , weights = None , gap_threshold = 0.5 , simple = False , trim_ends = True ) : if simple : col_consensus = make_simple_col_consensus ( alnutils . aa_frequencies ( aln ) ) def is_majority_gap ( col ) : return ( float ( col . count ( '-' ) ) / len ( col ) >= gap_threshold ) else : if weights is None :...
Get the consensus of an alignment as a string .
5,960
def make_simple_col_consensus ( bg_freqs ) : def col_consensus ( col , prev_col = [ ] , prev_char = [ ] ) : aa_counts = sequtils . aa_frequencies ( col ) assert aa_counts , "Column is all gaps! That's not allowed." best_char , best_score = max ( aa_counts . iteritems ( ) , key = lambda kv : kv [ 1 ] ) ties = [ aa for a...
Consensus by simple plurality unweighted .
5,961
def supported ( aln ) : def col_consensus ( columns ) : for col in columns : if ( ( col . count ( '-' ) >= len ( col ) / 2 ) or all ( c . islower ( ) for c in col if c not in '.-' ) ) : yield '-' continue if any ( c . islower ( ) for c in col ) : logging . warn ( 'Mixed lowercase and uppercase letters in a ' 'column: '...
Get only the supported consensus residues in each column .
5,962
def detect_clause ( parser , clause_name , tokens , as_filter_expr = True ) : if clause_name in tokens : t_index = tokens . index ( clause_name ) clause_value = tokens [ t_index + 1 ] if as_filter_expr : clause_value = parser . compile_filter ( clause_value ) del tokens [ t_index : t_index + 2 ] else : clause_value = N...
Helper function detects a certain clause in tag tokens list . Returns its value .
5,963
def install_kernel_spec ( self , app , dir_name , display_name , settings_module , ipython_arguments ) : ksm = app . kernel_spec_manager try_spec_names = [ 'python3' if six . PY3 else 'python2' , 'python' ] if isinstance ( try_spec_names , six . string_types ) : try_spec_names = [ try_spec_names ] ks = None for spec_na...
install an IPython > = 3 . 0 kernelspec that loads corral env
5,964
def _cache_init ( self ) : cache_ = cache . get ( self . CACHE_ENTRY_NAME ) if cache_ is None : categories = get_category_model ( ) . objects . order_by ( 'sort_order' ) ids = { category . id : category for category in categories } aliases = { category . alias : category for category in categories if category . alias }...
Initializes local cache from Django cache if required .
5,965
def _cache_get_entry ( self , entry_name , key = ENTIRE_ENTRY_KEY , default = False ) : if key is self . ENTIRE_ENTRY_KEY : return self . _cache [ entry_name ] return self . _cache [ entry_name ] . get ( key , default )
Returns cache entry parameter value by its name .
5,966
def sort_aliases ( self , aliases ) : self . _cache_init ( ) if not aliases : return aliases parent_aliases = self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . keys ( ) return [ parent_alias for parent_alias in parent_aliases if parent_alias in aliases ]
Sorts the given aliases list returns a sorted list .
5,967
def get_parents_for ( self , child_ids ) : self . _cache_init ( ) parent_candidates = [ ] for parent , children in self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . items ( ) : if set ( children ) . intersection ( child_ids ) : parent_candidates . append ( parent ) return set ( parent_candidates )
Returns parent aliases for a list of child IDs .
5,968
def get_children_for ( self , parent_alias = None , only_with_aliases = False ) : self . _cache_init ( ) child_ids = self . get_child_ids ( parent_alias ) if only_with_aliases : children = [ ] for cid in child_ids : category = self . get_category_by_id ( cid ) if category . alias : children . append ( category ) return...
Returns a list with with categories under the given parent .
5,969
def get_child_ids ( self , parent_alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_PARENTS , parent_alias , [ ] )
Returns child IDs of the given parent category
5,970
def get_category_by_alias ( self , alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_ALIASES , alias , None )
Returns Category object by its alias .
5,971
def get_category_by_id ( self , cid ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_IDS , cid )
Returns Category object by its id .
5,972
def get_ties_stats ( self , categories , target_model = None ) : filter_kwargs = { 'category_id__in' : categories } if target_model is not None : is_cls = hasattr ( target_model , '__name__' ) if is_cls : concrete = False else : concrete = True filter_kwargs [ 'object_id' ] = target_model . id filter_kwargs [ 'content_...
Returns a dict with categories popularity stats .
5,973
def load_p2th_privkey_into_local_node ( provider : RpcNode , prod : bool = True ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "Import only works with local node." } error = { "error" : "Loading P2TH privkey failed." } pa_params = param_query ( provider . network ) if prod : provider . importprivkey...
Load PeerAssets P2TH privkey into the local node .
5,974
def find_deck_spawns ( provider : Provider , prod : bool = True ) -> Iterable [ str ] : pa_params = param_query ( provider . network ) if isinstance ( provider , RpcNode ) : if prod : decks = ( i [ "txid" ] for i in provider . listtransactions ( "PAPROD" ) ) else : decks = ( i [ "txid" ] for i in provider . listtransac...
find deck spawn transactions via Provider it requires that Deck spawn P2TH were imported in local node or that remote API knows about P2TH address .
5,975
def deck_parser ( args : Tuple [ Provider , dict , int , str ] , prod : bool = True ) -> Optional [ Deck ] : provider = args [ 0 ] raw_tx = args [ 1 ] deck_version = args [ 2 ] p2th = args [ 3 ] try : validate_deckspawn_p2th ( provider , raw_tx , p2th ) d = parse_deckspawn_metainfo ( read_tx_opreturn ( raw_tx [ 'vout' ...
deck parser function
5,976
def tx_serialization_order ( provider : Provider , blockhash : str , txid : str ) -> int : return provider . getblock ( blockhash ) [ "tx" ] . index ( txid )
find index of this tx in the blockid
5,977
def deck_issue_mode ( proto : DeckSpawnProto ) -> Iterable [ str ] : if proto . issue_mode == 0 : yield "NONE" return for mode , value in proto . MODE . items ( ) : if value > proto . issue_mode : continue if value & proto . issue_mode : yield mode
interpret issue mode bitfeg
5,978
def parse_deckspawn_metainfo ( protobuf : bytes , version : int ) -> dict : deck = DeckSpawnProto ( ) deck . ParseFromString ( protobuf ) error = { "error" : "Deck ({deck}) metainfo incomplete, deck must have a name." . format ( deck = deck . name ) } if deck . name == "" : raise InvalidDeckMetainfo ( error ) if deck ....
Decode deck_spawn tx op_return protobuf message and validate it Raise error if deck_spawn metainfo incomplete or version mistmatch .
5,979
def load_deck_p2th_into_local_node ( provider : RpcNode , deck : Deck ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "You can load privkeys only into local node." } error = { "error" : "Deck P2TH import went wrong." } provider . importprivkey ( deck . p2th_wif , deck . id ) check_addr = provider . v...
load deck p2th into local node via importprivke this allows building of proof - of - timeline for this deck
5,980
def card_bundle_parser ( bundle : CardBundle , debug = False ) -> Iterator : try : validate_card_transfer_p2th ( bundle . deck , bundle . vouts [ 0 ] ) card_metainfo = parse_card_transfer_metainfo ( read_tx_opreturn ( bundle . vouts [ 1 ] ) , bundle . deck . version ) except ( InvalidCardTransferP2TH , CardVersionMisma...
this function wraps all the card transfer parsing
5,981
def param_query ( name : str ) -> PAParams : for pa_params in params : if name in ( pa_params . network_name , pa_params . network_shortname , ) : return pa_params raise UnsupportedNetwork
Find the PAParams for a network by its long or short name . Raises UnsupportedNetwork if no PAParams is found .
5,982
def load_scripts ( self ) : for index in range ( self . tree_scripts . topLevelItemCount ( ) ) : script_item = self . tree_scripts . topLevelItem ( index ) self . update_script_from_item ( script_item ) dialog = LoadDialog ( elements_type = "scripts" , elements_old = self . scripts , filename = self . gui_settings [ 's...
opens file dialog to load scripts into gui
5,983
def getblockhash ( self , index : int ) -> str : return cast ( str , self . api_fetch ( 'getblockhash?index=' + str ( index ) ) )
Returns the hash of the block at ; index 0 is the genesis block .
5,984
def getblock ( self , hash : str ) -> dict : return cast ( dict , self . api_fetch ( 'getblock?hash=' + hash ) )
Returns information about the block with the given hash .
5,985
def getaddress ( self , address : str ) -> dict : return cast ( dict , self . ext_fetch ( 'getaddress/' + address ) )
Returns information for given address .
5,986
def listunspent ( self , address : str ) -> list : try : return cast ( dict , self . ext_fetch ( 'listunspent/' + address ) ) [ 'unspent_outputs' ] except KeyError : raise InsufficientFunds ( 'Insufficient funds.' )
Returns unspent transactions for given address .
5,987
def txinfo ( self , txid : str ) -> dict : return cast ( dict , self . ext_fetch ( 'txinfo/' + txid ) )
Returns information about given transaction .
5,988
def getbalance ( self , address : str ) -> Decimal : try : return Decimal ( cast ( float , self . ext_fetch ( 'getbalance/' + address ) ) ) except TypeError : return Decimal ( 0 )
Returns current balance of given address .
5,989
def extract ( self , obj , bypass_ref = False ) : return self . pointer . extract ( obj , bypass_ref )
Extract subelement from obj according to pointer . It assums that document is the object .
5,990
def parse ( self , pointer ) : if isinstance ( pointer , Pointer ) : return pointer . tokens [ : ] elif pointer == '' : return [ ] tokens = [ ] staged , _ , children = pointer . partition ( '/' ) if staged : try : token = StagesToken ( staged ) token . last = False tokens . append ( token ) except ValueError : raise Pa...
parse pointer into tokens
5,991
def extract ( self , obj , bypass_ref = False ) : for token in self . tokens : obj = token . extract ( obj , bypass_ref ) return obj
Extract subelement from obj according to tokens .
5,992
def extract ( self , obj , bypass_ref = False ) : for i in range ( 0 , self . stages ) : try : obj = obj . parent_obj except AttributeError : raise UnstagedError ( obj , '{!r} must be staged before ' 'exploring its parents' . format ( obj ) ) if self . member : return obj . parent_member return obj
Extract parent of obj according to current token .
5,993
def extract ( self , obj , bypass_ref = False ) : try : if isinstance ( obj , Mapping ) : if not bypass_ref and '$ref' in obj : raise RefError ( obj , 'presence of a $ref member' ) obj = self . extract_mapping ( obj ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : obj = self . extract_se...
Extract subelement from obj according to current token .
5,994
def digester ( data ) : if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) hashof = hashlib . sha1 ( data ) . digest ( ) encoded_hash = base64 . b64encode ( hashof ) if not isinstance ( encoded_hash , six . string_types ) : encoded_hash = encoded_hash . decode ( 'utf_8' ) chunked = splitt...
Create SHA - 1 hash get digest b64 encode split every 60 char .
5,995
def splitter ( iterable , chunksize = 60 ) : return ( iterable [ 0 + i : chunksize + i ] for i in range ( 0 , len ( iterable ) , chunksize ) )
Split an iterable that supports indexing into chunks of chunksize .
5,996
def canonical_request ( self , method , path , content , timestamp ) : request = collections . OrderedDict ( [ ( 'Method' , method . upper ( ) ) , ( 'Hashed Path' , path ) , ( 'X-Ops-Content-Hash' , content ) , ( 'X-Ops-Timestamp' , timestamp ) , ( 'X-Ops-UserId' , self . user_id ) , ] ) return '\n' . join ( [ '%s:%s' ...
Return the canonical request string .
5,997
def load_pem ( cls , private_key , password = None ) : maybe_path = normpath ( private_key ) if os . path . isfile ( maybe_path ) : with open ( maybe_path , 'rb' ) as pkf : private_key = pkf . read ( ) if not isinstance ( private_key , six . binary_type ) : private_key = private_key . encode ( 'utf-8' ) pkey = serializ...
Return a PrivateKey instance .
5,998
def sign ( self , data , b64 = True ) : padder = padding . PKCS1v15 ( ) signer = self . private_key . signer ( padder , None ) if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) signer . update ( data ) signed = signer . finalize ( ) if b64 : signed = base64 . b64encode ( signed ) return ...
Sign data with the private key and return the signed data .
5,999
def dump ( obj , fp , ** kw ) : r xml = dumps ( obj , ** kw ) if isinstance ( fp , basestring ) : with open ( fp , 'w' ) as fobj : fobj . write ( xml ) else : fp . write ( xml )
r Dump python object to file .