idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
17,400 | def combine_tax_scales ( node ) : combined_tax_scales = None for child_name in node : child = node [ child_name ] if not isinstance ( child , AbstractTaxScale ) : log . info ( 'Skipping {} with value {} because it is not a tax scale' . format ( child_name , child ) ) continue if combined_tax_scales is None : combined_tax_scales = MarginalRateTaxScale ( name = child_name ) combined_tax_scales . add_bracket ( 0 , 0 ) combined_tax_scales . add_tax_scale ( child ) return combined_tax_scales | Combine all the MarginalRateTaxScales in the node into a single MarginalRateTaxScale . |
17,401 | def inverse ( self ) : inverse = self . __class__ ( name = self . name + "'" , option = self . option , unit = self . unit ) net_threshold = 0 for threshold , rate in zip ( self . thresholds , self . rates ) : if threshold == 0 : previous_rate = 0 theta = 0 net_threshold = ( 1 - previous_rate ) * threshold + theta inverse . add_bracket ( net_threshold , 1 / ( 1 - rate ) ) theta = ( rate - previous_rate ) * threshold + theta previous_rate = rate return inverse | Returns a new instance of MarginalRateTaxScale |
17,402 | def scale_tax_scales ( self , factor ) : assert isinstance ( factor , ( float , int ) ) scaled_tax_scale = self . copy ( ) return scaled_tax_scale . multiply_thresholds ( factor ) | Scale all the MarginalRateTaxScales in the node . |
17,403 | def decode ( self ) : return np . select ( [ self == item . index for item in self . possible_values ] , [ item for item in self . possible_values ] ) | Return the array of enum items corresponding to self |
17,404 | def decode_to_str ( self ) : return np . select ( [ self == item . index for item in self . possible_values ] , [ item . name for item in self . possible_values ] ) | Return the array of string identifiers corresponding to self |
17,405 | def record_calculation_start ( self , variable_name , period , ** parameters ) : key = self . _get_key ( variable_name , period , ** parameters ) if self . stack : parent = self . stack [ - 1 ] self . trace [ parent ] [ 'dependencies' ] . append ( key ) else : self . requested_calculations . add ( key ) if not self . trace . get ( key ) : self . trace [ key ] = { 'dependencies' : [ ] , 'parameters' : { } } self . stack . append ( key ) self . _computation_log . append ( ( key , len ( self . stack ) ) ) self . usage_stats [ variable_name ] [ 'nb_requests' ] += 1 | Record that OpenFisca started computing a variable . |
17,406 | def record_calculation_end ( self , variable_name , period , result , ** parameters ) : key = self . _get_key ( variable_name , period , ** parameters ) expected_key = self . stack . pop ( ) if not key == expected_key : raise ValueError ( "Something went wrong with the simulation tracer: result of '{0}' was expected, got results for '{1}' instead. This does not make sense as the last variable we started computing was '{0}'." . format ( expected_key , key ) ) self . trace [ key ] [ 'value' ] = result | Record that OpenFisca finished computing a variable . |
17,407 | def print_computation_log ( self , aggregate = False ) : for line in self . computation_log ( aggregate ) : print ( line ) | Print the computation log of a simulation . |
17,408 | def make_simulation ( tax_benefit_system , nb_persons , nb_groups , ** kwargs ) : simulation = Simulation ( tax_benefit_system = tax_benefit_system , ** kwargs ) simulation . persons . ids = np . arange ( nb_persons ) simulation . persons . count = nb_persons adults = [ 0 ] + sorted ( random . sample ( range ( 1 , nb_persons ) , nb_groups - 1 ) ) members_entity_id = np . empty ( nb_persons , dtype = int ) members_legacy_role = np . empty ( nb_persons , dtype = int ) id_group = - 1 for id_person in range ( nb_persons ) : if id_person in adults : id_group += 1 legacy_role = 0 else : legacy_role = 2 if legacy_role == 0 else legacy_role + 1 members_legacy_role [ id_person ] = legacy_role members_entity_id [ id_person ] = id_group for entity in simulation . populations . values ( ) : if not entity . is_person : entity . members_entity_id = members_entity_id entity . count = nb_groups entity . members_role = np . where ( members_legacy_role == 0 , entity . flattened_roles [ 0 ] , entity . flattened_roles [ - 1 ] ) return simulation | Generate a simulation containing nb_persons persons spread in nb_groups groups . |
17,409 | def modify_parameters ( self , modifier_function ) : baseline_parameters = self . baseline . parameters baseline_parameters_copy = copy . deepcopy ( baseline_parameters ) reform_parameters = modifier_function ( baseline_parameters_copy ) if not isinstance ( reform_parameters , ParameterNode ) : return ValueError ( 'modifier_function {} in module {} must return a ParameterNode' . format ( modifier_function . __name__ , modifier_function . __module__ , ) ) self . parameters = reform_parameters self . _parameters_at_instant_cache = { } | Make modifications on the parameters of the legislation |
17,410 | def date ( self ) : instant_date = date_by_instant_cache . get ( self ) if instant_date is None : date_by_instant_cache [ self ] = instant_date = datetime . date ( * self ) return instant_date | Convert instant to a date . |
17,411 | def period ( self , unit , size = 1 ) : assert unit in ( DAY , MONTH , YEAR ) , 'Invalid unit: {} of type {}' . format ( unit , type ( unit ) ) assert isinstance ( size , int ) and size >= 1 , 'Invalid size: {} of type {}' . format ( size , type ( size ) ) return Period ( ( unit , self , size ) ) | Create a new period starting at instant . |
17,412 | def get_subperiods ( self , unit ) : if unit_weight ( self . unit ) < unit_weight ( unit ) : raise ValueError ( 'Cannot subdivide {0} into {1}' . format ( self . unit , unit ) ) if unit == YEAR : return [ self . this_year . offset ( i , YEAR ) for i in range ( self . size ) ] if unit == MONTH : return [ self . first_month . offset ( i , MONTH ) for i in range ( self . size_in_months ) ] if unit == DAY : return [ self . first_day . offset ( i , DAY ) for i in range ( self . size_in_days ) ] | Return the list of all the periods of unit unit contained in self . |
17,413 | def size_in_months ( self ) : if ( self [ 0 ] == MONTH ) : return self [ 2 ] if ( self [ 0 ] == YEAR ) : return self [ 2 ] * 12 raise ValueError ( "Cannot calculate number of months in {0}" . format ( self [ 0 ] ) ) | Return the size of the period in months . |
17,414 | def size_in_days ( self ) : unit , instant , length = self if unit == DAY : return length if unit in [ MONTH , YEAR ] : last_day = self . start . offset ( length , unit ) . offset ( - 1 , DAY ) return ( last_day . date - self . start . date ) . days + 1 raise ValueError ( "Cannot calculate number of days in {0}" . format ( unit ) ) | Return the size of the period in days . |
17,415 | def stop ( self ) : unit , start_instant , size = self year , month , day = start_instant if unit == ETERNITY : return Instant ( ( float ( "inf" ) , float ( "inf" ) , float ( "inf" ) ) ) if unit == 'day' : if size > 1 : day += size - 1 month_last_day = calendar . monthrange ( year , month ) [ 1 ] while day > month_last_day : month += 1 if month == 13 : year += 1 month = 1 day -= month_last_day month_last_day = calendar . monthrange ( year , month ) [ 1 ] else : if unit == 'month' : month += size while month > 12 : year += 1 month -= 12 else : assert unit == 'year' , 'Invalid unit: {} of type {}' . format ( unit , type ( unit ) ) year += size day -= 1 if day < 1 : month -= 1 if month == 0 : year -= 1 month = 12 day += calendar . monthrange ( year , month ) [ 1 ] else : month_last_day = calendar . monthrange ( year , month ) [ 1 ] if day > month_last_day : month += 1 if month == 13 : year += 1 month = 1 day -= month_last_day return Instant ( ( year , month , day ) ) | Return the last day of the period as an Instant instance . |
17,416 | def parse_formula_name ( self , attribute_name ) : def raise_error ( ) : raise ValueError ( 'Unrecognized formula name in variable "{}". Expecting "formula_YYYY" or "formula_YYYY_MM" or "formula_YYYY_MM_DD where YYYY, MM and DD are year, month and day. Found: "{}".' . format ( self . name , attribute_name ) ) if attribute_name == FORMULA_NAME_PREFIX : return date . min FORMULA_REGEX = r'formula_(\d{4})(?:_(\d{2}))?(?:_(\d{2}))?$' match = re . match ( FORMULA_REGEX , attribute_name ) if not match : raise_error ( ) date_str = '-' . join ( [ match . group ( 1 ) , match . group ( 2 ) or '01' , match . group ( 3 ) or '01' ] ) try : return datetime . datetime . strptime ( date_str , '%Y-%m-%d' ) . date ( ) except ValueError : raise_error ( ) | Returns the starting date of a formula based on its name . |
17,417 | def get_introspection_data ( cls , tax_benefit_system ) : comments = inspect . getcomments ( cls ) try : absolute_file_path = inspect . getsourcefile ( cls ) except TypeError : source_file_path = None else : source_file_path = absolute_file_path . replace ( tax_benefit_system . get_package_metadata ( ) [ 'location' ] , '' ) try : source_lines , start_line_number = inspect . getsourcelines ( cls ) source_code = textwrap . dedent ( '' . join ( source_lines ) ) except ( IOError , TypeError ) : source_code , start_line_number = None , None return comments , source_file_path , source_code , start_line_number | Get instrospection data about the code of the variable . |
17,418 | def get_formula ( self , period = None ) : if not self . formulas : return None if period is None : return self . formulas . peekitem ( index = 0 ) [ 1 ] if isinstance ( period , periods . Period ) : instant = period . start else : try : instant = periods . period ( period ) . start except ValueError : instant = periods . instant ( period ) if self . end and instant . date > self . end : return None instant = str ( instant ) for start_date in reversed ( self . formulas ) : if start_date <= instant : return self . formulas [ start_date ] return None | Returns the formula used to compute the variable at the given period . |
17,419 | def get_rank ( self , entity , criteria , condition = True ) : entity = entity if not isinstance ( entity , Projector ) else entity . reference_entity positions = entity . members_position biggest_entity_size = np . max ( positions ) + 1 filtered_criteria = np . where ( condition , criteria , np . inf ) ids = entity . members_entity_id matrix = np . asarray ( [ entity . value_nth_person ( k , filtered_criteria , default = np . inf ) for k in range ( biggest_entity_size ) ] ) . transpose ( ) sorted_matrix = np . argsort ( np . argsort ( matrix ) ) result = sorted_matrix [ ids , positions ] return np . where ( condition , result , - 1 ) | Get the rank of a person within an entity according to a criteria . The person with rank 0 has the minimum value of criteria . If condition is specified then the persons who don t respect it are not taken into account and their rank is - 1 . |
17,420 | def ordered_members_map ( self ) : if self . _ordered_members_map is None : return np . argsort ( self . members_entity_id ) return self . _ordered_members_map | Mask to group the persons by entity This function only caches the map value to see what the map is used for see value_nth_person method . |
17,421 | def sum ( self , array , role = None ) : self . entity . check_role_validity ( role ) self . members . check_array_compatible_with_entity ( array ) if role is not None : role_filter = self . members . has_role ( role ) return np . bincount ( self . members_entity_id [ role_filter ] , weights = array [ role_filter ] , minlength = self . count ) else : return np . bincount ( self . members_entity_id , weights = array ) | Return the sum of array for the members of the entity . |
17,422 | def any ( self , array , role = None ) : sum_in_entity = self . sum ( array , role = role ) return ( sum_in_entity > 0 ) | Return True if array is True for any members of the entity . |
17,423 | def all ( self , array , role = None ) : return self . reduce ( array , reducer = np . logical_and , neutral_element = True , role = role ) | Return True if array is True for all members of the entity . |
17,424 | def max ( self , array , role = None ) : return self . reduce ( array , reducer = np . maximum , neutral_element = - np . infty , role = role ) | Return the maximum value of array for the entity members . |
17,425 | def min ( self , array , role = None ) : return self . reduce ( array , reducer = np . minimum , neutral_element = np . infty , role = role ) | Return the minimum value of array for the entity members . |
17,426 | def nb_persons ( self , role = None ) : if role : if role . subroles : role_condition = np . logical_or . reduce ( [ self . members_role == subrole for subrole in role . subroles ] ) else : role_condition = self . members_role == role return self . sum ( role_condition ) else : return np . bincount ( self . members_entity_id ) | Returns the number of persons contained in the entity . |
17,427 | def value_from_person ( self , array , role , default = 0 ) : self . entity . check_role_validity ( role ) if role . max != 1 : raise Exception ( 'You can only use value_from_person with a role that is unique in {}. Role {} is not unique.' . format ( self . key , role . key ) ) self . members . check_array_compatible_with_entity ( array ) members_map = self . ordered_members_map result = self . filled_array ( default , dtype = array . dtype ) if isinstance ( array , EnumArray ) : result = EnumArray ( result , array . possible_values ) role_filter = self . members . has_role ( role ) entity_filter = self . any ( role_filter ) result [ entity_filter ] = array [ members_map ] [ role_filter [ members_map ] ] return result | Get the value of array for the person with the unique role role . |
17,428 | def value_nth_person ( self , n , array , default = 0 ) : self . members . check_array_compatible_with_entity ( array ) positions = self . members_position nb_persons_per_entity = self . nb_persons ( ) members_map = self . ordered_members_map result = self . filled_array ( default , dtype = array . dtype ) result [ nb_persons_per_entity > n ] = array [ members_map ] [ positions [ members_map ] == n ] return result | Get the value of array for the person whose position in the entity is n . |
17,429 | def data_storage_dir ( self ) : if self . _data_storage_dir is None : self . _data_storage_dir = tempfile . mkdtemp ( prefix = "openfisca_" ) log . warn ( ( "Intermediate results will be stored on disk in {} in case of memory overflow. " "You should remove this directory once you're done with your simulation." ) . format ( self . _data_storage_dir ) ) return self . _data_storage_dir | Temporary folder used to store intermediate calculation data in case the memory is saturated |
17,430 | def calculate ( self , variable_name , period , ** parameters ) : population = self . get_variable_population ( variable_name ) holder = population . get_holder ( variable_name ) variable = self . tax_benefit_system . get_variable ( variable_name , check_existence = True ) if period is not None and not isinstance ( period , periods . Period ) : period = periods . period ( period ) if self . trace : self . tracer . record_calculation_start ( variable . name , period , ** parameters ) self . _check_period_consistency ( period , variable ) cached_array = holder . get_array ( period ) if cached_array is not None : if self . trace : self . tracer . record_calculation_end ( variable . name , period , cached_array , ** parameters ) return cached_array array = None try : self . _check_for_cycle ( variable , period ) array = self . _run_formula ( variable , population , period ) if array is None : array = holder . default_array ( ) array = self . _cast_formula_result ( array , variable ) holder . put_in_cache ( array , period ) except SpiralError : array = holder . default_array ( ) finally : if self . trace : self . tracer . record_calculation_end ( variable . name , period , array , ** parameters ) self . _clean_cycle_detection_data ( variable . name ) self . purge_cache_of_invalid_values ( ) return array | Calculate the variable variable_name for the period period using the variable formula if it exists . |
17,431 | def calculate_output ( self , variable_name , period ) : variable = self . tax_benefit_system . get_variable ( variable_name , check_existence = True ) if variable . calculate_output is None : return self . calculate ( variable_name , period ) return variable . calculate_output ( self , variable_name , period ) | Calculate the value of a variable using the calculate_output attribute of the variable . |
17,432 | def _run_formula ( self , variable , population , period ) : formula = variable . get_formula ( period ) if formula is None : return None if self . trace : parameters_at = self . trace_parameters_at_instant else : parameters_at = self . tax_benefit_system . get_parameters_at_instant if formula . __code__ . co_argcount == 2 : array = formula ( population , period ) else : array = formula ( population , period , parameters_at ) self . _check_formula_result ( array , variable , population , period ) return array | Find the variable formula for the given period if it exists and apply it to population . |
17,433 | def _check_period_consistency ( self , period , variable ) : if variable . definition_period == periods . ETERNITY : return if variable . definition_period == periods . MONTH and period . unit != periods . MONTH : raise ValueError ( "Unable to compute variable '{0}' for period {1}: '{0}' must be computed for a whole month. You can use the ADD option to sum '{0}' over the requested period, or change the requested period to 'period.first_month'." . format ( variable . name , period ) ) if variable . definition_period == periods . YEAR and period . unit != periods . YEAR : raise ValueError ( "Unable to compute variable '{0}' for period {1}: '{0}' must be computed for a whole year. You can use the DIVIDE option to get an estimate of {0} by dividing the yearly value by 12, or change the requested period to 'period.this_year'." . format ( variable . name , period ) ) if period . size != 1 : raise ValueError ( "Unable to compute variable '{0}' for period {1}: '{0}' must be computed for a whole {2}. You can use the ADD option to sum '{0}' over the requested period." . format ( variable . name , period , 'month' if variable . definition_period == periods . MONTH else 'year' ) ) | Check that a period matches the variable definition_period |
17,434 | def get_memory_usage ( self , variables = None ) : result = dict ( total_nb_bytes = 0 , by_variable = { } ) for entity in self . populations . values ( ) : entity_memory_usage = entity . get_memory_usage ( variables = variables ) result [ 'total_nb_bytes' ] += entity_memory_usage [ 'total_nb_bytes' ] result [ 'by_variable' ] . update ( entity_memory_usage [ 'by_variable' ] ) return result | Get data about the virtual memory usage of the simulation |
17,435 | def set_input ( self , variable_name , period , value ) : variable = self . tax_benefit_system . get_variable ( variable_name , check_existence = True ) period = periods . period ( period ) if ( ( variable . end is not None ) and ( period . start . date > variable . end ) ) : return self . get_holder ( variable_name ) . set_input ( period , value ) | Set a variable s value for a given period |
17,436 | def clone ( self , debug = False , trace = False ) : new = empty_clone ( self ) new_dict = new . __dict__ for key , value in self . __dict__ . items ( ) : if key not in ( 'debug' , 'trace' , 'tracer' ) : new_dict [ key ] = value new . persons = self . persons . clone ( new ) setattr ( new , new . persons . entity . key , new . persons ) new . populations = { new . persons . entity . key : new . persons } for entity in self . tax_benefit_system . group_entities : population = self . populations [ entity . key ] . clone ( new ) new . populations [ entity . key ] = population setattr ( new , entity . key , population ) new . debug = debug new . trace = trace return new | Copy the simulation just enough to be able to run the copy without modifying the original simulation |
17,437 | def clone ( self , population ) : new = empty_clone ( self ) new_dict = new . __dict__ for key , value in self . __dict__ . items ( ) : if key not in ( 'population' , 'formula' , 'simulation' ) : new_dict [ key ] = value new_dict [ 'population' ] = population new_dict [ 'simulation' ] = population . simulation return new | Copy the holder just enough to be able to run a new simulation without modifying the original simulation . |
17,438 | def delete_arrays ( self , period = None ) : self . _memory_storage . delete ( period ) if self . _disk_storage : self . _disk_storage . delete ( period ) | If period is None remove all known values of the variable . |
17,439 | def get_array ( self , period ) : if self . variable . is_neutralized : return self . default_array ( ) value = self . _memory_storage . get ( period ) if value is not None : return value if self . _disk_storage : return self . _disk_storage . get ( period ) | Get the value of the variable for the given period . |
17,440 | def get_memory_usage ( self ) : usage = dict ( nb_cells_by_array = self . population . count , dtype = self . variable . dtype , ) usage . update ( self . _memory_storage . get_memory_usage ( ) ) if self . simulation . trace : usage_stats = self . simulation . tracer . usage_stats [ self . variable . name ] usage . update ( dict ( nb_requests = usage_stats [ 'nb_requests' ] , nb_requests_by_array = usage_stats [ 'nb_requests' ] / float ( usage [ 'nb_arrays' ] ) if usage [ 'nb_arrays' ] > 0 else np . nan ) ) return usage | Get data about the virtual memory usage of the holder . |
17,441 | def get_known_periods ( self ) : return list ( self . _memory_storage . get_known_periods ( ) ) + list ( ( self . _disk_storage . get_known_periods ( ) if self . _disk_storage else [ ] ) ) | Get the list of periods the variable value is known for . |
17,442 | def dump_simulation ( simulation , directory ) : parent_directory = os . path . abspath ( os . path . join ( directory , os . pardir ) ) if not os . path . isdir ( parent_directory ) : os . mkdir ( parent_directory ) if not os . path . isdir ( directory ) : os . mkdir ( directory ) if os . listdir ( directory ) : raise ValueError ( "Directory '{}' is not empty" . format ( directory ) ) entities_dump_dir = os . path . join ( directory , "__entities__" ) os . mkdir ( entities_dump_dir ) for entity in simulation . populations . values ( ) : _dump_entity ( entity , entities_dump_dir ) for holder in entity . _holders . values ( ) : _dump_holder ( holder , directory ) | Write simulation data to directory so that it can be restored later . |
17,443 | def restore_simulation ( directory , tax_benefit_system , ** kwargs ) : simulation = Simulation ( tax_benefit_system , tax_benefit_system . instantiate_entities ( ) ) entities_dump_dir = os . path . join ( directory , "__entities__" ) for population in simulation . populations . values ( ) : if population . entity . is_person : continue person_count = _restore_entity ( population , entities_dump_dir ) for population in simulation . populations . values ( ) : if not population . entity . is_person : continue _restore_entity ( population , entities_dump_dir ) population . count = person_count variables_to_restore = ( variable for variable in os . listdir ( directory ) if variable != "__entities__" ) for variable in variables_to_restore : _restore_holder ( simulation , variable , directory ) return simulation | Restore simulation from directory |
17,444 | def are_forms_valid ( self , forms ) : for form in six . itervalues ( forms ) : if not form . is_valid ( ) : return False return True | Check if all forms defined in form_classes are valid . |
17,445 | def get_context_data ( self , ** kwargs ) : context = { } if 'forms' not in kwargs : context [ 'forms' ] = self . get_forms ( ) else : context [ 'forms' ] = kwargs [ 'forms' ] return context | Add forms into the context dictionary . |
17,446 | def get_form_kwargs ( self ) : kwargs = { } for key in six . iterkeys ( self . form_classes ) : if self . request . method in ( 'POST' , 'PUT' ) : kwargs [ key ] = { 'data' : self . request . POST , 'files' : self . request . FILES , } else : kwargs [ key ] = { } return kwargs | Build the keyword arguments required to instantiate the form . |
17,447 | def get_initial ( self ) : initial = super ( MultiFormView , self ) . get_initial ( ) for key in six . iterkeys ( self . form_classes ) : initial [ key ] = { } return initial | Returns a copy of initial with empty initial data dictionaries for each form . |
17,448 | def get_objects ( self ) : objects = { } for key in six . iterkeys ( self . form_classes ) : objects [ key ] = None return objects | Returns dictionary with the instance objects for each form . Keys should match the corresponding form . |
17,449 | def ratio_split ( amount , ratios ) : ratio_total = sum ( ratios ) divided_value = amount / ratio_total values = [ ] for ratio in ratios : value = divided_value * ratio values . append ( value ) rounded = [ v . quantize ( Decimal ( "0.01" ) ) for v in values ] remainders = [ v - rounded [ i ] for i , v in enumerate ( values ) ] remainder = sum ( remainders ) rounded [ - 1 ] = ( rounded [ - 1 ] + remainder ) . quantize ( Decimal ( "0.01" ) ) assert sum ( rounded ) == amount return rounded | Split in_value according to the ratios specified in ratios |
17,450 | def create_columns ( self ) : reader = self . _get_csv_reader ( ) headings = six . next ( reader ) try : examples = six . next ( reader ) except StopIteration : examples = [ ] found_fields = set ( ) for i , value in enumerate ( headings ) : if i >= 20 : break infer_field = self . has_headings and value not in found_fields to_field = ( { "date" : "date" , "amount" : "amount" , "description" : "description" , "memo" : "description" , "notes" : "description" , } . get ( value . lower ( ) , "" ) if infer_field else "" ) if to_field : found_fields . add ( to_field ) TransactionCsvImportColumn . objects . update_or_create ( transaction_import = self , column_number = i + 1 , column_heading = value if self . has_headings else "" , to_field = to_field , example = examples [ i ] . strip ( ) if examples else "" , ) | For each column in file create a TransactionCsvImportColumn |
17,451 | def _get_num_similar_objects ( self , obj ) : return StatementLine . objects . filter ( date = obj . date , amount = obj . amount , description = obj . description ) . count ( ) | Get any statement lines which would be considered a duplicate of obj |
17,452 | def _get_num_similar_rows ( self , row , until = None ) : return len ( list ( filter ( lambda r : row == r , self . dataset [ : until ] ) ) ) | Get the number of rows similar to row which precede the index until |
17,453 | def do_import ( token , account_uuid , bank_account , since = None ) : response = requests . get ( url = "https://api.teller.io/accounts/{}/transactions" . format ( account_uuid ) , headers = { "Authorization" : "Bearer {}" . format ( token ) } , ) response . raise_for_status ( ) data = response . json ( ) statement_import = StatementImport . objects . create ( source = "teller.io" , extra = { "account_uuid" : account_uuid } , bank_account = bank_account ) for line_data in data : uuid = UUID ( hex = line_data [ "id" ] ) if StatementLine . objects . filter ( uuid = uuid ) : continue description = ", " . join ( filter ( bool , [ line_data [ "counterparty" ] , line_data [ "description" ] ] ) ) date = datetime . date ( * map ( int , line_data [ "date" ] . split ( "-" ) ) ) if not since or date >= since : StatementLine . objects . create ( uuid = uuid , date = line_data [ "date" ] , statement_import = statement_import , amount = line_data [ "amount" ] , type = line_data [ "type" ] , description = description , source_data = line_data , ) | Import data from teller . io |
17,454 | def validate_accounting_equation ( cls ) : balances = [ account . balance ( raw = True ) for account in Account . objects . root_nodes ( ) ] if sum ( balances , Balance ( ) ) != 0 : raise exceptions . AccountingEquationViolationError ( "Account balances do not sum to zero. They sum to {}" . format ( sum ( balances ) ) ) | Check that all accounts sum to 0 |
17,455 | def sign ( self ) : return - 1 if self . type in ( Account . TYPES . asset , Account . TYPES . expense ) else 1 | Returns 1 if a credit should increase the value of the account or - 1 if a credit should decrease the value of the account . |
17,456 | def balance ( self , as_of = None , raw = False , leg_query = None , ** kwargs ) : balances = [ account . simple_balance ( as_of = as_of , raw = raw , leg_query = leg_query , ** kwargs ) for account in self . get_descendants ( include_self = True ) ] return sum ( balances , Balance ( ) ) | Get the balance for this account including child accounts |
17,457 | def simple_balance ( self , as_of = None , raw = False , leg_query = None , ** kwargs ) : legs = self . legs if as_of : legs = legs . filter ( transaction__date__lte = as_of ) if leg_query or kwargs : leg_query = leg_query or models . Q ( ) legs = legs . filter ( leg_query , ** kwargs ) return legs . sum_to_balance ( ) * ( 1 if raw else self . sign ) + self . _zero_balance ( ) | Get the balance for this account ignoring all child accounts |
17,458 | def transfer_to ( self , to_account , amount , ** transaction_kwargs ) : if not isinstance ( amount , Money ) : raise TypeError ( "amount must be of type Money" ) if to_account . sign == 1 and to_account . type != self . TYPES . trading : direction = - 1 elif self . type == self . TYPES . liability and to_account . type == self . TYPES . expense : direction = - 1 else : direction = 1 transaction = Transaction . objects . create ( ** transaction_kwargs ) Leg . objects . create ( transaction = transaction , account = self , amount = + amount * direction ) Leg . objects . create ( transaction = transaction , account = to_account , amount = - amount * direction ) return transaction | Create a transaction which transfers amount to to_account |
17,459 | def sum_to_balance ( self ) : result = self . values ( "amount_currency" ) . annotate ( total = models . Sum ( "amount" ) ) return Balance ( [ Money ( r [ "total" ] , r [ "amount_currency" ] ) for r in result ] ) | Sum the Legs of the QuerySet to get a Balance _ object |
17,460 | def account_balance_after ( self ) : transaction_date = self . transaction . date return self . account . balance ( leg_query = ( models . Q ( transaction__date__lt = transaction_date ) | ( models . Q ( transaction__date = transaction_date ) & models . Q ( transaction_id__lte = self . transaction_id ) ) ) ) | Get the balance of the account associated with this leg following the transaction |
17,461 | def account_balance_before ( self ) : transaction_date = self . transaction . date return self . account . balance ( leg_query = ( models . Q ( transaction__date__lt = transaction_date ) | ( models . Q ( transaction__date = transaction_date ) & models . Q ( transaction_id__lt = self . transaction_id ) ) ) ) | Get the balance of the account associated with this leg before the transaction |
17,462 | def create_transaction ( self , to_account ) : from_account = self . statement_import . bank_account transaction = Transaction . objects . create ( ) Leg . objects . create ( transaction = transaction , account = from_account , amount = + ( self . amount * - 1 ) ) Leg . objects . create ( transaction = transaction , account = to_account , amount = - ( self . amount * - 1 ) ) transaction . date = self . date transaction . save ( ) self . transaction = transaction self . save ( ) return transaction | Create a transaction for this statement amount and account into to_account |
17,463 | def currency_exchange ( source , source_amount , destination , destination_amount , trading_account , fee_destination = None , fee_amount = None , date = None , description = None , ) : from hordak . models import Account , Transaction , Leg if trading_account . type != Account . TYPES . trading : raise TradingAccountRequiredError ( "Account {} must be a trading account" . format ( trading_account ) ) if ( fee_destination or fee_amount ) and not ( fee_destination and fee_amount ) : raise RuntimeError ( "You must specify either neither or both fee_destination and fee_amount." ) if fee_amount is None : fee_amount = Money ( 0 , source_amount . currency ) else : if fee_amount . currency != source_amount . currency : raise InvalidFeeCurrency ( "Fee amount currency ({}) must match source amount currency ({})" . format ( fee_amount . currency , source_amount . currency ) ) with db_transaction . atomic ( ) : transaction = Transaction . objects . create ( date = date or datetime . date . today ( ) , description = description or "Exchange of {} to {}, incurring {} fees" . format ( source_amount , destination_amount , "no" if fee_amount is None else fee_amount ) , ) Leg . objects . create ( transaction = transaction , account = source , amount = source_amount ) Leg . objects . create ( transaction = transaction , account = trading_account , amount = - ( source_amount - fee_amount ) ) if fee_amount and fee_destination : Leg . objects . create ( transaction = transaction , account = fee_destination , amount = - fee_amount , description = "Fees" , ) Leg . objects . create ( transaction = transaction , account = trading_account , amount = destination_amount ) Leg . objects . create ( transaction = transaction , account = destination , amount = - destination_amount ) return transaction | Exchange funds from one currency to another |
17,464 | def cache_rate ( self , currency , date , rate ) : if not self . is_supported ( defaults . INTERNAL_CURRENCY ) : logger . info ( 'Tried to cache unsupported currency "{}". Ignoring.' . format ( currency ) ) else : cache . set ( _cache_key ( currency , date ) , str ( rate ) , _cache_timeout ( date ) ) | Cache a rate for future use |
17,465 | def get_rate ( self , currency , date ) : if str ( currency ) == defaults . INTERNAL_CURRENCY : return Decimal ( 1 ) cached = cache . get ( _cache_key ( currency , date ) ) if cached : return Decimal ( cached ) else : return Decimal ( self . _get_rate ( currency , date ) ) | Get the exchange rate for currency against _INTERNAL_CURRENCY |
17,466 | def convert ( self , money , to_currency , date = None ) : if str ( money . currency ) == str ( to_currency ) : return copy . copy ( money ) return Money ( amount = money . amount * self . rate ( money . currency , to_currency , date or datetime . date . today ( ) ) , currency = to_currency , ) | Convert the given money to to_currency using exchange rate on date |
17,467 | def rate ( self , from_currency , to_currency , date ) : return ( 1 / self . backend . get_rate ( from_currency , date ) ) * self . backend . get_rate ( to_currency , date ) | Get the exchange rate between the specified currencies |
17,468 | def currencies ( self ) : return [ m . currency . code for m in self . monies ( ) if m . amount ] | Get all currencies with non - zero values |
17,469 | def normalise ( self , to_currency ) : out = Money ( currency = to_currency ) for money in self . _money_obs : out += converter . convert ( money , to_currency ) return Balance ( [ out ] ) | Normalise this balance into a single currency |
17,470 | def filter_active ( self , * args , ** kwargs ) : grace = getattr ( settings , 'HITCOUNT_KEEP_HIT_ACTIVE' , { 'days' : 7 } ) period = timezone . now ( ) - timedelta ( ** grace ) return self . filter ( created__gte = period ) . filter ( * args , ** kwargs ) | Return only the active hits . |
17,471 | def unload_fixture ( apps , schema_editor ) : "Brutally deleting all entries for this model..." MyModel = apps . get_model ( "blog" , "Post" ) MyModel . objects . all ( ) . delete ( ) | Brutally deleting all entries for this model ... |
17,472 | def get_ip ( request ) : ip_address = request . META . get ( 'HTTP_X_FORWARDED_FOR' , request . META . get ( 'REMOTE_ADDR' , '127.0.0.1' ) ) if ip_address : try : ip_address = IP_RE . match ( ip_address ) if ip_address : ip_address = ip_address . group ( 0 ) else : ip_address = '10.0.0.1' except IndexError : pass return ip_address | Retrieves the remote IP address from the request data . If the user is behind a proxy they may have a comma - separated list of IP addresses so we need to account for that . In such a case only the first IP in the list will be retrieved . Also some hosts that use a proxy will put the REMOTE_ADDR into HTTP_X_FORWARDED_FOR . This will handle pulling back the IP from the proper place . |
17,473 | def update_hit_count_ajax ( request , * args , ** kwargs ) : warnings . warn ( "hitcount.views.update_hit_count_ajax is deprecated. " "Use hitcount.views.HitCountJSONView instead." , RemovedInHitCount13Warning ) view = HitCountJSONView . as_view ( ) return view ( request , * args , ** kwargs ) | Deprecated in 1 . 2 . Use hitcount . views . HitCountJSONView instead . |
17,474 | def get_hit_count_from_obj_variable ( context , obj_variable , tag_name ) : error_to_raise = template . TemplateSyntaxError ( "'%(a)s' requires a valid individual model variable " "in the form of '%(a)s for [model_obj]'.\n" "Got: %(b)s" % { 'a' : tag_name , 'b' : obj_variable } ) try : obj = obj_variable . resolve ( context ) except template . VariableDoesNotExist : raise error_to_raise try : ctype = ContentType . objects . get_for_model ( obj ) except AttributeError : raise error_to_raise hit_count , created = HitCount . objects . get_or_create ( content_type = ctype , object_pk = obj . pk ) return hit_count | Helper function to return a HitCount for a given template object variable . |
17,475 | def _start ( self ) : params = self . _translate ( self . _options ) self . _resp = self . _r_session . get ( self . _url , params = params , stream = True ) self . _resp . raise_for_status ( ) self . _lines = self . _resp . iter_lines ( self . _chunk_size ) | Starts streaming the feed using the provided session and feed options . |
17,476 | def next ( self ) : while True : if not self . _resp : self . _start ( ) if self . _stop : raise StopIteration skip , data = self . _process_data ( next_ ( self . _lines ) ) if not skip : break return data | Handles the iteration by pulling the next line out of the stream attempting to convert the response to JSON if necessary . |
17,477 | def _process_data ( self , line ) : skip = False if self . _raw_data : return skip , line line = unicode_ ( line ) if not line : if ( self . _options . get ( 'heartbeat' , False ) and self . _options . get ( 'feed' ) in ( 'continuous' , 'longpoll' ) and not self . _last_seq ) : line = None else : skip = True elif line in ( '{"results":[' , '],' ) : skip = True elif line [ - 1 ] == ',' : line = line [ : - 1 ] elif line [ : 10 ] == ( '"last_seq"' ) : line = '{' + line try : if line : data = json . loads ( line ) if data . get ( 'last_seq' ) : self . _last_seq = data [ 'last_seq' ] skip = True else : data = None except ValueError : data = { "error" : "Bad JSON line" , "line" : line } return skip , data | Validates and processes the line passed in and converts it to a Python object if necessary . |
17,478 | def next ( self ) : while True : if self . _source == 'CouchDB' : raise CloudantFeedException ( 101 ) if self . _last_seq : self . _options . update ( { 'since' : self . _last_seq } ) self . _resp = None self . _last_seq = None if not self . _resp : self . _start ( ) if self . _stop : raise StopIteration skip , data = self . _process_data ( next_ ( self . _lines ) ) if not skip : break return data | Handles the iteration by pulling the next line out of the stream and converting the response to JSON . |
17,479 | def features ( self ) : if self . _features is None : metadata = self . metadata ( ) if "features" in metadata : self . _features = metadata [ "features" ] else : self . _features = [ ] return self . _features | lazy fetch and cache features |
17,480 | def connect ( self ) : if self . r_session : self . session_logout ( ) if self . admin_party : self . _use_iam = False self . r_session = ClientSession ( timeout = self . _timeout ) elif self . _use_basic_auth : self . _use_iam = False self . r_session = BasicSession ( self . _user , self . _auth_token , self . server_url , timeout = self . _timeout ) elif self . _use_iam : self . r_session = IAMSession ( self . _auth_token , self . server_url , auto_renew = self . _auto_renew , client_id = self . _iam_client_id , client_secret = self . _iam_client_secret , timeout = self . _timeout ) else : self . r_session = CookieSession ( self . _user , self . _auth_token , self . server_url , auto_renew = self . _auto_renew , timeout = self . _timeout ) if self . adapter is not None : self . r_session . mount ( self . server_url , self . adapter ) if self . _client_user_header is not None : self . r_session . headers . update ( self . _client_user_header ) self . session_login ( ) self . r_session . hooks [ 'response' ] . append ( append_response_error_content ) | Starts up an authentication session for the client using cookie authentication if necessary . |
17,481 | def disconnect ( self ) : if self . r_session : self . session_logout ( ) self . r_session = None self . clear ( ) | Ends a client authentication session performs a logout and a clean up . |
17,482 | def session_login ( self , user = None , passwd = None ) : self . change_credentials ( user = user , auth_token = passwd ) | Performs a session login by posting the auth information to the _session endpoint . |
17,483 | def change_credentials ( self , user = None , auth_token = None ) : self . r_session . set_credentials ( user , auth_token ) self . r_session . login ( ) | Change login credentials . |
17,484 | def all_dbs ( self ) : url = '/' . join ( ( self . server_url , '_all_dbs' ) ) resp = self . r_session . get ( url ) resp . raise_for_status ( ) return response_to_json_dict ( resp ) | Retrieves a list of all database names for the current client . |
17,485 | def create_database ( self , dbname , partitioned = False , ** kwargs ) : new_db = self . _DATABASE_CLASS ( self , dbname , partitioned = partitioned ) try : new_db . create ( kwargs . get ( 'throw_on_exists' , False ) ) except CloudantDatabaseException as ex : if ex . status_code == 412 : raise CloudantClientException ( 412 , dbname ) super ( CouchDB , self ) . __setitem__ ( dbname , new_db ) return new_db | Creates a new database on the remote server with the name provided and adds the new database object to the client s locally cached dictionary before returning it to the caller . The method will optionally throw a CloudantClientException if the database exists remotely . |
17,486 | def delete_database ( self , dbname ) : db = self . _DATABASE_CLASS ( self , dbname ) if not db . exists ( ) : raise CloudantClientException ( 404 , dbname ) db . delete ( ) if dbname in list ( self . keys ( ) ) : super ( CouchDB , self ) . __delitem__ ( dbname ) | Removes the named database remotely and locally . The method will throw a CloudantClientException if the database does not exist . |
17,487 | def metadata ( self ) : resp = self . r_session . get ( self . server_url ) resp . raise_for_status ( ) return response_to_json_dict ( resp ) | Retrieves the remote server metadata dictionary . |
17,488 | def keys ( self , remote = False ) : if not remote : return list ( super ( CouchDB , self ) . keys ( ) ) return self . all_dbs ( ) | Returns the database names for this client . Default is to return only the locally cached database names specify remote = True to make a remote request to include all databases . |
17,489 | def get ( self , key , default = None , remote = False ) : if not remote : return super ( CouchDB , self ) . get ( key , default ) db = self . _DATABASE_CLASS ( self , key ) if db . exists ( ) : super ( CouchDB , self ) . __setitem__ ( key , db ) return db return default | Overrides dictionary get behavior to retrieve database objects with support for returning a default . If remote = True then a remote request is made to retrieve the database from the remote server otherwise the client s locally cached database object is returned . |
17,490 | def _usage_endpoint ( self , endpoint , year = None , month = None ) : err = False if year is None and month is None : resp = self . r_session . get ( endpoint ) else : try : if int ( year ) > 0 and int ( month ) in range ( 1 , 13 ) : resp = self . r_session . get ( '/' . join ( ( endpoint , str ( int ( year ) ) , str ( int ( month ) ) ) ) ) else : err = True except ( ValueError , TypeError ) : err = True if err : raise CloudantArgumentError ( 101 , year , month ) resp . raise_for_status ( ) return response_to_json_dict ( resp ) | Common helper for getting usage and billing reports with optional year and month URL elements . |
17,491 | def bill ( self , year = None , month = None ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'bill' ) ) return self . _usage_endpoint ( endpoint , year , month ) | Retrieves Cloudant billing data optionally for a given year and month . |
17,492 | def volume_usage ( self , year = None , month = None ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'usage' , 'data_volume' ) ) return self . _usage_endpoint ( endpoint , year , month ) | Retrieves Cloudant volume usage data optionally for a given year and month . |
17,493 | def requests_usage ( self , year = None , month = None ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'usage' , 'requests' ) ) return self . _usage_endpoint ( endpoint , year , month ) | Retrieves Cloudant requests usage data optionally for a given year and month . |
17,494 | def shared_databases ( self ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'user' , 'shared_databases' ) ) resp = self . r_session . get ( endpoint ) resp . raise_for_status ( ) data = response_to_json_dict ( resp ) return data . get ( 'shared_databases' , [ ] ) | Retrieves a list containing the names of databases shared with this account . |
17,495 | def cors_configuration ( self ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'user' , 'config' , 'cors' ) ) resp = self . r_session . get ( endpoint ) resp . raise_for_status ( ) return response_to_json_dict ( resp ) | Retrieves the current CORS configuration . |
17,496 | def disable_cors ( self ) : return self . update_cors_configuration ( enable_cors = False , allow_credentials = False , origins = [ ] , overwrite_origins = True ) | Switches CORS off . |
17,497 | def update_cors_configuration ( self , enable_cors = True , allow_credentials = True , origins = None , overwrite_origins = False ) : if origins is None : origins = [ ] cors_config = { 'enable_cors' : enable_cors , 'allow_credentials' : allow_credentials , 'origins' : origins } if overwrite_origins : return self . _write_cors_configuration ( cors_config ) old_config = self . cors_configuration ( ) updated_config = old_config . copy ( ) updated_config [ 'enable_cors' ] = cors_config . get ( 'enable_cors' ) updated_config [ 'allow_credentials' ] = cors_config . get ( 'allow_credentials' ) if cors_config . get ( 'origins' ) == [ "*" ] : updated_config [ 'origins' ] = [ "*" ] elif old_config . get ( 'origins' ) != cors_config . get ( 'origins' ) : new_origins = list ( set ( old_config . get ( 'origins' ) ) . union ( set ( cors_config . get ( 'origins' ) ) ) ) updated_config [ 'origins' ] = new_origins return self . _write_cors_configuration ( updated_config ) | Merges existing CORS configuration with updated values . |
17,498 | def _write_cors_configuration ( self , config ) : endpoint = '/' . join ( ( self . server_url , '_api' , 'v2' , 'user' , 'config' , 'cors' ) ) resp = self . r_session . put ( endpoint , data = json . dumps ( config , cls = self . encoder ) , headers = { 'Content-Type' : 'application/json' } ) resp . raise_for_status ( ) return response_to_json_dict ( resp ) | Overwrites the entire CORS config with the values updated in update_cors_configuration . |
17,499 | def bluemix ( cls , vcap_services , instance_name = None , service_name = None , ** kwargs ) : service_name = service_name or 'cloudantNoSQLDB' try : service = CloudFoundryService ( vcap_services , instance_name = instance_name , service_name = service_name ) except CloudantException : raise CloudantClientException ( 103 ) if hasattr ( service , 'iam_api_key' ) : return Cloudant . iam ( service . username , service . iam_api_key , url = service . url , ** kwargs ) return Cloudant ( service . username , service . password , url = service . url , ** kwargs ) | Create a Cloudant session using a VCAP_SERVICES environment variable . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.