idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
23,700 | def relabel_variables ( self , mapping , inplace = True ) : if not inplace : return self . copy ( ) . relabel_variables ( mapping , inplace = True ) try : old_labels = set ( mapping ) new_labels = set ( mapping . values ( ) ) except TypeError : raise ValueError ( "mapping targets must be hashable objects" ) variables = self . variables for v in new_labels : if v in variables and v not in old_labels : raise ValueError ( ( 'A variable cannot be relabeled "{}" without also relabeling ' "the existing variable of the same name" ) . format ( v ) ) shared = old_labels & new_labels if shared : old_to_intermediate , intermediate_to_new = resolve_label_conflict ( mapping , old_labels , new_labels ) self . relabel_variables ( old_to_intermediate , inplace = True ) self . relabel_variables ( intermediate_to_new , inplace = True ) return self for oldterm , bias in list ( self . items ( ) ) : newterm = frozenset ( ( mapping . get ( v , v ) for v in oldterm ) ) if newterm != oldterm : self [ newterm ] = bias del self [ oldterm ] return self | Relabel variables of a binary polynomial as specified by mapping . |
23,701 | def scale ( self , scalar , ignored_terms = None ) : if ignored_terms is None : ignored_terms = set ( ) else : ignored_terms = { asfrozenset ( term ) for term in ignored_terms } for term in self : if term not in ignored_terms : self [ term ] *= scalar | Multiply the polynomial by the given scalar . |
23,702 | def from_hising ( cls , h , J , offset = None ) : poly = { ( k , ) : v for k , v in h . items ( ) } poly . update ( J ) if offset is not None : poly [ frozenset ( [ ] ) ] = offset return cls ( poly , Vartype . SPIN ) | Construct a binary polynomial from a higher - order Ising problem . |
23,703 | def to_hising ( self ) : if self . vartype is Vartype . BINARY : return self . to_spin ( ) . to_hising ( ) h = { } J = { } offset = 0 for term , bias in self . items ( ) : if len ( term ) == 0 : offset += bias elif len ( term ) == 1 : v , = term h [ v ] = bias else : J [ tuple ( term ) ] = bias return h , J , offset | Construct a higher - order Ising problem from a binary polynomial . |
23,704 | def greedy_coloring ( adj ) : coloring = { } colors = { } possible_colors = { n : set ( range ( len ( adj ) ) ) for n in adj } while possible_colors : n = min ( possible_colors , key = lambda n : len ( possible_colors [ n ] ) ) color = min ( possible_colors [ n ] ) coloring [ n ] = color if color not in colors : colors [ color ] = { n } else : colors [ color ] . add ( n ) for neighbor in adj [ n ] : if neighbor in possible_colors and color in possible_colors [ neighbor ] : possible_colors [ neighbor ] . remove ( color ) del possible_colors [ n ] return coloring , colors | Determines a vertex coloring . |
23,705 | def sample ( self , bqm , beta_range = None , num_reads = 10 , num_sweeps = 1000 ) : if not isinstance ( num_reads , int ) : raise TypeError ( "'samples' should be a positive integer" ) if num_reads < 1 : raise ValueError ( "'samples' should be a positive integer" ) h , J , offset = bqm . to_ising ( ) samples = [ ] energies = [ ] for __ in range ( num_reads ) : sample , energy = ising_simulated_annealing ( h , J , beta_range , num_sweeps ) samples . append ( sample ) energies . append ( energy ) response = SampleSet . from_samples ( samples , Vartype . SPIN , energies ) response . change_vartype ( bqm . vartype , offset , inplace = True ) return response | Sample from low - energy spin states using simulated annealing . |
23,706 | def _scale_back_response ( bqm , response , scalar , ignored_interactions , ignored_variables , ignore_offset ) : if len ( ignored_interactions ) + len ( ignored_variables ) + ignore_offset == 0 : response . record . energy = np . divide ( response . record . energy , scalar ) else : response . record . energy = bqm . energies ( ( response . record . sample , response . variables ) ) return response | Helper function to scale back the response of sample method |
23,707 | def _check_params ( ignored_variables , ignored_interactions ) : if ignored_variables is None : ignored_variables = set ( ) elif not isinstance ( ignored_variables , abc . Container ) : ignored_variables = set ( ignored_variables ) if ignored_interactions is None : ignored_interactions = set ( ) elif not isinstance ( ignored_interactions , abc . Container ) : ignored_interactions = set ( ignored_interactions ) return ignored_variables , ignored_interactions | Helper for sample methods |
23,708 | def _calc_norm_coeff ( h , J , bias_range , quadratic_range , ignored_variables , ignored_interactions ) : if ignored_variables is None or ignored_interactions is None : raise ValueError ( 'ignored interactions or variables cannot be None' ) def parse_range ( r ) : if isinstance ( r , Number ) : return - abs ( r ) , abs ( r ) return r def min_and_max ( iterable ) : if not iterable : return 0 , 0 return min ( iterable ) , max ( iterable ) if quadratic_range is None : linear_range , quadratic_range = bias_range , bias_range else : linear_range = bias_range lin_range , quad_range = map ( parse_range , ( linear_range , quadratic_range ) ) lin_min , lin_max = min_and_max ( [ v for k , v in h . items ( ) if k not in ignored_variables ] ) quad_min , quad_max = min_and_max ( [ v for k , v in J . items ( ) if not check_isin ( k , ignored_interactions ) ] ) inv_scalar = max ( lin_min / lin_range [ 0 ] , lin_max / lin_range [ 1 ] , quad_min / quad_range [ 0 ] , quad_max / quad_range [ 1 ] ) if inv_scalar != 0 : return 1. / inv_scalar else : return 1. | Helper function to calculate normalization coefficient |
23,709 | def _scaled_bqm ( bqm , scalar , bias_range , quadratic_range , ignored_variables , ignored_interactions , ignore_offset ) : bqm_copy = bqm . copy ( ) if scalar is None : scalar = _calc_norm_coeff ( bqm_copy . linear , bqm_copy . quadratic , bias_range , quadratic_range , ignored_variables , ignored_interactions ) bqm_copy . scale ( scalar , ignored_variables = ignored_variables , ignored_interactions = ignored_interactions , ignore_offset = ignore_offset ) bqm_copy . info . update ( { 'scalar' : scalar } ) return bqm_copy | Helper function of sample for scaling |
23,710 | def sample ( self , bqm , scalar = None , bias_range = 1 , quadratic_range = None , ignored_variables = None , ignored_interactions = None , ignore_offset = False , ** parameters ) : ignored_variables , ignored_interactions = _check_params ( ignored_variables , ignored_interactions ) child = self . child bqm_copy = _scaled_bqm ( bqm , scalar , bias_range , quadratic_range , ignored_variables , ignored_interactions , ignore_offset ) response = child . sample ( bqm_copy , ** parameters ) return _scale_back_response ( bqm , response , bqm_copy . info [ 'scalar' ] , ignored_variables , ignored_interactions , ignore_offset ) | Scale and sample from the provided binary quadratic model . |
23,711 | def sample_ising ( self , h , J , offset = 0 , scalar = None , bias_range = 1 , quadratic_range = None , ignored_variables = None , ignored_interactions = None , ignore_offset = False , ** parameters ) : if any ( len ( inter ) > 2 for inter in J ) : import warnings msg = ( "Support for higher order Ising models in ScaleComposite is " "deprecated and will be removed in dimod 0.9.0. Please use " "PolyScaleComposite.sample_hising instead." ) warnings . warn ( msg , DeprecationWarning ) from dimod . reference . composites . higherordercomposites import PolyScaleComposite from dimod . higherorder . polynomial import BinaryPolynomial poly = BinaryPolynomial . from_hising ( h , J , offset = offset ) ignored_terms = set ( ) if ignored_variables is not None : ignored_terms . update ( frozenset ( v ) for v in ignored_variables ) if ignored_interactions is not None : ignored_terms . update ( frozenset ( inter ) for inter in ignored_interactions ) if ignore_offset : ignored_terms . add ( frozenset ( ) ) return PolyScaleComposite ( self . child ) . sample_poly ( poly , scalar = scalar , bias_range = bias_range , poly_range = quadratic_range , ignored_terms = ignored_terms , ** parameters ) bqm = BinaryQuadraticModel . from_ising ( h , J , offset = offset ) return self . sample ( bqm , scalar = scalar , bias_range = bias_range , quadratic_range = quadratic_range , ignored_variables = ignored_variables , ignored_interactions = ignored_interactions , ignore_offset = ignore_offset , ** parameters ) | Scale and sample from the problem provided by h J offset |
23,712 | def chimera_anticluster ( m , n = None , t = 4 , multiplier = 3.0 , cls = BinaryQuadraticModel , subgraph = None , seed = None ) : if seed is None : seed = numpy . random . randint ( 2 ** 32 , dtype = np . uint32 ) r = numpy . random . RandomState ( seed ) m = int ( m ) if n is None : n = m else : n = int ( n ) t = int ( t ) ldata = np . zeros ( m * n * t * 2 ) if m and n and t : inrow , incol = zip ( * _iter_chimera_tile_edges ( m , n , t ) ) if m > 1 or n > 1 : outrow , outcol = zip ( * _iter_chimera_intertile_edges ( m , n , t ) ) else : outrow = outcol = tuple ( ) qdata = r . choice ( ( - 1. , 1. ) , size = len ( inrow ) + len ( outrow ) ) qdata [ len ( inrow ) : ] *= multiplier irow = inrow + outrow icol = incol + outcol else : irow = icol = qdata = tuple ( ) bqm = cls . from_numpy_vectors ( ldata , ( irow , icol , qdata ) , 0.0 , SPIN ) if subgraph is not None : nodes , edges = subgraph subbqm = cls . empty ( SPIN ) try : subbqm . add_variables_from ( ( v , bqm . linear [ v ] ) for v in nodes ) except KeyError : msg = "given 'subgraph' contains nodes not in Chimera({}, {}, {})" . format ( m , n , t ) raise ValueError ( msg ) try : subbqm . add_interactions_from ( ( u , v , bqm . adj [ u ] [ v ] ) for u , v in edges ) except KeyError : msg = "given 'subgraph' contains edges not in Chimera({}, {}, {})" . format ( m , n , t ) raise ValueError ( msg ) bqm = subbqm return bqm | Generate an anticluster problem on a Chimera lattice . |
23,713 | def dump ( bqm , fp , vartype_header = False ) : for triplet in _iter_triplets ( bqm , vartype_header ) : fp . write ( '%s\n' % triplet ) | Dump a binary quadratic model to a string in COOrdinate format . |
23,714 | def loads ( s , cls = BinaryQuadraticModel , vartype = None ) : return load ( s . split ( '\n' ) , cls = cls , vartype = vartype ) | Load a COOrdinate formatted binary quadratic model from a string . |
23,715 | def load ( fp , cls = BinaryQuadraticModel , vartype = None ) : pattern = re . compile ( _LINE_REGEX ) vartype_pattern = re . compile ( _VARTYPE_HEADER_REGEX ) triplets = [ ] for line in fp : triplets . extend ( pattern . findall ( line ) ) vt = vartype_pattern . findall ( line ) if vt : if vartype is None : vartype = vt [ 0 ] else : if isinstance ( vartype , str ) : vartype = Vartype [ vartype ] else : vartype = Vartype ( vartype ) if Vartype [ vt [ 0 ] ] != vartype : raise ValueError ( "vartypes from headers and/or inputs do not match" ) if vartype is None : raise ValueError ( "vartype must be provided either as a header or as an argument" ) bqm = cls . empty ( vartype ) for u , v , bias in triplets : if u == v : bqm . add_variable ( int ( u ) , float ( bias ) ) else : bqm . add_interaction ( int ( u ) , int ( v ) , float ( bias ) ) return bqm | Load a COOrdinate formatted binary quadratic model from a file . |
23,716 | def remove_variable ( self , v ) : if v not in self : return adj = self . adj while adj [ v ] : self . remove_interaction ( v , next ( iter ( adj [ v ] ) ) ) del self . linear [ v ] try : del self . _counterpart if self . vartype is not Vartype . BINARY and hasattr ( self , '_binary' ) : del self . _binary elif self . vartype is not Vartype . SPIN and hasattr ( self , '_spin' ) : del self . _spin except AttributeError : pass | Remove variable v and all its interactions from a binary quadratic model . |
23,717 | def remove_interaction ( self , u , v ) : try : del self . quadratic [ ( u , v ) ] except KeyError : return try : del self . _counterpart if self . vartype is not Vartype . BINARY and hasattr ( self , '_binary' ) : del self . _binary elif self . vartype is not Vartype . SPIN and hasattr ( self , '_spin' ) : del self . _spin except AttributeError : pass | Remove interaction of variables u v from a binary quadratic model . |
23,718 | def remove_interactions_from ( self , interactions ) : for u , v in interactions : self . remove_interaction ( u , v ) | Remove all specified interactions from the binary quadratic model . |
23,719 | def add_offset ( self , offset ) : self . offset += offset try : self . _counterpart . add_offset ( offset ) except AttributeError : pass | Add specified value to the offset of a binary quadratic model . |
23,720 | def scale ( self , scalar , ignored_variables = None , ignored_interactions = None , ignore_offset = False ) : if ignored_variables is None : ignored_variables = set ( ) elif not isinstance ( ignored_variables , abc . Container ) : ignored_variables = set ( ignored_variables ) if ignored_interactions is None : ignored_interactions = set ( ) elif not isinstance ( ignored_interactions , abc . Container ) : ignored_interactions = set ( ignored_interactions ) linear = self . linear for v in linear : if v in ignored_variables : continue linear [ v ] *= scalar quadratic = self . quadratic for u , v in quadratic : if ( u , v ) in ignored_interactions or ( v , u ) in ignored_interactions : continue quadratic [ ( u , v ) ] *= scalar if not ignore_offset : self . offset *= scalar try : self . _counterpart . scale ( scalar , ignored_variables = ignored_variables , ignored_interactions = ignored_interactions ) except AttributeError : pass | Multiply by the specified scalar all the biases and offset of a binary quadratic model . |
23,721 | def fix_variable ( self , v , value ) : adj = self . adj linear = self . linear if value not in self . vartype . value : raise ValueError ( "expected value to be in {}, received {} instead" . format ( self . vartype . value , value ) ) removed_interactions = [ ] for u in adj [ v ] : self . add_variable ( u , value * adj [ v ] [ u ] ) removed_interactions . append ( ( u , v ) ) self . remove_interactions_from ( removed_interactions ) self . add_offset ( value * linear [ v ] ) self . remove_variable ( v ) | Fix the value of a variable and remove it from a binary quadratic model . |
23,722 | def fix_variables ( self , fixed ) : for v , val in fixed . items ( ) : self . fix_variable ( v , val ) | Fix the value of the variables and remove it from a binary quadratic model . |
23,723 | def flip_variable ( self , v ) : adj = self . adj linear = self . linear quadratic = self . quadratic if v not in adj : return if self . vartype is Vartype . SPIN : linear [ v ] *= - 1. for u in adj [ v ] : adj [ v ] [ u ] *= - 1. adj [ u ] [ v ] *= - 1. if ( u , v ) in quadratic : quadratic [ ( u , v ) ] *= - 1. elif ( v , u ) in quadratic : quadratic [ ( v , u ) ] *= - 1. else : raise RuntimeError ( "quadratic is missing an interaction" ) elif self . vartype is Vartype . BINARY : self . offset += linear [ v ] linear [ v ] *= - 1 for u in adj [ v ] : bias = adj [ v ] [ u ] adj [ v ] [ u ] *= - 1. adj [ u ] [ v ] *= - 1. linear [ u ] += bias if ( u , v ) in quadratic : quadratic [ ( u , v ) ] *= - 1. elif ( v , u ) in quadratic : quadratic [ ( v , u ) ] *= - 1. else : raise RuntimeError ( "quadratic is missing an interaction" ) else : raise RuntimeError ( "Unexpected vartype" ) try : self . _counterpart . flip_variable ( v ) except AttributeError : pass | Flip variable v in a binary quadratic model . |
23,724 | def update ( self , bqm , ignore_info = True ) : self . add_variables_from ( bqm . linear , vartype = bqm . vartype ) self . add_interactions_from ( bqm . quadratic , vartype = bqm . vartype ) self . add_offset ( bqm . offset ) if not ignore_info : self . info . update ( bqm . info ) | Update one binary quadratic model from another . |
23,725 | def contract_variables ( self , u , v ) : adj = self . adj if u not in adj : raise ValueError ( "{} is not a variable in the binary quadratic model" . format ( u ) ) if v not in adj : raise ValueError ( "{} is not a variable in the binary quadratic model" . format ( v ) ) if v in adj [ u ] : if self . vartype is Vartype . BINARY : self . add_variable ( u , adj [ u ] [ v ] ) elif self . vartype is Vartype . SPIN : self . add_offset ( adj [ u ] [ v ] ) else : raise RuntimeError ( "unexpected vartype" ) self . remove_interaction ( u , v ) neighbors = list ( adj [ v ] ) for w in neighbors : self . add_interaction ( u , w , adj [ v ] [ w ] ) self . remove_interaction ( v , w ) self . remove_variable ( v ) | Enforce u v being the same variable in a binary quadratic model . |
23,726 | def relabel_variables ( self , mapping , inplace = True ) : try : old_labels = set ( mapping ) new_labels = set ( itervalues ( mapping ) ) except TypeError : raise ValueError ( "mapping targets must be hashable objects" ) for v in new_labels : if v in self . linear and v not in old_labels : raise ValueError ( ( 'A variable cannot be relabeled "{}" without also relabeling ' "the existing variable of the same name" ) . format ( v ) ) if inplace : shared = old_labels & new_labels if shared : old_to_intermediate , intermediate_to_new = resolve_label_conflict ( mapping , old_labels , new_labels ) self . relabel_variables ( old_to_intermediate , inplace = True ) self . relabel_variables ( intermediate_to_new , inplace = True ) return self linear = self . linear quadratic = self . quadratic adj = self . adj for old in list ( linear ) : if old not in mapping : continue new = mapping [ old ] new_interactions = [ ( new , v , adj [ old ] [ v ] ) for v in adj [ old ] ] self . add_variable ( new , linear [ old ] ) self . add_interactions_from ( new_interactions ) self . remove_variable ( old ) return self else : return BinaryQuadraticModel ( { mapping . get ( v , v ) : bias for v , bias in iteritems ( self . linear ) } , { ( mapping . get ( u , u ) , mapping . get ( v , v ) ) : bias for ( u , v ) , bias in iteritems ( self . quadratic ) } , self . offset , self . vartype ) | Relabel variables of a binary quadratic model as specified by mapping . |
23,727 | def change_vartype ( self , vartype , inplace = True ) : if not inplace : new_model = BinaryQuadraticModel ( { } , { } , 0.0 , vartype ) new_model . add_variables_from ( self . linear , vartype = self . vartype ) new_model . add_interactions_from ( self . quadratic , vartype = self . vartype ) new_model . add_offset ( self . offset ) return new_model if vartype is self . vartype : return self if self . vartype is Vartype . SPIN and vartype is Vartype . BINARY : linear , quadratic , offset = self . spin_to_binary ( self . linear , self . quadratic , self . offset ) elif self . vartype is Vartype . BINARY and vartype is Vartype . SPIN : linear , quadratic , offset = self . binary_to_spin ( self . linear , self . quadratic , self . offset ) else : raise RuntimeError ( "something has gone wrong. unknown vartype conversion." ) for v in linear : self . remove_variable ( v ) self . add_offset ( - self . offset ) self . vartype = vartype self . add_variables_from ( linear ) self . add_interactions_from ( quadratic ) self . add_offset ( offset ) return self | Create a binary quadratic model with the specified vartype . |
23,728 | def spin_to_binary ( linear , quadratic , offset ) : new_linear = { v : 2. * bias for v , bias in iteritems ( linear ) } new_quadratic = { } for ( u , v ) , bias in iteritems ( quadratic ) : new_quadratic [ ( u , v ) ] = 4. * bias new_linear [ u ] -= 2. * bias new_linear [ v ] -= 2. * bias offset += sum ( itervalues ( quadratic ) ) - sum ( itervalues ( linear ) ) return new_linear , new_quadratic , offset | convert linear quadratic and offset from spin to binary . Does no checking of vartype . Copies all of the values into new objects . |
23,729 | def binary_to_spin ( linear , quadratic , offset ) : h = { } J = { } linear_offset = 0.0 quadratic_offset = 0.0 for u , bias in iteritems ( linear ) : h [ u ] = .5 * bias linear_offset += bias for ( u , v ) , bias in iteritems ( quadratic ) : J [ ( u , v ) ] = .25 * bias h [ u ] += .25 * bias h [ v ] += .25 * bias quadratic_offset += bias offset += .5 * linear_offset + .25 * quadratic_offset return h , J , offset | convert linear quadratic and offset from binary to spin . Does no checking of vartype . Copies all of the values into new objects . |
23,730 | def copy ( self ) : return BinaryQuadraticModel ( self . linear , self . quadratic , self . offset , self . vartype , ** self . info ) | Create a copy of a BinaryQuadraticModel . |
23,731 | def energy ( self , sample ) : linear = self . linear quadratic = self . quadratic if isinstance ( sample , SampleView ) : sample = dict ( sample ) en = self . offset en += sum ( linear [ v ] * sample [ v ] for v in linear ) en += sum ( sample [ u ] * sample [ v ] * quadratic [ ( u , v ) ] for u , v in quadratic ) return en | Determine the energy of the specified sample of a binary quadratic model . |
23,732 | def energies ( self , samples_like , dtype = np . float ) : samples , labels = as_samples ( samples_like ) if all ( v == idx for idx , v in enumerate ( labels ) ) : ldata , ( irow , icol , qdata ) , offset = self . to_numpy_vectors ( dtype = dtype ) else : ldata , ( irow , icol , qdata ) , offset = self . to_numpy_vectors ( variable_order = labels , dtype = dtype ) energies = samples . dot ( ldata ) + ( samples [ : , irow ] * samples [ : , icol ] ) . dot ( qdata ) + offset return np . asarray ( energies , dtype = dtype ) | Determine the energies of the given samples . |
23,733 | def to_coo ( self , fp = None , vartype_header = False ) : import dimod . serialization . coo as coo if fp is None : return coo . dumps ( self , vartype_header ) else : coo . dump ( self , fp , vartype_header ) | Serialize the binary quadratic model to a COOrdinate_ format encoding . |
23,734 | def from_coo ( cls , obj , vartype = None ) : import dimod . serialization . coo as coo if isinstance ( obj , str ) : return coo . loads ( obj , cls = cls , vartype = vartype ) return coo . load ( obj , cls = cls , vartype = vartype ) | Deserialize a binary quadratic model from a COOrdinate_ format encoding . |
23,735 | def to_serializable ( self , use_bytes = False , bias_dtype = np . float32 , bytes_type = bytes ) : from dimod . package_info import __version__ schema_version = "2.0.0" try : variables = sorted ( self . variables ) except TypeError : variables = list ( self . variables ) num_variables = len ( variables ) index_dtype = np . uint16 if num_variables <= 2 ** 16 else np . uint32 ldata , ( irow , icol , qdata ) , offset = self . to_numpy_vectors ( dtype = bias_dtype , index_dtype = index_dtype , sort_indices = True , variable_order = variables ) doc = { "basetype" : "BinaryQuadraticModel" , "type" : type ( self ) . __name__ , "version" : { "dimod" : __version__ , "bqm_schema" : schema_version } , "variable_labels" : variables , "variable_type" : self . vartype . name , "info" : self . info , "offset" : float ( offset ) , "use_bytes" : bool ( use_bytes ) } if use_bytes : doc . update ( { 'linear_biases' : array2bytes ( ldata , bytes_type = bytes_type ) , 'quadratic_biases' : array2bytes ( qdata , bytes_type = bytes_type ) , 'quadratic_head' : array2bytes ( irow , bytes_type = bytes_type ) , 'quadratic_tail' : array2bytes ( icol , bytes_type = bytes_type ) } ) else : doc . update ( { 'linear_biases' : ldata . tolist ( ) , 'quadratic_biases' : qdata . tolist ( ) , 'quadratic_head' : irow . tolist ( ) , 'quadratic_tail' : icol . tolist ( ) } ) return doc | Convert the binary quadratic model to a serializable object . |
23,736 | def from_serializable ( cls , obj ) : if obj . get ( "version" , { "bqm_schema" : "1.0.0" } ) [ "bqm_schema" ] != "2.0.0" : return cls . _from_serializable_v1 ( obj ) variables = [ tuple ( v ) if isinstance ( v , list ) else v for v in obj [ "variable_labels" ] ] if obj [ "use_bytes" ] : ldata = bytes2array ( obj [ "linear_biases" ] ) qdata = bytes2array ( obj [ "quadratic_biases" ] ) irow = bytes2array ( obj [ "quadratic_head" ] ) icol = bytes2array ( obj [ "quadratic_tail" ] ) else : ldata = obj [ "linear_biases" ] qdata = obj [ "quadratic_biases" ] irow = obj [ "quadratic_head" ] icol = obj [ "quadratic_tail" ] offset = obj [ "offset" ] vartype = obj [ "variable_type" ] bqm = cls . from_numpy_vectors ( ldata , ( irow , icol , qdata ) , offset , str ( vartype ) , variable_order = variables ) bqm . info . update ( obj [ "info" ] ) return bqm | Deserialize a binary quadratic model . |
23,737 | def to_networkx_graph ( self , node_attribute_name = 'bias' , edge_attribute_name = 'bias' ) : import networkx as nx BQM = nx . Graph ( ) BQM . add_nodes_from ( ( ( v , { node_attribute_name : bias , 'vartype' : self . vartype } ) for v , bias in iteritems ( self . linear ) ) ) BQM . add_edges_from ( ( ( u , v , { edge_attribute_name : bias } ) for ( u , v ) , bias in iteritems ( self . quadratic ) ) ) BQM . offset = self . offset BQM . vartype = self . vartype return BQM | Convert a binary quadratic model to NetworkX graph format . |
23,738 | def from_networkx_graph ( cls , G , vartype = None , node_attribute_name = 'bias' , edge_attribute_name = 'bias' ) : if vartype is None : if not hasattr ( G , 'vartype' ) : msg = ( "either 'vartype' argument must be provided or " "the given graph should have a vartype attribute." ) raise ValueError ( msg ) vartype = G . vartype linear = G . nodes ( data = node_attribute_name , default = 0 ) quadratic = G . edges ( data = edge_attribute_name , default = 0 ) offset = getattr ( G , 'offset' , 0 ) return cls ( linear , quadratic , offset , vartype ) | Create a binary quadratic model from a NetworkX graph . |
23,739 | def to_ising ( self ) : return dict ( self . spin . linear ) , dict ( self . spin . quadratic ) , self . spin . offset | Converts a binary quadratic model to Ising format . |
23,740 | def from_ising ( cls , h , J , offset = 0.0 ) : if isinstance ( h , abc . Sequence ) : h = dict ( enumerate ( h ) ) return cls ( h , J , offset , Vartype . SPIN ) | Create a binary quadratic model from an Ising problem . |
23,741 | def to_qubo ( self ) : qubo = dict ( self . binary . quadratic ) qubo . update ( ( ( v , v ) , bias ) for v , bias in iteritems ( self . binary . linear ) ) return qubo , self . binary . offset | Convert a binary quadratic model to QUBO format . |
23,742 | def from_qubo ( cls , Q , offset = 0.0 ) : linear = { } quadratic = { } for ( u , v ) , bias in iteritems ( Q ) : if u == v : linear [ u ] = bias else : quadratic [ ( u , v ) ] = bias return cls ( linear , quadratic , offset , Vartype . BINARY ) | Create a binary quadratic model from a QUBO model . |
23,743 | def to_numpy_matrix ( self , variable_order = None ) : import numpy as np if variable_order is None : num_variables = len ( self ) mat = np . zeros ( ( num_variables , num_variables ) , dtype = float ) try : for v , bias in iteritems ( self . binary . linear ) : mat [ v , v ] = bias except IndexError : raise ValueError ( ( "if 'variable_order' is not provided, binary quadratic model must be " "index labeled [0, ..., N-1]" ) ) for ( u , v ) , bias in iteritems ( self . binary . quadratic ) : if u < v : mat [ u , v ] = bias else : mat [ v , u ] = bias else : num_variables = len ( variable_order ) idx = { v : i for i , v in enumerate ( variable_order ) } mat = np . zeros ( ( num_variables , num_variables ) , dtype = float ) try : for v , bias in iteritems ( self . binary . linear ) : mat [ idx [ v ] , idx [ v ] ] = bias except KeyError as e : raise ValueError ( ( "variable {} is missing from variable_order" . format ( e ) ) ) for ( u , v ) , bias in iteritems ( self . binary . quadratic ) : iu , iv = idx [ u ] , idx [ v ] if iu < iv : mat [ iu , iv ] = bias else : mat [ iv , iu ] = bias return mat | Convert a binary quadratic model to NumPy 2D array . |
23,744 | def from_numpy_matrix ( cls , mat , variable_order = None , offset = 0.0 , interactions = None ) : import numpy as np if mat . ndim != 2 : raise ValueError ( "expected input mat to be a square 2D numpy array" ) num_row , num_col = mat . shape if num_col != num_row : raise ValueError ( "expected input mat to be a square 2D numpy array" ) if variable_order is None : variable_order = list ( range ( num_row ) ) if interactions is None : interactions = [ ] bqm = cls ( { } , { } , offset , Vartype . BINARY ) for ( row , col ) , bias in np . ndenumerate ( mat ) : if row == col : bqm . add_variable ( variable_order [ row ] , bias ) elif bias : bqm . add_interaction ( variable_order [ row ] , variable_order [ col ] , bias ) for u , v in interactions : bqm . add_interaction ( u , v , 0.0 ) return bqm | Create a binary quadratic model from a NumPy array . |
23,745 | def to_numpy_vectors ( self , variable_order = None , dtype = np . float , index_dtype = np . int64 , sort_indices = False ) : linear = self . linear quadratic = self . quadratic num_variables = len ( linear ) num_interactions = len ( quadratic ) irow = np . empty ( num_interactions , dtype = index_dtype ) icol = np . empty ( num_interactions , dtype = index_dtype ) qdata = np . empty ( num_interactions , dtype = dtype ) if variable_order is None : try : ldata = np . fromiter ( ( linear [ v ] for v in range ( num_variables ) ) , count = num_variables , dtype = dtype ) except KeyError : raise ValueError ( ( "if 'variable_order' is not provided, binary quadratic model must be " "index labeled [0, ..., N-1]" ) ) for idx , ( ( u , v ) , bias ) in enumerate ( quadratic . items ( ) ) : irow [ idx ] = u icol [ idx ] = v qdata [ idx ] = bias else : try : ldata = np . fromiter ( ( linear [ v ] for v in variable_order ) , count = num_variables , dtype = dtype ) except KeyError : raise ValueError ( "provided 'variable_order' does not match binary quadratic model" ) label_to_idx = { v : idx for idx , v in enumerate ( variable_order ) } for idx , ( ( u , v ) , bias ) in enumerate ( quadratic . items ( ) ) : irow [ idx ] = label_to_idx [ u ] icol [ idx ] = label_to_idx [ v ] qdata [ idx ] = bias if sort_indices : swaps = irow > icol if swaps . any ( ) : irow [ swaps ] , icol [ swaps ] = icol [ swaps ] , irow [ swaps ] order = np . lexsort ( ( irow , icol ) ) if not ( order == range ( len ( order ) ) ) . all ( ) : irow = irow [ order ] icol = icol [ order ] qdata = qdata [ order ] return ldata , ( irow , icol , qdata ) , ldata . dtype . type ( self . offset ) | Convert a binary quadratic model to numpy arrays . |
23,746 | def from_numpy_vectors ( cls , linear , quadratic , offset , vartype , variable_order = None ) : try : heads , tails , values = quadratic except ValueError : raise ValueError ( "quadratic should be a 3-tuple" ) if not len ( heads ) == len ( tails ) == len ( values ) : raise ValueError ( "row, col, and bias should be of equal length" ) if variable_order is None : variable_order = list ( range ( len ( linear ) ) ) linear = { v : float ( bias ) for v , bias in zip ( variable_order , linear ) } quadratic = { ( variable_order [ u ] , variable_order [ v ] ) : float ( bias ) for u , v , bias in zip ( heads , tails , values ) } return cls ( linear , quadratic , offset , vartype ) | Create a binary quadratic model from vectors . |
23,747 | def to_pandas_dataframe ( self ) : import pandas as pd try : variable_order = sorted ( self . linear ) except TypeError : variable_order = list ( self . linear ) return pd . DataFrame ( self . to_numpy_matrix ( variable_order = variable_order ) , index = variable_order , columns = variable_order ) | Convert a binary quadratic model to pandas DataFrame format . |
23,748 | def from_pandas_dataframe ( cls , bqm_df , offset = 0.0 , interactions = None ) : if interactions is None : interactions = [ ] bqm = cls ( { } , { } , offset , Vartype . BINARY ) for u , row in bqm_df . iterrows ( ) : for v , bias in row . iteritems ( ) : if u == v : bqm . add_variable ( u , bias ) elif bias : bqm . add_interaction ( u , v , bias ) for u , v in interactions : bqm . add_interaction ( u , v , 0.0 ) return bqm | Create a binary quadratic model from a QUBO model formatted as a pandas DataFrame . |
23,749 | def ising_energy ( sample , h , J , offset = 0.0 ) : for v in h : offset += h [ v ] * sample [ v ] for v0 , v1 in J : offset += J [ ( v0 , v1 ) ] * sample [ v0 ] * sample [ v1 ] return offset | Calculate the energy for the specified sample of an Ising model . |
23,750 | def qubo_energy ( sample , Q , offset = 0.0 ) : for v0 , v1 in Q : offset += sample [ v0 ] * sample [ v1 ] * Q [ ( v0 , v1 ) ] return offset | Calculate the energy for the specified sample of a QUBO model . |
23,751 | def ising_to_qubo ( h , J , offset = 0.0 ) : q = { ( v , v ) : 2. * bias for v , bias in iteritems ( h ) } for ( u , v ) , bias in iteritems ( J ) : if bias == 0.0 : continue q [ ( u , v ) ] = 4. * bias q [ ( u , u ) ] -= 2. * bias q [ ( v , v ) ] -= 2. * bias offset += sum ( itervalues ( J ) ) - sum ( itervalues ( h ) ) return q , offset | Convert an Ising problem to a QUBO problem . |
23,752 | def qubo_to_ising ( Q , offset = 0.0 ) : h = { } J = { } linear_offset = 0.0 quadratic_offset = 0.0 for ( u , v ) , bias in iteritems ( Q ) : if u == v : if u in h : h [ u ] += .5 * bias else : h [ u ] = .5 * bias linear_offset += bias else : if bias != 0.0 : J [ ( u , v ) ] = .25 * bias if u in h : h [ u ] += .25 * bias else : h [ u ] = .25 * bias if v in h : h [ v ] += .25 * bias else : h [ v ] = .25 * bias quadratic_offset += bias offset += .5 * linear_offset + .25 * quadratic_offset return h , J , offset | Convert a QUBO problem to an Ising problem . |
23,753 | def resolve_label_conflict ( mapping , old_labels = None , new_labels = None ) : if old_labels is None : old_labels = set ( mapping ) if new_labels is None : new_labels = set ( itervalues ( mapping ) ) counter = itertools . count ( 2 * len ( mapping ) ) old_to_intermediate = { } intermediate_to_new = { } for old , new in iteritems ( mapping ) : if old == new : continue if old in new_labels or new in old_labels : lbl = next ( counter ) while lbl in new_labels or lbl in old_labels : lbl = next ( counter ) old_to_intermediate [ old ] = lbl intermediate_to_new [ lbl ] = new else : old_to_intermediate [ old ] = new return old_to_intermediate , intermediate_to_new | Resolve a self - labeling conflict by creating an intermediate labeling . |
23,754 | def fix_variables ( bqm , sampling_mode = True ) : try : from dimod . roof_duality . _fix_variables import fix_variables_wrapper except ImportError : raise ImportError ( "c++ extension roof_duality is not built" ) if sampling_mode : method = 2 else : method = 1 linear = bqm . linear if all ( v in linear for v in range ( len ( bqm ) ) ) : fixed = fix_variables_wrapper ( bqm . binary , method ) else : try : inverse_mapping = dict ( enumerate ( sorted ( linear ) ) ) except TypeError : inverse_mapping = dict ( enumerate ( linear ) ) mapping = { v : i for i , v in inverse_mapping . items ( ) } fixed = fix_variables_wrapper ( bqm . relabel_variables ( mapping , inplace = False ) . binary , method ) fixed = { inverse_mapping [ v ] : val for v , val in fixed . items ( ) } if bqm . vartype is Vartype . SPIN : return { v : 2 * val - 1 for v , val in fixed . items ( ) } else : return fixed | Determine assignments for some variables of a binary quadratic model . |
23,755 | def dimod_object_hook ( obj ) : if _is_sampleset_v2 ( obj ) : return SampleSet . from_serializable ( obj ) elif _is_bqm_v2 ( obj ) : return BinaryQuadraticModel . from_serializable ( obj ) return obj | JSON - decoding for dimod objects . |
23,756 | def _decode_label ( label ) : if isinstance ( label , list ) : return tuple ( _decode_label ( v ) for v in label ) return label | Convert a list label into a tuple . Works recursively on nested lists . |
23,757 | def _encode_label ( label ) : if isinstance ( label , tuple ) : return [ _encode_label ( v ) for v in label ] return label | Convert a tuple label into a list . Works recursively on nested tuples . |
23,758 | def make_quadratic ( poly , strength , vartype = None , bqm = None ) : if bqm is None : if vartype is None : raise ValueError ( "one of vartype and bqm must be provided" ) bqm = BinaryQuadraticModel . empty ( vartype ) else : if not isinstance ( bqm , BinaryQuadraticModel ) : raise TypeError ( 'create_using must be a BinaryQuadraticModel' ) if vartype is not None and vartype is not bqm . vartype : raise ValueError ( "one of vartype and create_using must be provided" ) bqm . info [ 'reduction' ] = { } new_poly = { } for term , bias in iteritems ( poly ) : if len ( term ) == 0 : bqm . add_offset ( bias ) elif len ( term ) == 1 : v , = term bqm . add_variable ( v , bias ) else : new_poly [ term ] = bias return _reduce_degree ( bqm , new_poly , vartype , strength ) | Create a binary quadratic model from a higher order polynomial . |
23,759 | def _reduce_degree ( bqm , poly , vartype , scale ) : if all ( len ( term ) <= 2 for term in poly ) : bqm . add_interactions_from ( poly ) return bqm paircounter = Counter ( ) for term in poly : if len ( term ) > 2 : for u , v in itertools . combinations ( term , 2 ) : pair = frozenset ( ( u , v ) ) paircounter [ pair ] += 1 pair , __ = paircounter . most_common ( 1 ) [ 0 ] u , v = pair p = '{}*{}' . format ( u , v ) while p in bqm . linear : p = '_' + p if vartype is Vartype . BINARY : constraint = _binary_product ( [ u , v , p ] ) bqm . info [ 'reduction' ] [ ( u , v ) ] = { 'product' : p } else : aux = 'aux{},{}' . format ( u , v ) while aux in bqm . linear : aux = '_' + aux constraint = _spin_product ( [ u , v , p , aux ] ) bqm . info [ 'reduction' ] [ ( u , v ) ] = { 'product' : p , 'auxiliary' : aux } constraint . scale ( scale ) bqm . update ( constraint ) new_poly = { } for interaction , bias in poly . items ( ) : if u in interaction and v in interaction : if len ( interaction ) == 2 : assert len ( interaction ) >= 2 bqm . add_variable ( p , bias ) continue interaction = tuple ( s for s in interaction if s not in pair ) interaction += ( p , ) if interaction in new_poly : new_poly [ interaction ] += bias else : new_poly [ interaction ] = bias return _reduce_degree ( bqm , new_poly , vartype , scale ) | helper function for make_quadratic |
23,760 | def poly_energy ( sample_like , poly ) : msg = ( "poly_energy is deprecated and will be removed in dimod 0.9.0." "In the future, use BinaryPolynomial.energy" ) warnings . warn ( msg , DeprecationWarning ) return BinaryPolynomial ( poly , 'SPIN' ) . energy ( sample_like ) | Calculates energy of a sample from a higher order polynomial . |
23,761 | def poly_energies ( samples_like , poly ) : msg = ( "poly_energies is deprecated and will be removed in dimod 0.9.0." "In the future, use BinaryPolynomial.energies" ) warnings . warn ( msg , DeprecationWarning ) return BinaryPolynomial ( poly , 'SPIN' ) . energies ( samples_like ) | Calculates energy of samples from a higher order polynomial . |
23,762 | def frustrated_loop ( graph , num_cycles , R = float ( 'inf' ) , cycle_predicates = tuple ( ) , max_failed_cycles = 100 , seed = None ) : nodes , edges = graph if num_cycles <= 0 : raise ValueError ( "num_cycles should be a positive integer" ) if R <= 0 : raise ValueError ( "R should be a positive integer" ) if max_failed_cycles <= 0 : raise ValueError ( "max_failed_cycles should be a positive integer" ) if seed is None : seed = numpy . random . randint ( 2 ** 32 , dtype = np . uint32 ) r = numpy . random . RandomState ( seed ) adj = { v : set ( ) for v in nodes } for u , v in edges : if u in adj : adj [ u ] . add ( v ) else : adj [ u ] = { v } if v in adj : adj [ v ] . add ( u ) else : adj [ v ] = { u } bqm = BinaryQuadraticModel ( { v : 0.0 for v in nodes } , { edge : 0.0 for edge in edges } , 0.0 , SPIN ) failed_cycles = 0 good_cycles = 0 while good_cycles < num_cycles and failed_cycles < max_failed_cycles : cycle = _random_cycle ( adj , r ) if cycle is None or not all ( pred ( cycle ) for pred in cycle_predicates ) : failed_cycles += 1 continue good_cycles += 1 cycle_J = { ( cycle [ i - 1 ] , cycle [ i ] ) : - 1. for i in range ( len ( cycle ) ) } idx = r . randint ( len ( cycle ) ) cycle_J [ ( cycle [ idx - 1 ] , cycle [ idx ] ) ] *= - 1. bqm . add_interactions_from ( cycle_J ) for u , v in cycle_J : if abs ( bqm . adj [ u ] [ v ] ) >= R : adj [ u ] . remove ( v ) adj [ v ] . remove ( u ) if good_cycles < num_cycles : raise RuntimeError return bqm | Generate a frustrated loop problem . |
23,763 | def _random_cycle ( adj , random_state ) : n = random_state . randint ( len ( adj ) ) for idx , v in enumerate ( adj ) : if idx == n : break start = v walk = [ start ] visited = { start : 0 } while True : if len ( walk ) > 1 : previous = walk [ - 2 ] neighbors = [ u for u in adj [ walk [ - 1 ] ] if u != previous ] else : neighbors = list ( adj [ walk [ - 1 ] ] ) if not neighbors : return None u = random_state . choice ( neighbors ) if u in visited : return walk [ visited [ u ] : ] else : walk . append ( u ) visited [ u ] = len ( visited ) | Find a cycle using a random graph walk . |
23,764 | def sample ( self , bqm , num_spin_reversal_transforms = 2 , spin_reversal_variables = None , ** kwargs ) : if spin_reversal_variables is not None : import warnings warnings . warn ( "'spin_reversal_variables' kwarg is deprecated and no longer functions." , DeprecationWarning ) responses = [ ] flipped_bqm = bqm . copy ( ) transform = { v : False for v in bqm . variables } for ii in range ( num_spin_reversal_transforms ) : for v in bqm : if random ( ) > .5 : transform [ v ] = not transform [ v ] flipped_bqm . flip_variable ( v ) flipped_response = self . child . sample ( flipped_bqm , ** kwargs ) tf_idxs = [ flipped_response . variables . index ( v ) for v , flip in transform . items ( ) if flip ] if bqm . vartype is Vartype . SPIN : flipped_response . record . sample [ : , tf_idxs ] = - 1 * flipped_response . record . sample [ : , tf_idxs ] else : flipped_response . record . sample [ : , tf_idxs ] = 1 - flipped_response . record . sample [ : , tf_idxs ] responses . append ( flipped_response ) return concatenate ( responses ) | Sample from the binary quadratic model . |
23,765 | def append_index ( self , num_rows ) : width = len ( str ( num_rows - 1 ) ) def f ( datum ) : return str ( datum . idx ) . ljust ( width ) header = ' ' * width self . append ( header , f ) | Add an index column . |
23,766 | def append_sample ( self , v , vartype , _left = False ) : vstr = str ( v ) . rjust ( 2 ) length = len ( vstr ) if vartype is dimod . SPIN : def f ( datum ) : return _spinstr ( datum . sample [ v ] , rjust = length ) else : def f ( datum ) : return _binarystr ( datum . sample [ v ] , rjust = length ) self . append ( vstr , f , _left = _left ) | Add a sample column |
23,767 | def append_vector ( self , name , vector , _left = False ) : if np . issubdtype ( vector . dtype , np . integer ) : largest = str ( max ( vector . max ( ) , vector . min ( ) , key = abs ) ) length = max ( len ( largest ) , min ( 7 , len ( name ) ) ) if len ( name ) > length : header = name [ : length - 1 ] + '.' else : header = name . rjust ( length ) def f ( datum ) : return str ( getattr ( datum , name ) ) . rjust ( length ) elif np . issubdtype ( vector . dtype , np . floating ) : largest = np . format_float_positional ( max ( vector . max ( ) , vector . min ( ) , key = abs ) , precision = 6 , trim = '0' ) length = max ( len ( largest ) , min ( 7 , len ( name ) ) ) if len ( name ) > length : header = name [ : length - 1 ] + '.' else : header = name . rjust ( length ) def f ( datum ) : return np . format_float_positional ( getattr ( datum , name ) , precision = 6 , trim = '0' , ) . rjust ( length ) else : length = 7 if len ( name ) > length : header = name [ : length - 1 ] + '.' else : header = name . rjust ( length ) def f ( datum ) : r = repr ( getattr ( datum , name ) ) if len ( r ) > length : r = r [ : length - 3 ] + '...' return r . rjust ( length ) self . append ( header , f , _left = _left ) | Add a data vectors column . |
23,768 | def format ( self , obj , ** kwargs ) : sio = StringIO ( ) self . fprint ( obj , stream = sio , ** kwargs ) return sio . getvalue ( ) | Return the formatted representation of the object as a string . |
23,769 | def fprint ( self , obj , stream = None , ** kwargs ) : if stream is None : stream = sys . stdout options = self . options options . update ( kwargs ) if isinstance ( obj , dimod . SampleSet ) : self . _print_sampleset ( obj , stream , ** options ) return raise TypeError ( "cannot format type {}" . format ( type ( obj ) ) ) | Prints the formatted representation of the object on stream |
23,770 | def as_samples ( samples_like , dtype = None , copy = False , order = 'C' ) : if isinstance ( samples_like , SampleSet ) : return samples_like . record . sample , list ( samples_like . variables ) if isinstance ( samples_like , tuple ) and len ( samples_like ) == 2 : samples_like , labels = samples_like if not isinstance ( labels , list ) and labels is not None : labels = list ( labels ) else : labels = None if isinstance ( samples_like , abc . Iterator ) : raise TypeError ( 'samples_like cannot be an iterator' ) if isinstance ( samples_like , abc . Mapping ) : return as_samples ( ( [ samples_like ] , labels ) , dtype = dtype ) if ( isinstance ( samples_like , list ) and samples_like and isinstance ( samples_like [ 0 ] , numbers . Number ) ) : return as_samples ( ( [ samples_like ] , labels ) , dtype = dtype ) if not isinstance ( samples_like , np . ndarray ) : if any ( isinstance ( sample , abc . Mapping ) for sample in samples_like ) : samples_like , old = list ( samples_like ) , samples_like if labels is None : first = samples_like [ 0 ] if isinstance ( first , abc . Mapping ) : labels = list ( first ) else : labels = list ( range ( len ( first ) ) ) for idx , sample in enumerate ( old ) : if isinstance ( sample , abc . Mapping ) : try : samples_like [ idx ] = [ sample [ v ] for v in labels ] except KeyError : raise ValueError ( "samples_like and labels do not match" ) if dtype is None and not hasattr ( samples_like , 'dtype' ) : dtype = np . int8 arr = np . array ( samples_like , dtype = dtype , copy = copy , order = order ) if arr . ndim > 2 : raise ValueError ( "expected samples_like to be <= 2 dimensions" ) if arr . ndim < 2 : if arr . size : arr = np . atleast_2d ( arr ) elif labels : arr = arr . reshape ( ( 0 , len ( labels ) ) ) else : arr = arr . reshape ( ( 0 , 0 ) ) if labels is None : return arr , list ( range ( arr . shape [ 1 ] ) ) elif len ( labels ) != arr . shape [ 1 ] : raise ValueError ( "samples_like and labels dimensions do not match" ) else : return arr , labels | Convert a samples_like object to a NumPy array and list of labels . |
23,771 | def concatenate ( samplesets , defaults = None ) : itertup = iter ( samplesets ) try : first = next ( itertup ) except StopIteration : raise ValueError ( "samplesets must contain at least one SampleSet" ) vartype = first . vartype variables = first . variables records = [ first . record ] records . extend ( _iter_records ( itertup , vartype , variables ) ) record = recfunctions . stack_arrays ( records , defaults = defaults , asrecarray = True , usemask = False ) return SampleSet ( record , variables , { } , vartype ) | Combine SampleSets . |
23,772 | def from_samples_bqm ( cls , samples_like , bqm , ** kwargs ) : samples_like = as_samples ( samples_like ) energies = bqm . energies ( samples_like ) return cls . from_samples ( samples_like , energy = energies , vartype = bqm . vartype , ** kwargs ) | Build a SampleSet from raw samples using a BinaryQuadraticModel to get energies and vartype . |
23,773 | def data_vectors ( self ) : return { field : self . record [ field ] for field in self . record . dtype . names if field != 'sample' } | The per - sample data in a vector . |
23,774 | def first ( self ) : try : return next ( self . data ( sorted_by = 'energy' , name = 'Sample' ) ) except StopIteration : raise ValueError ( '{} is empty' . format ( self . __class__ . __name__ ) ) | Sample with the lowest - energy . |
23,775 | def done ( self ) : return ( not hasattr ( self , '_future' ) ) or ( not hasattr ( self . _future , 'done' ) ) or self . _future . done ( ) | Return True if a pending computation is done . |
23,776 | def samples ( self , n = None , sorted_by = 'energy' ) : if n is not None : return self . samples ( sorted_by = sorted_by ) [ : n ] if sorted_by is None : samples = self . record . sample else : order = np . argsort ( self . record [ sorted_by ] ) samples = self . record . sample [ order ] return SamplesArray ( samples , self . variables ) | Return an iterable over the samples . |
23,777 | def copy ( self ) : return self . __class__ ( self . record . copy ( ) , self . variables , self . info . copy ( ) , self . vartype ) | Create a shallow copy . |
23,778 | def aggregate ( self ) : _ , indices , inverse = np . unique ( self . record . sample , axis = 0 , return_index = True , return_inverse = True ) order = np . argsort ( indices ) indices = indices [ order ] record = self . record [ indices ] record . num_occurrences = 0 for old_idx , new_idx in enumerate ( inverse ) : new_idx = order [ new_idx ] record [ new_idx ] . num_occurrences += self . record [ old_idx ] . num_occurrences return type ( self ) ( record , self . variables , copy . deepcopy ( self . info ) , self . vartype ) | Create a new SampleSet with repeated samples aggregated . |
23,779 | def append_variables ( self , samples_like , sort_labels = True ) : samples , labels = as_samples ( samples_like ) num_samples = len ( self ) if samples . shape [ 0 ] == num_samples : pass elif samples . shape [ 0 ] == 1 and num_samples : samples = np . repeat ( samples , num_samples , axis = 0 ) else : msg = ( "mismatched shape. The samples to append should either be " "a single sample or should match the length of the sample " "set. Empty sample sets cannot be appended to." ) raise ValueError ( msg ) variables = self . variables if any ( v in variables for v in labels ) : msg = "Appended samples cannot contain variables in sample set" raise ValueError ( msg ) new_variables = list ( variables ) + labels new_samples = np . hstack ( ( self . record . sample , samples ) ) return type ( self ) . from_samples ( ( new_samples , new_variables ) , self . vartype , info = copy . deepcopy ( self . info ) , sort_labels = sort_labels , ** self . data_vectors ) | Create a new sampleset with the given variables with values added . |
23,780 | def lowest ( self , rtol = 1.e-5 , atol = 1.e-8 ) : if len ( self ) == 0 : return self . copy ( ) record = self . record close = np . isclose ( record . energy , np . min ( record . energy ) , rtol = rtol , atol = atol ) record = record [ close ] return type ( self ) ( record , self . variables , copy . deepcopy ( self . info ) , self . vartype ) | Return a sample set containing the lowest - energy samples . |
23,781 | def slice ( self , * slice_args , ** kwargs ) : sorted_by = kwargs . pop ( 'sorted_by' , 'energy' ) if kwargs : raise TypeError ( 'slice got an unexpected ' 'keyword argument {!r}' . format ( kwargs . popitem ( ) [ 0 ] ) ) if slice_args : selector = slice ( * slice_args ) else : selector = slice ( None ) if sorted_by is None : record = self . record [ selector ] else : sort_indices = np . argsort ( self . record [ sorted_by ] ) record = self . record [ sort_indices [ selector ] ] return type ( self ) ( record , self . variables , copy . deepcopy ( self . info ) , self . vartype ) | Create a new SampleSet with rows sliced according to standard Python slicing syntax . |
23,782 | def to_pandas_dataframe ( self , sample_column = False ) : import pandas as pd if sample_column : df = pd . DataFrame ( self . data ( sorted_by = None , sample_dict_cast = True ) ) else : df = pd . DataFrame ( self . record . sample , columns = self . variables ) for field in sorted ( self . record . dtype . fields ) : if field == 'sample' : continue df . loc [ : , field ] = self . record [ field ] return df | Convert a SampleSet to a Pandas DataFrame |
23,783 | def penalty_satisfaction ( response , bqm ) : record = response . record label_dict = response . variables . index if len ( bqm . info [ 'reduction' ] ) == 0 : return np . array ( [ 1 ] * len ( record . sample ) ) penalty_vector = np . prod ( [ record . sample [ : , label_dict [ qi ] ] * record . sample [ : , label_dict [ qj ] ] == record . sample [ : , label_dict [ valdict [ 'product' ] ] ] for ( qi , qj ) , valdict in bqm . info [ 'reduction' ] . items ( ) ] , axis = 0 ) return penalty_vector | Creates a penalty satisfaction list |
23,784 | def polymorph_response ( response , poly , bqm , penalty_strength = None , keep_penalty_variables = True , discard_unsatisfied = False ) : record = response . record penalty_vector = penalty_satisfaction ( response , bqm ) original_variables = bqm . variables if discard_unsatisfied : samples_to_keep = list ( map ( bool , list ( penalty_vector ) ) ) penalty_vector = np . array ( [ True ] * np . sum ( samples_to_keep ) ) else : samples_to_keep = list ( map ( bool , [ 1 ] * len ( record . sample ) ) ) samples = record . sample [ samples_to_keep ] energy_vector = poly . energies ( ( samples , response . variables ) ) if not keep_penalty_variables : original_variables = poly . variables idxs = [ response . variables . index [ v ] for v in original_variables ] samples = np . asarray ( samples [ : , idxs ] ) num_samples , num_variables = np . shape ( samples ) datatypes = [ ( 'sample' , np . dtype ( np . int8 ) , ( num_variables , ) ) , ( 'energy' , energy_vector . dtype ) , ( 'penalty_satisfaction' , penalty_vector . dtype ) ] datatypes . extend ( ( name , record [ name ] . dtype , record [ name ] . shape [ 1 : ] ) for name in record . dtype . names if name not in { 'sample' , 'energy' } ) data = np . rec . array ( np . empty ( num_samples , dtype = datatypes ) ) data . sample = samples data . energy = energy_vector for name in record . dtype . names : if name not in { 'sample' , 'energy' } : data [ name ] = record [ name ] [ samples_to_keep ] data [ 'penalty_satisfaction' ] = penalty_vector response . info [ 'reduction' ] = bqm . info [ 'reduction' ] if penalty_strength is not None : response . info [ 'penalty_strength' ] = penalty_strength return SampleSet ( data , original_variables , response . info , response . vartype ) | Transforms the sampleset for the higher order problem . |
23,785 | def sample_poly ( self , poly , penalty_strength = 1.0 , keep_penalty_variables = False , discard_unsatisfied = False , ** parameters ) : bqm = make_quadratic ( poly , penalty_strength , vartype = poly . vartype ) response = self . child . sample ( bqm , ** parameters ) return polymorph_response ( response , poly , bqm , penalty_strength = penalty_strength , keep_penalty_variables = keep_penalty_variables , discard_unsatisfied = discard_unsatisfied ) | Sample from the given binary polynomial . |
23,786 | def sample_poly ( self , poly , scalar = None , bias_range = 1 , poly_range = None , ignored_terms = None , ** parameters ) : if ignored_terms is None : ignored_terms = set ( ) else : ignored_terms = { frozenset ( term ) for term in ignored_terms } original , poly = poly , poly . copy ( ) if scalar is not None : poly . scale ( scalar , ignored_terms = ignored_terms ) else : poly . normalize ( bias_range = bias_range , poly_range = poly_range , ignored_terms = ignored_terms ) try : v = next ( v for v , bias in original . items ( ) if bias and v not in ignored_terms ) except StopIteration : scalar = 1 else : scalar = poly [ v ] / original [ v ] sampleset = self . child . sample_poly ( poly , ** parameters ) if ignored_terms : sampleset . record . energy = original . energies ( ( sampleset . record . sample , sampleset . variables ) ) else : sampleset . record . energy /= scalar return sampleset | Scale and sample from the given binary polynomial . |
23,787 | def sample_poly ( self , poly , ** kwargs ) : tkw = self . _truncate_kwargs if self . _aggregate : return self . child . sample_poly ( poly , ** kwargs ) . aggregate ( ) . truncate ( ** tkw ) else : return self . child . sample_poly ( poly , ** kwargs ) . truncate ( ** tkw ) | Sample from the binary polynomial and truncate output . |
23,788 | def _samples_dicts_to_array ( samples_dicts , labels ) : itersamples = iter ( samples_dicts ) first_sample = next ( itersamples ) if labels is None : labels = list ( first_sample ) num_variables = len ( labels ) def _iter_samples ( ) : yield np . fromiter ( ( first_sample [ v ] for v in labels ) , count = num_variables , dtype = np . int8 ) try : for sample in itersamples : yield np . fromiter ( ( sample [ v ] for v in labels ) , count = num_variables , dtype = np . int8 ) except KeyError : msg = ( "Each dict in 'samples' must have the same keys." ) raise ValueError ( msg ) return np . stack ( list ( _iter_samples ( ) ) ) , labels | Convert an iterable of samples where each sample is a dict to a numpy 2d array . Also determines the labels is they are None . |
23,789 | def data_struct_array ( sample , ** vectors ) : if not len ( sample ) : sample = np . zeros ( ( 0 , 0 ) , dtype = np . int8 ) else : sample = np . asarray ( sample , dtype = np . int8 ) if sample . ndim < 2 : sample = np . expand_dims ( sample , 0 ) num_samples , num_variables = sample . shape if 'num_occurrences' not in vectors : vectors [ 'num_occurrences' ] = [ 1 ] * num_samples datavectors = { } datatypes = [ ( 'sample' , np . dtype ( np . int8 ) , ( num_variables , ) ) ] for kwarg , vector in vectors . items ( ) : dtype = float if kwarg == 'energy' else None datavectors [ kwarg ] = vector = np . asarray ( vector , dtype ) if len ( vector . shape ) < 1 or vector . shape [ 0 ] != num_samples : msg = ( '{} and sample have a mismatched shape {}, {}. They must have the same size ' 'in the first axis.' ) . format ( kwarg , vector . shape , sample . shape ) raise ValueError ( msg ) datatypes . append ( ( kwarg , vector . dtype , vector . shape [ 1 : ] ) ) if 'energy' not in datavectors : raise TypeError ( 'data_struct_array() needs keyword-only argument energy' ) elif datavectors [ 'energy' ] . shape != ( num_samples , ) : raise ValueError ( 'energy should be a vector of length {}' . format ( num_samples ) ) data = np . rec . array ( np . zeros ( num_samples , dtype = datatypes ) ) data [ 'sample' ] = sample for kwarg , vector in datavectors . items ( ) : data [ kwarg ] = vector return data | Combine samples and per - sample data into a numpy structured array . |
23,790 | def from_samples ( cls , samples_like , vectors , info , vartype , variable_labels = None ) : try : samples = np . asarray ( samples_like , dtype = np . int8 ) except TypeError : samples , variable_labels = _samples_dicts_to_array ( samples_like , variable_labels ) assert samples . dtype == np . int8 , 'sanity check' record = data_struct_array ( samples , ** vectors ) if variable_labels is None : __ , num_variables = record . sample . shape variable_labels = list ( range ( num_variables ) ) return cls ( record , variable_labels , info , vartype ) | Build a response from samples . |
23,791 | def breathe_identifier ( self ) : if self . kind == "function" : return "{name}({parameters})" . format ( name = self . name , parameters = ", " . join ( self . parameters ) ) return self . name | The unique identifier for breathe directives . |
23,792 | def full_signature ( self ) : if self . kind == "function" : return "{template}{return_type} {name}({parameters})" . format ( template = "template <{0}> " . format ( ", " . join ( self . template ) ) if self . template else "" , return_type = self . return_type , name = self . name , parameters = ", " . join ( self . parameters ) ) raise RuntimeError ( "full_signature may only be called for a 'function', but {name} is a '{kind}' node." . format ( name = self . name , kind = self . kind ) ) | The full signature of a function node . |
23,793 | def findNestedNamespaces ( self , lst ) : if self . kind == "namespace" : lst . append ( self ) for c in self . children : c . findNestedNamespaces ( lst ) | Recursive helper function for finding nested namespaces . If this node is a namespace node it is appended to lst . Each node also calls each of its child findNestedNamespaces with the same list . |
23,794 | def findNestedDirectories ( self , lst ) : if self . kind == "dir" : lst . append ( self ) for c in self . children : c . findNestedDirectories ( lst ) | Recursive helper function for finding nested directories . If this node is a directory node it is appended to lst . Each node also calls each of its child findNestedDirectories with the same list . |
23,795 | def findNestedClassLike ( self , lst ) : if self . kind == "class" or self . kind == "struct" : lst . append ( self ) for c in self . children : c . findNestedClassLike ( lst ) | Recursive helper function for finding nested classes and structs . If this node is a class or struct it is appended to lst . Each node also calls each of its child findNestedClassLike with the same list . |
23,796 | def generateDirectoryNodeDocuments ( self ) : all_dirs = [ ] for d in self . dirs : d . findNestedDirectories ( all_dirs ) for d in all_dirs : self . generateDirectoryNodeRST ( d ) | Generates all of the directory reStructuredText documents . |
23,797 | def gerrymanderNodeFilenames ( self ) : for node in self . all_nodes : node . file_name = os . path . basename ( node . file_name ) if node . kind == "file" : node . program_file = os . path . basename ( node . program_file ) | When creating nodes the filename needs to be relative to conf . py so it will include self . root_directory . However when generating the API the file we are writing to is in the same directory as the generated node files so we need to remove the directory path from a given ExhaleNode s file_name before we can include it or use it in a toctree . |
23,798 | def generateClassView ( self ) : class_view_stream = StringIO ( ) for n in self . namespaces : n . toHierarchy ( True , 0 , class_view_stream ) missing = [ ] for cl in sorted ( self . class_like ) : if not cl . in_class_hierarchy : missing . append ( cl ) for e in sorted ( self . enums ) : if not e . in_class_hierarchy : missing . append ( e ) for u in sorted ( self . unions ) : if not u . in_class_hierarchy : missing . append ( u ) if len ( missing ) > 0 : idx = 0 last_missing_child = len ( missing ) - 1 for m in missing : m . toHierarchy ( True , 0 , class_view_stream , idx == last_missing_child ) idx += 1 elif configs . createTreeView : class_view_stream . close ( ) class_view_stream = StringIO ( ) last_nspace_index = len ( self . namespaces ) - 1 for idx in range ( last_nspace_index + 1 ) : nspace = self . namespaces [ idx ] nspace . toHierarchy ( True , 0 , class_view_stream , idx == last_nspace_index ) class_view_string = class_view_stream . getvalue ( ) class_view_stream . close ( ) return class_view_string | Generates the class view hierarchy writing it to self . class_hierarchy_file . |
23,799 | def generateDirectoryView ( self ) : file_view_stream = StringIO ( ) for d in self . dirs : d . toHierarchy ( False , 0 , file_view_stream ) missing = [ ] for f in sorted ( self . files ) : if not f . in_file_hierarchy : missing . append ( f ) found_missing = len ( missing ) > 0 if found_missing : idx = 0 last_missing_child = len ( missing ) - 1 for m in missing : m . toHierarchy ( False , 0 , file_view_stream , idx == last_missing_child ) idx += 1 elif configs . createTreeView : file_view_stream . close ( ) file_view_stream = StringIO ( ) last_dir_index = len ( self . dirs ) - 1 for idx in range ( last_dir_index + 1 ) : curr_d = self . dirs [ idx ] curr_d . toHierarchy ( False , 0 , file_view_stream , idx == last_dir_index ) file_view_string = file_view_stream . getvalue ( ) file_view_stream . close ( ) return file_view_string | Generates the file view hierarchy writing it to self . file_hierarchy_file . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.