idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
25,900
def contains_empty ( features ) : if not features : return True for feature in features : if feature . shape [ 0 ] == 0 : return True return False
Check features data are not empty
25,901
def parse_dformat ( dformat , check = True ) : if check and dformat not in [ 'dense' , 'sparse' ] : raise IOError ( "{} is a bad features format, please choose 'dense' or 'sparse'" . format ( dformat ) ) return dformat
Return dformat or raise if it is not dense or sparse
25,902
def parse_dtype ( features , check = True ) : dtype = features [ 0 ] . dtype if check : types = [ x . dtype for x in features ] if not all ( [ t == dtype for t in types ] ) : raise IOError ( 'features must be homogeneous' ) return dtype
Return the features scalar type raise if error
25,903
def parse_dim ( features , check = True ) : dim = features [ 0 ] . shape [ 1 ] if check and not dim > 0 : raise IOError ( 'features dimension must be strictly positive' ) if check and not all ( [ d == dim for d in [ x . shape [ 1 ] for x in features ] ] ) : raise IOError ( 'all files must have the same feature dimensio...
Return the features dimension raise if error
25,904
def is_appendable_to ( self , group ) : return ( group . attrs [ 'format' ] == self . dformat and group [ self . name ] . dtype == self . dtype and self . _group_dim ( group ) == self . dim )
Return True if features are appendable to a HDF5 group
25,905
def create_dataset ( self , group , chunk_size , compression = None , compression_opts = None ) : group . attrs [ 'format' ] = self . dformat super ( Features , self ) . _create_dataset ( group , chunk_size , compression , compression_opts ) if chunk_size != 'auto' : self . nb_per_chunk = nb_per_chunk ( self . dtype . ...
Initialize the features subgoup
25,906
def write_to ( self , group , append = False ) : if self . sparsetodense : self . data = [ x . todense ( ) if sp . issparse ( x ) else x for x in self . data ] nframes = sum ( [ d . shape [ 0 ] for d in self . data ] ) dim = self . _group_dim ( group ) feats = np . concatenate ( self . data , axis = 0 ) if append : nfr...
Write stored features to a given group
25,907
def create_dataset ( self , group , chunk_size ) : group . attrs [ 'format' ] = self . dformat group . attrs [ 'dim' ] = self . dim if chunk_size == 'auto' : group . create_dataset ( 'coordinates' , ( 0 , 2 ) , dtype = np . float64 , chunks = True , maxshape = ( None , 2 ) ) group . create_dataset ( self . name , ( 0 ,...
Initializes sparse specific datasets
25,908
def read_properties ( group ) : if 'properties' not in group : raise IOError ( 'no properties in group' ) data = group [ 'properties' ] [ ... ] [ 0 ] . replace ( b'__NULL__' , b'\x00' ) return pickle . loads ( data )
Returns properties loaded from a group
25,909
def _eq_dicts ( d1 , d2 ) : if not d1 . keys ( ) == d2 . keys ( ) : return False for k , v1 in d1 . items ( ) : v2 = d2 [ k ] if not type ( v1 ) == type ( v2 ) : return False if isinstance ( v1 , np . ndarray ) : if not np . array_equal ( v1 , v2 ) : return False else : if not v1 == v2 : return False return True
Returns True if d1 == d2 False otherwise
25,910
def write_to ( self , group , append = False ) : data = self . data if append is True : try : original = read_properties ( group ) data = original + data except EOFError : pass data = pickle . dumps ( data ) . replace ( b'\x00' , b'__NULL__' ) group [ 'properties' ] [ ... ] = np . void ( data )
Writes the properties to a group or append it
25,911
def generate_data ( nitem , nfeat = 2 , dim = 10 , labeldim = 1 , base = 'item' ) : import numpy as np items = [ base + '_' + str ( i ) for i in range ( nitem ) ] features = [ np . random . randn ( nfeat , dim ) for _ in range ( nitem ) ] if labeldim == 1 : labels = [ np . linspace ( 0 , 1 , nfeat ) ] * nitem else : t ...
Returns a randomly generated h5f . Data instance .
25,912
def create_index ( group , chunk_size , compression = None , compression_opts = None ) : dtype = np . int64 if chunk_size == 'auto' : chunks = True else : chunks = ( nb_per_chunk ( np . dtype ( dtype ) . itemsize , 1 , chunk_size ) , ) group . create_dataset ( 'index' , ( 0 , ) , dtype = dtype , chunks = chunks , maxsh...
Create an empty index dataset in the given group .
25,913
def write_index ( data , group , append ) : nitems = group [ 'items' ] . shape [ 0 ] if 'items' in group else 0 last_index = group [ 'index' ] [ - 1 ] if nitems > 0 else - 1 index = last_index + cumindex ( data . _entries [ 'features' ] ) if append : nidx = group [ 'index' ] . shape [ 0 ] group [ 'index' ] . resize ( (...
Write the data index to the given group .
25,914
def read_index ( group , version = '1.1' ) : if version == '0.1' : return np . int64 ( group [ 'index' ] [ ... ] ) elif version == '1.0' : return group [ 'file_index' ] [ ... ] else : return group [ 'index' ] [ ... ]
Return the index stored in a h5features group .
25,915
def nb_per_chunk ( item_size , item_dim , chunk_size ) : size = chunk_size * 10. ** 6 ratio = int ( round ( size / ( item_size * item_dim ) ) ) return max ( 10 , ratio )
Return the number of items that can be stored in one chunk .
25,916
def is_appendable ( self , entry ) : try : if ( self . name == entry . name and self . dtype == entry . dtype and self . dim == entry . dim ) : return True except AttributeError : return False return False
Return True if entry can be appended to self
25,917
def append ( self , entry ) : if not self . is_appendable ( entry ) : raise ValueError ( 'entry not appendable' ) self . data += entry . data
Append an entry to self
25,918
def write ( self , data , groupname = 'h5features' , append = False ) : if append and groupname in self . h5file : group = self . h5file [ groupname ] if not is_same_version ( self . version , group ) : raise IOError ( 'data is not appendable to the group {}: ' 'versions are different' . format ( group . name ) ) if no...
Write h5features data in a specified group of the file .
25,919
def _prepare ( self , data , groupname ) : if groupname in self . h5file : del self . h5file [ groupname ] group = self . h5file . create_group ( groupname ) group . attrs [ 'version' ] = self . version data . init_group ( group , self . chunk_size , self . compression , self . compression_opts ) return group
Clear the group if existing and initialize empty datasets .
25,920
def read_items ( group , version = '1.1' , check = False ) : if version == '0.1' : return '' . join ( [ unichr ( int ( c ) ) for c in group [ 'files' ] [ ... ] ] ) . replace ( '/-' , '/' ) . split ( '/\\' ) elif version == '1.0' : return Items ( list ( group [ 'files' ] [ ... ] ) , check ) else : return Items ( list ( ...
Return an Items instance initialized from a h5features group .
25,921
def write_to ( self , group ) : items_group = group [ self . name ] nitems = items_group . shape [ 0 ] items_group . resize ( ( nitems + len ( self . data ) , ) ) items_group [ nitems : ] = self . data
Write stored items to the given HDF5 group .
25,922
def _create_dataset ( self , group , chunk_size , compression , compression_opts ) : if chunk_size == 'auto' : chunks = True else : per_chunk = ( nb_per_chunk ( 20 , 1 , chunk_size ) if self . dtype == np . dtype ( 'O' ) else nb_per_chunk ( np . dtype ( self . dtype ) . itemsize , 1 , chunk_size ) ) chunks = ( per_chun...
Create an empty dataset in a group .
25,923
def read_version ( group ) : version = ( '0.1' if 'version' not in group . attrs else group . attrs [ 'version' ] ) if isinstance ( version , bytes ) : version = version . decode ( ) if not is_supported_version ( version ) : raise IOError ( 'version {} is not supported' . format ( version ) ) return version
Return the h5features version of a given HDF5 group .
25,924
def read ( self , from_item = None , to_item = None , from_time = None , to_time = None ) : if to_item is None : to_item = self . items . data [ - 1 ] if from_item is None else from_item if from_item is None : from_item = self . items . data [ 0 ] if not self . items . is_valid_interval ( from_item , to_item ) : raise ...
Retrieve requested data coordinates from the h5features index .
25,925
def writesgf ( self , sgffilename ) : "Write the game to an SGF file after a game" size = self . size outfile = open ( sgffilename , "w" ) if not outfile : print "Couldn't create " + sgffilename return black_name = self . blackplayer . get_program_name ( ) white_name = self . whiteplayer . get_program_name ( ) black_se...
Write the game to an SGF file after a game
25,926
def _escapeText ( text ) : output = "" index = 0 match = reCharsToEscape . search ( text , index ) while match : output = output + text [ index : match . start ( ) ] + '\\' + text [ match . start ( ) ] index = match . end ( ) match = reCharsToEscape . search ( text , index ) output = output + text [ index : ] return ou...
Adds backslash - escapes to property value characters that need them .
25,927
def parse ( self ) : c = Collection ( ) while self . index < self . datalen : g = self . parseOneGame ( ) if g : c . append ( g ) else : break return c
Parses the SGF data stored in self . data and returns a Collection .
25,928
def parseOneGame ( self ) : if self . index < self . datalen : match = self . reGameTreeStart . match ( self . data , self . index ) if match : self . index = match . end ( ) return self . parseGameTree ( ) return None
Parses one game from self . data . Returns a GameTree containing one game or None if the end of self . data has been reached .
25,929
def reset ( self ) : self . gametree = self . game self . nodenum = 0 self . index = 0 self . stack = [ ] self . node = self . gametree [ self . index ] self . _setChildren ( ) self . _setFlags ( )
Set Cursor to point to the start of the root GameTree self . game .
25,930
def previous ( self ) : if self . index - 1 >= 0 : self . index = self . index - 1 elif self . stack : self . gametree = self . stack . pop ( ) self . index = len ( self . gametree ) - 1 else : raise GameTreeEndError self . node = self . gametree [ self . index ] self . nodenum = self . nodenum - 1 self . _setChildren ...
Moves the Cursor to & returns the previous Node . Raises GameTreeEndError if the start of a branch is exceeded .
25,931
def _setChildren ( self ) : if self . index + 1 < len ( self . gametree ) : self . children = [ self . gametree [ self . index + 1 ] ] else : self . children = map ( lambda list : list [ 0 ] , self . gametree . variations )
Sets up self . children .
25,932
def _setFlags ( self ) : self . atEnd = not self . gametree . variations and ( self . index + 1 == len ( self . gametree ) ) self . atStart = not self . stack and ( self . index == 0 )
Sets up the flags self . atEnd and self . atStart .
25,933
def ripple_carry_add ( A , B , cin = 0 ) : if len ( A ) != len ( B ) : raise ValueError ( "expected A and B to be equal length" ) ss , cs = list ( ) , list ( ) for i , a in enumerate ( A ) : c = ( cin if i == 0 else cs [ i - 1 ] ) ss . append ( a ^ B [ i ] ^ c ) cs . append ( a & B [ i ] | a & c | B [ i ] & c ) return ...
Return symbolic logic for an N - bit ripple carry adder .
25,934
def kogge_stone_add ( A , B , cin = 0 ) : if len ( A ) != len ( B ) : raise ValueError ( "expected A and B to be equal length" ) N = len ( A ) gs = [ A [ i ] & B [ i ] for i in range ( N ) ] ps = [ A [ i ] ^ B [ i ] for i in range ( N ) ] for i in range ( clog2 ( N ) ) : start = 1 << i for j in range ( start , N ) : gs...
Return symbolic logic for an N - bit Kogge - Stone adder .
25,935
def brent_kung_add ( A , B , cin = 0 ) : if len ( A ) != len ( B ) : raise ValueError ( "expected A and B to be equal length" ) N = len ( A ) gs = [ A [ i ] & B [ i ] for i in range ( N ) ] ps = [ A [ i ] ^ B [ i ] for i in range ( N ) ] for i in range ( floor ( log ( N , 2 ) ) ) : step = 2 ** i for start in range ( 2 ...
Return symbolic logic for an N - bit Brent - Kung adder .
25,936
def _expect_token ( lexer , types ) : tok = next ( lexer ) if any ( isinstance ( tok , t ) for t in types ) : return tok else : raise Error ( "unexpected token: " + str ( tok ) )
Return the next token or raise an exception .
25,937
def parse_cnf ( s , varname = 'x' ) : lexer = iter ( CNFLexer ( s ) ) try : ast = _cnf ( lexer , varname ) except lex . RunError as exc : fstr = ( "{0.args[0]}: " "(line: {0.lineno}, offset: {0.offset}, text: {0.text})" ) raise Error ( fstr . format ( exc ) ) _expect_token ( lexer , { EndToken } ) return ast
Parse an input string in DIMACS CNF format and return an expression abstract syntax tree .
25,938
def _cnf ( lexer , varname ) : _expect_token ( lexer , { KW_p } ) _expect_token ( lexer , { KW_cnf } ) nvars = _expect_token ( lexer , { IntegerToken } ) . value nclauses = _expect_token ( lexer , { IntegerToken } ) . value return _cnf_formula ( lexer , varname , nvars , nclauses )
Return a DIMACS CNF .
25,939
def _cnf_formula ( lexer , varname , nvars , nclauses ) : clauses = _clauses ( lexer , varname , nvars ) if len ( clauses ) < nclauses : fstr = "formula has fewer than {} clauses" raise Error ( fstr . format ( nclauses ) ) if len ( clauses ) > nclauses : fstr = "formula has more than {} clauses" raise Error ( fstr . fo...
Return a DIMACS CNF formula .
25,940
def _clauses ( lexer , varname , nvars ) : tok = next ( lexer ) toktype = type ( tok ) if toktype is OP_not or toktype is IntegerToken : lexer . unpop_token ( tok ) first = _clause ( lexer , varname , nvars ) rest = _clauses ( lexer , varname , nvars ) return ( first , ) + rest else : lexer . unpop_token ( tok ) return...
Return a tuple of DIMACS CNF clauses .
25,941
def _lits ( lexer , varname , nvars ) : tok = _expect_token ( lexer , { OP_not , IntegerToken } ) if isinstance ( tok , IntegerToken ) and tok . value == 0 : return tuple ( ) else : if isinstance ( tok , OP_not ) : neg = True tok = _expect_token ( lexer , { IntegerToken } ) else : neg = False index = tok . value if ind...
Return a tuple of DIMACS CNF clause literals .
25,942
def parse_sat ( s , varname = 'x' ) : lexer = iter ( SATLexer ( s ) ) try : ast = _sat ( lexer , varname ) except lex . RunError as exc : fstr = ( "{0.args[0]}: " "(line: {0.lineno}, offset: {0.offset}, text: {0.text})" ) raise Error ( fstr . format ( exc ) ) _expect_token ( lexer , { EndToken } ) return ast
Parse an input string in DIMACS SAT format and return an expression .
25,943
def _sat ( lexer , varname ) : _expect_token ( lexer , { KW_p } ) fmt = _expect_token ( lexer , { KW_sat , KW_satx , KW_sate , KW_satex } ) . value nvars = _expect_token ( lexer , { IntegerToken } ) . value return _sat_formula ( lexer , varname , fmt , nvars )
Return a DIMACS SAT .
25,944
def _sat_formula ( lexer , varname , fmt , nvars ) : types = { IntegerToken , LPAREN } | _SAT_TOKS [ fmt ] tok = _expect_token ( lexer , types ) if isinstance ( tok , IntegerToken ) : index = tok . value if not 0 < index <= nvars : fstr = "formula literal {} outside valid range: (0, {}]" raise Error ( fstr . format ( i...
Return a DIMACS SAT formula .
25,945
def _formulas ( lexer , varname , fmt , nvars ) : types = { IntegerToken , LPAREN } | _SAT_TOKS [ fmt ] tok = lexer . peek_token ( ) if any ( isinstance ( tok , t ) for t in types ) : first = _sat_formula ( lexer , varname , fmt , nvars ) rest = _formulas ( lexer , varname , fmt , nvars ) return ( first , ) + rest else...
Return a tuple of DIMACS SAT formulas .
25,946
def keyword ( self , text ) : cls = self . KEYWORDS [ text ] self . push_token ( cls ( text , self . lineno , self . offset ) )
Push a keyword onto the token queue .
25,947
def operator ( self , text ) : cls = self . OPERATORS [ text ] self . push_token ( cls ( text , self . lineno , self . offset ) )
Push an operator onto the token queue .
25,948
def punct ( self , text ) : cls = self . PUNCTUATION [ text ] self . push_token ( cls ( text , self . lineno , self . offset ) )
Push punctuation onto the token queue .
25,949
def parse ( s ) : d = dict ( ninputs = None , noutputs = None , input_labels = None , output_labels = None , intype = None , cover = set ( ) ) lines = [ line . strip ( ) for line in s . splitlines ( ) ] for i , line in enumerate ( lines , start = 1 ) : if not line or _COMMENT . match ( line ) : continue m_in = _NINS . ...
Parse an input string in PLA format and return an intermediate representation dict .
25,950
def action ( toktype ) : def outer ( func ) : def inner ( lexer , text ) : value = func ( lexer , text ) lexer . tokens . append ( toktype ( value , lexer . lineno , lexer . offset ) ) return inner return outer
Return a parser action property .
25,951
def _compile_rules ( self ) : for state , table in self . RULES . items ( ) : patterns = list ( ) actions = list ( ) nextstates = list ( ) for i , row in enumerate ( table ) : if len ( row ) == 2 : pattern , _action = row nextstate = None elif len ( row ) == 3 : pattern , _action , nextstate = row else : fstr = "invali...
Compile the rules into the internal lexer state .
25,952
def _iter_tokens ( self ) : reobj , actions , nextstates = self . _rules [ self . states [ - 1 ] ] mobj = reobj . match ( self . string , self . pos ) while mobj is not None : text = mobj . group ( 0 ) idx = mobj . lastindex - 1 nextstate = nextstates [ idx ] actions [ idx ] ( self , text ) while self . tokens : yield ...
Iterate through all tokens in the input string .
25,953
def parity ( num : int ) -> int : if num < 0 : raise ValueError ( "expected num >= 0" ) par = 0 while num : par ^= ( num & 1 ) num >>= 1 return par
Return the parity of a non - negative integer .
25,954
def cached_property ( func ) : def get ( self ) : try : return self . _property_cache [ func ] except AttributeError : self . _property_cache = dict ( ) prop = self . _property_cache [ func ] = func ( self ) return prop except KeyError : prop = self . _property_cache [ func ] = func ( self ) return prop get . __doc__ =...
Return a cached property calculated by the input function .
25,955
def var ( name , index = None ) : tname = type ( name ) if tname is str : names = ( name , ) elif tname is tuple : names = name else : fstr = "expected name to be a str or tuple, got {0.__name__}" raise TypeError ( fstr . format ( tname ) ) if not names : raise ValueError ( "expected at least one name" ) for name in na...
Return a unique Variable instance .
25,956
def iter_cofactors ( self , vs = None ) : r vs = self . _expect_vars ( vs ) for point in iter_points ( vs ) : yield self . restrict ( point )
r Iterate through the cofactors of a function over N variables .
25,957
def smoothing ( self , vs = None ) : r return functools . reduce ( operator . or_ , self . iter_cofactors ( vs ) )
r Return the smoothing of a function over a sequence of N variables .
25,958
def consensus ( self , vs = None ) : r return functools . reduce ( operator . and_ , self . iter_cofactors ( vs ) )
r Return the consensus of a function over a sequence of N variables .
25,959
def derivative ( self , vs = None ) : r return functools . reduce ( operator . xor , self . iter_cofactors ( vs ) )
r Return the derivative of a function over a sequence of N variables .
25,960
def _expect_vars ( vs = None ) : if vs is None : return list ( ) elif isinstance ( vs , Variable ) : return [ vs ] else : checked = list ( ) for v in vs : if isinstance ( v , Variable ) : checked . append ( v ) else : fstr = "expected Variable, got {0.__name__}" raise TypeError ( fstr . format ( type ( v ) ) ) return c...
Verify the input type and return a list of Variables .
25,961
def solve ( self , grid ) : soln = self . S . satisfy_one ( assumptions = self . _parse_grid ( grid ) ) return self . S . soln2point ( soln , self . litmap )
Return a solution point for a Sudoku grid .
25,962
def _parse_grid ( self , grid ) : chars = [ c for c in grid if c in DIGITS or c in "0." ] if len ( chars ) != 9 ** 2 : raise ValueError ( "expected 9x9 grid" ) return [ self . litmap [ self . X [ i // 9 + 1 , i % 9 + 1 , int ( c ) ] ] for i , c in enumerate ( chars ) if c in DIGITS ]
Return the input constraints for a Sudoku grid .
25,963
def _soln2str ( self , soln , fancy = False ) : chars = list ( ) for r in range ( 1 , 10 ) : for c in range ( 1 , 10 ) : if fancy and c in ( 4 , 7 ) : chars . append ( "|" ) chars . append ( self . _get_val ( soln , r , c ) ) if fancy and r != 9 : chars . append ( "\n" ) if r in ( 3 , 6 ) : chars . append ( "---+---+--...
Convert a Sudoku solution point to a string .
25,964
def _get_val ( self , soln , r , c ) : for v in range ( 1 , 10 ) : if soln [ self . X [ r , c , v ] ] : return DIGITS [ v - 1 ] return "X"
Return the string value for a solution coordinate .
25,965
def ttvar ( name , index = None ) : bvar = boolfunc . var ( name , index ) try : var = _VARS [ bvar . uniqid ] except KeyError : var = _VARS [ bvar . uniqid ] = TTVariable ( bvar ) return var
Return a TruthTable variable .
25,966
def expr2truthtable ( expr ) : inputs = [ ttvar ( v . names , v . indices ) for v in expr . inputs ] return truthtable ( inputs , expr . iter_image ( ) )
Convert an expression into a truth table .
25,967
def truthtable2expr ( tt , conj = False ) : if conj : outer , inner = ( And , Or ) nums = tt . pcdata . iter_zeros ( ) else : outer , inner = ( Or , And ) nums = tt . pcdata . iter_ones ( ) inputs = [ exprvar ( v . names , v . indices ) for v in tt . inputs ] terms = [ boolfunc . num2term ( num , inputs , conj ) for nu...
Convert a truth table into an expression .
25,968
def _bin_zfill ( num , width = None ) : s = bin ( num ) [ 2 : ] return s if width is None else s . zfill ( width )
Convert a base - 10 number to a binary string .
25,969
def zero_mask ( self ) : accum = 0 for i in range ( self . data . itemsize ) : accum += ( 0x55 << ( i << 3 ) ) return accum
Return a mask to determine whether an array chunk has any zeros .
25,970
def one_mask ( self ) : accum = 0 for i in range ( self . data . itemsize ) : accum += ( 0xAA << ( i << 3 ) ) return accum
Return a mask to determine whether an array chunk has any ones .
25,971
def iter_zeros ( self ) : num = quotient = 0 while num < self . _len : chunk = self . data [ quotient ] if chunk & self . zero_mask : remainder = 0 while remainder < self . width and num < self . _len : item = ( chunk >> remainder ) & 3 if item == PC_ZERO : yield num remainder += 2 num += 1 else : num += ( self . width...
Iterate through the indices of all zero items .
25,972
def find_one ( self ) : num = quotient = 0 while num < self . _len : chunk = self . data [ quotient ] if chunk & self . one_mask : remainder = 0 while remainder < self . width and num < self . _len : item = ( chunk >> remainder ) & 3 if item == PC_ONE : return num remainder += 2 num += 1 else : num += ( self . width >>...
Return the first index of an entry that is either one or DC . If no item is found return None .
25,973
def is_neg_unate ( self , vs = None ) : r vs = self . _expect_vars ( vs ) basis = self . support - set ( vs ) maxcov = [ PC_ONE ] * ( 1 << len ( basis ) ) for cf in self . iter_cofactors ( vs ) : for i , item in enumerate ( cf . pcdata ) : if maxcov [ i ] == PC_ZERO and item == PC_ONE : return False maxcov [ i ] = item...
r Return whether a function is negative unate .
25,974
def _iter_restrict ( self , zeros , ones ) : inputs = list ( self . inputs ) unmapped = dict ( ) for i , v in enumerate ( self . inputs ) : if v in zeros : inputs [ i ] = 0 elif v in ones : inputs [ i ] = 1 else : unmapped [ v ] = i vs = sorted ( unmapped . keys ( ) ) for num in range ( 1 << len ( vs ) ) : for v , val ...
Iterate through indices of all table entries that vary .
25,975
def bddvar ( name , index = None ) : r bvar = boolfunc . var ( name , index ) try : var = _VARS [ bvar . uniqid ] except KeyError : var = _VARS [ bvar . uniqid ] = BDDVariable ( bvar ) _BDDS [ var . node ] = var return var
r Return a unique BDD variable .
25,976
def _expr2bddnode ( expr ) : if expr . is_zero ( ) : return BDDNODEZERO elif expr . is_one ( ) : return BDDNODEONE else : top = expr . top _ = bddvar ( top . names , top . indices ) root = top . uniqid lo = _expr2bddnode ( expr . restrict ( { top : 0 } ) ) hi = _expr2bddnode ( expr . restrict ( { top : 1 } ) ) return _...
Convert an expression into a BDD node .
25,977
def bdd2expr ( bdd , conj = False ) : if conj : outer , inner = ( And , Or ) paths = _iter_all_paths ( bdd . node , BDDNODEZERO ) else : outer , inner = ( Or , And ) paths = _iter_all_paths ( bdd . node , BDDNODEONE ) terms = list ( ) for path in paths : expr_point = { exprvar ( v . names , v . indices ) : val for v , ...
Convert a binary decision diagram into an expression .
25,978
def upoint2bddpoint ( upoint ) : point = dict ( ) for uniqid in upoint [ 0 ] : point [ _VARS [ uniqid ] ] = 0 for uniqid in upoint [ 1 ] : point [ _VARS [ uniqid ] ] = 1 return point
Convert an untyped point into a BDD point .
25,979
def _bddnode ( root , lo , hi ) : if lo is hi : node = lo else : key = ( root , lo , hi ) try : node = _NODES [ key ] except KeyError : node = _NODES [ key ] = BDDNode ( * key ) return node
Return a unique BDD node .
25,980
def _bdd ( node ) : try : bdd = _BDDS [ node ] except KeyError : bdd = _BDDS [ node ] = BinaryDecisionDiagram ( node ) return bdd
Return a unique BDD .
25,981
def _path2point ( path ) : return { _VARS [ node . root ] : int ( node . hi is path [ i + 1 ] ) for i , node in enumerate ( path [ : - 1 ] ) }
Convert a BDD path to a BDD point .
25,982
def _find_path ( start , end , path = tuple ( ) ) : path = path + ( start , ) if start is end : return path else : ret = None if start . lo is not None : ret = _find_path ( start . lo , end , path ) if ret is None and start . hi is not None : ret = _find_path ( start . hi , end , path ) return ret
Return the path from start to end .
25,983
def _iter_all_paths ( start , end , rand = False , path = tuple ( ) ) : path = path + ( start , ) if start is end : yield path else : nodes = [ start . lo , start . hi ] if rand : random . shuffle ( nodes ) for node in nodes : if node is not None : yield from _iter_all_paths ( node , end , rand , path )
Iterate through all paths from start to end .
25,984
def _dfs_preorder ( node , visited ) : if node not in visited : visited . add ( node ) yield node if node . lo is not None : yield from _dfs_preorder ( node . lo , visited ) if node . hi is not None : yield from _dfs_preorder ( node . hi , visited )
Iterate through nodes in DFS pre - order .
25,985
def _dfs_postorder ( node , visited ) : if node . lo is not None : yield from _dfs_postorder ( node . lo , visited ) if node . hi is not None : yield from _dfs_postorder ( node . hi , visited ) if node not in visited : visited . add ( node ) yield node
Iterate through nodes in DFS post - order .
25,986
def _bfs ( node , visited ) : queue = collections . deque ( ) queue . appendleft ( node ) while queue : node = queue . pop ( ) if node not in visited : if node . lo is not None : queue . appendleft ( node . lo ) if node . hi is not None : queue . appendleft ( node . hi ) visited . add ( node ) yield node
Iterate through nodes in BFS order .
25,987
def parse ( s ) : lexer = iter ( BoolExprLexer ( s ) ) try : expr = _expr ( lexer ) except lex . RunError as exc : fstr = ( "{0.args[0]}: " "(line: {0.lineno}, offset: {0.offset}, text: {0.text})" ) raise Error ( fstr . format ( exc ) ) _expect_token ( lexer , { EndToken } ) return expr
Parse a Boolean expression string and return an expression abstract syntax tree .
25,988
def _ite ( lexer ) : s = _impl ( lexer ) tok = next ( lexer ) if isinstance ( tok , OP_question ) : d1 = _ite ( lexer ) _expect_token ( lexer , { OP_colon } ) d0 = _ite ( lexer ) return ( 'ite' , s , d1 , d0 ) else : lexer . unpop_token ( tok ) return s
Return an ITE expression .
25,989
def _impl ( lexer ) : p = _sumterm ( lexer ) tok = next ( lexer ) if isinstance ( tok , OP_rarrow ) : q = _impl ( lexer ) return ( 'implies' , p , q ) elif isinstance ( tok , OP_lrarrow ) : q = _impl ( lexer ) return ( 'equal' , p , q ) else : lexer . unpop_token ( tok ) return p
Return an Implies expression .
25,990
def _sumterm ( lexer ) : xorterm = _xorterm ( lexer ) sumterm_prime = _sumterm_prime ( lexer ) if sumterm_prime is None : return xorterm else : return ( 'or' , xorterm , sumterm_prime )
Return a sum term expresssion .
25,991
def _sumterm_prime ( lexer ) : tok = next ( lexer ) if isinstance ( tok , OP_or ) : xorterm = _xorterm ( lexer ) sumterm_prime = _sumterm_prime ( lexer ) if sumterm_prime is None : return xorterm else : return ( 'or' , xorterm , sumterm_prime ) else : lexer . unpop_token ( tok ) return None
Return a sum term expression eliminates left recursion .
25,992
def _xorterm ( lexer ) : prodterm = _prodterm ( lexer ) xorterm_prime = _xorterm_prime ( lexer ) if xorterm_prime is None : return prodterm else : return ( 'xor' , prodterm , xorterm_prime )
Return an xor term expresssion .
25,993
def _xorterm_prime ( lexer ) : tok = next ( lexer ) if isinstance ( tok , OP_xor ) : prodterm = _prodterm ( lexer ) xorterm_prime = _xorterm_prime ( lexer ) if xorterm_prime is None : return prodterm else : return ( 'xor' , prodterm , xorterm_prime ) else : lexer . unpop_token ( tok ) return None
Return an xor term expression eliminates left recursion .
25,994
def _prodterm ( lexer ) : factor = _factor ( lexer ) prodterm_prime = _prodterm_prime ( lexer ) if prodterm_prime is None : return factor else : return ( 'and' , factor , prodterm_prime )
Return a product term expression .
25,995
def _prodterm_prime ( lexer ) : tok = next ( lexer ) if isinstance ( tok , OP_and ) : factor = _factor ( lexer ) prodterm_prime = _prodterm_prime ( lexer ) if prodterm_prime is None : return factor else : return ( 'and' , factor , prodterm_prime ) else : lexer . unpop_token ( tok ) return None
Return a product term expression eliminates left recursion .
25,996
def _factor ( lexer ) : tok = _expect_token ( lexer , FACTOR_TOKS ) toktype = type ( tok ) if toktype is OP_not : return ( 'not' , _factor ( lexer ) ) elif toktype is LPAREN : expr = _expr ( lexer ) _expect_token ( lexer , { RPAREN } ) return expr elif any ( toktype is t for t in OPN_TOKS ) : op = tok . ASTOP _expect_t...
Return a factor expression .
25,997
def _zom_arg ( lexer ) : tok = next ( lexer ) if isinstance ( tok , COMMA ) : return ( _expr ( lexer ) , ) + _zom_arg ( lexer ) else : lexer . unpop_token ( tok ) return tuple ( )
Return zero or more arguments .
25,998
def _variable ( lexer ) : names = _names ( lexer ) tok = next ( lexer ) if isinstance ( tok , LBRACK ) : indices = _indices ( lexer ) _expect_token ( lexer , { RBRACK } ) else : lexer . unpop_token ( tok ) indices = tuple ( ) return ( 'var' , names , indices )
Return a variable expression .
25,999
def _names ( lexer ) : first = _expect_token ( lexer , { NameToken } ) . value rest = _zom_name ( lexer ) rnames = ( first , ) + rest return rnames [ : : - 1 ]
Return a tuple of names .