text stringlengths 0 828 |
|---|
# absolutely necessary. Notice that even if Equivalence queries are |
# expensive in general caching the result will be able to discover that |
# this iteration required a new state in the next equivalence query. |
exp = w_string[diff:] |
self.observation_table.em_vector.append(exp) |
for row in self.observation_table.sm_vector + self.observation_table.smi_vector: |
self._fill_table_entry(row, exp)" |
539,"def _get_predicate_guards(self, state, state_training_data): |
"""""" |
Args: |
state (DFA state): The dfa state |
state_training_data (list): The training data set |
Returns: |
list: A list of transitions |
"""""" |
# choose the sink transition. |
# First option: Just the maximum transition |
# sink = max(state_training_data, key=lambda x: len(x[1]))[0] |
# Second option: Heuristics based on RE filters properties |
max_size_trans = max(state_training_data, key=lambda x: len(x[1])) |
max_size_trans_l = [x for x in state_training_data if |
len(x[1]) == len(max_size_trans[1])] |
target_states = [t[0] for t in max_size_trans_l] |
if len(max_size_trans_l) == 1: |
sink = max_size_trans[0] |
elif '' in target_states: |
sink = '' |
elif state in target_states: |
sink = state |
else: |
sink = random.choice(target_states) |
# End of sink selection |
transitions = [] |
known_symbols = [] |
for (t, data) in state_training_data: |
if t == sink: |
continue |
pred = SetPredicate(data) |
transitions.append((t, pred)) |
known_symbols += data |
transitions.append( |
(sink, SetPredicate(set(self.alphabet) - set(known_symbols)))) |
return transitions" |
540,"def get_sfa_conjecture(self): |
"""""" |
Utilize the observation table to construct a Mealy Machine. |
The library used for representing the Mealy Machine is the python |
bindings of the openFST library (pyFST). |
Args: |
None |
Returns: |
MealyMachine: A mealy machine build based on a closed and consistent |
observation table. |
"""""" |
sfa = SFA(self.alphabet) |
for s in self.observation_table.sm_vector: |
transitions = self._get_predicate_guards( |
s, self.observation_table.training_data[s]) |
for (t, pred) in transitions: |
src_id = self.observation_table.sm_vector.index(s) |
dst_id = self.observation_table.sm_vector.index(t) |
assert isinstance( |
pred, SetPredicate), ""Invalid type for predicate {}"".format(pred) |
sfa.add_arc(src_id, dst_id, pred) |
# Mark the final states in the hypothesis automaton. |
i = 0 |
for s in self.observation_table.sm_vector: |
sfa.states[i].final = self.observation_table[s, self.epsilon] |
i += 1 |
return sfa" |
541,"def _init_table(self): |
"""""" |
Initialize the observation table. |
"""""" |
self.observation_table.sm_vector.append(self.epsilon) |
self.observation_table.smi_vector = [random.choice(self.alphabet)] |
self.observation_table.em_vector.append(self.epsilon) |
self._fill_table_entry(self.epsilon, self.epsilon) |
for s in self.observation_table.smi_vector: |
self._fill_table_entry(s, self.epsilon)" |
542,"def _init_table_from_dfa(self, mma): |
"""""" |
Initializes table form a DFA |
Args: |
mma: The input automaton |
Returns: |
None |
"""""" |
observation_table_init = ObservationTableInit(self.epsilon, self.alphabet) |
sm_vector, smi_vector, em_vector = observation_table_init.initialize(mma, True) |
self.observation_table.sm_vector = sm_vector |
self.observation_table.smi_vector = smi_vector |
self.observation_table.em_vector = em_vector |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.