_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q268500
QasmLexer.input
test
def input(self, data): """Set the input text data.""" self.data = data self.lexer.input(data)
python
{ "resource": "" }
q268501
QasmLexer.pop
test
def pop(self): """Pop a PLY lexer off the stack.""" self.lexer = self.stack.pop() self.filename = self.lexer.qasm_file self.lineno = self.lexer.qasm_line
python
{ "resource": "" }
q268502
QasmLexer.push
test
def push(self, filename): """Push a PLY lexer on the stack to parse filename.""" self.lexer.qasm_file = self.filename self.lexer.qasm_line = self.lineno self.stack.append(self.lexer) self.__mklexer__(filename)
python
{ "resource": "" }
q268503
ConsolidateBlocks.run
test
def run(self, dag): """iterate over each block and replace it with an equivalent Unitary on the same wires. """ new_dag = DAGCircuit() for qreg in dag.qregs.values(): new_dag.add_qreg(qreg) for creg in dag.cregs.values(): new_dag.add_creg(creg) # compute ordered indices for the global circuit wires global_index_map = {} for wire in dag.wires: if not isinstance(wire[0], QuantumRegister): continue global_qregs = list(dag.qregs.values()) global_index_map[wire] = global_qregs.index(wire[0]) + wire[1] blocks = self.property_set['block_list'] nodes_seen = set() for node in dag.topological_op_nodes(): # skip already-visited nodes or input/output nodes if node in nodes_seen or node.type == 'in' or node.type == 'out': continue # check if the node belongs to the next block if blocks and node in blocks[0]: block = blocks[0] # find the qubits involved in this block block_qargs = set() for nd in block: block_qargs |= set(nd.qargs) # convert block to a sub-circuit, then simulate unitary and add block_width = len(block_qargs) q = QuantumRegister(block_width) subcirc = QuantumCircuit(q) block_index_map = self._block_qargs_to_indices(block_qargs, global_index_map) for nd in block: nodes_seen.add(nd) subcirc.append(nd.op, [q[block_index_map[i]] for i in nd.qargs]) unitary = UnitaryGate(Operator(subcirc)) # simulates the circuit new_dag.apply_operation_back( unitary, sorted(block_qargs, key=lambda x: block_index_map[x])) del blocks[0] else: # the node could belong to some future block, but in that case # we simply skip it. It is guaranteed that we will revisit that # future block, via its other nodes for block in blocks[1:]: if node in block: break # freestanding nodes can just be added else: nodes_seen.add(node) new_dag.apply_operation_back(node.op, node.qargs, node.cargs) return new_dag
python
{ "resource": "" }
q268504
ConversionMethodBinder.get_bound_method
test
def get_bound_method(self, instruction): """Get conversion method for instruction.""" try: return self._bound_instructions[type(instruction)] except KeyError: raise PulseError('Qobj conversion method for %s is not found.' % instruction)
python
{ "resource": "" }
q268505
PulseQobjConverter.convert_acquire
test
def convert_acquire(self, shift, instruction): """Return converted `AcquireInstruction`. Args: shift(int): Offset time. instruction (AcquireInstruction): acquire instruction. Returns: dict: Dictionary of required parameters. """ meas_level = self._run_config.get('meas_level', 2) command_dict = { 'name': 'acquire', 't0': shift+instruction.start_time, 'duration': instruction.duration, 'qubits': [q.index for q in instruction.acquires], 'memory_slot': [m.index for m in instruction.mem_slots] } if meas_level == 2: # setup discriminators if instruction.command.discriminator: command_dict.update({ 'discriminators': [ QobjMeasurementOption( name=instruction.command.discriminator.name, params=instruction.command.discriminator.params) ] }) # setup register_slots command_dict.update({ 'register_slot': [regs.index for regs in instruction.reg_slots] }) if meas_level >= 1: # setup kernels if instruction.command.kernel: command_dict.update({ 'kernels': [ QobjMeasurementOption( name=instruction.command.kernel.name, params=instruction.command.kernel.params) ] }) return self._qobj_model(**command_dict)
python
{ "resource": "" }
q268506
PulseQobjConverter.convert_frame_change
test
def convert_frame_change(self, shift, instruction): """Return converted `FrameChangeInstruction`. Args: shift(int): Offset time. instruction (FrameChangeInstruction): frame change instruction. Returns: dict: Dictionary of required parameters. """ command_dict = { 'name': 'fc', 't0': shift+instruction.start_time, 'ch': instruction.channels[0].name, 'phase': instruction.command.phase } return self._qobj_model(**command_dict)
python
{ "resource": "" }
q268507
PulseQobjConverter.convert_persistent_value
test
def convert_persistent_value(self, shift, instruction): """Return converted `PersistentValueInstruction`. Args: shift(int): Offset time. instruction (PersistentValueInstruction): persistent value instruction. Returns: dict: Dictionary of required parameters. """ command_dict = { 'name': 'pv', 't0': shift+instruction.start_time, 'ch': instruction.channels[0].name, 'val': instruction.command.value } return self._qobj_model(**command_dict)
python
{ "resource": "" }
q268508
PulseQobjConverter.convert_drive
test
def convert_drive(self, shift, instruction): """Return converted `PulseInstruction`. Args: shift(int): Offset time. instruction (PulseInstruction): drive instruction. Returns: dict: Dictionary of required parameters. """ command_dict = { 'name': instruction.command.name, 't0': shift+instruction.start_time, 'ch': instruction.channels[0].name } return self._qobj_model(**command_dict)
python
{ "resource": "" }
q268509
PulseQobjConverter.convert_snapshot
test
def convert_snapshot(self, shift, instruction): """Return converted `Snapshot`. Args: shift(int): Offset time. instruction (Snapshot): snapshot instruction. Returns: dict: Dictionary of required parameters. """ command_dict = { 'name': 'snapshot', 't0': shift+instruction.start_time, 'label': instruction.name, 'type': instruction.type } return self._qobj_model(**command_dict)
python
{ "resource": "" }
q268510
_update_annotations
test
def _update_annotations(discretized_pulse: Callable) -> Callable: """Update annotations of discretized continuous pulse function with duration. Args: discretized_pulse: Discretized decorated continuous pulse. """ undecorated_annotations = list(discretized_pulse.__annotations__.items()) decorated_annotations = undecorated_annotations[1:] decorated_annotations.insert(0, ('duration', int)) discretized_pulse.__annotations__ = dict(decorated_annotations) return discretized_pulse
python
{ "resource": "" }
q268511
sampler
test
def sampler(sample_function: Callable) -> Callable: """Sampler decorator base method. Samplers are used for converting an continuous function to a discretized pulse. They operate on a function with the signature: `def f(times: np.ndarray, *args, **kwargs) -> np.ndarray` Where `times` is a numpy array of floats with length n_times and the output array is a complex numpy array with length n_times. The output of the decorator is an instance of `FunctionalPulse` with signature: `def g(duration: int, *args, **kwargs) -> SamplePulse` Note if your continuous pulse function outputs a `complex` scalar rather than a `np.ndarray`, you should first vectorize it before applying a sampler. This class implements the sampler boilerplate for the sampler. Args: sample_function: A sampler function to be decorated. """ def generate_sampler(continuous_pulse: Callable) -> Callable: """Return a decorated sampler function.""" @functools.wraps(continuous_pulse) def call_sampler(duration: int, *args, **kwargs) -> commands.SamplePulse: """Replace the call to the continuous function with a call to the sampler applied to the anlytic pulse function.""" sampled_pulse = sample_function(continuous_pulse, duration, *args, **kwargs) return np.asarray(sampled_pulse, dtype=np.complex_) # Update type annotations for wrapped continuous function to be discrete call_sampler = _update_annotations(call_sampler) # Update docstring with that of the sampler and include sampled function documentation. call_sampler = _update_docstring(call_sampler, sample_function) # Unset wrapped to return base sampler signature # but still get rest of benefits of wraps # such as __name__, __qualname__ call_sampler.__dict__.pop('__wrapped__') # wrap with functional pulse return commands.functional_pulse(call_sampler) return generate_sampler
python
{ "resource": "" }
q268512
filter_backends
test
def filter_backends(backends, filters=None, **kwargs): """Return the backends matching the specified filtering. Filter the `backends` list by their `configuration` or `status` attributes, or from a boolean callable. The criteria for filtering can be specified via `**kwargs` or as a callable via `filters`, and the backends must fulfill all specified conditions. Args: backends (list[BaseBackend]): list of backends. filters (callable): filtering conditions as a callable. **kwargs (dict): dict of criteria. Returns: list[BaseBackend]: a list of backend instances matching the conditions. """ def _match_all(obj, criteria): """Return True if all items in criteria matches items in obj.""" return all(getattr(obj, key_, None) == value_ for key_, value_ in criteria.items()) # Inspect the backends to decide which filters belong to # backend.configuration and which ones to backend.status, as it does # not involve querying the API. configuration_filters = {} status_filters = {} for key, value in kwargs.items(): if all(key in backend.configuration() for backend in backends): configuration_filters[key] = value else: status_filters[key] = value # 1. Apply backend.configuration filtering. if configuration_filters: backends = [b for b in backends if _match_all(b.configuration(), configuration_filters)] # 2. Apply backend.status filtering (it involves one API call for # each backend). if status_filters: backends = [b for b in backends if _match_all(b.status(), status_filters)] # 3. Apply acceptor filter. backends = list(filter(filters, backends)) return backends
python
{ "resource": "" }
q268513
resolve_backend_name
test
def resolve_backend_name(name, backends, deprecated, aliased): """Resolve backend name from a deprecated name or an alias. A group will be resolved in order of member priorities, depending on availability. Args: name (str): name of backend to resolve backends (list[BaseBackend]): list of available backends. deprecated (dict[str: str]): dict of deprecated names. aliased (dict[str: list[str]]): dict of aliased names. Returns: str: resolved name (name of an available backend) Raises: LookupError: if name cannot be resolved through regular available names, nor deprecated, nor alias names. """ available = [backend.name() for backend in backends] resolved_name = deprecated.get(name, aliased.get(name, name)) if isinstance(resolved_name, list): resolved_name = next((b for b in resolved_name if b in available), "") if resolved_name not in available: raise LookupError("backend '{}' not found.".format(name)) if name in deprecated: logger.warning("WARNING: '%s' is deprecated. Use '%s'.", name, resolved_name) return resolved_name
python
{ "resource": "" }
q268514
dag_to_circuit
test
def dag_to_circuit(dag): """Build a ``QuantumCircuit`` object from a ``DAGCircuit``. Args: dag (DAGCircuit): the input dag. Return: QuantumCircuit: the circuit representing the input dag. """ qregs = collections.OrderedDict() for qreg in dag.qregs.values(): qreg_tmp = QuantumRegister(qreg.size, name=qreg.name) qregs[qreg.name] = qreg_tmp cregs = collections.OrderedDict() for creg in dag.cregs.values(): creg_tmp = ClassicalRegister(creg.size, name=creg.name) cregs[creg.name] = creg_tmp name = dag.name or None circuit = QuantumCircuit(*qregs.values(), *cregs.values(), name=name) for node in dag.topological_op_nodes(): qubits = [] for qubit in node.qargs: qubits.append(qregs[qubit[0].name][qubit[1]]) clbits = [] for clbit in node.cargs: clbits.append(cregs[clbit[0].name][clbit[1]]) # Get arguments for classical control (if any) if node.condition is None: control = None else: control = (node.condition[0], node.condition[1]) inst = node.op.copy() inst.control = control circuit.append(inst, qubits, clbits) return circuit
python
{ "resource": "" }
q268515
make_dict_observable
test
def make_dict_observable(matrix_observable): """Convert an observable in matrix form to dictionary form. Takes in a diagonal observable as a matrix and converts it to a dictionary form. Can also handle a list sorted of the diagonal elements. Args: matrix_observable (list): The observable to be converted to dictionary form. Can be a matrix or just an ordered list of observed values Returns: Dict: A dictionary with all observable states as keys, and corresponding values being the observed value for that state """ dict_observable = {} observable = np.array(matrix_observable) observable_size = len(observable) observable_bits = int(np.ceil(np.log2(observable_size))) binary_formater = '0{}b'.format(observable_bits) if observable.ndim == 2: observable = observable.diagonal() for state_no in range(observable_size): state_str = format(state_no, binary_formater) dict_observable[state_str] = observable[state_no] return dict_observable
python
{ "resource": "" }
q268516
QasmParser.update_symtab
test
def update_symtab(self, obj): """Update a node in the symbol table. Everything in the symtab must be a node with these attributes: name - the string name of the object type - the string type of the object line - the source line where the type was first found file - the source file where the type was first found """ if obj.name in self.current_symtab: prev = self.current_symtab[obj.name] raise QasmError("Duplicate declaration for", obj.type + " '" + obj.name + "' at line", str(obj.line) + ', file', obj.file + '.\nPrevious occurrence at line', str(prev.line) + ', file', prev.file) self.current_symtab[obj.name] = obj
python
{ "resource": "" }
q268517
QasmParser.verify_declared_bit
test
def verify_declared_bit(self, obj): """Verify a qubit id against the gate prototype.""" # We are verifying gate args against the formal parameters of a # gate prototype. if obj.name not in self.current_symtab: raise QasmError("Cannot find symbol '" + obj.name + "' in argument list for gate, line", str(obj.line), 'file', obj.file) # This insures the thing is from the bitlist and not from the # argument list. sym = self.current_symtab[obj.name] if not (sym.type == 'id' and sym.is_bit): raise QasmError("Bit", obj.name, 'is not declared as a bit in the gate.')
python
{ "resource": "" }
q268518
QasmParser.verify_exp_list
test
def verify_exp_list(self, obj): """Verify each expression in a list.""" # A tad harder. This is a list of expressions each of which could be # the head of a tree. We need to recursively walk each of these and # ensure that any Id elements resolve to the current stack. # # I believe we only have to look at the current symtab. if obj.children is not None: for children in obj.children: if isinstance(children, node.Id): if children.name in self.external_functions: continue if children.name not in self.current_symtab: raise QasmError("Argument '" + children.name + "' in expression cannot be " + "found, line", str(children.line), "file", children.file) else: if hasattr(children, "children"): self.verify_exp_list(children)
python
{ "resource": "" }
q268519
QasmParser.verify_as_gate
test
def verify_as_gate(self, obj, bitlist, arglist=None): """Verify a user defined gate call.""" if obj.name not in self.global_symtab: raise QasmError("Cannot find gate definition for '" + obj.name + "', line", str(obj.line), 'file', obj.file) g_sym = self.global_symtab[obj.name] if not (g_sym.type == 'gate' or g_sym.type == 'opaque'): raise QasmError("'" + obj.name + "' is used as a gate " + "or opaque call but the symbol is neither;" + " it is a '" + g_sym.type + "' line", str(obj.line), 'file', obj.file) if g_sym.n_bits() != bitlist.size(): raise QasmError("Gate or opaque call to '" + obj.name + "' uses", str(bitlist.size()), "qubits but is declared for", str(g_sym.n_bits()), "qubits", "line", str(obj.line), 'file', obj.file) if arglist: if g_sym.n_args() != arglist.size(): raise QasmError("Gate or opaque call to '" + obj.name + "' uses", str(arglist.size()), "qubits but is declared for", str(g_sym.n_args()), "qubits", "line", str(obj.line), 'file', obj.file) else: if g_sym.n_args() > 0: raise QasmError("Gate or opaque call to '" + obj.name + "' has no arguments but is declared for", str(g_sym.n_args()), "qubits", "line", str(obj.line), 'file', obj.file)
python
{ "resource": "" }
q268520
QasmParser.verify_reg
test
def verify_reg(self, obj, object_type): """Verify a register.""" # How to verify: # types must match # indexes must be checked if obj.name not in self.global_symtab: raise QasmError('Cannot find definition for', object_type, "'" + obj.name + "'", 'at line', str(obj.line), 'file', obj.file) g_sym = self.global_symtab[obj.name] if g_sym.type != object_type: raise QasmError("Type for '" + g_sym.name + "' should be '" + object_type + "' but was found to be '" + g_sym.type + "'", "line", str(obj.line), "file", obj.file) if obj.type == 'indexed_id': bound = g_sym.index ndx = obj.index if ndx < 0 or ndx >= bound: raise QasmError("Register index for '" + g_sym.name + "' out of bounds. Index is", str(ndx), "bound is 0 <= index <", str(bound), "at line", str(obj.line), "file", obj.file)
python
{ "resource": "" }
q268521
QasmParser.verify_reg_list
test
def verify_reg_list(self, obj, object_type): """Verify a list of registers.""" # We expect the object to be a bitlist or an idlist, we don't care. # We will iterate it and ensure everything in it is declared as a bit, # and throw if not. for children in obj.children: self.verify_reg(children, object_type)
python
{ "resource": "" }
q268522
QasmParser.find_column
test
def find_column(self, input_, token): """Compute the column. Input is the input text string. token is a token instance. """ if token is None: return 0 last_cr = input_.rfind('\n', 0, token.lexpos) if last_cr < 0: last_cr = 0 column = (token.lexpos - last_cr) + 1 return column
python
{ "resource": "" }
q268523
QasmParser.parse_debug
test
def parse_debug(self, val): """Set the parse_deb field.""" if val is True: self.parse_deb = True elif val is False: self.parse_deb = False else: raise QasmError("Illegal debug value '" + str(val) + "' must be True or False.")
python
{ "resource": "" }
q268524
QasmParser.parse
test
def parse(self, data): """Parse some data.""" self.parser.parse(data, lexer=self.lexer, debug=self.parse_deb) if self.qasm is None: raise QasmError("Uncaught exception in parser; " + "see previous messages for details.") return self.qasm
python
{ "resource": "" }
q268525
QasmParser.run
test
def run(self, data): """Parser runner. To use this module stand-alone. """ ast = self.parser.parse(data, debug=True) self.parser.parse(data, debug=True) ast.to_string(0)
python
{ "resource": "" }
q268526
Qasm.parse
test
def parse(self): """Parse the data.""" if self._filename: with open(self._filename) as ifile: self._data = ifile.read() with QasmParser(self._filename) as qasm_p: qasm_p.parse_debug(False) return qasm_p.parse(self._data)
python
{ "resource": "" }
q268527
crz
test
def crz(self, theta, ctl, tgt): """Apply crz from ctl to tgt with angle theta.""" return self.append(CrzGate(theta), [ctl, tgt], [])
python
{ "resource": "" }
q268528
basis_state
test
def basis_state(str_state, num): """ Return a basis state ndarray. Args: str_state (string): a string representing the state. num (int): the number of qubits Returns: ndarray: state(2**num) a quantum state with basis basis state. Raises: QiskitError: if the dimensions is wrong """ n = int(str_state, 2) if num >= len(str_state): state = np.zeros(1 << num, dtype=complex) state[n] = 1 return state else: raise QiskitError('size of bitstring is greater than num.')
python
{ "resource": "" }
q268529
projector
test
def projector(state, flatten=False): """ maps a pure state to a state matrix Args: state (ndarray): the number of qubits flatten (bool): determine if state matrix of column work Returns: ndarray: state_mat(2**num, 2**num) if flatten is false ndarray: state_mat(4**num) if flatten is true stacked on by the column """ density_matrix = np.outer(state.conjugate(), state) if flatten: return density_matrix.flatten(order='F') return density_matrix
python
{ "resource": "" }
q268530
purity
test
def purity(state): """Calculate the purity of a quantum state. Args: state (ndarray): a quantum state Returns: float: purity. """ rho = np.array(state) if rho.ndim == 1: return 1.0 return np.real(np.trace(rho.dot(rho)))
python
{ "resource": "" }
q268531
CommutationAnalysis.run
test
def run(self, dag): """ Run the pass on the DAG, and write the discovered commutation relations into the property_set. """ # Initiate the commutation set self.property_set['commutation_set'] = defaultdict(list) # Build a dictionary to keep track of the gates on each qubit for wire in dag.wires: wire_name = "{0}[{1}]".format(str(wire[0].name), str(wire[1])) self.property_set['commutation_set'][wire_name] = [] # Add edges to the dictionary for each qubit for node in dag.topological_op_nodes(): for (_, _, edge_data) in dag.edges(node): edge_name = edge_data['name'] self.property_set['commutation_set'][(node, edge_name)] = -1 for wire in dag.wires: wire_name = "{0}[{1}]".format(str(wire[0].name), str(wire[1])) for current_gate in dag.nodes_on_wire(wire): current_comm_set = self.property_set['commutation_set'][wire_name] if not current_comm_set: current_comm_set.append([current_gate]) if current_gate not in current_comm_set[-1]: prev_gate = current_comm_set[-1][-1] if _commute(current_gate, prev_gate): current_comm_set[-1].append(current_gate) else: current_comm_set.append([current_gate]) temp_len = len(current_comm_set) self.property_set['commutation_set'][(current_gate, wire_name)] = temp_len - 1
python
{ "resource": "" }
q268532
backend_widget
test
def backend_widget(backend): """Creates a backend widget. """ config = backend.configuration().to_dict() props = backend.properties().to_dict() name = widgets.HTML(value="<h4>{name}</h4>".format(name=backend.name()), layout=widgets.Layout()) n_qubits = config['n_qubits'] qubit_count = widgets.HTML(value="<h5><b>{qubits}</b></h5>".format(qubits=n_qubits), layout=widgets.Layout(justify_content='center')) cmap = widgets.Output(layout=widgets.Layout(min_width='250px', max_width='250px', max_height='250px', min_height='250px', justify_content='center', align_items='center', margin='0px 0px 0px 0px')) with cmap: _cmap_fig = plot_gate_map(backend, plot_directed=False, label_qubits=False) if _cmap_fig is not None: display(_cmap_fig) # Prevents plot from showing up twice. plt.close(_cmap_fig) pending = generate_jobs_pending_widget() is_oper = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content='center')) least_busy = widgets.HTML(value="<h5></h5>", layout=widgets.Layout(justify_content='center')) t1_units = props['qubits'][0][0]['unit'] avg_t1 = round(sum([q[0]['value'] for q in props['qubits']])/n_qubits, 1) t1_widget = widgets.HTML(value="<h5>{t1} {units}</h5>".format(t1=avg_t1, units=t1_units), layout=widgets.Layout()) t2_units = props['qubits'][0][1]['unit'] avg_t2 = round(sum([q[1]['value'] for q in props['qubits']])/n_qubits, 1) t2_widget = widgets.HTML(value="<h5>{t2} {units}</h5>".format(t2=avg_t2, units=t2_units), layout=widgets.Layout()) out = widgets.VBox([name, cmap, qubit_count, pending, least_busy, is_oper, t1_widget, t2_widget], layout=widgets.Layout(display='inline-flex', flex_flow='column', align_items='center')) out._is_alive = True return out
python
{ "resource": "" }
q268533
update_backend_info
test
def update_backend_info(self, interval=60): """Updates the monitor info Called from another thread. """ my_thread = threading.currentThread() current_interval = 0 started = False all_dead = False stati = [None]*len(self._backends) while getattr(my_thread, "do_run", True) and not all_dead: if current_interval == interval or started is False: for ind, back in enumerate(self._backends): _value = self.children[ind].children[2].value _head = _value.split('<b>')[0] try: _status = back.status() stati[ind] = _status except Exception: # pylint: disable=W0703 self.children[ind].children[2].value = _value.replace( _head, "<h5 style='color:#ff5c49'>") self.children[ind]._is_alive = False else: self.children[ind]._is_alive = True self.children[ind].children[2].value = _value.replace( _head, "<h5>") idx = list(range(len(self._backends))) pending = [s.pending_jobs for s in stati] _, least_idx = zip(*sorted(zip(pending, idx))) # Make sure least pending is operational for ind in least_idx: if stati[ind].operational: least_pending_idx = ind break for var in idx: if var == least_pending_idx: self.children[var].children[4].value = "<h5 style='color:#34bc6e'>True</h5>" else: self.children[var].children[4].value = "<h5 style='color:#dc267f'>False</h5>" self.children[var].children[3].children[1].value = pending[var] self.children[var].children[3].children[1].max = max( self.children[var].children[3].children[1].max, pending[var]+10) if stati[var].operational: self.children[var].children[5].value = "<h5 style='color:#34bc6e'>True</h5>" else: self.children[var].children[5].value = "<h5 style='color:#dc267f'>False</h5>" started = True current_interval = 0 time.sleep(1) all_dead = not any([wid._is_alive for wid in self.children]) current_interval += 1
python
{ "resource": "" }
q268534
generate_jobs_pending_widget
test
def generate_jobs_pending_widget(): """Generates a jobs_pending progress bar widget. """ pbar = widgets.IntProgress( value=0, min=0, max=50, description='', orientation='horizontal', layout=widgets.Layout(max_width='180px')) pbar.style.bar_color = '#71cddd' pbar_current = widgets.Label( value=str(pbar.value), layout=widgets.Layout(min_width='auto')) pbar_max = widgets.Label( value=str(pbar.max), layout=widgets.Layout(min_width='auto')) def _on_max_change(change): pbar_max.value = str(change['new']) def _on_val_change(change): pbar_current.value = str(change['new']) pbar.observe(_on_max_change, names='max') pbar.observe(_on_val_change, names='value') jobs_widget = widgets.HBox([pbar_current, pbar, pbar_max], layout=widgets.Layout(max_width='250px', min_width='250px', justify_content='center')) return jobs_widget
python
{ "resource": "" }
q268535
CXCancellation.run
test
def run(self, dag): """ Run one pass of cx cancellation on the circuit Args: dag (DAGCircuit): the directed acyclic graph to run on. Returns: DAGCircuit: Transformed DAG. """ cx_runs = dag.collect_runs(["cx"]) for cx_run in cx_runs: # Partition the cx_run into chunks with equal gate arguments partition = [] chunk = [] for i in range(len(cx_run) - 1): chunk.append(cx_run[i]) qargs0 = cx_run[i].qargs qargs1 = cx_run[i + 1].qargs if qargs0 != qargs1: partition.append(chunk) chunk = [] chunk.append(cx_run[-1]) partition.append(chunk) # Simplify each chunk in the partition for chunk in partition: if len(chunk) % 2 == 0: for n in chunk: dag.remove_op_node(n) else: for n in chunk[1:]: dag.remove_op_node(n) return dag
python
{ "resource": "" }
q268536
BaseProvider.get_backend
test
def get_backend(self, name=None, **kwargs): """Return a single backend matching the specified filtering. Args: name (str): name of the backend. **kwargs (dict): dict used for filtering. Returns: BaseBackend: a backend matching the filtering. Raises: QiskitBackendNotFoundError: if no backend could be found or more than one backend matches. """ backends = self.backends(name, **kwargs) if len(backends) > 1: raise QiskitBackendNotFoundError('More than one backend matches the criteria') elif not backends: raise QiskitBackendNotFoundError('No backend matches the criteria') return backends[0]
python
{ "resource": "" }
q268537
Choi._bipartite_shape
test
def _bipartite_shape(self): """Return the shape for bipartite matrix""" return (self._input_dim, self._output_dim, self._input_dim, self._output_dim)
python
{ "resource": "" }
q268538
_get_register_specs
test
def _get_register_specs(bit_labels): """Get the number and size of unique registers from bit_labels list. Args: bit_labels (list): this list is of the form:: [['reg1', 0], ['reg1', 1], ['reg2', 0]] which indicates a register named "reg1" of size 2 and a register named "reg2" of size 1. This is the format of classic and quantum bit labels in qobj header. Yields: tuple: iterator of register_name:size pairs. """ it = itertools.groupby(bit_labels, operator.itemgetter(0)) for register_name, sub_it in it: yield register_name, max(ind[1] for ind in sub_it) + 1
python
{ "resource": "" }
q268539
_truncate_float
test
def _truncate_float(matchobj, format_str='0.2g'): """Truncate long floats Args: matchobj (re.Match): contains original float format_str (str): format specifier Returns: str: returns truncated float """ if matchobj.group(0): return format(float(matchobj.group(0)), format_str) return ''
python
{ "resource": "" }
q268540
QCircuitImage.latex
test
def latex(self, aliases=None): """Return LaTeX string representation of circuit. This method uses the LaTeX Qconfig package to create a graphical representation of the circuit. Returns: string: for writing to a LaTeX file. """ self._initialize_latex_array(aliases) self._build_latex_array(aliases) header_1 = r"""% \documentclass[preview]{standalone} % If the image is too large to fit on this documentclass use \documentclass[draft]{beamer} """ beamer_line = "\\usepackage[size=custom,height=%d,width=%d,scale=%.1f]{beamerposter}\n" header_2 = r"""% instead and customize the height and width (in cm) to fit. % Large images may run out of memory quickly. % To fix this use the LuaLaTeX compiler, which dynamically % allocates memory. \usepackage[braket, qm]{qcircuit} \usepackage{amsmath} \pdfmapfile{+sansmathaccent.map} % \usepackage[landscape]{geometry} % Comment out the above line if using the beamer documentclass. \begin{document} \begin{equation*}""" qcircuit_line = r""" \Qcircuit @C=%.1fem @R=%.1fem @!R { """ output = io.StringIO() output.write(header_1) output.write('%% img_width = %d, img_depth = %d\n' % (self.img_width, self.img_depth)) output.write(beamer_line % self._get_beamer_page()) output.write(header_2) output.write(qcircuit_line % (self.column_separation, self.row_separation)) for i in range(self.img_width): output.write("\t \t") for j in range(self.img_depth + 1): cell_str = self._latex[i][j] # Don't truncate offset float if drawing a barrier if 'barrier' in cell_str: output.write(cell_str) else: # floats can cause "Dimension too large" latex error in # xymatrix this truncates floats to avoid issue. cell_str = re.sub(r'[-+]?\d*\.\d{2,}|\d{2,}', _truncate_float, cell_str) output.write(cell_str) if j != self.img_depth: output.write(" & ") else: output.write(r'\\' + '\n') output.write('\t }\n') output.write('\\end{equation*}\n\n') output.write('\\end{document}') contents = output.getvalue() output.close() return contents
python
{ "resource": "" }
q268541
QCircuitImage._get_image_depth
test
def _get_image_depth(self): """Get depth information for the circuit. Returns: int: number of columns in the circuit int: total size of columns in the circuit """ max_column_widths = [] for layer in self.ops: # store the max width for the layer current_max = 0 for op in layer: # update current op width arg_str_len = 0 # the wide gates for arg in op.op.params: arg_str = re.sub(r'[-+]?\d*\.\d{2,}|\d{2,}', _truncate_float, str(arg)) arg_str_len += len(arg_str) # the width of the column is the max of all the gates in the column current_max = max(arg_str_len, current_max) max_column_widths.append(current_max) # wires in the beginning and end columns = 2 # each layer is one column columns += len(self.ops) # every 3 characters is roughly one extra 'unit' of width in the cell # the gate name is 1 extra 'unit' # the qubit/cbit labels plus initial states is 2 more # the wires poking out at the ends is 2 more sum_column_widths = sum(1 + v / 3 for v in max_column_widths) # could be a fraction so ceil return columns, math.ceil(sum_column_widths) + 4
python
{ "resource": "" }
q268542
QCircuitImage._get_beamer_page
test
def _get_beamer_page(self): """Get height, width & scale attributes for the beamer page. Returns: tuple: (height, width, scale) desirable page attributes """ # PIL python package limits image size to around a quarter gigabyte # this means the beamer image should be limited to < 50000 # if you want to avoid a "warning" too, set it to < 25000 PIL_limit = 40000 # the beamer latex template limits each dimension to < 19 feet # (i.e. 575cm) beamer_limit = 550 # columns are roughly twice as big as rows aspect_ratio = self.sum_row_heights / self.sum_column_widths # choose a page margin so circuit is not cropped margin_factor = 1.5 height = min(self.sum_row_heights * margin_factor, beamer_limit) width = min(self.sum_column_widths * margin_factor, beamer_limit) # if too large, make it fit if height * width > PIL_limit: height = min(np.sqrt(PIL_limit * aspect_ratio), beamer_limit) width = min(np.sqrt(PIL_limit / aspect_ratio), beamer_limit) # if too small, give it a minimum size height = max(height, 10) width = max(width, 10) return (height, width, self.scale)
python
{ "resource": "" }
q268543
_load_schema
test
def _load_schema(file_path, name=None): """Loads the QObj schema for use in future validations. Caches schema in _SCHEMAS module attribute. Args: file_path(str): Path to schema. name(str): Given name for schema. Defaults to file_path filename without schema. Return: schema(dict): Loaded schema. """ if name is None: # filename without extension name = os.path.splitext(os.path.basename(file_path))[0] if name not in _SCHEMAS: with open(file_path, 'r') as schema_file: _SCHEMAS[name] = json.load(schema_file) return _SCHEMAS[name]
python
{ "resource": "" }
q268544
_get_validator
test
def _get_validator(name, schema=None, check_schema=True, validator_class=None, **validator_kwargs): """Generate validator for JSON schema. Args: name (str): Name for validator. Will be validator key in `_VALIDATORS` dict. schema (dict): JSON schema `dict`. If not provided searches for schema in `_SCHEMAS`. check_schema (bool): Verify schema is valid. validator_class (jsonschema.IValidator): jsonschema IValidator instance. Default behavior is to determine this from the schema `$schema` field. **validator_kwargs (dict): Additional keyword arguments for validator. Return: jsonschema.IValidator: Validator for JSON schema. Raises: SchemaValidationError: Raised if validation fails. """ if schema is None: try: schema = _SCHEMAS[name] except KeyError: raise SchemaValidationError("Valid schema name or schema must " "be provided.") if name not in _VALIDATORS: # Resolve JSON spec from schema if needed if validator_class is None: validator_class = jsonschema.validators.validator_for(schema) # Generate and store validator in _VALIDATORS _VALIDATORS[name] = validator_class(schema, **validator_kwargs) validator = _VALIDATORS[name] if check_schema: validator.check_schema(schema) return validator
python
{ "resource": "" }
q268545
_load_schemas_and_validators
test
def _load_schemas_and_validators(): """Load all default schemas into `_SCHEMAS`.""" schema_base_path = os.path.join(os.path.dirname(__file__), '../..') for name, path in _DEFAULT_SCHEMA_PATHS.items(): _load_schema(os.path.join(schema_base_path, path), name) _get_validator(name)
python
{ "resource": "" }
q268546
validate_json_against_schema
test
def validate_json_against_schema(json_dict, schema, err_msg=None): """Validates JSON dict against a schema. Args: json_dict (dict): JSON to be validated. schema (dict or str): JSON schema dictionary or the name of one of the standards schemas in Qiskit to validate against it. The list of standard schemas is: ``backend_configuration``, ``backend_properties``, ``backend_status``, ``default_pulse_configuration``, ``job_status``, ``qobj``, ``result``. err_msg (str): Optional error message. Raises: SchemaValidationError: Raised if validation fails. """ try: if isinstance(schema, str): schema_name = schema schema = _SCHEMAS[schema_name] validator = _get_validator(schema_name) validator.validate(json_dict) else: jsonschema.validate(json_dict, schema) except jsonschema.ValidationError as err: if err_msg is None: err_msg = "JSON failed validation. Set Qiskit log level to DEBUG " \ "for further information." newerr = SchemaValidationError(err_msg) newerr.__cause__ = _SummaryValidationError(err) logger.debug('%s', _format_causes(err)) raise newerr
python
{ "resource": "" }
q268547
_format_causes
test
def _format_causes(err, level=0): """Return a cascading explanation of the validation error. Returns a cascading explanation of the validation error in the form of:: <validator> failed @ <subfield_path> because of: <validator> failed @ <subfield_path> because of: ... <validator> failed @ <subfield_path> because of: ... ... For example:: 'oneOf' failed @ '<root>' because of: 'required' failed @ '<root>.config' because of: 'meas_level' is a required property Meaning the validator 'oneOf' failed while validating the whole object because of the validator 'required' failing while validating the property 'config' because its 'meas_level' field is missing. The cascade repeats the format "<validator> failed @ <path> because of" until there are no deeper causes. In this case, the string representation of the error is shown. Args: err (jsonschema.ValidationError): the instance to explain. level (int): starting level of indentation for the cascade of explanations. Return: str: a formatted string with the explanation of the error. """ lines = [] def _print(string, offset=0): lines.append(_pad(string, offset=offset)) def _pad(string, offset=0): padding = ' ' * (level + offset) padded_lines = [padding + line for line in string.split('\n')] return '\n'.join(padded_lines) def _format_path(path): def _format(item): if isinstance(item, str): return '.{}'.format(item) return '[{}]'.format(item) return ''.join(['<root>'] + list(map(_format, path))) _print('\'{}\' failed @ \'{}\' because of:'.format( err.validator, _format_path(err.absolute_path))) if not err.context: _print(str(err.message), offset=1) else: for suberr in err.context: lines.append(_format_causes(suberr, level+1)) return '\n'.join(lines)
python
{ "resource": "" }
q268548
majority
test
def majority(p, a, b, c): """Majority gate.""" p.cx(c, b) p.cx(c, a) p.ccx(a, b, c)
python
{ "resource": "" }
q268549
unmajority
test
def unmajority(p, a, b, c): """Unmajority gate.""" p.ccx(a, b, c) p.cx(c, a) p.cx(a, b)
python
{ "resource": "" }
q268550
_generate_latex_source
test
def _generate_latex_source(circuit, filename=None, scale=0.7, style=None, reverse_bits=False, plot_barriers=True, justify=None): """Convert QuantumCircuit to LaTeX string. Args: circuit (QuantumCircuit): input circuit scale (float): image scaling filename (str): optional filename to write latex style (dict or str): dictionary of style or file name of style file reverse_bits (bool): When set to True reverse the bit order inside registers for the output visualization. plot_barriers (bool): Enable/disable drawing barriers in the output circuit. Defaults to True. justify (str) : `left`, `right` or `none`. Defaults to `left`. Says how the circuit should be justified. Returns: str: Latex string appropriate for writing to file. """ qregs, cregs, ops = utils._get_layered_instructions(circuit, reverse_bits=reverse_bits, justify=justify) qcimg = _latex.QCircuitImage(qregs, cregs, ops, scale, style=style, plot_barriers=plot_barriers, reverse_bits=reverse_bits) latex = qcimg.latex() if filename: with open(filename, 'w') as latex_file: latex_file.write(latex) return latex
python
{ "resource": "" }
q268551
_matplotlib_circuit_drawer
test
def _matplotlib_circuit_drawer(circuit, scale=0.7, filename=None, style=None, plot_barriers=True, reverse_bits=False, justify=None): """Draw a quantum circuit based on matplotlib. If `%matplotlib inline` is invoked in a Jupyter notebook, it visualizes a circuit inline. We recommend `%config InlineBackend.figure_format = 'svg'` for the inline visualization. Args: circuit (QuantumCircuit): a quantum circuit scale (float): scaling factor filename (str): file path to save image to style (dict or str): dictionary of style or file name of style file reverse_bits (bool): When set to True reverse the bit order inside registers for the output visualization. plot_barriers (bool): Enable/disable drawing barriers in the output circuit. Defaults to True. justify (str) : `left`, `right` or `none`. Defaults to `left`. Says how the circuit should be justified. Returns: matplotlib.figure: a matplotlib figure object for the circuit diagram """ qregs, cregs, ops = utils._get_layered_instructions(circuit, reverse_bits=reverse_bits, justify=justify) qcd = _matplotlib.MatplotlibDrawer(qregs, cregs, ops, scale=scale, style=style, plot_barriers=plot_barriers, reverse_bits=reverse_bits) return qcd.draw(filename)
python
{ "resource": "" }
q268552
random_unitary
test
def random_unitary(dim, seed=None): """ Return a random dim x dim unitary Operator from the Haar measure. Args: dim (int): the dim of the state space. seed (int): Optional. To set a random seed. Returns: Operator: (dim, dim) unitary operator. Raises: QiskitError: if dim is not a positive power of 2. """ if dim == 0 or not math.log2(dim).is_integer(): raise QiskitError("Desired unitary dimension not a positive power of 2.") matrix = np.zeros([dim, dim], dtype=complex) for j in range(dim): if j == 0: a = random_state(dim, seed) else: a = random_state(dim) matrix[:, j] = np.copy(a) # Grahm-Schmidt Orthogonalize i = j-1 while i >= 0: dc = np.vdot(matrix[:, i], a) matrix[:, j] = matrix[:, j]-dc*matrix[:, i] i = i - 1 # normalize matrix[:, j] = matrix[:, j] * (1.0 / np.sqrt(np.vdot(matrix[:, j], matrix[:, j]))) return Operator(matrix)
python
{ "resource": "" }
q268553
random_density_matrix
test
def random_density_matrix(length, rank=None, method='Hilbert-Schmidt', seed=None): """ Generate a random density matrix rho. Args: length (int): the length of the density matrix. rank (int or None): the rank of the density matrix. The default value is full-rank. method (string): the method to use. 'Hilbert-Schmidt': sample rho from the Hilbert-Schmidt metric. 'Bures': sample rho from the Bures metric. seed (int): Optional. To set a random seed. Returns: ndarray: rho (length, length) a density matrix. Raises: QiskitError: if the method is not valid. """ if method == 'Hilbert-Schmidt': return __random_density_hs(length, rank, seed) elif method == 'Bures': return __random_density_bures(length, rank, seed) else: raise QiskitError('Error: unrecognized method {}'.format(method))
python
{ "resource": "" }
q268554
__ginibre_matrix
test
def __ginibre_matrix(nrow, ncol=None, seed=None): """ Return a normally distributed complex random matrix. Args: nrow (int): number of rows in output matrix. ncol (int): number of columns in output matrix. seed (int): Optional. To set a random seed. Returns: ndarray: A complex rectangular matrix where each real and imaginary entry is sampled from the normal distribution. """ if ncol is None: ncol = nrow if seed is not None: np.random.seed(seed) G = np.random.normal(size=(nrow, ncol)) + \ np.random.normal(size=(nrow, ncol)) * 1j return G
python
{ "resource": "" }
q268555
__random_density_hs
test
def __random_density_hs(N, rank=None, seed=None): """ Generate a random density matrix from the Hilbert-Schmidt metric. Args: N (int): the length of the density matrix. rank (int or None): the rank of the density matrix. The default value is full-rank. seed (int): Optional. To set a random seed. Returns: ndarray: rho (N,N a density matrix. """ G = __ginibre_matrix(N, rank, seed) G = G.dot(G.conj().T) return G / np.trace(G)
python
{ "resource": "" }
q268556
__random_density_bures
test
def __random_density_bures(N, rank=None, seed=None): """ Generate a random density matrix from the Bures metric. Args: N (int): the length of the density matrix. rank (int or None): the rank of the density matrix. The default value is full-rank. seed (int): Optional. To set a random seed. Returns: ndarray: rho (N,N) a density matrix. """ P = np.eye(N) + random_unitary(N).data G = P.dot(__ginibre_matrix(N, rank, seed)) G = G.dot(G.conj().T) return G / np.trace(G)
python
{ "resource": "" }
q268557
GateBody.calls
test
def calls(self): """Return a list of custom gate names in this gate body.""" lst = [] for children in self.children: if children.type == "custom_unitary": lst.append(children.name) return lst
python
{ "resource": "" }
q268558
SuperOp.power
test
def power(self, n): """Return the compose of a QuantumChannel with itself n times. Args: n (int): compute the matrix power of the superoperator matrix. Returns: SuperOp: the n-times composition channel as a SuperOp object. Raises: QiskitError: if the input and output dimensions of the QuantumChannel are not equal, or the power is not an integer. """ if not isinstance(n, (int, np.integer)): raise QiskitError("Can only power with integer powers.") if self._input_dim != self._output_dim: raise QiskitError("Can only power with input_dim = output_dim.") # Override base class power so we can implement more efficiently # using Numpy.matrix_power return SuperOp( np.linalg.matrix_power(self._data, n), self.input_dims(), self.output_dims())
python
{ "resource": "" }
q268559
SuperOp._compose_subsystem
test
def _compose_subsystem(self, other, qargs, front=False): """Return the composition channel.""" # Compute tensor contraction indices from qargs input_dims = list(self.input_dims()) output_dims = list(self.output_dims()) if front: num_indices = len(self.input_dims()) shift = 2 * len(self.output_dims()) right_mul = True for pos, qubit in enumerate(qargs): input_dims[qubit] = other._input_dims[pos] else: num_indices = len(self.output_dims()) shift = 0 right_mul = False for pos, qubit in enumerate(qargs): output_dims[qubit] = other._output_dims[pos] # Reshape current matrix # Note that we must reverse the subsystem dimension order as # qubit 0 corresponds to the right-most position in the tensor # product, which is the last tensor wire index. tensor = np.reshape(self.data, self._shape) mat = np.reshape(other.data, other._shape) # Add first set of indicies indices = [2 * num_indices - 1 - qubit for qubit in qargs ] + [num_indices - 1 - qubit for qubit in qargs] final_shape = [np.product(output_dims)**2, np.product(input_dims)**2] data = np.reshape( self._einsum_matmul(tensor, mat, indices, shift, right_mul), final_shape) return SuperOp(data, input_dims, output_dims)
python
{ "resource": "" }
q268560
SuperOp._instruction_to_superop
test
def _instruction_to_superop(cls, instruction): """Convert a QuantumCircuit or Instruction to a SuperOp.""" # Convert circuit to an instruction if isinstance(instruction, QuantumCircuit): instruction = instruction.to_instruction() # Initialize an identity superoperator of the correct size # of the circuit op = SuperOp(np.eye(4 ** instruction.num_qubits)) op._append_instruction(instruction) return op
python
{ "resource": "" }
q268561
BarrierBeforeFinalMeasurements.run
test
def run(self, dag): """Return a circuit with a barrier before last measurements.""" # Collect DAG nodes which are followed only by barriers or other measures. final_op_types = ['measure', 'barrier'] final_ops = [] for candidate_node in dag.named_nodes(*final_op_types): is_final_op = True for _, child_successors in dag.bfs_successors(candidate_node): if any(suc.type == 'op' and suc.name not in final_op_types for suc in child_successors): is_final_op = False break if is_final_op: final_ops.append(candidate_node) if not final_ops: return dag # Create a layer with the barrier and add registers from the original dag. barrier_layer = DAGCircuit() for qreg in dag.qregs.values(): barrier_layer.add_qreg(qreg) for creg in dag.cregs.values(): barrier_layer.add_creg(creg) final_qubits = set(final_op.qargs[0] for final_op in final_ops) barrier_layer.apply_operation_back( Barrier(len(final_qubits)), list(final_qubits), []) # Preserve order of final ops collected earlier from the original DAG. ordered_final_nodes = [node for node in dag.topological_op_nodes() if node in set(final_ops)] # Move final ops to the new layer and append the new layer to the DAG. for final_node in ordered_final_nodes: barrier_layer.apply_operation_back(final_node.op, final_node.qargs, final_node.cargs) for final_op in final_ops: dag.remove_op_node(final_op) dag.extend_back(barrier_layer) # Merge the new barrier into any other barriers adjacent_pass = MergeAdjacentBarriers() return adjacent_pass.run(dag)
python
{ "resource": "" }
q268562
circuits_to_qobj
test
def circuits_to_qobj(circuits, qobj_header=None, qobj_id=None, backend_name=None, config=None, shots=None, max_credits=None, basis_gates=None, coupling_map=None, seed=None, memory=None): """Convert a list of circuits into a qobj. Args: circuits (list[QuantumCircuits] or QuantumCircuit): circuits to compile qobj_header (QobjHeader): header to pass to the results qobj_id (int): TODO: delete after qiskit-terra 0.8 backend_name (str): TODO: delete after qiskit-terra 0.8 config (dict): TODO: delete after qiskit-terra 0.8 shots (int): TODO: delete after qiskit-terra 0.8 max_credits (int): TODO: delete after qiskit-terra 0.8 basis_gates (str): TODO: delete after qiskit-terra 0.8 coupling_map (list): TODO: delete after qiskit-terra 0.8 seed (int): TODO: delete after qiskit-terra 0.8 memory (bool): TODO: delete after qiskit-terra 0.8 Returns: Qobj: the Qobj to be run on the backends """ warnings.warn('circuits_to_qobj is deprecated and will be removed in Qiskit Terra 0.9. ' 'Use qiskit.compiler.assemble() to serialize circuits into a qobj.', DeprecationWarning) qobj_header = qobj_header or QobjHeader() if backend_name: qobj_header.backend_name = backend_name if basis_gates: warnings.warn('basis_gates was unused and will be removed.', DeprecationWarning) if coupling_map: warnings.warn('coupling_map was unused and will be removed.', DeprecationWarning) qobj = assemble(experiments=circuits, qobj_id=qobj_id, qobj_header=qobj_header, shots=shots, memory=memory, max_credits=max_credits, seed_simulator=seed, config=config) return qobj
python
{ "resource": "" }
q268563
Unroll3qOrMore.run
test
def run(self, dag): """Expand 3+ qubit gates using their decomposition rules. Args: dag(DAGCircuit): input dag Returns: DAGCircuit: output dag with maximum node degrees of 2 Raises: QiskitError: if a 3q+ gate is not decomposable """ for node in dag.threeQ_or_more_gates(): # TODO: allow choosing other possible decompositions rule = node.op.definition if not rule: raise QiskitError("Cannot unroll all 3q or more gates. " "No rule to expand instruction %s." % node.op.name) # hacky way to build a dag on the same register as the rule is defined # TODO: need anonymous rules to address wires by index decomposition = DAGCircuit() decomposition.add_qreg(rule[0][1][0][0]) for inst in rule: decomposition.apply_operation_back(*inst) decomposition = self.run(decomposition) # recursively unroll dag.substitute_node_with_dag(node, decomposition) return dag
python
{ "resource": "" }
q268564
Decompose.run
test
def run(self, dag): """Expand a given gate into its decomposition. Args: dag(DAGCircuit): input dag Returns: DAGCircuit: output dag where gate was expanded. """ # Walk through the DAG and expand each non-basis node for node in dag.op_nodes(self.gate): # opaque or built-in gates are not decomposable if not node.op.definition: continue # TODO: allow choosing among multiple decomposition rules rule = node.op.definition # hacky way to build a dag on the same register as the rule is defined # TODO: need anonymous rules to address wires by index decomposition = DAGCircuit() decomposition.add_qreg(rule[0][1][0][0]) if rule[0][2]: decomposition.add_creg(rule[0][2][0][0]) for inst in rule: decomposition.apply_operation_back(*inst) dag.substitute_node_with_dag(node, decomposition) return dag
python
{ "resource": "" }
q268565
UnitaryGate._define
test
def _define(self): """Calculate a subcircuit that implements this unitary.""" if self.num_qubits == 1: q = QuantumRegister(1, "q") angles = euler_angles_1q(self.to_matrix()) self.definition = [(U3Gate(*angles), [q[0]], [])] if self.num_qubits == 2: self.definition = two_qubit_kak(self.to_matrix())
python
{ "resource": "" }
q268566
Nested.check_type
test
def check_type(self, value, attr, data): """Validate if the value is of the type of the schema's model. Assumes the nested schema is a ``BaseSchema``. """ if self.many and not is_collection(value): raise self._not_expected_type( value, Iterable, fields=[self], field_names=attr, data=data) _check_type = super().check_type errors = [] values = value if self.many else [value] for idx, v in enumerate(values): try: _check_type(v, idx, values) except ValidationError as err: errors.append(err.messages) if errors: errors = errors if self.many else errors[0] raise ValidationError(errors) return value
python
{ "resource": "" }
q268567
List.check_type
test
def check_type(self, value, attr, data): """Validate if it's a list of valid item-field values. Check if each element in the list can be validated by the item-field passed during construction. """ super().check_type(value, attr, data) errors = [] for idx, v in enumerate(value): try: self.container.check_type(v, idx, value) except ValidationError as err: errors.append(err.messages) if errors: raise ValidationError(errors) return value
python
{ "resource": "" }
q268568
BaseOperator._atol
test
def _atol(self, atol): """Set the absolute tolerence parameter for float comparisons.""" # NOTE: that this overrides the class value so applies to all # instances of the class. max_tol = self.__class__.MAX_TOL if atol < 0: raise QiskitError("Invalid atol: must be non-negative.") if atol > max_tol: raise QiskitError( "Invalid atol: must be less than {}.".format(max_tol)) self.__class__.ATOL = atol
python
{ "resource": "" }
q268569
BaseOperator._rtol
test
def _rtol(self, rtol): """Set the relative tolerence parameter for float comparisons.""" # NOTE: that this overrides the class value so applies to all # instances of the class. max_tol = self.__class__.MAX_TOL if rtol < 0: raise QiskitError("Invalid rtol: must be non-negative.") if rtol > max_tol: raise QiskitError( "Invalid rtol: must be less than {}.".format(max_tol)) self.__class__.RTOL = rtol
python
{ "resource": "" }
q268570
BaseOperator._reshape
test
def _reshape(self, input_dims=None, output_dims=None): """Reshape input and output dimensions of operator. Arg: input_dims (tuple): new subsystem input dimensions. output_dims (tuple): new subsystem output dimensions. Returns: Operator: returns self with reshaped input and output dimensions. Raises: QiskitError: if combined size of all subsystem input dimension or subsystem output dimensions is not constant. """ if input_dims is not None: if np.product(input_dims) != self._input_dim: raise QiskitError( "Reshaped input_dims are incompatible with combined input dimension." ) self._input_dims = tuple(input_dims) if output_dims is not None: if np.product(output_dims) != self._output_dim: raise QiskitError( "Reshaped input_dims are incompatible with combined input dimension." ) self._output_dims = tuple(output_dims) return self
python
{ "resource": "" }
q268571
BaseOperator.input_dims
test
def input_dims(self, qargs=None): """Return tuple of input dimension for specified subsystems.""" if qargs is None: return self._input_dims return tuple(self._input_dims[i] for i in qargs)
python
{ "resource": "" }
q268572
BaseOperator.output_dims
test
def output_dims(self, qargs=None): """Return tuple of output dimension for specified subsystems.""" if qargs is None: return self._output_dims return tuple(self._output_dims[i] for i in qargs)
python
{ "resource": "" }
q268573
BaseOperator.copy
test
def copy(self): """Make a copy of current operator.""" # pylint: disable=no-value-for-parameter # The constructor of subclasses from raw data should be a copy return self.__class__(self.data, self.input_dims(), self.output_dims())
python
{ "resource": "" }
q268574
BaseOperator.power
test
def power(self, n): """Return the compose of a operator with itself n times. Args: n (int): the number of times to compose with self (n>0). Returns: BaseOperator: the n-times composed operator. Raises: QiskitError: if the input and output dimensions of the operator are not equal, or the power is not a positive integer. """ # NOTE: if a subclass can have negative or non-integer powers # this method should be overriden in that class. if not isinstance(n, (int, np.integer)) or n < 1: raise QiskitError("Can only power with positive integer powers.") if self._input_dim != self._output_dim: raise QiskitError("Can only power with input_dim = output_dim.") ret = self.copy() for _ in range(1, n): ret = ret.compose(self) return ret
python
{ "resource": "" }
q268575
BaseOperator._automatic_dims
test
def _automatic_dims(cls, dims, size): """Check if input dimension corresponds to qubit subsystems.""" if dims is None: dims = size elif np.product(dims) != size: raise QiskitError("dimensions do not match size.") if isinstance(dims, (int, np.integer)): num_qubits = int(np.log2(dims)) if 2 ** num_qubits == size: return num_qubits * (2,) return (dims,) return tuple(dims)
python
{ "resource": "" }
q268576
BaseOperator._einsum_matmul
test
def _einsum_matmul(cls, tensor, mat, indices, shift=0, right_mul=False): """Perform a contraction using Numpy.einsum Args: tensor (np.array): a vector or matrix reshaped to a rank-N tensor. mat (np.array): a matrix reshaped to a rank-2M tensor. indices (list): tensor indices to contract with mat. shift (int): shift for indicies of tensor to contract [Default: 0]. right_mul (bool): if True right multiply tensor by mat (else left multiply) [Default: False]. Returns: Numpy.ndarray: the matrix multiplied rank-N tensor. Raises: QiskitError: if mat is not an even rank tensor. """ rank = tensor.ndim rank_mat = mat.ndim if rank_mat % 2 != 0: raise QiskitError( "Contracted matrix must have an even number of indices.") # Get einsum indices for tensor indices_tensor = list(range(rank)) for j, index in enumerate(indices): indices_tensor[index + shift] = rank + j # Get einsum indces for mat mat_contract = list(reversed(range(rank, rank + len(indices)))) mat_free = [index + shift for index in reversed(indices)] if right_mul: indices_mat = mat_contract + mat_free else: indices_mat = mat_free + mat_contract return np.einsum(tensor, indices_tensor, mat, indices_mat)
python
{ "resource": "" }
q268577
BasePolyField._deserialize
test
def _deserialize(self, value, attr, data): """Override ``_deserialize`` for customizing the exception raised.""" try: return super()._deserialize(value, attr, data) except ValidationError as ex: if 'deserialization_schema_selector' in ex.messages[0]: ex.messages[0] = 'Cannot find a valid schema among the choices' raise
python
{ "resource": "" }
q268578
BasePolyField._serialize
test
def _serialize(self, value, key, obj): """Override ``_serialize`` for customizing the exception raised.""" try: return super()._serialize(value, key, obj) except TypeError as ex: if 'serialization_schema_selector' in str(ex): raise ValidationError('Data from an invalid schema') raise
python
{ "resource": "" }
q268579
ByType.check_type
test
def check_type(self, value, attr, data): """Check if at least one of the possible choices validates the value. Possible choices are assumed to be ``ModelTypeValidator`` fields. """ for field in self.choices: if isinstance(field, ModelTypeValidator): try: return field.check_type(value, attr, data) except ValidationError: pass raise self._not_expected_type( value, [field.__class__ for field in self.choices], fields=[self], field_names=attr, data=data)
python
{ "resource": "" }
q268580
state_fidelity
test
def state_fidelity(state1, state2): """Return the state fidelity between two quantum states. Either input may be a state vector, or a density matrix. The state fidelity (F) for two density matrices is defined as:: F(rho1, rho2) = Tr[sqrt(sqrt(rho1).rho2.sqrt(rho1))] ^ 2 For a pure state and mixed state the fidelity is given by:: F(|psi1>, rho2) = <psi1|rho2|psi1> For two pure states the fidelity is given by:: F(|psi1>, |psi2>) = |<psi1|psi2>|^2 Args: state1 (array_like): a quantum state vector or density matrix. state2 (array_like): a quantum state vector or density matrix. Returns: array_like: The state fidelity F(state1, state2). """ # convert input to numpy arrays s1 = np.array(state1) s2 = np.array(state2) # fidelity of two state vectors if s1.ndim == 1 and s2.ndim == 1: return np.abs(s2.conj().dot(s1)) ** 2 # fidelity of vector and density matrix elif s1.ndim == 1: # psi = s1, rho = s2 return np.abs(s1.conj().dot(s2).dot(s1)) elif s2.ndim == 1: # psi = s2, rho = s1 return np.abs(s2.conj().dot(s1).dot(s2)) # fidelity of two density matrices s1sq = _funm_svd(s1, np.sqrt) s2sq = _funm_svd(s2, np.sqrt) return np.linalg.norm(s1sq.dot(s2sq), ord='nuc') ** 2
python
{ "resource": "" }
q268581
_funm_svd
test
def _funm_svd(a, func): """Apply real scalar function to singular values of a matrix. Args: a (array_like): (N, N) Matrix at which to evaluate the function. func (callable): Callable object that evaluates a scalar function f. Returns: ndarray: funm (N, N) Value of the matrix function specified by func evaluated at `A`. """ U, s, Vh = la.svd(a, lapack_driver='gesvd') S = np.diag(func(s)) return U.dot(S).dot(Vh)
python
{ "resource": "" }
q268582
Snapshot.inverse
test
def inverse(self): """Special case. Return self.""" return Snapshot(self.num_qubits, self.num_clbits, self.params[0], self.params[1])
python
{ "resource": "" }
q268583
Snapshot.label
test
def label(self, name): """Set snapshot label to name Args: name (str or None): label to assign unitary Raises: TypeError: name is not string or None. """ if isinstance(name, str): self._label = name else: raise TypeError('label expects a string')
python
{ "resource": "" }
q268584
QuantumChannel.is_unitary
test
def is_unitary(self, atol=None, rtol=None): """Return True if QuantumChannel is a unitary channel.""" try: op = self.to_operator() return op.is_unitary(atol=atol, rtol=rtol) except QiskitError: return False
python
{ "resource": "" }
q268585
QuantumChannel.to_operator
test
def to_operator(self): """Try to convert channel to a unitary representation Operator.""" mat = _to_operator(self.rep, self._data, *self.dim) return Operator(mat, self.input_dims(), self.output_dims())
python
{ "resource": "" }
q268586
QuantumChannel.to_instruction
test
def to_instruction(self): """Convert to a Kraus or UnitaryGate circuit instruction. If the channel is unitary it will be added as a unitary gate, otherwise it will be added as a kraus simulator instruction. Returns: Instruction: A kraus instruction for the channel. Raises: QiskitError: if input data is not an N-qubit CPTP quantum channel. """ from qiskit.circuit.instruction import Instruction # Check if input is an N-qubit CPTP channel. n_qubits = int(np.log2(self._input_dim)) if self._input_dim != self._output_dim or 2**n_qubits != self._input_dim: raise QiskitError( 'Cannot convert QuantumChannel to Instruction: channel is not an N-qubit channel.' ) if not self.is_cptp(): raise QiskitError( 'Cannot convert QuantumChannel to Instruction: channel is not CPTP.' ) # Next we convert to the Kraus representation. Since channel is CPTP we know # that there is only a single set of Kraus operators kraus, _ = _to_kraus(self.rep, self._data, *self.dim) # If we only have a single Kraus operator then the channel is # a unitary channel so can be converted to a UnitaryGate. We do this by # converting to an Operator and using its to_instruction method if len(kraus) == 1: return Operator(kraus[0]).to_instruction() return Instruction('kraus', n_qubits, 0, kraus)
python
{ "resource": "" }
q268587
QuantumChannel._init_transformer
test
def _init_transformer(cls, data): """Convert input into a QuantumChannel subclass object or Operator object""" # This handles common conversion for all QuantumChannel subclasses. # If the input is already a QuantumChannel subclass it will return # the original object if isinstance(data, QuantumChannel): return data if hasattr(data, 'to_quantumchannel'): # If the data object is not a QuantumChannel it will give # preference to a 'to_quantumchannel' attribute that allows # an arbitrary object to define its own conversion to any # quantum channel subclass. return data.to_channel() if hasattr(data, 'to_channel'): # TODO: this 'to_channel' method is the same case as the above # but is used by current version of Aer. It should be removed # once Aer is nupdated to use `to_quantumchannel` # instead of `to_channel`, return data.to_channel() # Finally if the input is not a QuantumChannel and doesn't have a # 'to_quantumchannel' conversion method we try and initialize it as a # regular matrix Operator which can be converted into a QuantumChannel. return Operator(data)
python
{ "resource": "" }
q268588
sort_enum_for_model
test
def sort_enum_for_model(cls, name=None, symbol_name=_symbol_name): """Create Graphene Enum for sorting a SQLAlchemy class query Parameters - cls : Sqlalchemy model class Model used to create the sort enumerator - name : str, optional, default None Name to use for the enumerator. If not provided it will be set to `cls.__name__ + 'SortEnum'` - symbol_name : function, optional, default `_symbol_name` Function which takes the column name and a boolean indicating if the sort direction is ascending, and returns the symbol name for the current column and sort direction. The default function will create, for a column named 'foo', the symbols 'foo_asc' and 'foo_desc' Returns - Enum The Graphene enumerator """ enum, _ = _sort_enum_for_model(cls, name, symbol_name) return enum
python
{ "resource": "" }
q268589
patch_strptime
test
def patch_strptime(): """Monkey patching _strptime to avoid problems related with non-english locale changes on the system. For example, if system's locale is set to fr_FR. Parser won't recognize any date since all languages are translated to english dates. """ _strptime = imp.load_module( 'strptime_patched', *imp.find_module('_strptime') ) _calendar = imp.load_module( 'calendar_patched', *imp.find_module('_strptime') ) _strptime._getlang = lambda: ('en_US', 'UTF-8') _strptime.calendar = _calendar _strptime.calendar.day_abbr = [ 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun' ] _strptime.calendar.day_name = [ 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday' ] _strptime.calendar.month_abbr = [ '', 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec' ] _strptime.calendar.month_name = [ '', 'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december' ] return _strptime._strptime_time
python
{ "resource": "" }
q268590
LocaleDataLoader.get_locale_map
test
def get_locale_map(self, languages=None, locales=None, region=None, use_given_order=False, allow_conflicting_locales=False): """ Get an ordered mapping with locale codes as keys and corresponding locale instances as values. :param languages: A list of language codes, e.g. ['en', 'es', 'zh-Hant']. If locales are not given, languages and region are used to construct locales to load. :type languages: list :param locales: A list of codes of locales which are to be loaded, e.g. ['fr-PF', 'qu-EC', 'af-NA'] :type locales: list :param region: A region code, e.g. 'IN', '001', 'NE'. If locales are not given, languages and region are used to construct locales to load. :type region: str|unicode :param use_given_order: If True, the returned mapping is ordered in the order locales are given. :type allow_redetect_language: bool :param allow_conflicting_locales: if True, locales with same language and different region can be loaded. :type allow_conflicting_locales: bool :return: ordered locale code to locale instance mapping """ return OrderedDict(self._load_data( languages=languages, locales=locales, region=region, use_given_order=use_given_order, allow_conflicting_locales=allow_conflicting_locales))
python
{ "resource": "" }
q268591
LocaleDataLoader.get_locales
test
def get_locales(self, languages=None, locales=None, region=None, use_given_order=False, allow_conflicting_locales=False): """ Yield locale instances. :param languages: A list of language codes, e.g. ['en', 'es', 'zh-Hant']. If locales are not given, languages and region are used to construct locales to load. :type languages: list :param locales: A list of codes of locales which are to be loaded, e.g. ['fr-PF', 'qu-EC', 'af-NA'] :type locales: list :param region: A region code, e.g. 'IN', '001', 'NE'. If locales are not given, languages and region are used to construct locales to load. :type region: str|unicode :param use_given_order: If True, the returned mapping is ordered in the order locales are given. :type allow_redetect_language: bool :param allow_conflicting_locales: if True, locales with same language and different region can be loaded. :type allow_conflicting_locales: bool :yield: locale instances """ for _, locale in self._load_data( languages=languages, locales=locales, region=region, use_given_order=use_given_order, allow_conflicting_locales=allow_conflicting_locales): yield locale
python
{ "resource": "" }
q268592
Dictionary.are_tokens_valid
test
def are_tokens_valid(self, tokens): """ Check if tokens are valid tokens for the locale. :param tokens: a list of string or unicode tokens. :type tokens: list :return: True if tokens are valid, False otherwise. """ match_relative_regex = self._get_match_relative_regex_cache() for token in tokens: if any([match_relative_regex.match(token), token in self, token.isdigit()]): continue else: return False else: return True
python
{ "resource": "" }
q268593
Dictionary.split
test
def split(self, string, keep_formatting=False): """ Split the date string using translations in locale info. :param string: Date string to be splitted. :type string: str|unicode :param keep_formatting: If True, retain formatting of the date string. :type keep_formatting: bool :return: A list of string tokens formed after splitting the date string. """ if not string: return string split_relative_regex = self._get_split_relative_regex_cache() match_relative_regex = self._get_match_relative_regex_cache() tokens = split_relative_regex.split(string) for i, token in enumerate(tokens): if match_relative_regex.match(token): tokens[i] = [token] continue tokens[i] = self._split_by_known_words(token, keep_formatting) return list(filter(bool, chain(*tokens)))
python
{ "resource": "" }
q268594
parse
test
def parse(date_string, date_formats=None, languages=None, locales=None, region=None, settings=None): """Parse date and time from given date string. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :param date_formats: A list of format strings using directives as given `here <https://docs.python.org/2/library/datetime.html#strftime-and-strptime-behavior>`_. The parser applies formats one by one, taking into account the detected languages/locales. :type date_formats: list :param languages: A list of language codes, e.g. ['en', 'es', 'zh-Hant']. If locales are not given, languages and region are used to construct locales for translation. :type languages: list :param locales: A list of locale codes, e.g. ['fr-PF', 'qu-EC', 'af-NA']. The parser uses locales to translate date string. :type locales: list :param region: A region code, e.g. 'IN', '001', 'NE'. If locales are not given, languages and region are used to construct locales for translation. :type region: str|unicode :param settings: Configure customized behavior using settings defined in :mod:`dateparser.conf.Settings`. :type settings: dict :return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None :rtype: :class:`datetime <datetime.datetime>`. :raises: ValueError - Unknown Language """ parser = _default_parser if any([languages, locales, region, not settings._default]): parser = DateDataParser(languages=languages, locales=locales, region=region, settings=settings) data = parser.get_date_data(date_string, date_formats) if data: return data['date_obj']
python
{ "resource": "" }
q268595
FreshnessDateDataParser._parse_time
test
def _parse_time(self, date_string, settings): """Attemps to parse time part of date strings like '1 day ago, 2 PM' """ date_string = PATTERN.sub('', date_string) date_string = re.sub(r'\b(?:ago|in)\b', '', date_string) try: return time_parser(date_string) except: pass
python
{ "resource": "" }
q268596
Locale.is_applicable
test
def is_applicable(self, date_string, strip_timezone=False, settings=None): """ Check if the locale is applicable to translate date string. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :param strip_timezone: If True, timezone is stripped from date string. :type strip_timezone: bool :return: boolean value representing if the locale is applicable for the date string or not. """ if strip_timezone: date_string, _ = pop_tz_offset_from_string(date_string, as_offset=False) date_string = self._translate_numerals(date_string) if settings.NORMALIZE: date_string = normalize_unicode(date_string) date_string = self._simplify(date_string, settings=settings) dictionary = self._get_dictionary(settings) date_tokens = dictionary.split(date_string) return dictionary.are_tokens_valid(date_tokens)
python
{ "resource": "" }
q268597
Locale.translate
test
def translate(self, date_string, keep_formatting=False, settings=None): """ Translate the date string to its English equivalent. :param date_string: A string representing date and/or time in a recognizably valid format. :type date_string: str|unicode :param keep_formatting: If True, retain formatting of the date string after translation. :type keep_formatting: bool :return: translated date string. """ date_string = self._translate_numerals(date_string) if settings.NORMALIZE: date_string = normalize_unicode(date_string) date_string = self._simplify(date_string, settings=settings) dictionary = self._get_dictionary(settings) date_string_tokens = dictionary.split(date_string, keep_formatting) relative_translations = self._get_relative_translations(settings=settings) for i, word in enumerate(date_string_tokens): word = word.lower() for pattern, replacement in relative_translations.items(): if pattern.match(word): date_string_tokens[i] = pattern.sub(replacement, word) else: if word in dictionary: date_string_tokens[i] = dictionary[word] or '' if "in" in date_string_tokens: date_string_tokens = self._clear_future_words(date_string_tokens) return self._join(list(filter(bool, date_string_tokens)), separator="" if keep_formatting else " ", settings=settings)
python
{ "resource": "" }
q268598
parse_with_formats
test
def parse_with_formats(date_string, date_formats, settings): """ Parse with formats and return a dictionary with 'period' and 'obj_date'. :returns: :class:`datetime.datetime`, dict or None """ period = 'day' for date_format in date_formats: try: date_obj = datetime.strptime(date_string, date_format) except ValueError: continue else: # If format does not include the day, use last day of the month # instead of first, because the first is usually out of range. if '%d' not in date_format: period = 'month' date_obj = date_obj.replace( day=get_last_day_of_month(date_obj.year, date_obj.month)) if not ('%y' in date_format or '%Y' in date_format): today = datetime.today() date_obj = date_obj.replace(year=today.year) date_obj = apply_timezone_from_settings(date_obj, settings) return {'date_obj': date_obj, 'period': period} else: return {'date_obj': None, 'period': period}
python
{ "resource": "" }
q268599
ComponentFactory.get_ammo_generator
test
def get_ammo_generator(self): """ return ammo generator """ af_readers = { 'phantom': missile.AmmoFileReader, 'slowlog': missile.SlowLogReader, 'line': missile.LineReader, 'uri': missile.UriReader, 'uripost': missile.UriPostReader, 'access': missile.AccessLogReader, 'caseline': missile.CaseLineReader, } if self.uris and self.ammo_file: raise StepperConfigurationError( 'Both uris and ammo file specified. You must specify only one of them' ) elif self.uris: ammo_gen = missile.UriStyleGenerator( self.uris, self.headers, http_ver=self.http_ver) elif self.ammo_file: if self.ammo_type in af_readers: if self.ammo_type == 'phantom': opener = resource.get_opener(self.ammo_file) with opener(self.use_cache) as ammo: try: if not ammo.next()[0].isdigit(): self.ammo_type = 'uri' self.log.info( "Setting ammo_type 'uri' because ammo is not started with digit and you did not specify ammo format" ) else: self.log.info( "Default ammo type ('phantom') used, use 'phantom.ammo_type' option to override it" ) except StopIteration: self.log.exception( "Couldn't read first line of ammo file") raise AmmoFileError( "Couldn't read first line of ammo file") else: raise NotImplementedError( 'No such ammo type implemented: "%s"' % self.ammo_type) ammo_gen = af_readers[self.ammo_type]( self.ammo_file, headers=self.headers, http_ver=self.http_ver, use_cache=self.use_cache) else: raise StepperConfigurationError( 'Ammo not found. Specify uris or ammo file') self.log.info("Using %s ammo reader" % type(ammo_gen).__name__) return ammo_gen
python
{ "resource": "" }