_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q268100 | bdist_wheel.add_requirements | test | def add_requirements(self, metadata_path):
"""Add additional requirements from setup.cfg to file metadata_path"""
additional = list(self.setupcfg_requirements())
if not additional: return
pkg_info = read_pkg_info(metadata_path)
if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info:
warnings.warn('setup.cfg requirements overwrite values from setup.py')
del pkg_info['Provides-Extra']
del pkg_info['Requires-Dist']
for k, v in additional:
pkg_info[k] = v
write_pkg_info(metadata_path, pkg_info) | python | {
"resource": ""
} |
q268101 | bdist_wheel.egg2dist | test | def egg2dist(self, egginfo_path, distinfo_path):
"""Convert an .egg-info directory into a .dist-info directory"""
def adios(p):
"""Appropriately delete directory, file or link."""
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
shutil.rmtree(p)
elif os.path.exists(p):
os.unlink(p)
adios(distinfo_path)
if not os.path.exists(egginfo_path):
# There is no egg-info. This is probably because the egg-info
# file/directory is not named matching the distribution name used
# to name the archive file. Check for this case and report
# accordingly.
import glob
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
possible = glob.glob(pat)
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
if possible:
alt = os.path.basename(possible[0])
err += " (%s found - possible misnamed archive file?)" % (alt,)
raise ValueError(err)
if os.path.isfile(egginfo_path):
# .egg-info is a single file
pkginfo_path = egginfo_path
pkg_info = self._pkginfo_to_metadata(egginfo_path, egginfo_path)
os.mkdir(distinfo_path)
else:
# .egg-info is a directory
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
pkg_info = self._pkginfo_to_metadata(egginfo_path, pkginfo_path)
# ignore common egg metadata that is useless to wheel
shutil.copytree(egginfo_path, distinfo_path,
ignore=lambda x, y: set(('PKG-INFO',
'requires.txt',
'SOURCES.txt',
'not-zip-safe',)))
# delete dependency_links if it is only whitespace
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
with open(dependency_links_path, 'r') as dependency_links_file:
dependency_links = dependency_links_file.read().strip()
if not dependency_links:
adios(dependency_links_path)
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
# XXX deprecated. Still useful for current distribute/setuptools.
metadata_path = os.path.join(distinfo_path, 'METADATA')
self.add_requirements(metadata_path)
# XXX intentionally a different path than the PEP.
metadata_json_path = os.path.join(distinfo_path, 'metadata.json')
pymeta = pkginfo_to_dict(metadata_path,
distribution=self.distribution)
if 'description' in pymeta:
description_filename = 'DESCRIPTION.rst'
description_text = pymeta.pop('description')
description_path = os.path.join(distinfo_path,
description_filename)
with open(description_path, "wb") as description_file:
description_file.write(description_text.encode('utf-8'))
pymeta['extensions']['python.details']['document_names']['description'] = description_filename
# XXX heuristically copy any LICENSE/LICENSE.txt?
license = self.license_file()
if license:
license_filename = 'LICENSE.txt'
shutil.copy(license, os.path.join(self.distinfo_dir, license_filename))
pymeta['extensions']['python.details']['document_names']['license'] = license_filename
with open(metadata_json_path, "w") as metadata_json:
json.dump(pymeta, metadata_json, sort_keys=True)
adios(egginfo_path) | python | {
"resource": ""
} |
q268102 | MessageFactory.text | test | def text(text: str, speak: str = None, input_hint: Union[InputHints, str] = InputHints.accepting_input) -> Activity:
"""
Returns a simple text message.
:Example:
message = MessageFactory.text('Greetings from example message')
await context.send_activity(message)
:param text:
:param speak:
:param input_hint:
:return:
"""
message = Activity(type=ActivityTypes.message, text=text, input_hint=input_hint)
if speak:
message.speak = speak
return message | python | {
"resource": ""
} |
q268103 | MessageFactory.suggested_actions | test | def suggested_actions(actions: List[CardAction], text: str = None, speak: str = None,
input_hint: Union[InputHints, str] = InputHints.accepting_input) -> Activity:
"""
Returns a message that includes a set of suggested actions and optional text.
:Example:
message = MessageFactory.suggested_actions([CardAction(title='a', type=ActionTypes.im_back, value='a'),
CardAction(title='b', type=ActionTypes.im_back, value='b'),
CardAction(title='c', type=ActionTypes.im_back, value='c')], 'Choose a color')
await context.send_activity(message)
:param actions:
:param text:
:param speak:
:param input_hint:
:return:
"""
actions = SuggestedActions(actions=actions)
message = Activity(type=ActivityTypes.message, input_hint=input_hint, suggested_actions=actions)
if text:
message.text = text
if speak:
message.speak = speak
return message | python | {
"resource": ""
} |
q268104 | MessageFactory.attachment | test | def attachment(attachment: Attachment, text: str = None, speak: str = None,
input_hint: Union[InputHints, str] = None):
"""
Returns a single message activity containing an attachment.
:Example:
message = MessageFactory.attachment(CardFactory.hero_card(HeroCard(title='White T-Shirt',
images=[CardImage(url='https://example.com/whiteShirt.jpg')],
buttons=[CardAction(title='buy')])))
await context.send_activity(message)
:param attachment:
:param text:
:param speak:
:param input_hint:
:return:
"""
return attachment_activity(AttachmentLayoutTypes.list, [attachment], text, speak, input_hint) | python | {
"resource": ""
} |
q268105 | MessageFactory.list | test | def list(attachments: List[Attachment], text: str = None, speak: str = None,
input_hint: Union[InputHints, str] = None) -> Activity:
"""
Returns a message that will display a set of attachments in list form.
:Example:
message = MessageFactory.list([CardFactory.hero_card(HeroCard(title='title1',
images=[CardImage(url='imageUrl1')],
buttons=[CardAction(title='button1')])),
CardFactory.hero_card(HeroCard(title='title2',
images=[CardImage(url='imageUrl2')],
buttons=[CardAction(title='button2')])),
CardFactory.hero_card(HeroCard(title='title3',
images=[CardImage(url='imageUrl3')],
buttons=[CardAction(title='button3')]))])
await context.send_activity(message)
:param attachments:
:param text:
:param speak:
:param input_hint:
:return:
"""
return attachment_activity(AttachmentLayoutTypes.list, attachments, text, speak, input_hint) | python | {
"resource": ""
} |
q268106 | MessageFactory.content_url | test | def content_url(url: str, content_type: str, name: str = None, text: str = None, speak: str = None,
input_hint: Union[InputHints, str] = None):
"""
Returns a message that will display a single image or video to a user.
:Example:
message = MessageFactory.content_url('https://example.com/hawaii.jpg', 'image/jpeg',
'Hawaii Trip', 'A photo from our family vacation.')
await context.send_activity(message)
:param url:
:param content_type:
:param name:
:param text:
:param speak:
:param input_hint:
:return:
"""
attachment = Attachment(content_type=content_type, content_url=url)
if name:
attachment.name = name
return attachment_activity(AttachmentLayoutTypes.list, [attachment], text, speak, input_hint) | python | {
"resource": ""
} |
q268107 | ActivityUtil.create_trace | test | def create_trace(
turn_activity: Activity,
name: str,
value: object = None,
value_type: str = None,
label: str = None,
) -> Activity:
"""Creates a trace activity based on this activity.
:param turn_activity:
:type turn_activity: Activity
:param name: The value to assign to the trace activity's <see cref="Activity.name"/> property.
:type name: str
:param value: The value to assign to the trace activity's <see cref="Activity.value"/> property., defaults to None
:param value: object, optional
:param value_type: The value to assign to the trace activity's <see cref="Activity.value_type"/> property, defaults to None
:param value_type: str, optional
:param label: The value to assign to the trace activity's <see cref="Activity.label"/> property, defaults to None
:param label: str, optional
:return: The created trace activity.
:rtype: Activity
"""
from_property = (
ChannelAccount(
id=turn_activity.recipient.id, name=turn_activity.recipient.name
)
if turn_activity.recipient is not None
else ChannelAccount()
)
if value_type is None and value is not None:
value_type = type(value).__name__
reply = Activity(
type=ActivityTypes.trace,
timestamp=datetime.utcnow(),
from_property=from_property,
recipient=ChannelAccount(
id=turn_activity.from_property.id, name=turn_activity.from_property.name
),
reply_to_id=turn_activity.id,
service_url=turn_activity.service_url,
channel_id=turn_activity.channel_id,
conversation=ConversationAccount(
is_group=turn_activity.conversation.is_group,
id=turn_activity.conversation.id,
name=turn_activity.conversation.name,
),
name=name,
label=label,
value_type=value_type,
value=value,
)
return reply | python | {
"resource": ""
} |
q268108 | Dialog.telemetry_client | test | def telemetry_client(self, value: BotTelemetryClient) -> None:
"""
Sets the telemetry client for logging events.
"""
if value is None:
self._telemetry_client = NullTelemetryClient()
else:
self._telemetry_client = value | python | {
"resource": ""
} |
q268109 | CosmosDbStorage.read | test | async def read(self, keys: List[str]) -> dict:
"""Read storeitems from storage.
:param keys:
:return dict:
"""
try:
# check if the database and container exists and if not create
if not self.__container_exists:
self.__create_db_and_container()
if len(keys) > 0:
# create the parameters object
parameters = [
{'name': f'@id{i}', 'value': f'{self.__sanitize_key(key)}'}
for i, key in enumerate(keys)
]
# get the names of the params
parameter_sequence = ','.join(param.get('name')
for param in parameters)
# create the query
query = {
"query":
f"SELECT c.id, c.realId, c.document, c._etag \
FROM c WHERE c.id in ({parameter_sequence})",
"parameters": parameters
}
options = {'enableCrossPartitionQuery': True}
# run the query and store the results as a list
results = list(
self.client.QueryItems(
self.__container_link, query, options)
)
# return a dict with a key and a StoreItem
return {
r.get('realId'): self.__create_si(r) for r in results
}
else:
raise Exception('cosmosdb_storage.read(): \
provide at least one key')
except TypeError as e:
raise e | python | {
"resource": ""
} |
q268110 | CosmosDbStorage.write | test | async def write(self, changes: Dict[str, StoreItem]):
"""Save storeitems to storage.
:param changes:
:return:
"""
try:
# check if the database and container exists and if not create
if not self.__container_exists:
self.__create_db_and_container()
# iterate over the changes
for (key, change) in changes.items():
# store the e_tag
e_tag = change.e_tag
# create the new document
doc = {'id': self.__sanitize_key(key),
'realId': key,
'document': self.__create_dict(change)
}
# the e_tag will be * for new docs so do an insert
if (e_tag == '*' or not e_tag):
self.client.UpsertItem(
database_or_Container_link=self.__container_link,
document=doc,
options={'disableAutomaticIdGeneration': True}
)
# if we have an etag, do opt. concurrency replace
elif(len(e_tag) > 0):
access_condition = {'type': 'IfMatch', 'condition': e_tag}
self.client.ReplaceItem(
document_link=self.__item_link(
self.__sanitize_key(key)),
new_document=doc,
options={'accessCondition': access_condition}
)
# error when there is no e_tag
else:
raise Exception('cosmosdb_storage.write(): etag missing')
except Exception as e:
raise e | python | {
"resource": ""
} |
q268111 | CosmosDbStorage.delete | test | async def delete(self, keys: List[str]):
"""Remove storeitems from storage.
:param keys:
:return:
"""
try:
# check if the database and container exists and if not create
if not self.__container_exists:
self.__create_db_and_container()
# call the function for each key
for k in keys:
self.client.DeleteItem(
document_link=self.__item_link(self.__sanitize_key(k)))
# print(res)
except cosmos_errors.HTTPFailure as h:
# print(h.status_code)
if h.status_code != 404:
raise h
except TypeError as e:
raise e | python | {
"resource": ""
} |
q268112 | CosmosDbStorage.__create_si | test | def __create_si(self, result) -> StoreItem:
"""Create a StoreItem from a result out of CosmosDB.
:param result:
:return StoreItem:
"""
# get the document item from the result and turn into a dict
doc = result.get('document')
# readd the e_tag from Cosmos
doc['e_tag'] = result.get('_etag')
# create and return the StoreItem
return StoreItem(**doc) | python | {
"resource": ""
} |
q268113 | CosmosDbStorage.__create_dict | test | def __create_dict(self, si: StoreItem) -> Dict:
"""Return the dict of a StoreItem.
This eliminates non_magic attributes and the e_tag.
:param si:
:return dict:
"""
# read the content
non_magic_attr = ([attr for attr in dir(si)
if not attr.startswith('_') or attr.__eq__('e_tag')])
# loop through attributes and write and return a dict
return ({attr: getattr(si, attr)
for attr in non_magic_attr}) | python | {
"resource": ""
} |
q268114 | CosmosDbStorage.__sanitize_key | test | def __sanitize_key(self, key) -> str:
"""Return the sanitized key.
Replace characters that are not allowed in keys in Cosmos.
:param key:
:return str:
"""
# forbidden characters
bad_chars = ['\\', '?', '/', '#', '\t', '\n', '\r']
# replace those with with '*' and the
# Unicode code point of the character and return the new string
return ''.join(
map(
lambda x: '*'+str(ord(x)) if x in bad_chars else x, key
)
) | python | {
"resource": ""
} |
q268115 | CosmosDbStorage.__create_db_and_container | test | def __create_db_and_container(self):
"""Call the get or create methods."""
db_id = self.config.database
container_name = self.config.container
self.db = self.__get_or_create_database(self.client, db_id)
self.container = self.__get_or_create_container(
self.client, container_name
) | python | {
"resource": ""
} |
q268116 | CosmosDbStorage.__get_or_create_database | test | def __get_or_create_database(self, doc_client, id) -> str:
"""Return the database link.
Check if the database exists or create the db.
:param doc_client:
:param id:
:return str:
"""
# query CosmosDB for a database with that name/id
dbs = list(doc_client.QueryDatabases({
"query": "SELECT * FROM r WHERE r.id=@id",
"parameters": [
{"name": "@id", "value": id}
]
}))
# if there are results, return the first (db names are unique)
if len(dbs) > 0:
return dbs[0]['id']
else:
# create the database if it didn't exist
res = doc_client.CreateDatabase({'id': id})
return res['id'] | python | {
"resource": ""
} |
q268117 | CosmosDbStorage.__get_or_create_container | test | def __get_or_create_container(self, doc_client, container) -> str:
"""Return the container link.
Check if the container exists or create the container.
:param doc_client:
:param container:
:return str:
"""
# query CosmosDB for a container in the database with that name
containers = list(doc_client.QueryContainers(
self.__database_link,
{
"query": "SELECT * FROM r WHERE r.id=@id",
"parameters": [
{"name": "@id", "value": container}
]
}
))
# if there are results, return the first (container names are unique)
if len(containers) > 0:
return containers[0]['id']
else:
# Create a container if it didn't exist
res = doc_client.CreateContainer(
self.__database_link, {'id': container})
return res['id'] | python | {
"resource": ""
} |
q268118 | QnAMaker.fill_qna_event | test | def fill_qna_event(
self,
query_results: [QueryResult],
turn_context: TurnContext,
telemetry_properties: Dict[str,str] = None,
telemetry_metrics: Dict[str,float] = None
) -> EventData:
"""
Fills the event properties and metrics for the QnaMessage event for telemetry.
:return: A tuple of event data properties and metrics that will be sent to the BotTelemetryClient.track_event() method for the QnAMessage event. The properties and metrics returned the standard properties logged with any properties passed from the get_answers() method.
:rtype: EventData
"""
properties: Dict[str,str] = dict()
metrics: Dict[str, float] = dict()
properties[QnATelemetryConstants.knowledge_base_id_property] = self._endpoint.knowledge_base_id
text: str = turn_context.activity.text
userName: str = turn_context.activity.from_property.name
# Use the LogPersonalInformation flag to toggle logging PII data; text and username are common examples.
if self.log_personal_information:
if text:
properties[QnATelemetryConstants.question_property] = text
if userName:
properties[QnATelemetryConstants.username_property] = userName
# Fill in Qna Results (found or not).
if len(query_results) > 0:
query_result = query_results[0]
result_properties = {
QnATelemetryConstants.matched_question_property: json.dumps(query_result.questions),
QnATelemetryConstants.question_id_property: str(query_result.id),
QnATelemetryConstants.answer_property: query_result.answer,
QnATelemetryConstants.score_metric: query_result.score,
QnATelemetryConstants.article_found_property: 'true'
}
properties.update(result_properties)
else:
no_match_properties = {
QnATelemetryConstants.matched_question_property : 'No Qna Question matched',
QnATelemetryConstants.question_id_property : 'No Qna Question Id matched',
QnATelemetryConstants.answer_property : 'No Qna Answer matched',
QnATelemetryConstants.article_found_property : 'false'
}
properties.update(no_match_properties)
# Additional Properties can override "stock" properties.
if telemetry_properties:
properties.update(telemetry_properties)
# Additional Metrics can override "stock" metrics.
if telemetry_metrics:
metrics.update(telemetry_metrics)
return EventData(properties=properties, metrics=metrics) | python | {
"resource": ""
} |
q268119 | TurnContext.get_conversation_reference | test | def get_conversation_reference(activity: Activity) -> ConversationReference:
"""
Returns the conversation reference for an activity. This can be saved as a plain old JSON
object and then later used to message the user proactively.
Usage Example:
reference = TurnContext.get_conversation_reference(context.request)
:param activity:
:return:
"""
return ConversationReference(activity_id=activity.id,
user=copy(activity.from_property),
bot=copy(activity.recipient),
conversation=copy(activity.conversation),
channel_id=activity.channel_id,
service_url=activity.service_url) | python | {
"resource": ""
} |
q268120 | WaterfallDialog.get_step_name | test | def get_step_name(self, index: int) -> str:
"""
Give the waterfall step a unique name
"""
step_name = self._steps[index].__qualname__
if not step_name or ">" in step_name :
step_name = f"Step{index + 1}of{len(self._steps)}"
return step_name | python | {
"resource": ""
} |
q268121 | Channel.supports_suggested_actions | test | def supports_suggested_actions(channel_id: str, button_cnt: int = 100) -> bool:
"""Determine if a number of Suggested Actions are supported by a Channel.
Args:
channel_id (str): The Channel to check the if Suggested Actions are supported in.
button_cnt (int, optional): Defaults to 100. The number of Suggested Actions to check for the Channel.
Returns:
bool: True if the Channel supports the button_cnt total Suggested Actions, False if the Channel does not support that number of Suggested Actions.
"""
max_actions = {
# https://developers.facebook.com/docs/messenger-platform/send-messages/quick-replies
Channels.facebook: 10,
Channels.skype: 10,
# https://developers.line.biz/en/reference/messaging-api/#items-object
Channels.line: 13,
# https://dev.kik.com/#/docs/messaging#text-response-object
Channels.kik: 20,
Channels.telegram: 100,
Channels.slack: 100,
Channels.emulator: 100,
Channels.direct_line: 100,
Channels.webchat: 100,
}
return button_cnt <= max_actions[channel_id] if channel_id in max_actions else False | python | {
"resource": ""
} |
q268122 | Channel.supports_card_actions | test | def supports_card_actions(channel_id: str, button_cnt: int = 100) -> bool:
"""Determine if a number of Card Actions are supported by a Channel.
Args:
channel_id (str): The Channel to check if the Card Actions are supported in.
button_cnt (int, optional): Defaults to 100. The number of Card Actions to check for the Channel.
Returns:
bool: True if the Channel supports the button_cnt total Card Actions, False if the Channel does not support that number of Card Actions.
"""
max_actions = {
Channels.facebook: 3,
Channels.skype: 3,
Channels.ms_teams: 3,
Channels.line: 99,
Channels.slack: 100,
Channels.emulator: 100,
Channels.direct_line: 100,
Channels.webchat: 100,
Channels.cortana: 100,
}
return button_cnt <= max_actions[channel_id] if channel_id in max_actions else False | python | {
"resource": ""
} |
q268123 | Channel.get_channel_id | test | def get_channel_id(turn_context: TurnContext) -> str:
"""Get the Channel Id from the current Activity on the Turn Context.
Args:
turn_context (TurnContext): The Turn Context to retrieve the Activity's Channel Id from.
Returns:
str: The Channel Id from the Turn Context's Activity.
"""
if turn_context.activity.channel_id is None:
return ""
else:
return turn_context.activity.channel_id | python | {
"resource": ""
} |
q268124 | EmulatorValidation.is_token_from_emulator | test | def is_token_from_emulator(auth_header: str) -> bool:
""" Determines if a given Auth header is from the Bot Framework Emulator
:param auth_header: Bearer Token, in the 'Bearer [Long String]' Format.
:type auth_header: str
:return: True, if the token was issued by the Emulator. Otherwise, false.
"""
# The Auth Header generally looks like this:
# "Bearer eyJ0e[...Big Long String...]XAiO"
if not auth_header:
# No token. Can't be an emulator token.
return False
parts = auth_header.split(' ')
if len(parts) != 2:
# Emulator tokens MUST have exactly 2 parts.
# If we don't have 2 parts, it's not an emulator token
return False
auth_scheme = parts[0]
bearer_token = parts[1]
# We now have an array that should be:
# [0] = "Bearer"
# [1] = "[Big Long String]"
if auth_scheme != 'Bearer':
# The scheme from the emulator MUST be "Bearer"
return False
# Parse the Big Long String into an actual token.
token = jwt.decode(bearer_token, verify=False)
if not token:
return False
# Is there an Issuer?
issuer = token['iss']
if not issuer:
# No Issuer, means it's not from the Emulator.
return False
# Is the token issues by a source we consider to be the emulator?
issuer_list = EmulatorValidation.TO_BOT_FROM_EMULATOR_TOKEN_VALIDATION_PARAMETERS.issuer
if issuer_list and not issuer in issuer_list:
# Not a Valid Issuer. This is NOT a Bot Framework Emulator Token.
return False
# The Token is from the Bot Framework Emulator. Success!
return True | python | {
"resource": ""
} |
q268125 | CardFactory.hero_card | test | def hero_card(card: HeroCard) -> Attachment:
"""
Returns an attachment for a hero card. Will raise a TypeError if 'card' argument is not a HeroCard.
Hero cards tend to have one dominant full width image and the cards text & buttons can
usually be found below the image.
:return:
"""
if not isinstance(card, HeroCard):
raise TypeError('CardFactory.hero_card(): `card` argument is not an instance of an HeroCard, '
'unable to prepare attachment.')
return Attachment(content_type=CardFactory.content_types.hero_card,
content=card) | python | {
"resource": ""
} |
q268126 | Instruction.params | test | def params(self):
"""return instruction params"""
# if params already defined don't attempt to get them from definition
if self._definition and not self._params:
self._params = []
for sub_instr, _, _ in self._definition:
self._params.extend(sub_instr.params) # recursive call
return self._params
else:
return self._params | python | {
"resource": ""
} |
q268127 | Instruction.mirror | test | def mirror(self):
"""For a composite instruction, reverse the order of sub-gates.
This is done by recursively mirroring all sub-instructions.
It does not invert any gate.
Returns:
Instruction: a fresh gate with sub-gates reversed
"""
if not self._definition:
return self.copy()
reverse_inst = self.copy(name=self.name + '_mirror')
reverse_inst.definition = []
for inst, qargs, cargs in reversed(self._definition):
reverse_inst._definition.append((inst.mirror(), qargs, cargs))
return reverse_inst | python | {
"resource": ""
} |
q268128 | Instruction.inverse | test | def inverse(self):
"""Invert this instruction.
If the instruction is composite (i.e. has a definition),
then its definition will be recursively inverted.
Special instructions inheriting from Instruction can
implement their own inverse (e.g. T and Tdg, Barrier, etc.)
Returns:
Instruction: a fresh instruction for the inverse
Raises:
QiskitError: if the instruction is not composite
and an inverse has not been implemented for it.
"""
if not self.definition:
raise QiskitError("inverse() not implemented for %s." % self.name)
inverse_gate = self.copy(name=self.name + '_dg')
inverse_gate._definition = []
for inst, qargs, cargs in reversed(self._definition):
inverse_gate._definition.append((inst.inverse(), qargs, cargs))
return inverse_gate | python | {
"resource": ""
} |
q268129 | Instruction.c_if | test | def c_if(self, classical, val):
"""Add classical control on register classical and value val."""
if not isinstance(classical, ClassicalRegister):
raise QiskitError("c_if must be used with a classical register")
if val < 0:
raise QiskitError("control value should be non-negative")
self.control = (classical, val)
return self | python | {
"resource": ""
} |
q268130 | Instruction.copy | test | def copy(self, name=None):
"""
shallow copy of the instruction.
Args:
name (str): name to be given to the copied circuit,
if None then the name stays the same
Returns:
Instruction: a shallow copy of the current instruction, with the name
updated if it was provided
"""
cpy = copy.copy(self)
if name:
cpy.name = name
return cpy | python | {
"resource": ""
} |
q268131 | Instruction._qasmif | test | def _qasmif(self, string):
"""Print an if statement if needed."""
if self.control is None:
return string
return "if(%s==%d) " % (self.control[0].name, self.control[1]) + string | python | {
"resource": ""
} |
q268132 | Instruction.qasm | test | def qasm(self):
"""Return a default OpenQASM string for the instruction.
Derived instructions may override this to print in a
different format (e.g. measure q[0] -> c[0];).
"""
name_param = self.name
if self.params:
name_param = "%s(%s)" % (name_param, ",".join(
[str(i) for i in self.params]))
return self._qasmif(name_param) | python | {
"resource": ""
} |
q268133 | PassManager.run | test | def run(self, circuit):
"""Run all the passes on a QuantumCircuit
Args:
circuit (QuantumCircuit): circuit to transform via all the registered passes
Returns:
QuantumCircuit: Transformed circuit.
"""
name = circuit.name
dag = circuit_to_dag(circuit)
del circuit
for passset in self.working_list:
for pass_ in passset:
dag = self._do_pass(pass_, dag, passset.options)
circuit = dag_to_circuit(dag)
circuit.name = name
return circuit | python | {
"resource": ""
} |
q268134 | PassManager._do_pass | test | def _do_pass(self, pass_, dag, options):
"""Do a pass and its "requires".
Args:
pass_ (BasePass): Pass to do.
dag (DAGCircuit): The dag on which the pass is ran.
options (dict): PassManager options.
Returns:
DAGCircuit: The transformed dag in case of a transformation pass.
The same input dag in case of an analysis pass.
Raises:
TranspilerError: If the pass is not a proper pass instance.
"""
# First, do the requires of pass_
if not options["ignore_requires"]:
for required_pass in pass_.requires:
dag = self._do_pass(required_pass, dag, options)
# Run the pass itself, if not already run
if pass_ not in self.valid_passes:
if pass_.is_transformation_pass:
pass_.property_set = self.fenced_property_set
new_dag = pass_.run(dag)
if not isinstance(new_dag, DAGCircuit):
raise TranspilerError("Transformation passes should return a transformed dag."
"The pass %s is returning a %s" % (type(pass_).__name__,
type(new_dag)))
dag = new_dag
elif pass_.is_analysis_pass:
pass_.property_set = self.property_set
pass_.run(FencedDAGCircuit(dag))
else:
raise TranspilerError("I dont know how to handle this type of pass")
# update the valid_passes property
self._update_valid_passes(pass_, options['ignore_preserves'])
return dag | python | {
"resource": ""
} |
q268135 | PassManager.passes | test | def passes(self):
"""
Returns a list structure of the appended passes and its options.
Returns (list): The appended passes.
"""
ret = []
for pass_ in self.working_list:
ret.append(pass_.dump_passes())
return ret | python | {
"resource": ""
} |
q268136 | FlowController.dump_passes | test | def dump_passes(self):
"""
Fetches the passes added to this flow controller.
Returns (dict): {'options': self.options, 'passes': [passes], 'type': type(self)}
"""
ret = {'options': self.options, 'passes': [], 'type': type(self)}
for pass_ in self._passes:
if isinstance(pass_, FlowController):
ret['passes'].append(pass_.dump_passes())
else:
ret['passes'].append(pass_)
return ret | python | {
"resource": ""
} |
q268137 | FlowController.controller_factory | test | def controller_factory(cls, passes, options, **partial_controller):
"""
Constructs a flow controller based on the partially evaluated controller arguments.
Args:
passes (list[BasePass]): passes to add to the flow controller.
options (dict): PassManager options.
**partial_controller (dict): Partially evaluated controller arguments in the form
`{name:partial}`
Raises:
TranspilerError: When partial_controller is not well-formed.
Returns:
FlowController: A FlowController instance.
"""
if None in partial_controller.values():
raise TranspilerError('The controller needs a condition.')
if partial_controller:
for registered_controller in cls.registered_controllers.keys():
if registered_controller in partial_controller:
return cls.registered_controllers[registered_controller](passes, options,
**partial_controller)
raise TranspilerError("The controllers for %s are not registered" % partial_controller)
else:
return FlowControllerLinear(passes, options) | python | {
"resource": ""
} |
q268138 | u_base | test | def u_base(self, theta, phi, lam, q):
"""Apply U to q."""
return self.append(UBase(theta, phi, lam), [q], []) | python | {
"resource": ""
} |
q268139 | single_gate_params | test | def single_gate_params(gate, params=None):
"""Apply a single qubit gate to the qubit.
Args:
gate(str): the single qubit gate name
params(list): the operation parameters op['params']
Returns:
tuple: a tuple of U gate parameters (theta, phi, lam)
Raises:
QiskitError: if the gate name is not valid
"""
if gate in ('U', 'u3'):
return params[0], params[1], params[2]
elif gate == 'u2':
return np.pi / 2, params[0], params[1]
elif gate == 'u1':
return 0, 0, params[0]
elif gate == 'id':
return 0, 0, 0
raise QiskitError('Gate is not among the valid types: %s' % gate) | python | {
"resource": ""
} |
q268140 | single_gate_matrix | test | def single_gate_matrix(gate, params=None):
"""Get the matrix for a single qubit.
Args:
gate(str): the single qubit gate name
params(list): the operation parameters op['params']
Returns:
array: A numpy array representing the matrix
"""
# Converting sym to floats improves the performance of the simulator 10x.
# This a is a probable a FIXME since it might show bugs in the simulator.
(theta, phi, lam) = map(float, single_gate_params(gate, params))
return np.array([[np.cos(theta / 2),
-np.exp(1j * lam) * np.sin(theta / 2)],
[np.exp(1j * phi) * np.sin(theta / 2),
np.exp(1j * phi + 1j * lam) * np.cos(theta / 2)]]) | python | {
"resource": ""
} |
q268141 | einsum_matmul_index | test | def einsum_matmul_index(gate_indices, number_of_qubits):
"""Return the index string for Numpy.eignsum matrix-matrix multiplication.
The returned indices are to perform a matrix multiplication A.B where
the matrix A is an M-qubit matrix, matrix B is an N-qubit matrix, and
M <= N, and identity matrices are implied on the subsystems where A has no
support on B.
Args:
gate_indices (list[int]): the indices of the right matrix subsystems
to contract with the left matrix.
number_of_qubits (int): the total number of qubits for the right matrix.
Returns:
str: An indices string for the Numpy.einsum function.
"""
mat_l, mat_r, tens_lin, tens_lout = _einsum_matmul_index_helper(gate_indices,
number_of_qubits)
# Right indices for the N-qubit input and output tensor
tens_r = ascii_uppercase[:number_of_qubits]
# Combine indices into matrix multiplication string format
# for numpy.einsum function
return "{mat_l}{mat_r}, ".format(mat_l=mat_l, mat_r=mat_r) + \
"{tens_lin}{tens_r}->{tens_lout}{tens_r}".format(tens_lin=tens_lin,
tens_lout=tens_lout,
tens_r=tens_r) | python | {
"resource": ""
} |
q268142 | einsum_vecmul_index | test | def einsum_vecmul_index(gate_indices, number_of_qubits):
"""Return the index string for Numpy.eignsum matrix-vector multiplication.
The returned indices are to perform a matrix multiplication A.v where
the matrix A is an M-qubit matrix, vector v is an N-qubit vector, and
M <= N, and identity matrices are implied on the subsystems where A has no
support on v.
Args:
gate_indices (list[int]): the indices of the right matrix subsystems
to contract with the left matrix.
number_of_qubits (int): the total number of qubits for the right matrix.
Returns:
str: An indices string for the Numpy.einsum function.
"""
mat_l, mat_r, tens_lin, tens_lout = _einsum_matmul_index_helper(gate_indices,
number_of_qubits)
# Combine indices into matrix multiplication string format
# for numpy.einsum function
return "{mat_l}{mat_r}, ".format(mat_l=mat_l, mat_r=mat_r) + \
"{tens_lin}->{tens_lout}".format(tens_lin=tens_lin,
tens_lout=tens_lout) | python | {
"resource": ""
} |
q268143 | _einsum_matmul_index_helper | test | def _einsum_matmul_index_helper(gate_indices, number_of_qubits):
"""Return the index string for Numpy.eignsum matrix multiplication.
The returned indices are to perform a matrix multiplication A.v where
the matrix A is an M-qubit matrix, matrix v is an N-qubit vector, and
M <= N, and identity matrices are implied on the subsystems where A has no
support on v.
Args:
gate_indices (list[int]): the indices of the right matrix subsystems
to contract with the left matrix.
number_of_qubits (int): the total number of qubits for the right matrix.
Returns:
tuple: (mat_left, mat_right, tens_in, tens_out) of index strings for
that may be combined into a Numpy.einsum function string.
Raises:
QiskitError: if the total number of qubits plus the number of
contracted indices is greater than 26.
"""
# Since we use ASCII alphabet for einsum index labels we are limited
# to 26 total free left (lowercase) and 26 right (uppercase) indexes.
# The rank of the contracted tensor reduces this as we need to use that
# many characters for the contracted indices
if len(gate_indices) + number_of_qubits > 26:
raise QiskitError("Total number of free indexes limited to 26")
# Indicies for N-qubit input tensor
tens_in = ascii_lowercase[:number_of_qubits]
# Indices for the N-qubit output tensor
tens_out = list(tens_in)
# Left and right indices for the M-qubit multiplying tensor
mat_left = ""
mat_right = ""
# Update left indices for mat and output
for pos, idx in enumerate(reversed(gate_indices)):
mat_left += ascii_lowercase[-1 - pos]
mat_right += tens_in[-1 - idx]
tens_out[-1 - idx] = ascii_lowercase[-1 - pos]
tens_out = "".join(tens_out)
# Combine indices into matrix multiplication string format
# for numpy.einsum function
return mat_left, mat_right, tens_in, tens_out | python | {
"resource": ""
} |
q268144 | circuit_to_dag | test | def circuit_to_dag(circuit):
"""Build a ``DAGCircuit`` object from a ``QuantumCircuit``.
Args:
circuit (QuantumCircuit): the input circuit.
Return:
DAGCircuit: the DAG representing the input circuit.
"""
dagcircuit = DAGCircuit()
dagcircuit.name = circuit.name
for register in circuit.qregs:
dagcircuit.add_qreg(register)
for register in circuit.cregs:
dagcircuit.add_creg(register)
for instruction, qargs, cargs in circuit.data:
# Get arguments for classical control (if any)
if instruction.control is None:
control = None
else:
control = (instruction.control[0], instruction.control[1])
dagcircuit.apply_operation_back(instruction.copy(),
qargs, cargs, control)
return dagcircuit | python | {
"resource": ""
} |
q268145 | exp_fit_fun | test | def exp_fit_fun(x, a, tau, c):
"""Function used to fit the exponential decay."""
# pylint: disable=invalid-name
return a * np.exp(-x / tau) + c | python | {
"resource": ""
} |
q268146 | osc_fit_fun | test | def osc_fit_fun(x, a, tau, f, phi, c):
"""Function used to fit the decay cosine."""
# pylint: disable=invalid-name
return a * np.exp(-x / tau) * np.cos(2 * np.pi * f * x + phi) + c | python | {
"resource": ""
} |
q268147 | plot_coherence | test | def plot_coherence(xdata, ydata, std_error, fit, fit_function, xunit, exp_str,
qubit_label):
"""Plot coherence data.
Args:
xdata
ydata
std_error
fit
fit_function
xunit
exp_str
qubit_label
Raises:
ImportError: If matplotlib is not installed.
"""
if not HAS_MATPLOTLIB:
raise ImportError('The function plot_coherence needs matplotlib. '
'Run "pip install matplotlib" before.')
plt.errorbar(xdata, ydata, std_error, marker='.',
markersize=9, c='b', linestyle='')
plt.plot(xdata, fit_function(xdata, *fit), c='r', linestyle='--',
label=(exp_str + '= %s %s' % (str(round(fit[1])), xunit)))
plt.xticks(fontsize=14, rotation=70)
plt.yticks(fontsize=14)
plt.xlabel('time [%s]' % (xunit), fontsize=16)
plt.ylabel('P(1)', fontsize=16)
plt.title(exp_str + ' measurement of Q$_{%s}$' % (str(qubit_label)), fontsize=18)
plt.legend(fontsize=12)
plt.grid(True)
plt.show() | python | {
"resource": ""
} |
q268148 | shape_rb_data | test | def shape_rb_data(raw_rb):
"""Take the raw rb data and convert it into averages and std dev
Args:
raw_rb (numpy.array): m x n x l list where m is the number of seeds, n
is the number of Clifford sequences and l is the number of qubits
Return:
numpy_array: 2 x n x l list where index 0 is the mean over seeds, 1 is
the std dev overseeds
"""
rb_data = []
rb_data.append(np.mean(raw_rb, 0))
rb_data.append(np.std(raw_rb, 0))
return rb_data | python | {
"resource": ""
} |
q268149 | plot_rb_data | test | def plot_rb_data(xdata, ydatas, yavg, yerr, fit, survival_prob, ax=None,
show_plt=True):
"""Plot randomized benchmarking data.
Args:
xdata (list): list of subsequence lengths
ydatas (list): list of lists of survival probabilities for each
sequence
yavg (list): mean of the survival probabilities at each sequence
length
yerr (list): error of the survival
fit (list): fit parameters
survival_prob (callable): function that computes survival probability
ax (Axes or None): plot axis (if passed in)
show_plt (bool): display the plot.
Raises:
ImportError: If matplotlib is not installed.
"""
# pylint: disable=invalid-name
if not HAS_MATPLOTLIB:
raise ImportError('The function plot_rb_data needs matplotlib. '
'Run "pip install matplotlib" before.')
if ax is None:
plt.figure()
ax = plt.gca()
# Plot the result for each sequence
for ydata in ydatas:
ax.plot(xdata, ydata, color='gray', linestyle='none', marker='x')
# Plot the mean with error bars
ax.errorbar(xdata, yavg, yerr=yerr, color='r', linestyle='--', linewidth=3)
# Plot the fit
ax.plot(xdata, survival_prob(xdata, *fit), color='blue', linestyle='-', linewidth=2)
ax.tick_params(labelsize=14)
# ax.tick_params(axis='x',labelrotation=70)
ax.set_xlabel('Clifford Length', fontsize=16)
ax.set_ylabel('Z', fontsize=16)
ax.grid(True)
if show_plt:
plt.show() | python | {
"resource": ""
} |
q268150 | _split_runs_on_parameters | test | def _split_runs_on_parameters(runs):
"""Finds runs containing parameterized gates and splits them into sequential
runs excluding the parameterized gates.
"""
def _is_dagnode_parameterized(node):
return any(isinstance(param, Parameter) for param in node.op.params)
out = []
for run in runs:
groups = groupby(run, _is_dagnode_parameterized)
for group_is_parameterized, gates in groups:
if not group_is_parameterized:
out.append(list(gates))
return out | python | {
"resource": ""
} |
q268151 | Optimize1qGates.compose_u3 | test | def compose_u3(theta1, phi1, lambda1, theta2, phi2, lambda2):
"""Return a triple theta, phi, lambda for the product.
u3(theta, phi, lambda)
= u3(theta1, phi1, lambda1).u3(theta2, phi2, lambda2)
= Rz(phi1).Ry(theta1).Rz(lambda1+phi2).Ry(theta2).Rz(lambda2)
= Rz(phi1).Rz(phi').Ry(theta').Rz(lambda').Rz(lambda2)
= u3(theta', phi1 + phi', lambda2 + lambda')
Return theta, phi, lambda.
"""
# Careful with the factor of two in yzy_to_zyz
thetap, phip, lambdap = Optimize1qGates.yzy_to_zyz((lambda1 + phi2), theta1, theta2)
(theta, phi, lamb) = (thetap, phi1 + phip, lambda2 + lambdap)
return (theta, phi, lamb) | python | {
"resource": ""
} |
q268152 | Optimize1qGates.yzy_to_zyz | test | def yzy_to_zyz(xi, theta1, theta2, eps=1e-9): # pylint: disable=invalid-name
"""Express a Y.Z.Y single qubit gate as a Z.Y.Z gate.
Solve the equation
.. math::
Ry(theta1).Rz(xi).Ry(theta2) = Rz(phi).Ry(theta).Rz(lambda)
for theta, phi, and lambda.
Return a solution theta, phi, and lambda.
"""
quaternion_yzy = quaternion_from_euler([theta1, xi, theta2], 'yzy')
euler = quaternion_yzy.to_zyz()
quaternion_zyz = quaternion_from_euler(euler, 'zyz')
# output order different than rotation order
out_angles = (euler[1], euler[0], euler[2])
abs_inner = abs(quaternion_zyz.data.dot(quaternion_yzy.data))
if not np.allclose(abs_inner, 1, eps):
raise TranspilerError('YZY and ZYZ angles do not give same rotation matrix.')
out_angles = tuple(0 if np.abs(angle) < _CHOP_THRESHOLD else angle
for angle in out_angles)
return out_angles | python | {
"resource": ""
} |
q268153 | _validate_input_state | test | def _validate_input_state(quantum_state):
"""Validates the input to state visualization functions.
Args:
quantum_state (ndarray): Input state / density matrix.
Returns:
rho: A 2d numpy array for the density matrix.
Raises:
VisualizationError: Invalid input.
"""
rho = np.asarray(quantum_state)
if rho.ndim == 1:
rho = np.outer(rho, np.conj(rho))
# Check the shape of the input is a square matrix
shape = np.shape(rho)
if len(shape) != 2 or shape[0] != shape[1]:
raise VisualizationError("Input is not a valid quantum state.")
# Check state is an n-qubit state
num = int(np.log2(rho.shape[0]))
if 2 ** num != rho.shape[0]:
raise VisualizationError("Input is not a multi-qubit quantum state.")
return rho | python | {
"resource": ""
} |
q268154 | _trim | test | def _trim(image):
"""Trim a PIL image and remove white space."""
background = PIL.Image.new(image.mode, image.size, image.getpixel((0, 0)))
diff = PIL.ImageChops.difference(image, background)
diff = PIL.ImageChops.add(diff, diff, 2.0, -100)
bbox = diff.getbbox()
if bbox:
image = image.crop(bbox)
return image | python | {
"resource": ""
} |
q268155 | _get_gate_span | test | def _get_gate_span(qregs, instruction):
"""Get the list of qubits drawing this gate would cover"""
min_index = len(qregs)
max_index = 0
for qreg in instruction.qargs:
index = qregs.index(qreg)
if index < min_index:
min_index = index
if index > max_index:
max_index = index
if instruction.cargs:
return qregs[min_index:]
return qregs[min_index:max_index + 1] | python | {
"resource": ""
} |
q268156 | circuit_to_instruction | test | def circuit_to_instruction(circuit):
"""Build an ``Instruction`` object from a ``QuantumCircuit``.
The instruction is anonymous (not tied to a named quantum register),
and so can be inserted into another circuit. The instruction will
have the same string name as the circuit.
Args:
circuit (QuantumCircuit): the input circuit.
Return:
Instruction: an instruction equivalent to the action of the
input circuit. Upon decomposition, this instruction will
yield the components comprising the original circuit.
"""
instruction = Instruction(name=circuit.name,
num_qubits=sum([qreg.size for qreg in circuit.qregs]),
num_clbits=sum([creg.size for creg in circuit.cregs]),
params=[])
instruction.control = None
def find_bit_position(bit):
"""find the index of a given bit (Register, int) within
a flat ordered list of bits of the circuit
"""
if isinstance(bit[0], QuantumRegister):
ordered_regs = circuit.qregs
else:
ordered_regs = circuit.cregs
reg_index = ordered_regs.index(bit[0])
return sum([reg.size for reg in ordered_regs[:reg_index]]) + bit[1]
definition = circuit.data.copy()
if instruction.num_qubits > 0:
q = QuantumRegister(instruction.num_qubits, 'q')
if instruction.num_clbits > 0:
c = ClassicalRegister(instruction.num_clbits, 'c')
definition = list(map(lambda x:
(x[0],
list(map(lambda y: (q, find_bit_position(y)), x[1])),
list(map(lambda y: (c, find_bit_position(y)), x[2]))), definition))
instruction.definition = definition
return instruction | python | {
"resource": ""
} |
q268157 | DenseLayout.run | test | def run(self, dag):
"""
Pick a convenient layout depending on the best matching
qubit connectivity, and set the property `layout`.
Args:
dag (DAGCircuit): DAG to find layout for.
Raises:
TranspilerError: if dag wider than self.coupling_map
"""
num_dag_qubits = sum([qreg.size for qreg in dag.qregs.values()])
if num_dag_qubits > self.coupling_map.size():
raise TranspilerError('Number of qubits greater than device.')
best_sub = self._best_subset(num_dag_qubits)
layout = Layout()
map_iter = 0
for qreg in dag.qregs.values():
for i in range(qreg.size):
layout[(qreg, i)] = int(best_sub[map_iter])
map_iter += 1
self.property_set['layout'] = layout | python | {
"resource": ""
} |
q268158 | DenseLayout._best_subset | test | def _best_subset(self, n_qubits):
"""Computes the qubit mapping with the best connectivity.
Args:
n_qubits (int): Number of subset qubits to consider.
Returns:
ndarray: Array of qubits to use for best connectivity mapping.
"""
if n_qubits == 1:
return np.array([0])
device_qubits = self.coupling_map.size()
cmap = np.asarray(self.coupling_map.get_edges())
data = np.ones_like(cmap[:, 0])
sp_cmap = sp.coo_matrix((data, (cmap[:, 0], cmap[:, 1])),
shape=(device_qubits, device_qubits)).tocsr()
best = 0
best_map = None
# do bfs with each node as starting point
for k in range(sp_cmap.shape[0]):
bfs = cs.breadth_first_order(sp_cmap, i_start=k, directed=False,
return_predecessors=False)
connection_count = 0
sub_graph = []
for i in range(n_qubits):
node_idx = bfs[i]
for j in range(sp_cmap.indptr[node_idx],
sp_cmap.indptr[node_idx + 1]):
node = sp_cmap.indices[j]
for counter in range(n_qubits):
if node == bfs[counter]:
connection_count += 1
sub_graph.append([node_idx, node])
break
if connection_count > best:
best = connection_count
best_map = bfs[0:n_qubits]
# Return a best mapping that has reduced bandwidth
mapping = {}
for edge in range(best_map.shape[0]):
mapping[best_map[edge]] = edge
new_cmap = [[mapping[c[0]], mapping[c[1]]] for c in sub_graph]
rows = [edge[0] for edge in new_cmap]
cols = [edge[1] for edge in new_cmap]
data = [1]*len(rows)
sp_sub_graph = sp.coo_matrix((data, (rows, cols)),
shape=(n_qubits, n_qubits)).tocsr()
perm = cs.reverse_cuthill_mckee(sp_sub_graph)
best_map = best_map[perm]
return best_map | python | {
"resource": ""
} |
q268159 | barrier | test | def barrier(self, *qargs):
"""Apply barrier to circuit.
If qargs is None, applies to all the qbits.
Args is a list of QuantumRegister or single qubits.
For QuantumRegister, applies barrier to all the qubits in that register."""
qubits = []
qargs = _convert_to_bits(qargs, [qbit for qreg in self.qregs for qbit in qreg])
if not qargs: # None
for qreg in self.qregs:
for j in range(qreg.size):
qubits.append((qreg, j))
for qarg in qargs:
if isinstance(qarg, (QuantumRegister, list)):
if isinstance(qarg, QuantumRegister):
qubits.extend([(qarg, j) for j in range(qarg.size)])
else:
qubits.extend(qarg)
else:
qubits.append(qarg)
return self.append(Barrier(len(qubits)), qubits, []) | python | {
"resource": ""
} |
q268160 | average_data | test | def average_data(counts, observable):
"""Compute the mean value of an diagonal observable.
Takes in a diagonal observable in dictionary, list or matrix format and then
calculates the sum_i value(i) P(i) where value(i) is the value of the
observable for state i.
Args:
counts (dict): a dict of outcomes from an experiment
observable (dict or matrix or list): The observable to be averaged over.
As an example, ZZ on qubits can be given as:
* dict: {"00": 1, "11": 1, "01": -1, "10": -1}
* matrix: [[1, 0, 0, 0], [0, -1, 0, 0, ], [0, 0, -1, 0], [0, 0, 0, 1]]
* matrix diagonal (list): [1, -1, -1, 1]
Returns:
Double: Average of the observable
"""
if not isinstance(observable, dict):
observable = make_dict_observable(observable)
temp = 0
tot = sum(counts.values())
for key in counts:
if key in observable:
temp += counts[key] * observable[key] / tot
return temp | python | {
"resource": ""
} |
q268161 | AstInterpreter._process_bit_id | test | def _process_bit_id(self, node):
"""Process an Id or IndexedId node as a bit or register type.
Return a list of tuples (Register,index).
"""
# pylint: disable=inconsistent-return-statements
reg = None
if node.name in self.dag.qregs:
reg = self.dag.qregs[node.name]
elif node.name in self.dag.cregs:
reg = self.dag.cregs[node.name]
else:
raise QiskitError("expected qreg or creg name:",
"line=%s" % node.line,
"file=%s" % node.file)
if node.type == "indexed_id":
# An indexed bit or qubit
return [(reg, node.index)]
elif node.type == "id":
# A qubit or qreg or creg
if not self.bit_stack[-1]:
# Global scope
return [(reg, j) for j in range(reg.size)]
else:
# local scope
if node.name in self.bit_stack[-1]:
return [self.bit_stack[-1][node.name]]
raise QiskitError("expected local bit name:",
"line=%s" % node.line,
"file=%s" % node.file)
return None | python | {
"resource": ""
} |
q268162 | AstInterpreter._process_custom_unitary | test | def _process_custom_unitary(self, node):
"""Process a custom unitary node."""
name = node.name
if node.arguments is not None:
args = self._process_node(node.arguments)
else:
args = []
bits = [self._process_bit_id(node_element)
for node_element in node.bitlist.children]
if name in self.gates:
gargs = self.gates[name]["args"]
gbits = self.gates[name]["bits"]
# Loop over register arguments, if any.
maxidx = max(map(len, bits))
for idx in range(maxidx):
self.arg_stack.append({gargs[j]: args[j]
for j in range(len(gargs))})
# Only index into register arguments.
element = [idx*x for x in
[len(bits[j]) > 1 for j in range(len(bits))]]
self.bit_stack.append({gbits[j]: bits[j][element[j]]
for j in range(len(gbits))})
self._create_dag_op(name,
[self.arg_stack[-1][s].sym() for s in gargs],
[self.bit_stack[-1][s] for s in gbits])
self.arg_stack.pop()
self.bit_stack.pop()
else:
raise QiskitError("internal error undefined gate:",
"line=%s" % node.line, "file=%s" % node.file) | python | {
"resource": ""
} |
q268163 | AstInterpreter._process_gate | test | def _process_gate(self, node, opaque=False):
"""Process a gate node.
If opaque is True, process the node as an opaque gate node.
"""
self.gates[node.name] = {}
de_gate = self.gates[node.name]
de_gate["print"] = True # default
de_gate["opaque"] = opaque
de_gate["n_args"] = node.n_args()
de_gate["n_bits"] = node.n_bits()
if node.n_args() > 0:
de_gate["args"] = [element.name for element in node.arguments.children]
else:
de_gate["args"] = []
de_gate["bits"] = [c.name for c in node.bitlist.children]
if opaque:
de_gate["body"] = None
else:
de_gate["body"] = node.body | python | {
"resource": ""
} |
q268164 | AstInterpreter._process_cnot | test | def _process_cnot(self, node):
"""Process a CNOT gate node."""
id0 = self._process_bit_id(node.children[0])
id1 = self._process_bit_id(node.children[1])
if not(len(id0) == len(id1) or len(id0) == 1 or len(id1) == 1):
raise QiskitError("internal error: qreg size mismatch",
"line=%s" % node.line, "file=%s" % node.file)
maxidx = max([len(id0), len(id1)])
for idx in range(maxidx):
if len(id0) > 1 and len(id1) > 1:
self.dag.apply_operation_back(CXBase(), [id0[idx], id1[idx]], [], self.condition)
elif len(id0) > 1:
self.dag.apply_operation_back(CXBase(), [id0[idx], id1[0]], [], self.condition)
else:
self.dag.apply_operation_back(CXBase(), [id0[0], id1[idx]], [], self.condition) | python | {
"resource": ""
} |
q268165 | AstInterpreter._process_measure | test | def _process_measure(self, node):
"""Process a measurement node."""
id0 = self._process_bit_id(node.children[0])
id1 = self._process_bit_id(node.children[1])
if len(id0) != len(id1):
raise QiskitError("internal error: reg size mismatch",
"line=%s" % node.line, "file=%s" % node.file)
for idx, idy in zip(id0, id1):
self.dag.apply_operation_back(Measure(), [idx], [idy], self.condition) | python | {
"resource": ""
} |
q268166 | AstInterpreter._process_if | test | def _process_if(self, node):
"""Process an if node."""
creg_name = node.children[0].name
creg = self.dag.cregs[creg_name]
cval = node.children[1].value
self.condition = (creg, cval)
self._process_node(node.children[2])
self.condition = None | python | {
"resource": ""
} |
q268167 | AstInterpreter._create_dag_op | test | def _create_dag_op(self, name, params, qargs):
"""
Create a DAG node out of a parsed AST op node.
Args:
name (str): operation name to apply to the dag.
params (list): op parameters
qargs (list(QuantumRegister, int)): qubits to attach to
Raises:
QiskitError: if encountering a non-basis opaque gate
"""
if name == "u0":
op_class = U0Gate
elif name == "u1":
op_class = U1Gate
elif name == "u2":
op_class = U2Gate
elif name == "u3":
op_class = U3Gate
elif name == "x":
op_class = XGate
elif name == "y":
op_class = YGate
elif name == "z":
op_class = ZGate
elif name == "t":
op_class = TGate
elif name == "tdg":
op_class = TdgGate
elif name == "s":
op_class = SGate
elif name == "sdg":
op_class = SdgGate
elif name == "swap":
op_class = SwapGate
elif name == "rx":
op_class = RXGate
elif name == "ry":
op_class = RYGate
elif name == "rz":
op_class = RZGate
elif name == "rzz":
op_class = RZZGate
elif name == "id":
op_class = IdGate
elif name == "h":
op_class = HGate
elif name == "cx":
op_class = CnotGate
elif name == "cy":
op_class = CyGate
elif name == "cz":
op_class = CzGate
elif name == "ch":
op_class = CHGate
elif name == "crz":
op_class = CrzGate
elif name == "cu1":
op_class = Cu1Gate
elif name == "cu3":
op_class = Cu3Gate
elif name == "ccx":
op_class = ToffoliGate
elif name == "cswap":
op_class = FredkinGate
else:
raise QiskitError("unknown operation for ast node name %s" % name)
op = op_class(*params)
self.dag.apply_operation_back(op, qargs, [], condition=self.condition) | python | {
"resource": ""
} |
q268168 | Schedule.ch_duration | test | def ch_duration(self, *channels: List[Channel]) -> int:
"""Return duration of supplied channels.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_duration(*channels) | python | {
"resource": ""
} |
q268169 | Schedule.ch_start_time | test | def ch_start_time(self, *channels: List[Channel]) -> int:
"""Return minimum start time for supplied channels.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_start_time(*channels) | python | {
"resource": ""
} |
q268170 | Schedule.ch_stop_time | test | def ch_stop_time(self, *channels: List[Channel]) -> int:
"""Return maximum start time for supplied channels.
Args:
*channels: Supplied channels
"""
return self.timeslots.ch_stop_time(*channels) | python | {
"resource": ""
} |
q268171 | Schedule._instructions | test | def _instructions(self, time: int = 0) -> Iterable[Tuple[int, 'Instruction']]:
"""Iterable for flattening Schedule tree.
Args:
time: Shifted time due to parent
Yields:
Tuple[int, ScheduleComponent]: Tuple containing time `ScheduleComponent` starts
at and the flattened `ScheduleComponent`.
"""
for insert_time, child_sched in self.children:
yield from child_sched._instructions(time + insert_time) | python | {
"resource": ""
} |
q268172 | ModelTypeValidator.check_type | test | def check_type(self, value, attr, data):
"""Validates a value against the correct type of the field.
It calls ``_expected_types`` to get a list of valid types.
Subclasses can do one of the following:
1. They can override the ``valid_types`` property with a tuple with
the expected types for this field.
2. They can override the ``_expected_types`` method to return a
tuple of expected types for the field.
3. They can change ``check_type`` completely to customize
validation.
This method or the overrides must return the ``value`` parameter
untouched.
"""
expected_types = self._expected_types()
if not isinstance(value, expected_types):
raise self._not_expected_type(
value, expected_types, fields=[self], field_names=attr, data=data)
return value | python | {
"resource": ""
} |
q268173 | BaseSchema.dump_additional_data | test | def dump_additional_data(self, valid_data, many, original_data):
"""Include unknown fields after dumping.
Unknown fields are added with no processing at all.
Args:
valid_data (dict or list): data collected and returned by ``dump()``.
many (bool): if True, data and original_data are a list.
original_data (object or list): object passed to ``dump()`` in the
first place.
Returns:
dict: the same ``valid_data`` extended with the unknown attributes.
Inspired by https://github.com/marshmallow-code/marshmallow/pull/595.
"""
if many:
for i, _ in enumerate(valid_data):
additional_keys = set(original_data[i].__dict__) - set(valid_data[i])
for key in additional_keys:
valid_data[i][key] = getattr(original_data[i], key)
else:
additional_keys = set(original_data.__dict__) - set(valid_data)
for key in additional_keys:
valid_data[key] = getattr(original_data, key)
return valid_data | python | {
"resource": ""
} |
q268174 | BaseSchema.load_additional_data | test | def load_additional_data(self, valid_data, many, original_data):
"""Include unknown fields after load.
Unknown fields are added with no processing at all.
Args:
valid_data (dict or list): validated data returned by ``load()``.
many (bool): if True, data and original_data are a list.
original_data (dict or list): data passed to ``load()`` in the
first place.
Returns:
dict: the same ``valid_data`` extended with the unknown attributes.
Inspired by https://github.com/marshmallow-code/marshmallow/pull/595.
"""
if many:
for i, _ in enumerate(valid_data):
additional_keys = set(original_data[i]) - set(valid_data[i])
for key in additional_keys:
valid_data[i][key] = original_data[i][key]
else:
additional_keys = set(original_data) - set(valid_data)
for key in additional_keys:
valid_data[key] = original_data[key]
return valid_data | python | {
"resource": ""
} |
q268175 | _SchemaBinder._create_validation_schema | test | def _create_validation_schema(schema_cls):
"""Create a patched Schema for validating models.
Model validation is not part of Marshmallow. Schemas have a ``validate``
method but this delegates execution on ``load`` and discards the result.
Similarly, ``load`` will call ``_deserialize`` on every field in the
schema.
This function patches the ``_deserialize`` instance method of each
field to make it call a custom defined method ``check_type``
provided by Qiskit in the different fields at
``qiskit.validation.fields``.
Returns:
BaseSchema: a copy of the original Schema, overriding the
``_deserialize()`` call of its fields.
"""
validation_schema = schema_cls()
for _, field in validation_schema.fields.items():
if isinstance(field, ModelTypeValidator):
validate_function = field.__class__.check_type
field._deserialize = MethodType(validate_function, field)
return validation_schema | python | {
"resource": ""
} |
q268176 | _SchemaBinder._validate | test | def _validate(instance):
"""Validate the internal representation of the instance."""
try:
_ = instance.schema.validate(instance.to_dict())
except ValidationError as ex:
raise ModelValidationError(
ex.messages, ex.field_names, ex.fields, ex.data, **ex.kwargs) | python | {
"resource": ""
} |
q268177 | _SchemaBinder._validate_after_init | test | def _validate_after_init(init_method):
"""Add validation after instantiation."""
@wraps(init_method)
def _decorated(self, **kwargs):
try:
_ = self.shallow_schema.validate(kwargs)
except ValidationError as ex:
raise ModelValidationError(
ex.messages, ex.field_names, ex.fields, ex.data, **ex.kwargs) from None
init_method(self, **kwargs)
return _decorated | python | {
"resource": ""
} |
q268178 | BaseModel.to_dict | test | def to_dict(self):
"""Serialize the model into a Python dict of simple types.
Note that this method requires that the model is bound with
``@bind_schema``.
"""
try:
data, _ = self.schema.dump(self)
except ValidationError as ex:
raise ModelValidationError(
ex.messages, ex.field_names, ex.fields, ex.data, **ex.kwargs) from None
return data | python | {
"resource": ""
} |
q268179 | BaseModel.from_dict | test | def from_dict(cls, dict_):
"""Deserialize a dict of simple types into an instance of this class.
Note that this method requires that the model is bound with
``@bind_schema``.
"""
try:
data, _ = cls.schema.load(dict_)
except ValidationError as ex:
raise ModelValidationError(
ex.messages, ex.field_names, ex.fields, ex.data, **ex.kwargs) from None
return data | python | {
"resource": ""
} |
q268180 | qft | test | def qft(circ, q, n):
"""n-qubit QFT on q in circ."""
for j in range(n):
for k in range(j):
circ.cu1(math.pi / float(2**(j - k)), q[j], q[k])
circ.h(q[j]) | python | {
"resource": ""
} |
q268181 | __partial_trace_vec | test | def __partial_trace_vec(vec, trace_systems, dimensions, reverse=True):
"""
Partial trace over subsystems of multi-partite vector.
Args:
vec (vector_like): complex vector N
trace_systems (list(int)): a list of subsystems (starting from 0) to
trace over.
dimensions (list(int)): a list of the dimensions of the subsystems.
If this is not set it will assume all
subsystems are qubits.
reverse (bool): ordering of systems in operator.
If True system-0 is the right most system in tensor product.
If False system-0 is the left most system in tensor product.
Returns:
ndarray: A density matrix with the appropriate subsystems traced over.
"""
# trace sys positions
if reverse:
dimensions = dimensions[::-1]
trace_systems = len(dimensions) - 1 - np.array(trace_systems)
rho = vec.reshape(dimensions)
rho = np.tensordot(rho, rho.conj(), axes=(trace_systems, trace_systems))
d = int(np.sqrt(np.product(rho.shape)))
return rho.reshape(d, d) | python | {
"resource": ""
} |
q268182 | vectorize | test | def vectorize(density_matrix, method='col'):
"""Flatten an operator to a vector in a specified basis.
Args:
density_matrix (ndarray): a density matrix.
method (str): the method of vectorization. Allowed values are
- 'col' (default) flattens to column-major vector.
- 'row' flattens to row-major vector.
- 'pauli'flattens in the n-qubit Pauli basis.
- 'pauli-weights': flattens in the n-qubit Pauli basis ordered by
weight.
Returns:
ndarray: the resulting vector.
Raises:
Exception: if input state is not a n-qubit state
"""
density_matrix = np.array(density_matrix)
if method == 'col':
return density_matrix.flatten(order='F')
elif method == 'row':
return density_matrix.flatten(order='C')
elif method in ['pauli', 'pauli_weights']:
num = int(np.log2(len(density_matrix))) # number of qubits
if len(density_matrix) != 2**num:
raise Exception('Input state must be n-qubit state')
if method == 'pauli_weights':
pgroup = pauli_group(num, case='weight')
else:
pgroup = pauli_group(num, case='tensor')
vals = [np.trace(np.dot(p.to_matrix(), density_matrix))
for p in pgroup]
return np.array(vals)
return None | python | {
"resource": ""
} |
q268183 | devectorize | test | def devectorize(vectorized_mat, method='col'):
"""Devectorize a vectorized square matrix.
Args:
vectorized_mat (ndarray): a vectorized density matrix.
method (str): the method of devectorization. Allowed values are
- 'col' (default): flattens to column-major vector.
- 'row': flattens to row-major vector.
- 'pauli': flattens in the n-qubit Pauli basis.
- 'pauli-weights': flattens in the n-qubit Pauli basis ordered by
weight.
Returns:
ndarray: the resulting matrix.
Raises:
Exception: if input state is not a n-qubit state
"""
vectorized_mat = np.array(vectorized_mat)
dimension = int(np.sqrt(vectorized_mat.size))
if len(vectorized_mat) != dimension * dimension:
raise Exception('Input is not a vectorized square matrix')
if method == 'col':
return vectorized_mat.reshape(dimension, dimension, order='F')
elif method == 'row':
return vectorized_mat.reshape(dimension, dimension, order='C')
elif method in ['pauli', 'pauli_weights']:
num_qubits = int(np.log2(dimension)) # number of qubits
if dimension != 2 ** num_qubits:
raise Exception('Input state must be n-qubit state')
if method == 'pauli_weights':
pgroup = pauli_group(num_qubits, case='weight')
else:
pgroup = pauli_group(num_qubits, case='tensor')
pbasis = np.array([p.to_matrix() for p in pgroup]) / 2 ** num_qubits
return np.tensordot(vectorized_mat, pbasis, axes=1)
return None | python | {
"resource": ""
} |
q268184 | choi_to_rauli | test | def choi_to_rauli(choi, order=1):
"""
Convert a Choi-matrix to a Pauli-basis superoperator.
Note that this function assumes that the Choi-matrix
is defined in the standard column-stacking convention
and is normalized to have trace 1. For a channel E this
is defined as: choi = (I \\otimes E)(bell_state).
The resulting 'rauli' R acts on input states as
|rho_out>_p = R.|rho_in>_p
where |rho> = vectorize(rho, method='pauli') for order=1
and |rho> = vectorize(rho, method='pauli_weights') for order=0.
Args:
choi (matrix): the input Choi-matrix.
order (int): ordering of the Pauli group vector.
order=1 (default) is standard lexicographic ordering.
Eg: [II, IX, IY, IZ, XI, XX, XY,...]
order=0 is ordered by weights.
Eg. [II, IX, IY, IZ, XI, XY, XZ, XX, XY,...]
Returns:
np.array: A superoperator in the Pauli basis.
"""
if order == 0:
order = 'weight'
elif order == 1:
order = 'tensor'
# get number of qubits'
num_qubits = int(np.log2(np.sqrt(len(choi))))
pgp = pauli_group(num_qubits, case=order)
rauli = []
for i in pgp:
for j in pgp:
pauliop = np.kron(j.to_matrix().T, i.to_matrix())
rauli += [np.trace(np.dot(choi, pauliop))]
return np.array(rauli).reshape(4 ** num_qubits, 4 ** num_qubits) | python | {
"resource": ""
} |
q268185 | chop | test | def chop(array, epsilon=1e-10):
"""
Truncate small values of a complex array.
Args:
array (array_like): array to truncte small values.
epsilon (float): threshold.
Returns:
np.array: A new operator with small values set to zero.
"""
ret = np.array(array)
if np.isrealobj(ret):
ret[abs(ret) < epsilon] = 0.0
else:
ret.real[abs(ret.real) < epsilon] = 0.0
ret.imag[abs(ret.imag) < epsilon] = 0.0
return ret | python | {
"resource": ""
} |
q268186 | outer | test | def outer(vector1, vector2=None):
"""
Construct the outer product of two vectors.
The second vector argument is optional, if absent the projector
of the first vector will be returned.
Args:
vector1 (ndarray): the first vector.
vector2 (ndarray): the (optional) second vector.
Returns:
np.array: The matrix |v1><v2|.
"""
if vector2 is None:
vector2 = np.array(vector1).conj()
else:
vector2 = np.array(vector2).conj()
return np.outer(vector1, vector2) | python | {
"resource": ""
} |
q268187 | concurrence | test | def concurrence(state):
"""Calculate the concurrence.
Args:
state (np.array): a quantum state (1x4 array) or a density matrix (4x4
array)
Returns:
float: concurrence.
Raises:
Exception: if attempted on more than two qubits.
"""
rho = np.array(state)
if rho.ndim == 1:
rho = outer(state)
if len(state) != 4:
raise Exception("Concurrence is only defined for more than two qubits")
YY = np.fliplr(np.diag([-1, 1, 1, -1]))
A = rho.dot(YY).dot(rho.conj()).dot(YY)
w = la.eigh(A, eigvals_only=True)
w = np.sqrt(np.maximum(w, 0))
return max(0.0, w[-1] - np.sum(w[0:-1])) | python | {
"resource": ""
} |
q268188 | shannon_entropy | test | def shannon_entropy(pvec, base=2):
"""
Compute the Shannon entropy of a probability vector.
The shannon entropy of a probability vector pv is defined as
$H(pv) = - \\sum_j pv[j] log_b (pv[j])$ where $0 log_b 0 = 0$.
Args:
pvec (array_like): a probability vector.
base (int): the base of the logarith
Returns:
float: The Shannon entropy H(pvec).
"""
# pylint: disable=missing-docstring
if base == 2:
def logfn(x):
return - x * np.log2(x)
elif base == np.e:
def logfn(x):
return - x * np.log(x)
else:
def logfn(x):
return -x * np.log(x) / np.log(base)
h = 0.
for x in pvec:
if 0 < x < 1:
h += logfn(x)
return h | python | {
"resource": ""
} |
q268189 | entropy | test | def entropy(state):
"""
Compute the von-Neumann entropy of a quantum state.
Args:
state (array_like): a density matrix or state vector.
Returns:
float: The von-Neumann entropy S(rho).
"""
rho = np.array(state)
if rho.ndim == 1:
return 0
evals = np.maximum(np.linalg.eigvalsh(state), 0.)
return shannon_entropy(evals, base=np.e) | python | {
"resource": ""
} |
q268190 | mutual_information | test | def mutual_information(state, d0, d1=None):
"""
Compute the mutual information of a bipartite state.
Args:
state (array_like): a bipartite state-vector or density-matrix.
d0 (int): dimension of the first subsystem.
d1 (int or None): dimension of the second subsystem.
Returns:
float: The mutual information S(rho_A) + S(rho_B) - S(rho_AB).
"""
if d1 is None:
d1 = int(len(state) / d0)
mi = entropy(partial_trace(state, [0], dimensions=[d0, d1]))
mi += entropy(partial_trace(state, [1], dimensions=[d0, d1]))
mi -= entropy(state)
return mi | python | {
"resource": ""
} |
q268191 | entanglement_of_formation | test | def entanglement_of_formation(state, d0, d1=None):
"""
Compute the entanglement of formation of quantum state.
The input quantum state must be either a bipartite state vector, or a
2-qubit density matrix.
Args:
state (array_like): (N) array_like or (4,4) array_like, a
bipartite quantum state.
d0 (int): the dimension of the first subsystem.
d1 (int or None): the dimension of the second subsystem.
Returns:
float: The entanglement of formation.
"""
state = np.array(state)
if d1 is None:
d1 = int(len(state) / d0)
if state.ndim == 2 and len(state) == 4 and d0 == 2 and d1 == 2:
return __eof_qubit(state)
elif state.ndim == 1:
# trace out largest dimension
if d0 < d1:
tr = [1]
else:
tr = [0]
state = partial_trace(state, tr, dimensions=[d0, d1])
return entropy(state)
else:
print('Input must be a state-vector or 2-qubit density matrix.')
return None | python | {
"resource": ""
} |
q268192 | __eof_qubit | test | def __eof_qubit(rho):
"""
Compute the Entanglement of Formation of a 2-qubit density matrix.
Args:
rho ((array_like): (4,4) array_like, input density matrix.
Returns:
float: The entanglement of formation.
"""
c = concurrence(rho)
c = 0.5 + 0.5 * np.sqrt(1 - c * c)
return shannon_entropy([c, 1 - c]) | python | {
"resource": ""
} |
q268193 | flatten | test | def flatten(schedule: ScheduleComponent, name: str = None) -> Schedule:
"""Create a flattened schedule.
Args:
schedule: Schedules to flatten
name: Name of the new schedule. Defaults to first element of `schedules`
"""
if name is None:
name = schedule.name
return Schedule(*schedule.instructions, name=name) | python | {
"resource": ""
} |
q268194 | shift | test | def shift(schedule: ScheduleComponent, time: int, name: str = None) -> Schedule:
"""Return schedule shifted by `time`.
Args:
schedule: The schedule to shift
time: The time to shift by
name: Name of shifted schedule. Defaults to name of `schedule`
"""
if name is None:
name = schedule.name
return union((time, schedule), name=name) | python | {
"resource": ""
} |
q268195 | insert | test | def insert(parent: ScheduleComponent, time: int, child: ScheduleComponent,
name: str = None) -> Schedule:
"""Return a new schedule with the `child` schedule inserted into the `parent` at `start_time`.
Args:
parent: Schedule to be inserted into
time: Time to be inserted defined with respect to `parent`
child: Schedule to insert
name: Name of the new schedule. Defaults to name of parent
"""
return union(parent, (time, child), name=name) | python | {
"resource": ""
} |
q268196 | append | test | def append(parent: ScheduleComponent, child: ScheduleComponent,
name: str = None) -> Schedule:
r"""Return a new schedule with by appending `child` to `parent` at
the last time of the `parent` schedule's channels
over the intersection of the parent and child schedule's channels.
$t = \textrm{max}({x.stop\_time |x \in parent.channels \cap child.channels})$
Args:
parent: The schedule to be inserted into
child: The schedule to insert
name: Name of the new schedule. Defaults to name of parent
"""
common_channels = set(parent.channels) & set(child.channels)
insertion_time = parent.ch_stop_time(*common_channels)
return insert(parent, insertion_time, child, name=name) | python | {
"resource": ""
} |
q268197 | u3 | test | def u3(self, theta, phi, lam, q):
"""Apply u3 to q."""
return self.append(U3Gate(theta, phi, lam), [q], []) | python | {
"resource": ""
} |
q268198 | BaseBackend.status | test | def status(self):
"""Return backend status.
Returns:
BackendStatus: the status of the backend.
"""
return BackendStatus(backend_name=self.name(),
backend_version=__version__,
operational=True,
pending_jobs=0,
status_msg='') | python | {
"resource": ""
} |
q268199 | BaseProgressBar.start | test | def start(self, iterations):
"""Start the progress bar.
Parameters:
iterations (int): Number of iterations.
"""
self.touched = True
self.iter = int(iterations)
self.t_start = time.time() | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.