repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
bulkan/robotframework-requests
src/RequestsLibrary/RequestsKeywords.py
RequestsKeywords.delete_request
def delete_request( self, alias, uri, data=None, json=None, params=None, headers=None, allow_redirects=None, timeout=None): """ Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``json`` a value that will be json encoded and sent as request data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ session = self._cache.switch(alias) data = self._format_data_according_to_header(session, data, headers) redir = True if allow_redirects is None else allow_redirects response = self._delete_request( session, uri, data, json, params, headers, redir, timeout) if isinstance(data, bytes): data = data.decode('utf-8') logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir)) return response
python
def delete_request( self, alias, uri, data=None, json=None, params=None, headers=None, allow_redirects=None, timeout=None): """ Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``json`` a value that will be json encoded and sent as request data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ session = self._cache.switch(alias) data = self._format_data_according_to_header(session, data, headers) redir = True if allow_redirects is None else allow_redirects response = self._delete_request( session, uri, data, json, params, headers, redir, timeout) if isinstance(data, bytes): data = data.decode('utf-8') logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir)) return response
[ "def", "delete_request", "(", "self", ",", "alias", ",", "uri", ",", "data", "=", "None", ",", "json", "=", "None", ",", "params", "=", "None", ",", "headers", "=", "None", ",", "allow_redirects", "=", "None", ",", "timeout", "=", "None", ")", ":", ...
Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``json`` a value that will be json encoded and sent as request data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
[ "Send", "a", "DELETE", "request", "on", "the", "session", "object", "found", "using", "the", "given", "alias" ]
11baa3277f1cb728712e26d996200703c15254a8
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L864-L902
train
229,600
bulkan/robotframework-requests
src/RequestsLibrary/RequestsKeywords.py
RequestsKeywords.head_request
def head_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = False if allow_redirects is None else allow_redirects response = self._head_request(session, uri, headers, redir, timeout) logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \ allow_redirects=%s ' % (alias, uri, headers, redir)) return response
python
def head_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = False if allow_redirects is None else allow_redirects response = self._head_request(session, uri, headers, redir, timeout) logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \ allow_redirects=%s ' % (alias, uri, headers, redir)) return response
[ "def", "head_request", "(", "self", ",", "alias", ",", "uri", ",", "headers", "=", "None", ",", "allow_redirects", "=", "None", ",", "timeout", "=", "None", ")", ":", "session", "=", "self", ".", "_cache", ".", "switch", "(", "alias", ")", "redir", "...
Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
[ "Send", "a", "HEAD", "request", "on", "the", "session", "object", "found", "using", "the", "given", "alias" ]
11baa3277f1cb728712e26d996200703c15254a8
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L937-L961
train
229,601
bulkan/robotframework-requests
src/RequestsLibrary/RequestsKeywords.py
RequestsKeywords.options_request
def options_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._options_request(session, uri, headers, redir, timeout) logger.info( 'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' % (alias, uri, headers, redir)) return response
python
def options_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._options_request(session, uri, headers, redir, timeout) logger.info( 'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' % (alias, uri, headers, redir)) return response
[ "def", "options_request", "(", "self", ",", "alias", ",", "uri", ",", "headers", "=", "None", ",", "allow_redirects", "=", "None", ",", "timeout", "=", "None", ")", ":", "session", "=", "self", ".", "_cache", ".", "switch", "(", "alias", ")", "redir", ...
Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
[ "Send", "an", "OPTIONS", "request", "on", "the", "session", "object", "found", "using", "the", "given", "alias" ]
11baa3277f1cb728712e26d996200703c15254a8
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L990-L1015
train
229,602
bulkan/robotframework-requests
src/RequestsLibrary/RequestsKeywords.py
RequestsKeywords._get_url
def _get_url(self, session, uri): """ Helper method to get the full url """ url = session.url if uri: slash = '' if uri.startswith('/') else '/' url = "%s%s%s" % (session.url, slash, uri) return url
python
def _get_url(self, session, uri): """ Helper method to get the full url """ url = session.url if uri: slash = '' if uri.startswith('/') else '/' url = "%s%s%s" % (session.url, slash, uri) return url
[ "def", "_get_url", "(", "self", ",", "session", ",", "uri", ")", ":", "url", "=", "session", ".", "url", "if", "uri", ":", "slash", "=", "''", "if", "uri", ".", "startswith", "(", "'/'", ")", "else", "'/'", "url", "=", "\"%s%s%s\"", "%", "(", "se...
Helper method to get the full url
[ "Helper", "method", "to", "get", "the", "full", "url" ]
11baa3277f1cb728712e26d996200703c15254a8
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1174-L1182
train
229,603
bulkan/robotframework-requests
src/RequestsLibrary/RequestsKeywords.py
RequestsKeywords._json_pretty_print
def _json_pretty_print(self, content): """ Pretty print a JSON object ``content`` JSON object to pretty print """ temp = json.loads(content) return json.dumps( temp, sort_keys=True, indent=4, separators=( ',', ': '))
python
def _json_pretty_print(self, content): """ Pretty print a JSON object ``content`` JSON object to pretty print """ temp = json.loads(content) return json.dumps( temp, sort_keys=True, indent=4, separators=( ',', ': '))
[ "def", "_json_pretty_print", "(", "self", ",", "content", ")", ":", "temp", "=", "json", ".", "loads", "(", "content", ")", "return", "json", ".", "dumps", "(", "temp", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", "separators", "=", "...
Pretty print a JSON object ``content`` JSON object to pretty print
[ "Pretty", "print", "a", "JSON", "object" ]
11baa3277f1cb728712e26d996200703c15254a8
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1215-L1228
train
229,604
coddingtonbear/python-myfitnesspal
myfitnesspal/client.py
Client.get_measurements
def get_measurements( self, measurement='Weight', lower_bound=None, upper_bound=None ): """ Returns measurements of a given name between two dates.""" if upper_bound is None: upper_bound = datetime.date.today() if lower_bound is None: lower_bound = upper_bound - datetime.timedelta(days=30) # If they entered the dates in the opposite order, let's # just flip them around for them as a convenience if lower_bound > upper_bound: lower_bound, upper_bound = upper_bound, lower_bound # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # select the measurement ID based on the input if measurement in measurement_ids.keys(): measurement_id = measurement_ids[measurement] else: raise ValueError( "Measurement '%s' does not exist." % measurement ) page = 1 measurements = OrderedDict() # retrieve entries until finished while True: # retrieve the HTML from MyFitnessPal document = self._get_document_for_url( self._get_url_for_measurements(page, measurement_id) ) # parse the HTML for measurement entries and add to dictionary results = self._get_measurements(document) measurements.update(results) # stop if there are no more entries if len(results) == 0: break # continue if the lower bound has not been reached elif list(results.keys())[-1] > lower_bound: page += 1 continue # otherwise stop else: break # remove entries that are not within the dates specified for date in list(measurements.keys()): if not upper_bound >= date >= lower_bound: del measurements[date] return measurements
python
def get_measurements( self, measurement='Weight', lower_bound=None, upper_bound=None ): """ Returns measurements of a given name between two dates.""" if upper_bound is None: upper_bound = datetime.date.today() if lower_bound is None: lower_bound = upper_bound - datetime.timedelta(days=30) # If they entered the dates in the opposite order, let's # just flip them around for them as a convenience if lower_bound > upper_bound: lower_bound, upper_bound = upper_bound, lower_bound # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # select the measurement ID based on the input if measurement in measurement_ids.keys(): measurement_id = measurement_ids[measurement] else: raise ValueError( "Measurement '%s' does not exist." % measurement ) page = 1 measurements = OrderedDict() # retrieve entries until finished while True: # retrieve the HTML from MyFitnessPal document = self._get_document_for_url( self._get_url_for_measurements(page, measurement_id) ) # parse the HTML for measurement entries and add to dictionary results = self._get_measurements(document) measurements.update(results) # stop if there are no more entries if len(results) == 0: break # continue if the lower bound has not been reached elif list(results.keys())[-1] > lower_bound: page += 1 continue # otherwise stop else: break # remove entries that are not within the dates specified for date in list(measurements.keys()): if not upper_bound >= date >= lower_bound: del measurements[date] return measurements
[ "def", "get_measurements", "(", "self", ",", "measurement", "=", "'Weight'", ",", "lower_bound", "=", "None", ",", "upper_bound", "=", "None", ")", ":", "if", "upper_bound", "is", "None", ":", "upper_bound", "=", "datetime", ".", "date", ".", "today", "(",...
Returns measurements of a given name between two dates.
[ "Returns", "measurements", "of", "a", "given", "name", "between", "two", "dates", "." ]
29aad88d31adc025eacaddd3390cb521b6012b73
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L524-L586
train
229,605
coddingtonbear/python-myfitnesspal
myfitnesspal/client.py
Client.set_measurements
def set_measurements( self, measurement='Weight', value=None ): """ Sets measurement for today's date.""" if value is None: raise ValueError( "Cannot update blank value." ) # get the URL for the main check in page # this is left in because we need to parse # the 'measurement' name to set the value. document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # check if the measurement exists before going too far if measurement not in measurement_ids.keys(): raise ValueError( "Measurement '%s' does not exist." % measurement ) # build the update url. update_url = parse.urljoin( self.BASE_URL, 'measurements/save' ) # setup a dict for the post data = {} # here's where we need that required element data['authenticity_token'] = self._authenticity_token # Weight has it's own key value pair if measurement == 'Weight': data['weight[display_value]'] = value # the other measurements have generic names with # an incrementing numeric index. measurement_index = 0 # iterate all the measurement_ids for measurement_id in measurement_ids.keys(): # create the measurement_type[n] # key value pair n = str(measurement_index) meas_type = 'measurement_type[' + n + ']' meas_val = 'measurement_value[' + n + ']' data[meas_type] = measurement_ids[measurement_id] # and if it corresponds to the value we want to update if measurement == measurement_id: # create the measurement_value[n] # key value pair and assign it the value. data[meas_val] = value else: # otherwise, create the key value pair and leave it blank data[meas_val] = "" measurement_index += 1 # now post it. result = self.session.post( update_url, data=data ) # throw an error if it failed. if not result.ok: raise RuntimeError( "Unable to update measurement in MyFitnessPal: " "status code: {status}".format( status=result.status_code ) )
python
def set_measurements( self, measurement='Weight', value=None ): """ Sets measurement for today's date.""" if value is None: raise ValueError( "Cannot update blank value." ) # get the URL for the main check in page # this is left in because we need to parse # the 'measurement' name to set the value. document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # check if the measurement exists before going too far if measurement not in measurement_ids.keys(): raise ValueError( "Measurement '%s' does not exist." % measurement ) # build the update url. update_url = parse.urljoin( self.BASE_URL, 'measurements/save' ) # setup a dict for the post data = {} # here's where we need that required element data['authenticity_token'] = self._authenticity_token # Weight has it's own key value pair if measurement == 'Weight': data['weight[display_value]'] = value # the other measurements have generic names with # an incrementing numeric index. measurement_index = 0 # iterate all the measurement_ids for measurement_id in measurement_ids.keys(): # create the measurement_type[n] # key value pair n = str(measurement_index) meas_type = 'measurement_type[' + n + ']' meas_val = 'measurement_value[' + n + ']' data[meas_type] = measurement_ids[measurement_id] # and if it corresponds to the value we want to update if measurement == measurement_id: # create the measurement_value[n] # key value pair and assign it the value. data[meas_val] = value else: # otherwise, create the key value pair and leave it blank data[meas_val] = "" measurement_index += 1 # now post it. result = self.session.post( update_url, data=data ) # throw an error if it failed. if not result.ok: raise RuntimeError( "Unable to update measurement in MyFitnessPal: " "status code: {status}".format( status=result.status_code ) )
[ "def", "set_measurements", "(", "self", ",", "measurement", "=", "'Weight'", ",", "value", "=", "None", ")", ":", "if", "value", "is", "None", ":", "raise", "ValueError", "(", "\"Cannot update blank value.\"", ")", "# get the URL for the main check in page", "# this...
Sets measurement for today's date.
[ "Sets", "measurement", "for", "today", "s", "date", "." ]
29aad88d31adc025eacaddd3390cb521b6012b73
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L588-L667
train
229,606
coddingtonbear/python-myfitnesspal
myfitnesspal/client.py
Client.get_measurement_id_options
def get_measurement_id_options(self): """ Returns list of measurement choices.""" # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) return measurement_ids
python
def get_measurement_id_options(self): """ Returns list of measurement choices.""" # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) return measurement_ids
[ "def", "get_measurement_id_options", "(", "self", ")", ":", "# get the URL for the main check in page", "document", "=", "self", ".", "_get_document_for_url", "(", "self", ".", "_get_url_for_measurements", "(", ")", ")", "# gather the IDs for all measurement types", "measurem...
Returns list of measurement choices.
[ "Returns", "list", "of", "measurement", "choices", "." ]
29aad88d31adc025eacaddd3390cb521b6012b73
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L709-L718
train
229,607
joerick/pyinstrument
pyinstrument/__main__.py
file_supports_color
def file_supports_color(file_obj): """ Returns True if the running system's terminal supports color. Borrowed from Django https://github.com/django/django/blob/master/django/core/management/color.py """ plat = sys.platform supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ) is_a_tty = file_is_a_tty(file_obj) return (supported_platform and is_a_tty)
python
def file_supports_color(file_obj): """ Returns True if the running system's terminal supports color. Borrowed from Django https://github.com/django/django/blob/master/django/core/management/color.py """ plat = sys.platform supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ) is_a_tty = file_is_a_tty(file_obj) return (supported_platform and is_a_tty)
[ "def", "file_supports_color", "(", "file_obj", ")", ":", "plat", "=", "sys", ".", "platform", "supported_platform", "=", "plat", "!=", "'Pocket PC'", "and", "(", "plat", "!=", "'win32'", "or", "'ANSICON'", "in", "os", ".", "environ", ")", "is_a_tty", "=", ...
Returns True if the running system's terminal supports color. Borrowed from Django https://github.com/django/django/blob/master/django/core/management/color.py
[ "Returns", "True", "if", "the", "running", "system", "s", "terminal", "supports", "color", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L198-L211
train
229,608
joerick/pyinstrument
pyinstrument/__main__.py
load_report
def load_report(identifier=None): ''' Returns the session referred to by identifier ''' path = os.path.join( report_dir(), identifier + '.pyireport' ) return ProfilerSession.load(path)
python
def load_report(identifier=None): ''' Returns the session referred to by identifier ''' path = os.path.join( report_dir(), identifier + '.pyireport' ) return ProfilerSession.load(path)
[ "def", "load_report", "(", "identifier", "=", "None", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "report_dir", "(", ")", ",", "identifier", "+", "'.pyireport'", ")", "return", "ProfilerSession", ".", "load", "(", "path", ")" ]
Returns the session referred to by identifier
[ "Returns", "the", "session", "referred", "to", "by", "identifier" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L245-L253
train
229,609
joerick/pyinstrument
pyinstrument/__main__.py
save_report
def save_report(session): ''' Saves the session to a temp file, and returns that path. Also prunes the number of reports to 10 so there aren't loads building up. ''' # prune this folder to contain the last 10 sessions previous_reports = glob.glob(os.path.join(report_dir(), '*.pyireport')) previous_reports.sort(reverse=True) while len(previous_reports) > 10: report_file = previous_reports.pop() os.remove(report_file) identifier = time.strftime('%Y-%m-%dT%H-%M-%S', time.localtime(session.start_time)) path = os.path.join( report_dir(), identifier + '.pyireport' ) session.save(path) return path, identifier
python
def save_report(session): ''' Saves the session to a temp file, and returns that path. Also prunes the number of reports to 10 so there aren't loads building up. ''' # prune this folder to contain the last 10 sessions previous_reports = glob.glob(os.path.join(report_dir(), '*.pyireport')) previous_reports.sort(reverse=True) while len(previous_reports) > 10: report_file = previous_reports.pop() os.remove(report_file) identifier = time.strftime('%Y-%m-%dT%H-%M-%S', time.localtime(session.start_time)) path = os.path.join( report_dir(), identifier + '.pyireport' ) session.save(path) return path, identifier
[ "def", "save_report", "(", "session", ")", ":", "# prune this folder to contain the last 10 sessions", "previous_reports", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "report_dir", "(", ")", ",", "'*.pyireport'", ")", ")", "previous_reports...
Saves the session to a temp file, and returns that path. Also prunes the number of reports to 10 so there aren't loads building up.
[ "Saves", "the", "session", "to", "a", "temp", "file", "and", "returns", "that", "path", ".", "Also", "prunes", "the", "number", "of", "reports", "to", "10", "so", "there", "aren", "t", "loads", "building", "up", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L255-L274
train
229,610
joerick/pyinstrument
pyinstrument/session.py
ProfilerSession.root_frame
def root_frame(self, trim_stem=True): ''' Parses the internal frame records and returns a tree of Frame objects ''' root_frame = None frame_stack = [] for frame_tuple in self.frame_records: identifier_stack = frame_tuple[0] time = frame_tuple[1] # now we must create a stack of frame objects and assign this time to the leaf for stack_depth, frame_identifier in enumerate(identifier_stack): if stack_depth < len(frame_stack): if frame_identifier != frame_stack[stack_depth].identifier: # trim any frames after and including this one del frame_stack[stack_depth:] if stack_depth >= len(frame_stack): frame = Frame(frame_identifier) frame_stack.append(frame) if stack_depth == 0: # There should only be one root frame, as far as I know assert root_frame is None, ASSERTION_MESSAGE root_frame = frame else: parent = frame_stack[stack_depth-1] parent.add_child(frame) # trim any extra frames del frame_stack[stack_depth+1:] # pylint: disable=W0631 # assign the time to the final frame frame_stack[-1].add_child(SelfTimeFrame(self_time=time)) if root_frame is None: return None if trim_stem: root_frame = self._trim_stem(root_frame) return root_frame
python
def root_frame(self, trim_stem=True): ''' Parses the internal frame records and returns a tree of Frame objects ''' root_frame = None frame_stack = [] for frame_tuple in self.frame_records: identifier_stack = frame_tuple[0] time = frame_tuple[1] # now we must create a stack of frame objects and assign this time to the leaf for stack_depth, frame_identifier in enumerate(identifier_stack): if stack_depth < len(frame_stack): if frame_identifier != frame_stack[stack_depth].identifier: # trim any frames after and including this one del frame_stack[stack_depth:] if stack_depth >= len(frame_stack): frame = Frame(frame_identifier) frame_stack.append(frame) if stack_depth == 0: # There should only be one root frame, as far as I know assert root_frame is None, ASSERTION_MESSAGE root_frame = frame else: parent = frame_stack[stack_depth-1] parent.add_child(frame) # trim any extra frames del frame_stack[stack_depth+1:] # pylint: disable=W0631 # assign the time to the final frame frame_stack[-1].add_child(SelfTimeFrame(self_time=time)) if root_frame is None: return None if trim_stem: root_frame = self._trim_stem(root_frame) return root_frame
[ "def", "root_frame", "(", "self", ",", "trim_stem", "=", "True", ")", ":", "root_frame", "=", "None", "frame_stack", "=", "[", "]", "for", "frame_tuple", "in", "self", ".", "frame_records", ":", "identifier_stack", "=", "frame_tuple", "[", "0", "]", "time"...
Parses the internal frame records and returns a tree of Frame objects
[ "Parses", "the", "internal", "frame", "records", "and", "returns", "a", "tree", "of", "Frame", "objects" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/session.py#L52-L95
train
229,611
joerick/pyinstrument
pyinstrument/frame.py
BaseFrame.remove_from_parent
def remove_from_parent(self): ''' Removes this frame from its parent, and nulls the parent link ''' if self.parent: self.parent._children.remove(self) self.parent._invalidate_time_caches() self.parent = None
python
def remove_from_parent(self): ''' Removes this frame from its parent, and nulls the parent link ''' if self.parent: self.parent._children.remove(self) self.parent._invalidate_time_caches() self.parent = None
[ "def", "remove_from_parent", "(", "self", ")", ":", "if", "self", ".", "parent", ":", "self", ".", "parent", ".", "_children", ".", "remove", "(", "self", ")", "self", ".", "parent", ".", "_invalidate_time_caches", "(", ")", "self", ".", "parent", "=", ...
Removes this frame from its parent, and nulls the parent link
[ "Removes", "this", "frame", "from", "its", "parent", "and", "nulls", "the", "parent", "link" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L11-L18
train
229,612
joerick/pyinstrument
pyinstrument/frame.py
Frame.add_child
def add_child(self, frame, after=None): ''' Adds a child frame, updating the parent link. Optionally, insert the frame in a specific position by passing the frame to insert this one after. ''' frame.remove_from_parent() frame.parent = self if after is None: self._children.append(frame) else: index = self._children.index(after) + 1 self._children.insert(index, frame) self._invalidate_time_caches()
python
def add_child(self, frame, after=None): ''' Adds a child frame, updating the parent link. Optionally, insert the frame in a specific position by passing the frame to insert this one after. ''' frame.remove_from_parent() frame.parent = self if after is None: self._children.append(frame) else: index = self._children.index(after) + 1 self._children.insert(index, frame) self._invalidate_time_caches()
[ "def", "add_child", "(", "self", ",", "frame", ",", "after", "=", "None", ")", ":", "frame", ".", "remove_from_parent", "(", ")", "frame", ".", "parent", "=", "self", "if", "after", "is", "None", ":", "self", ".", "_children", ".", "append", "(", "fr...
Adds a child frame, updating the parent link. Optionally, insert the frame in a specific position by passing the frame to insert this one after.
[ "Adds", "a", "child", "frame", "updating", "the", "parent", "link", ".", "Optionally", "insert", "the", "frame", "in", "a", "specific", "position", "by", "passing", "the", "frame", "to", "insert", "this", "one", "after", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L99-L113
train
229,613
joerick/pyinstrument
pyinstrument/frame.py
Frame.add_children
def add_children(self, frames, after=None): ''' Convenience method to add multiple frames at once. ''' if after is not None: # if there's an 'after' parameter, add the frames in reverse so the order is # preserved. for frame in reversed(frames): self.add_child(frame, after=after) else: for frame in frames: self.add_child(frame)
python
def add_children(self, frames, after=None): ''' Convenience method to add multiple frames at once. ''' if after is not None: # if there's an 'after' parameter, add the frames in reverse so the order is # preserved. for frame in reversed(frames): self.add_child(frame, after=after) else: for frame in frames: self.add_child(frame)
[ "def", "add_children", "(", "self", ",", "frames", ",", "after", "=", "None", ")", ":", "if", "after", "is", "not", "None", ":", "# if there's an 'after' parameter, add the frames in reverse so the order is", "# preserved.", "for", "frame", "in", "reversed", "(", "f...
Convenience method to add multiple frames at once.
[ "Convenience", "method", "to", "add", "multiple", "frames", "at", "once", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L115-L126
train
229,614
joerick/pyinstrument
pyinstrument/frame.py
Frame.file_path_short
def file_path_short(self): """ Return the path resolved against the closest entry in sys.path """ if not hasattr(self, '_file_path_short'): if self.file_path: result = None for path in sys.path: # On Windows, if self.file_path and path are on different drives, relpath # will result in exception, because it cannot compute a relpath in this case. # The root cause is that on Windows, there is no root dir like '/' on Linux. try: candidate = os.path.relpath(self.file_path, path) except ValueError: continue if not result or (len(candidate.split(os.sep)) < len(result.split(os.sep))): result = candidate self._file_path_short = result else: self._file_path_short = None return self._file_path_short
python
def file_path_short(self): """ Return the path resolved against the closest entry in sys.path """ if not hasattr(self, '_file_path_short'): if self.file_path: result = None for path in sys.path: # On Windows, if self.file_path and path are on different drives, relpath # will result in exception, because it cannot compute a relpath in this case. # The root cause is that on Windows, there is no root dir like '/' on Linux. try: candidate = os.path.relpath(self.file_path, path) except ValueError: continue if not result or (len(candidate.split(os.sep)) < len(result.split(os.sep))): result = candidate self._file_path_short = result else: self._file_path_short = None return self._file_path_short
[ "def", "file_path_short", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_file_path_short'", ")", ":", "if", "self", ".", "file_path", ":", "result", "=", "None", "for", "path", "in", "sys", ".", "path", ":", "# On Windows, if self.file...
Return the path resolved against the closest entry in sys.path
[ "Return", "the", "path", "resolved", "against", "the", "closest", "entry", "in", "sys", ".", "path" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L151-L173
train
229,615
joerick/pyinstrument
pyinstrument/frame.py
FrameGroup.exit_frames
def exit_frames(self): ''' Returns a list of frames whose children include a frame outside of the group ''' if self._exit_frames is None: exit_frames = [] for frame in self.frames: if any(c.group != self for c in frame.children): exit_frames.append(frame) self._exit_frames = exit_frames return self._exit_frames
python
def exit_frames(self): ''' Returns a list of frames whose children include a frame outside of the group ''' if self._exit_frames is None: exit_frames = [] for frame in self.frames: if any(c.group != self for c in frame.children): exit_frames.append(frame) self._exit_frames = exit_frames return self._exit_frames
[ "def", "exit_frames", "(", "self", ")", ":", "if", "self", ".", "_exit_frames", "is", "None", ":", "exit_frames", "=", "[", "]", "for", "frame", "in", "self", ".", "frames", ":", "if", "any", "(", "c", ".", "group", "!=", "self", "for", "c", "in", ...
Returns a list of frames whose children include a frame outside of the group
[ "Returns", "a", "list", "of", "frames", "whose", "children", "include", "a", "frame", "outside", "of", "the", "group" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L286-L297
train
229,616
joerick/pyinstrument
pyinstrument/profiler.py
Profiler.first_interesting_frame
def first_interesting_frame(self): """ Traverse down the frame hierarchy until a frame is found with more than one child """ root_frame = self.root_frame() frame = root_frame while len(frame.children) <= 1: if frame.children: frame = frame.children[0] else: # there are no branches return root_frame return frame
python
def first_interesting_frame(self): """ Traverse down the frame hierarchy until a frame is found with more than one child """ root_frame = self.root_frame() frame = root_frame while len(frame.children) <= 1: if frame.children: frame = frame.children[0] else: # there are no branches return root_frame return frame
[ "def", "first_interesting_frame", "(", "self", ")", ":", "root_frame", "=", "self", ".", "root_frame", "(", ")", "frame", "=", "root_frame", "while", "len", "(", "frame", ".", "children", ")", "<=", "1", ":", "if", "frame", ".", "children", ":", "frame",...
Traverse down the frame hierarchy until a frame is found with more than one child
[ "Traverse", "down", "the", "frame", "hierarchy", "until", "a", "frame", "is", "found", "with", "more", "than", "one", "child" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/profiler.py#L119-L133
train
229,617
joerick/pyinstrument
pyinstrument/processors.py
aggregate_repeated_calls
def aggregate_repeated_calls(frame, options): ''' Converts a timeline into a time-aggregate summary. Adds together calls along the same call stack, so that repeated calls appear as the same frame. Removes time-linearity - frames are sorted according to total time spent. Useful for outputs that display a summary of execution (e.g. text and html outputs) ''' if frame is None: return None children_by_identifier = {} # iterate over a copy of the children since it's going to mutate while we're iterating for child in frame.children: if child.identifier in children_by_identifier: aggregate_frame = children_by_identifier[child.identifier] # combine the two frames, putting the children and self_time into the aggregate frame. aggregate_frame.self_time += child.self_time if child.children: aggregate_frame.add_children(child.children) # remove this frame, it's been incorporated into aggregate_frame child.remove_from_parent() else: # never seen this identifier before. It becomes the aggregate frame. children_by_identifier[child.identifier] = child # recurse into the children for child in frame.children: aggregate_repeated_calls(child, options=options) # sort the children by time # it's okay to use the internal _children list, sinde we're not changing the tree # structure. frame._children.sort(key=methodcaller('time'), reverse=True) # pylint: disable=W0212 return frame
python
def aggregate_repeated_calls(frame, options): ''' Converts a timeline into a time-aggregate summary. Adds together calls along the same call stack, so that repeated calls appear as the same frame. Removes time-linearity - frames are sorted according to total time spent. Useful for outputs that display a summary of execution (e.g. text and html outputs) ''' if frame is None: return None children_by_identifier = {} # iterate over a copy of the children since it's going to mutate while we're iterating for child in frame.children: if child.identifier in children_by_identifier: aggregate_frame = children_by_identifier[child.identifier] # combine the two frames, putting the children and self_time into the aggregate frame. aggregate_frame.self_time += child.self_time if child.children: aggregate_frame.add_children(child.children) # remove this frame, it's been incorporated into aggregate_frame child.remove_from_parent() else: # never seen this identifier before. It becomes the aggregate frame. children_by_identifier[child.identifier] = child # recurse into the children for child in frame.children: aggregate_repeated_calls(child, options=options) # sort the children by time # it's okay to use the internal _children list, sinde we're not changing the tree # structure. frame._children.sort(key=methodcaller('time'), reverse=True) # pylint: disable=W0212 return frame
[ "def", "aggregate_repeated_calls", "(", "frame", ",", "options", ")", ":", "if", "frame", "is", "None", ":", "return", "None", "children_by_identifier", "=", "{", "}", "# iterate over a copy of the children since it's going to mutate while we're iterating", "for", "child", ...
Converts a timeline into a time-aggregate summary. Adds together calls along the same call stack, so that repeated calls appear as the same frame. Removes time-linearity - frames are sorted according to total time spent. Useful for outputs that display a summary of execution (e.g. text and html outputs)
[ "Converts", "a", "timeline", "into", "a", "time", "-", "aggregate", "summary", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L31-L70
train
229,618
joerick/pyinstrument
pyinstrument/processors.py
merge_consecutive_self_time
def merge_consecutive_self_time(frame, options): ''' Combines consecutive 'self time' frames ''' if frame is None: return None previous_self_time_frame = None for child in frame.children: if isinstance(child, SelfTimeFrame): if previous_self_time_frame: # merge previous_self_time_frame.self_time += child.self_time child.remove_from_parent() else: # keep a reference, maybe it'll be added to on the next loop previous_self_time_frame = child else: previous_self_time_frame = None for child in frame.children: merge_consecutive_self_time(child, options=options) return frame
python
def merge_consecutive_self_time(frame, options): ''' Combines consecutive 'self time' frames ''' if frame is None: return None previous_self_time_frame = None for child in frame.children: if isinstance(child, SelfTimeFrame): if previous_self_time_frame: # merge previous_self_time_frame.self_time += child.self_time child.remove_from_parent() else: # keep a reference, maybe it'll be added to on the next loop previous_self_time_frame = child else: previous_self_time_frame = None for child in frame.children: merge_consecutive_self_time(child, options=options) return frame
[ "def", "merge_consecutive_self_time", "(", "frame", ",", "options", ")", ":", "if", "frame", "is", "None", ":", "return", "None", "previous_self_time_frame", "=", "None", "for", "child", "in", "frame", ".", "children", ":", "if", "isinstance", "(", "child", ...
Combines consecutive 'self time' frames
[ "Combines", "consecutive", "self", "time", "frames" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L101-L125
train
229,619
joerick/pyinstrument
pyinstrument/processors.py
remove_unnecessary_self_time_nodes
def remove_unnecessary_self_time_nodes(frame, options): ''' When a frame has only one child, and that is a self-time frame, remove that node, since it's unnecessary - it clutters the output and offers no additional information. ''' if frame is None: return None if len(frame.children) == 1 and isinstance(frame.children[0], SelfTimeFrame): child = frame.children[0] frame.self_time += child.self_time child.remove_from_parent() for child in frame.children: remove_unnecessary_self_time_nodes(child, options=options) return frame
python
def remove_unnecessary_self_time_nodes(frame, options): ''' When a frame has only one child, and that is a self-time frame, remove that node, since it's unnecessary - it clutters the output and offers no additional information. ''' if frame is None: return None if len(frame.children) == 1 and isinstance(frame.children[0], SelfTimeFrame): child = frame.children[0] frame.self_time += child.self_time child.remove_from_parent() for child in frame.children: remove_unnecessary_self_time_nodes(child, options=options) return frame
[ "def", "remove_unnecessary_self_time_nodes", "(", "frame", ",", "options", ")", ":", "if", "frame", "is", "None", ":", "return", "None", "if", "len", "(", "frame", ".", "children", ")", "==", "1", "and", "isinstance", "(", "frame", ".", "children", "[", ...
When a frame has only one child, and that is a self-time frame, remove that node, since it's unnecessary - it clutters the output and offers no additional information.
[ "When", "a", "frame", "has", "only", "one", "child", "and", "that", "is", "a", "self", "-", "time", "frame", "remove", "that", "node", "since", "it", "s", "unnecessary", "-", "it", "clutters", "the", "output", "and", "offers", "no", "additional", "inform...
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L128-L144
train
229,620
joerick/pyinstrument
pyinstrument/renderers/html.py
HTMLRenderer.open_in_browser
def open_in_browser(self, session, output_filename=None): """ Open the rendered HTML in a webbrowser. If output_filename=None (the default), a tempfile is used. The filename of the HTML file is returned. """ if output_filename is None: output_file = tempfile.NamedTemporaryFile(suffix='.html', delete=False) output_filename = output_file.name with codecs.getwriter('utf-8')(output_file) as f: f.write(self.render(session)) else: with codecs.open(output_filename, 'w', 'utf-8') as f: f.write(self.render(session)) from pyinstrument.vendor.six.moves import urllib url = urllib.parse.urlunparse(('file', '', output_filename, '', '', '')) webbrowser.open(url) return output_filename
python
def open_in_browser(self, session, output_filename=None): """ Open the rendered HTML in a webbrowser. If output_filename=None (the default), a tempfile is used. The filename of the HTML file is returned. """ if output_filename is None: output_file = tempfile.NamedTemporaryFile(suffix='.html', delete=False) output_filename = output_file.name with codecs.getwriter('utf-8')(output_file) as f: f.write(self.render(session)) else: with codecs.open(output_filename, 'w', 'utf-8') as f: f.write(self.render(session)) from pyinstrument.vendor.six.moves import urllib url = urllib.parse.urlunparse(('file', '', output_filename, '', '', '')) webbrowser.open(url) return output_filename
[ "def", "open_in_browser", "(", "self", ",", "session", ",", "output_filename", "=", "None", ")", ":", "if", "output_filename", "is", "None", ":", "output_file", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.html'", ",", "delete", "=", "F...
Open the rendered HTML in a webbrowser. If output_filename=None (the default), a tempfile is used. The filename of the HTML file is returned.
[ "Open", "the", "rendered", "HTML", "in", "a", "webbrowser", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/renderers/html.py#L43-L64
train
229,621
joerick/pyinstrument
setup.py
BuildPyCommand.run
def run(self): '''compile the JS, then run superclass implementation''' if subprocess.call(['npm', '--version']) != 0: raise RuntimeError('npm is required to build the HTML renderer.') self.check_call(['npm', 'install'], cwd=HTML_RENDERER_DIR) self.check_call(['npm', 'run', 'build'], cwd=HTML_RENDERER_DIR) self.copy_file(HTML_RENDERER_DIR+'/dist/js/app.js', 'pyinstrument/renderers/html_resources/app.js') setuptools.command.build_py.build_py.run(self)
python
def run(self): '''compile the JS, then run superclass implementation''' if subprocess.call(['npm', '--version']) != 0: raise RuntimeError('npm is required to build the HTML renderer.') self.check_call(['npm', 'install'], cwd=HTML_RENDERER_DIR) self.check_call(['npm', 'run', 'build'], cwd=HTML_RENDERER_DIR) self.copy_file(HTML_RENDERER_DIR+'/dist/js/app.js', 'pyinstrument/renderers/html_resources/app.js') setuptools.command.build_py.build_py.run(self)
[ "def", "run", "(", "self", ")", ":", "if", "subprocess", ".", "call", "(", "[", "'npm'", ",", "'--version'", "]", ")", "!=", "0", ":", "raise", "RuntimeError", "(", "'npm is required to build the HTML renderer.'", ")", "self", ".", "check_call", "(", "[", ...
compile the JS, then run superclass implementation
[ "compile", "the", "JS", "then", "run", "superclass", "implementation" ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/setup.py#L19-L30
train
229,622
joerick/pyinstrument
pyinstrument/util.py
deprecated
def deprecated(func, *args, **kwargs): ''' Marks a function as deprecated. ''' warnings.warn( '{} is deprecated and should no longer be used.'.format(func), DeprecationWarning, stacklevel=3 ) return func(*args, **kwargs)
python
def deprecated(func, *args, **kwargs): ''' Marks a function as deprecated. ''' warnings.warn( '{} is deprecated and should no longer be used.'.format(func), DeprecationWarning, stacklevel=3 ) return func(*args, **kwargs)
[ "def", "deprecated", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "warnings", ".", "warn", "(", "'{} is deprecated and should no longer be used.'", ".", "format", "(", "func", ")", ",", "DeprecationWarning", ",", "stacklevel", "=", "3", ...
Marks a function as deprecated.
[ "Marks", "a", "function", "as", "deprecated", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/util.py#L18-L25
train
229,623
joerick/pyinstrument
pyinstrument/util.py
deprecated_option
def deprecated_option(option_name, message=''): ''' Marks an option as deprecated. ''' def caller(func, *args, **kwargs): if option_name in kwargs: warnings.warn( '{} is deprecated. {}'.format(option_name, message), DeprecationWarning, stacklevel=3 ) return func(*args, **kwargs) return decorator(caller)
python
def deprecated_option(option_name, message=''): ''' Marks an option as deprecated. ''' def caller(func, *args, **kwargs): if option_name in kwargs: warnings.warn( '{} is deprecated. {}'.format(option_name, message), DeprecationWarning, stacklevel=3 ) return func(*args, **kwargs) return decorator(caller)
[ "def", "deprecated_option", "(", "option_name", ",", "message", "=", "''", ")", ":", "def", "caller", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "option_name", "in", "kwargs", ":", "warnings", ".", "warn", "(", "'{} is depr...
Marks an option as deprecated.
[ "Marks", "an", "option", "as", "deprecated", "." ]
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/util.py#L27-L38
train
229,624
jrief/django-angular
djng/app_settings.py
AppSettings.THUMBNAIL_OPTIONS
def THUMBNAIL_OPTIONS(self): """ Set the size as a 2-tuple for thumbnailed images after uploading them. """ from django.core.exceptions import ImproperlyConfigured size = self._setting('DJNG_THUMBNAIL_SIZE', (200, 200)) if not (isinstance(size, (list, tuple)) and len(size) == 2 and isinstance(size[0], int) and isinstance(size[1], int)): raise ImproperlyConfigured("'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers.") return {'crop': True, 'size': size}
python
def THUMBNAIL_OPTIONS(self): """ Set the size as a 2-tuple for thumbnailed images after uploading them. """ from django.core.exceptions import ImproperlyConfigured size = self._setting('DJNG_THUMBNAIL_SIZE', (200, 200)) if not (isinstance(size, (list, tuple)) and len(size) == 2 and isinstance(size[0], int) and isinstance(size[1], int)): raise ImproperlyConfigured("'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers.") return {'crop': True, 'size': size}
[ "def", "THUMBNAIL_OPTIONS", "(", "self", ")", ":", "from", "django", ".", "core", ".", "exceptions", "import", "ImproperlyConfigured", "size", "=", "self", ".", "_setting", "(", "'DJNG_THUMBNAIL_SIZE'", ",", "(", "200", ",", "200", ")", ")", "if", "not", "...
Set the size as a 2-tuple for thumbnailed images after uploading them.
[ "Set", "the", "size", "as", "a", "2", "-", "tuple", "for", "thumbnailed", "images", "after", "uploading", "them", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/app_settings.py#L20-L29
train
229,625
jrief/django-angular
djng/forms/angular_base.py
NgWidgetMixin.get_context
def get_context(self, name, value, attrs): """ Some widgets require a modified rendering context, if they contain angular directives. """ context = super(NgWidgetMixin, self).get_context(name, value, attrs) if callable(getattr(self._field, 'update_widget_rendering_context', None)): self._field.update_widget_rendering_context(context) return context
python
def get_context(self, name, value, attrs): """ Some widgets require a modified rendering context, if they contain angular directives. """ context = super(NgWidgetMixin, self).get_context(name, value, attrs) if callable(getattr(self._field, 'update_widget_rendering_context', None)): self._field.update_widget_rendering_context(context) return context
[ "def", "get_context", "(", "self", ",", "name", ",", "value", ",", "attrs", ")", ":", "context", "=", "super", "(", "NgWidgetMixin", ",", "self", ")", ".", "get_context", "(", "name", ",", "value", ",", "attrs", ")", "if", "callable", "(", "getattr", ...
Some widgets require a modified rendering context, if they contain angular directives.
[ "Some", "widgets", "require", "a", "modified", "rendering", "context", "if", "they", "contain", "angular", "directives", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L156-L163
train
229,626
jrief/django-angular
djng/forms/angular_base.py
NgBoundField.errors
def errors(self): """ Returns a TupleErrorList for this field. This overloaded method adds additional error lists to the errors as detected by the form validator. """ if not hasattr(self, '_errors_cache'): self._errors_cache = self.form.get_field_errors(self) return self._errors_cache
python
def errors(self): """ Returns a TupleErrorList for this field. This overloaded method adds additional error lists to the errors as detected by the form validator. """ if not hasattr(self, '_errors_cache'): self._errors_cache = self.form.get_field_errors(self) return self._errors_cache
[ "def", "errors", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_errors_cache'", ")", ":", "self", ".", "_errors_cache", "=", "self", ".", "form", ".", "get_field_errors", "(", "self", ")", "return", "self", ".", "_errors_cache" ]
Returns a TupleErrorList for this field. This overloaded method adds additional error lists to the errors as detected by the form validator.
[ "Returns", "a", "TupleErrorList", "for", "this", "field", ".", "This", "overloaded", "method", "adds", "additional", "error", "lists", "to", "the", "errors", "as", "detected", "by", "the", "form", "validator", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L168-L175
train
229,627
jrief/django-angular
djng/forms/angular_base.py
NgBoundField.css_classes
def css_classes(self, extra_classes=None): """ Returns a string of space-separated CSS classes for the wrapping element of this input field. """ if hasattr(extra_classes, 'split'): extra_classes = extra_classes.split() extra_classes = set(extra_classes or []) # field_css_classes is an optional member of a Form optimized for django-angular field_css_classes = getattr(self.form, 'field_css_classes', None) if hasattr(field_css_classes, 'split'): extra_classes.update(field_css_classes.split()) elif isinstance(field_css_classes, (list, tuple)): extra_classes.update(field_css_classes) elif isinstance(field_css_classes, dict): extra_field_classes = [] for key in ('*', self.name): css_classes = field_css_classes.get(key) if hasattr(css_classes, 'split'): extra_field_classes = css_classes.split() elif isinstance(css_classes, (list, tuple)): if '__default__' in css_classes: css_classes.remove('__default__') extra_field_classes.extend(css_classes) else: extra_field_classes = css_classes extra_classes.update(extra_field_classes) return super(NgBoundField, self).css_classes(extra_classes)
python
def css_classes(self, extra_classes=None): """ Returns a string of space-separated CSS classes for the wrapping element of this input field. """ if hasattr(extra_classes, 'split'): extra_classes = extra_classes.split() extra_classes = set(extra_classes or []) # field_css_classes is an optional member of a Form optimized for django-angular field_css_classes = getattr(self.form, 'field_css_classes', None) if hasattr(field_css_classes, 'split'): extra_classes.update(field_css_classes.split()) elif isinstance(field_css_classes, (list, tuple)): extra_classes.update(field_css_classes) elif isinstance(field_css_classes, dict): extra_field_classes = [] for key in ('*', self.name): css_classes = field_css_classes.get(key) if hasattr(css_classes, 'split'): extra_field_classes = css_classes.split() elif isinstance(css_classes, (list, tuple)): if '__default__' in css_classes: css_classes.remove('__default__') extra_field_classes.extend(css_classes) else: extra_field_classes = css_classes extra_classes.update(extra_field_classes) return super(NgBoundField, self).css_classes(extra_classes)
[ "def", "css_classes", "(", "self", ",", "extra_classes", "=", "None", ")", ":", "if", "hasattr", "(", "extra_classes", ",", "'split'", ")", ":", "extra_classes", "=", "extra_classes", ".", "split", "(", ")", "extra_classes", "=", "set", "(", "extra_classes",...
Returns a string of space-separated CSS classes for the wrapping element of this input field.
[ "Returns", "a", "string", "of", "space", "-", "separated", "CSS", "classes", "for", "the", "wrapping", "element", "of", "this", "input", "field", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L177-L203
train
229,628
jrief/django-angular
djng/forms/angular_base.py
NgFormBaseMixin.get_field_errors
def get_field_errors(self, field): """ Return server side errors. Shall be overridden by derived forms to add their extra errors for AngularJS. """ identifier = format_html('{0}[\'{1}\']', self.form_name, field.name) errors = self.errors.get(field.html_name, []) return self.error_class([SafeTuple( (identifier, self.field_error_css_classes, '$pristine', '$pristine', 'invalid', e)) for e in errors])
python
def get_field_errors(self, field): """ Return server side errors. Shall be overridden by derived forms to add their extra errors for AngularJS. """ identifier = format_html('{0}[\'{1}\']', self.form_name, field.name) errors = self.errors.get(field.html_name, []) return self.error_class([SafeTuple( (identifier, self.field_error_css_classes, '$pristine', '$pristine', 'invalid', e)) for e in errors])
[ "def", "get_field_errors", "(", "self", ",", "field", ")", ":", "identifier", "=", "format_html", "(", "'{0}[\\'{1}\\']'", ",", "self", ".", "form_name", ",", "field", ".", "name", ")", "errors", "=", "self", ".", "errors", ".", "get", "(", "field", ".",...
Return server side errors. Shall be overridden by derived forms to add their extra errors for AngularJS.
[ "Return", "server", "side", "errors", ".", "Shall", "be", "overridden", "by", "derived", "forms", "to", "add", "their", "extra", "errors", "for", "AngularJS", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L330-L338
train
229,629
jrief/django-angular
djng/forms/angular_base.py
NgFormBaseMixin.update_widget_attrs
def update_widget_attrs(self, bound_field, attrs): """ Updated the widget attributes which shall be added to the widget when rendering this field. """ if bound_field.field.has_subwidgets() is False: widget_classes = getattr(self, 'widget_css_classes', None) if widget_classes: if 'class' in attrs: attrs['class'] += ' ' + widget_classes else: attrs.update({'class': widget_classes}) return attrs
python
def update_widget_attrs(self, bound_field, attrs): """ Updated the widget attributes which shall be added to the widget when rendering this field. """ if bound_field.field.has_subwidgets() is False: widget_classes = getattr(self, 'widget_css_classes', None) if widget_classes: if 'class' in attrs: attrs['class'] += ' ' + widget_classes else: attrs.update({'class': widget_classes}) return attrs
[ "def", "update_widget_attrs", "(", "self", ",", "bound_field", ",", "attrs", ")", ":", "if", "bound_field", ".", "field", ".", "has_subwidgets", "(", ")", "is", "False", ":", "widget_classes", "=", "getattr", "(", "self", ",", "'widget_css_classes'", ",", "N...
Updated the widget attributes which shall be added to the widget when rendering this field.
[ "Updated", "the", "widget", "attributes", "which", "shall", "be", "added", "to", "the", "widget", "when", "rendering", "this", "field", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L354-L365
train
229,630
jrief/django-angular
djng/forms/angular_base.py
NgFormBaseMixin.rectify_multipart_form_data
def rectify_multipart_form_data(self, data): """ If a widget was converted and the Form data was submitted through a multipart request, then these data fields must be converted to suit the Django Form validation """ for name, field in self.base_fields.items(): try: field.implode_multi_values(name, data) except AttributeError: pass return data
python
def rectify_multipart_form_data(self, data): """ If a widget was converted and the Form data was submitted through a multipart request, then these data fields must be converted to suit the Django Form validation """ for name, field in self.base_fields.items(): try: field.implode_multi_values(name, data) except AttributeError: pass return data
[ "def", "rectify_multipart_form_data", "(", "self", ",", "data", ")", ":", "for", "name", ",", "field", "in", "self", ".", "base_fields", ".", "items", "(", ")", ":", "try", ":", "field", ".", "implode_multi_values", "(", "name", ",", "data", ")", "except...
If a widget was converted and the Form data was submitted through a multipart request, then these data fields must be converted to suit the Django Form validation
[ "If", "a", "widget", "was", "converted", "and", "the", "Form", "data", "was", "submitted", "through", "a", "multipart", "request", "then", "these", "data", "fields", "must", "be", "converted", "to", "suit", "the", "Django", "Form", "validation" ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L380-L390
train
229,631
jrief/django-angular
djng/forms/angular_base.py
NgFormBaseMixin.rectify_ajax_form_data
def rectify_ajax_form_data(self, data): """ If a widget was converted and the Form data was submitted through an Ajax request, then these data fields must be converted to suit the Django Form validation """ for name, field in self.base_fields.items(): try: data[name] = field.convert_ajax_data(data.get(name, {})) except AttributeError: pass return data
python
def rectify_ajax_form_data(self, data): """ If a widget was converted and the Form data was submitted through an Ajax request, then these data fields must be converted to suit the Django Form validation """ for name, field in self.base_fields.items(): try: data[name] = field.convert_ajax_data(data.get(name, {})) except AttributeError: pass return data
[ "def", "rectify_ajax_form_data", "(", "self", ",", "data", ")", ":", "for", "name", ",", "field", "in", "self", ".", "base_fields", ".", "items", "(", ")", ":", "try", ":", "data", "[", "name", "]", "=", "field", ".", "convert_ajax_data", "(", "data", ...
If a widget was converted and the Form data was submitted through an Ajax request, then these data fields must be converted to suit the Django Form validation
[ "If", "a", "widget", "was", "converted", "and", "the", "Form", "data", "was", "submitted", "through", "an", "Ajax", "request", "then", "these", "data", "fields", "must", "be", "converted", "to", "suit", "the", "Django", "Form", "validation" ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L392-L402
train
229,632
jrief/django-angular
djng/templatetags/djng_tags.py
djng_locale_script
def djng_locale_script(context, default_language='en'): """ Returns a script tag for including the proper locale script in any HTML page. This tag determines the current language with its locale. Usage: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script> or, if used with a default language: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script> """ language = get_language_from_request(context['request']) if not language: language = default_language return format_html('angular-locale_{}.js', language.lower())
python
def djng_locale_script(context, default_language='en'): """ Returns a script tag for including the proper locale script in any HTML page. This tag determines the current language with its locale. Usage: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script> or, if used with a default language: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script> """ language = get_language_from_request(context['request']) if not language: language = default_language return format_html('angular-locale_{}.js', language.lower())
[ "def", "djng_locale_script", "(", "context", ",", "default_language", "=", "'en'", ")", ":", "language", "=", "get_language_from_request", "(", "context", "[", "'request'", "]", ")", "if", "not", "language", ":", "language", "=", "default_language", "return", "f...
Returns a script tag for including the proper locale script in any HTML page. This tag determines the current language with its locale. Usage: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script> or, if used with a default language: <script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script>
[ "Returns", "a", "script", "tag", "for", "including", "the", "proper", "locale", "script", "in", "any", "HTML", "page", ".", "This", "tag", "determines", "the", "current", "language", "with", "its", "locale", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/templatetags/djng_tags.py#L101-L114
train
229,633
jrief/django-angular
djng/forms/fields.py
DefaultFieldMixin.update_widget_attrs
def update_widget_attrs(self, bound_field, attrs): """ Update the dictionary of attributes used while rendering the input widget """ bound_field.form.update_widget_attrs(bound_field, attrs) widget_classes = self.widget.attrs.get('class', None) if widget_classes: if 'class' in attrs: attrs['class'] += ' ' + widget_classes else: attrs.update({'class': widget_classes}) return attrs
python
def update_widget_attrs(self, bound_field, attrs): """ Update the dictionary of attributes used while rendering the input widget """ bound_field.form.update_widget_attrs(bound_field, attrs) widget_classes = self.widget.attrs.get('class', None) if widget_classes: if 'class' in attrs: attrs['class'] += ' ' + widget_classes else: attrs.update({'class': widget_classes}) return attrs
[ "def", "update_widget_attrs", "(", "self", ",", "bound_field", ",", "attrs", ")", ":", "bound_field", ".", "form", ".", "update_widget_attrs", "(", "bound_field", ",", "attrs", ")", "widget_classes", "=", "self", ".", "widget", ".", "attrs", ".", "get", "(",...
Update the dictionary of attributes used while rendering the input widget
[ "Update", "the", "dictionary", "of", "attributes", "used", "while", "rendering", "the", "input", "widget" ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L100-L111
train
229,634
jrief/django-angular
djng/forms/fields.py
MultipleChoiceField.implode_multi_values
def implode_multi_values(self, name, data): """ Due to the way Angular organizes it model, when Form data is sent via a POST request, then for this kind of widget, the posted data must to be converted into a format suitable for Django's Form validation. """ mkeys = [k for k in data.keys() if k.startswith(name + '.')] mvls = [data.pop(k)[0] for k in mkeys] if mvls: data.setlist(name, mvls)
python
def implode_multi_values(self, name, data): """ Due to the way Angular organizes it model, when Form data is sent via a POST request, then for this kind of widget, the posted data must to be converted into a format suitable for Django's Form validation. """ mkeys = [k for k in data.keys() if k.startswith(name + '.')] mvls = [data.pop(k)[0] for k in mkeys] if mvls: data.setlist(name, mvls)
[ "def", "implode_multi_values", "(", "self", ",", "name", ",", "data", ")", ":", "mkeys", "=", "[", "k", "for", "k", "in", "data", ".", "keys", "(", ")", "if", "k", ".", "startswith", "(", "name", "+", "'.'", ")", "]", "mvls", "=", "[", "data", ...
Due to the way Angular organizes it model, when Form data is sent via a POST request, then for this kind of widget, the posted data must to be converted into a format suitable for Django's Form validation.
[ "Due", "to", "the", "way", "Angular", "organizes", "it", "model", "when", "Form", "data", "is", "sent", "via", "a", "POST", "request", "then", "for", "this", "kind", "of", "widget", "the", "posted", "data", "must", "to", "be", "converted", "into", "a", ...
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L355-L364
train
229,635
jrief/django-angular
djng/forms/fields.py
MultipleChoiceField.convert_ajax_data
def convert_ajax_data(self, field_data): """ Due to the way Angular organizes it model, when this Form data is sent using Ajax, then for this kind of widget, the sent data has to be converted into a format suitable for Django's Form validation. """ data = [key for key, val in field_data.items() if val] return data
python
def convert_ajax_data(self, field_data): """ Due to the way Angular organizes it model, when this Form data is sent using Ajax, then for this kind of widget, the sent data has to be converted into a format suitable for Django's Form validation. """ data = [key for key, val in field_data.items() if val] return data
[ "def", "convert_ajax_data", "(", "self", ",", "field_data", ")", ":", "data", "=", "[", "key", "for", "key", ",", "val", "in", "field_data", ".", "items", "(", ")", "if", "val", "]", "return", "data" ]
Due to the way Angular organizes it model, when this Form data is sent using Ajax, then for this kind of widget, the sent data has to be converted into a format suitable for Django's Form validation.
[ "Due", "to", "the", "way", "Angular", "organizes", "it", "model", "when", "this", "Form", "data", "is", "sent", "using", "Ajax", "then", "for", "this", "kind", "of", "widget", "the", "sent", "data", "has", "to", "be", "converted", "into", "a", "format", ...
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L366-L373
train
229,636
jrief/django-angular
djng/middleware.py
AngularUrlMiddleware.process_request
def process_request(self, request): """ Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function Returns the result of resolved view function, called with provided args and kwargs Since the view function is called directly, it isn't ran through middlewares, so the middlewares must be added manually The final result is exactly the same as if the request was for the resolved view. Parametrized urls: djangoUrl.reverse can be used with parametrized urls of $resource In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id $resource can either replace the ':id' part with say 2 and we can proceed as usual, reverse with reverse('orders', kwargs={'id': 2}). If it's not replaced we want to reverse to url we get a request to url '/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']} In that case we want to ignore the id param and only reverse to url with name 'orders' and no params. So we ignore args and kwargs that are empty strings. """ if request.path == self.ANGULAR_REVERSE: url_name = request.GET.get('djng_url_name') url_args = request.GET.getlist('djng_url_args', []) url_kwargs = {} # Remove falsy values (empty strings) url_args = filter(lambda x: x, url_args) # Read kwargs for param in request.GET: if param.startswith('djng_url_kwarg_'): # Ignore kwargs that are empty strings if request.GET[param]: url_kwargs[param[15:]] = request.GET[param] # [15:] to remove 'djng_url_kwarg' prefix url = unquote(reverse(url_name, args=url_args, kwargs=url_kwargs)) assert not url.startswith(self.ANGULAR_REVERSE), "Prevent recursive requests" # rebuild the request object with a different environ request.path = request.path_info = url request.environ['PATH_INFO'] = url query = request.GET.copy() for key in request.GET: if key.startswith('djng_url'): query.pop(key, None) if six.PY3: request.environ['QUERY_STRING'] = query.urlencode() else: request.environ['QUERY_STRING'] = query.urlencode().encode('utf-8') # Reconstruct GET QueryList in the same way WSGIRequest.GET function works request.GET = http.QueryDict(request.environ['QUERY_STRING'])
python
def process_request(self, request): """ Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function Returns the result of resolved view function, called with provided args and kwargs Since the view function is called directly, it isn't ran through middlewares, so the middlewares must be added manually The final result is exactly the same as if the request was for the resolved view. Parametrized urls: djangoUrl.reverse can be used with parametrized urls of $resource In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id $resource can either replace the ':id' part with say 2 and we can proceed as usual, reverse with reverse('orders', kwargs={'id': 2}). If it's not replaced we want to reverse to url we get a request to url '/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']} In that case we want to ignore the id param and only reverse to url with name 'orders' and no params. So we ignore args and kwargs that are empty strings. """ if request.path == self.ANGULAR_REVERSE: url_name = request.GET.get('djng_url_name') url_args = request.GET.getlist('djng_url_args', []) url_kwargs = {} # Remove falsy values (empty strings) url_args = filter(lambda x: x, url_args) # Read kwargs for param in request.GET: if param.startswith('djng_url_kwarg_'): # Ignore kwargs that are empty strings if request.GET[param]: url_kwargs[param[15:]] = request.GET[param] # [15:] to remove 'djng_url_kwarg' prefix url = unquote(reverse(url_name, args=url_args, kwargs=url_kwargs)) assert not url.startswith(self.ANGULAR_REVERSE), "Prevent recursive requests" # rebuild the request object with a different environ request.path = request.path_info = url request.environ['PATH_INFO'] = url query = request.GET.copy() for key in request.GET: if key.startswith('djng_url'): query.pop(key, None) if six.PY3: request.environ['QUERY_STRING'] = query.urlencode() else: request.environ['QUERY_STRING'] = query.urlencode().encode('utf-8') # Reconstruct GET QueryList in the same way WSGIRequest.GET function works request.GET = http.QueryDict(request.environ['QUERY_STRING'])
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "if", "request", ".", "path", "==", "self", ".", "ANGULAR_REVERSE", ":", "url_name", "=", "request", ".", "GET", ".", "get", "(", "'djng_url_name'", ")", "url_args", "=", "request", ".", "G...
Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function Returns the result of resolved view function, called with provided args and kwargs Since the view function is called directly, it isn't ran through middlewares, so the middlewares must be added manually The final result is exactly the same as if the request was for the resolved view. Parametrized urls: djangoUrl.reverse can be used with parametrized urls of $resource In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id $resource can either replace the ':id' part with say 2 and we can proceed as usual, reverse with reverse('orders', kwargs={'id': 2}). If it's not replaced we want to reverse to url we get a request to url '/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']} In that case we want to ignore the id param and only reverse to url with name 'orders' and no params. So we ignore args and kwargs that are empty strings.
[ "Reads", "url", "name", "args", "kwargs", "from", "GET", "parameters", "reverses", "the", "url", "and", "resolves", "view", "function", "Returns", "the", "result", "of", "resolved", "view", "function", "called", "with", "provided", "args", "and", "kwargs", "Si...
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/middleware.py#L21-L73
train
229,637
jrief/django-angular
djng/views/crud.py
NgCRUDView.ng_delete
def ng_delete(self, request, *args, **kwargs): """ Delete object and return it's data in JSON encoding The response is build before the object is actually deleted so that we can still retrieve a serialization in the response even with a m2m relationship """ if 'pk' not in request.GET: raise NgMissingParameterError("Object id is required to delete.") obj = self.get_object() response = self.build_json_response(obj) obj.delete() return response
python
def ng_delete(self, request, *args, **kwargs): """ Delete object and return it's data in JSON encoding The response is build before the object is actually deleted so that we can still retrieve a serialization in the response even with a m2m relationship """ if 'pk' not in request.GET: raise NgMissingParameterError("Object id is required to delete.") obj = self.get_object() response = self.build_json_response(obj) obj.delete() return response
[ "def", "ng_delete", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'pk'", "not", "in", "request", ".", "GET", ":", "raise", "NgMissingParameterError", "(", "\"Object id is required to delete.\"", ")", "obj", "=", "...
Delete object and return it's data in JSON encoding The response is build before the object is actually deleted so that we can still retrieve a serialization in the response even with a m2m relationship
[ "Delete", "object", "and", "return", "it", "s", "data", "in", "JSON", "encoding", "The", "response", "is", "build", "before", "the", "object", "is", "actually", "deleted", "so", "that", "we", "can", "still", "retrieve", "a", "serialization", "in", "the", "...
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/views/crud.py#L170-L183
train
229,638
jrief/django-angular
djng/forms/angular_model.py
NgModelFormMixin._post_clean
def _post_clean(self): """ Rewrite the error dictionary, so that its keys correspond to the model fields. """ super(NgModelFormMixin, self)._post_clean() if self._errors and self.prefix: self._errors = ErrorDict((self.add_prefix(name), value) for name, value in self._errors.items())
python
def _post_clean(self): """ Rewrite the error dictionary, so that its keys correspond to the model fields. """ super(NgModelFormMixin, self)._post_clean() if self._errors and self.prefix: self._errors = ErrorDict((self.add_prefix(name), value) for name, value in self._errors.items())
[ "def", "_post_clean", "(", "self", ")", ":", "super", "(", "NgModelFormMixin", ",", "self", ")", ".", "_post_clean", "(", ")", "if", "self", ".", "_errors", "and", "self", ".", "prefix", ":", "self", ".", "_errors", "=", "ErrorDict", "(", "(", "self", ...
Rewrite the error dictionary, so that its keys correspond to the model fields.
[ "Rewrite", "the", "error", "dictionary", "so", "that", "its", "keys", "correspond", "to", "the", "model", "fields", "." ]
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_model.py#L42-L48
train
229,639
WoLpH/python-progressbar
progressbar/bar.py
ProgressBar.percentage
def percentage(self): '''Return current percentage, returns None if no max_value is given >>> progress = ProgressBar() >>> progress.max_value = 10 >>> progress.min_value = 0 >>> progress.value = 0 >>> progress.percentage 0.0 >>> >>> progress.value = 1 >>> progress.percentage 10.0 >>> progress.value = 10 >>> progress.percentage 100.0 >>> progress.min_value = -10 >>> progress.percentage 100.0 >>> progress.value = 0 >>> progress.percentage 50.0 >>> progress.value = 5 >>> progress.percentage 75.0 >>> progress.value = -5 >>> progress.percentage 25.0 >>> progress.max_value = None >>> progress.percentage ''' if self.max_value is None or self.max_value is base.UnknownLength: return None elif self.max_value: todo = self.value - self.min_value total = self.max_value - self.min_value percentage = todo / total else: percentage = 1 return percentage * 100
python
def percentage(self): '''Return current percentage, returns None if no max_value is given >>> progress = ProgressBar() >>> progress.max_value = 10 >>> progress.min_value = 0 >>> progress.value = 0 >>> progress.percentage 0.0 >>> >>> progress.value = 1 >>> progress.percentage 10.0 >>> progress.value = 10 >>> progress.percentage 100.0 >>> progress.min_value = -10 >>> progress.percentage 100.0 >>> progress.value = 0 >>> progress.percentage 50.0 >>> progress.value = 5 >>> progress.percentage 75.0 >>> progress.value = -5 >>> progress.percentage 25.0 >>> progress.max_value = None >>> progress.percentage ''' if self.max_value is None or self.max_value is base.UnknownLength: return None elif self.max_value: todo = self.value - self.min_value total = self.max_value - self.min_value percentage = todo / total else: percentage = 1 return percentage * 100
[ "def", "percentage", "(", "self", ")", ":", "if", "self", ".", "max_value", "is", "None", "or", "self", ".", "max_value", "is", "base", ".", "UnknownLength", ":", "return", "None", "elif", "self", ".", "max_value", ":", "todo", "=", "self", ".", "value...
Return current percentage, returns None if no max_value is given >>> progress = ProgressBar() >>> progress.max_value = 10 >>> progress.min_value = 0 >>> progress.value = 0 >>> progress.percentage 0.0 >>> >>> progress.value = 1 >>> progress.percentage 10.0 >>> progress.value = 10 >>> progress.percentage 100.0 >>> progress.min_value = -10 >>> progress.percentage 100.0 >>> progress.value = 0 >>> progress.percentage 50.0 >>> progress.value = 5 >>> progress.percentage 75.0 >>> progress.value = -5 >>> progress.percentage 25.0 >>> progress.max_value = None >>> progress.percentage
[ "Return", "current", "percentage", "returns", "None", "if", "no", "max_value", "is", "given" ]
963617a1bb9d81624ecf31f3457185992cd97bfa
https://github.com/WoLpH/python-progressbar/blob/963617a1bb9d81624ecf31f3457185992cd97bfa/progressbar/bar.py#L297-L337
train
229,640
WoLpH/python-progressbar
examples.py
example
def example(fn): '''Wrap the examples so they generate readable output''' @functools.wraps(fn) def wrapped(): try: sys.stdout.write('Running: %s\n' % fn.__name__) fn() sys.stdout.write('\n') except KeyboardInterrupt: sys.stdout.write('\nSkipping example.\n\n') # Sleep a bit to make killing the script easier time.sleep(0.2) examples.append(wrapped) return wrapped
python
def example(fn): '''Wrap the examples so they generate readable output''' @functools.wraps(fn) def wrapped(): try: sys.stdout.write('Running: %s\n' % fn.__name__) fn() sys.stdout.write('\n') except KeyboardInterrupt: sys.stdout.write('\nSkipping example.\n\n') # Sleep a bit to make killing the script easier time.sleep(0.2) examples.append(wrapped) return wrapped
[ "def", "example", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "wrapped", "(", ")", ":", "try", ":", "sys", ".", "stdout", ".", "write", "(", "'Running: %s\\n'", "%", "fn", ".", "__name__", ")", "fn", "(", ")", "sys...
Wrap the examples so they generate readable output
[ "Wrap", "the", "examples", "so", "they", "generate", "readable", "output" ]
963617a1bb9d81624ecf31f3457185992cd97bfa
https://github.com/WoLpH/python-progressbar/blob/963617a1bb9d81624ecf31f3457185992cd97bfa/examples.py#L16-L31
train
229,641
rigetti/quantumflow
quantumflow/datasets/__init__.py
load_stdgraphs
def load_stdgraphs(size: int) -> List[nx.Graph]: """Load standard graph validation sets For each size (from 6 to 32 graph nodes) the dataset consists of 100 graphs drawn from the Erdős-Rényi ensemble with edge probability 50%. """ from pkg_resources import resource_stream if size < 6 or size > 32: raise ValueError('Size out of range.') filename = 'datasets/data/graph{}er100.g6'.format(size) fdata = resource_stream('quantumflow', filename) return nx.read_graph6(fdata)
python
def load_stdgraphs(size: int) -> List[nx.Graph]: """Load standard graph validation sets For each size (from 6 to 32 graph nodes) the dataset consists of 100 graphs drawn from the Erdős-Rényi ensemble with edge probability 50%. """ from pkg_resources import resource_stream if size < 6 or size > 32: raise ValueError('Size out of range.') filename = 'datasets/data/graph{}er100.g6'.format(size) fdata = resource_stream('quantumflow', filename) return nx.read_graph6(fdata)
[ "def", "load_stdgraphs", "(", "size", ":", "int", ")", "->", "List", "[", "nx", ".", "Graph", "]", ":", "from", "pkg_resources", "import", "resource_stream", "if", "size", "<", "6", "or", "size", ">", "32", ":", "raise", "ValueError", "(", "'Size out of ...
Load standard graph validation sets For each size (from 6 to 32 graph nodes) the dataset consists of 100 graphs drawn from the Erdős-Rényi ensemble with edge probability 50%.
[ "Load", "standard", "graph", "validation", "sets" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/datasets/__init__.py#L23-L37
train
229,642
rigetti/quantumflow
quantumflow/datasets/__init__.py
load_mnist
def load_mnist(size: int = None, border: int = _MNIST_BORDER, blank_corners: bool = False, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """Download and rescale the MNIST database of handwritten digits MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits, along with a test set of 10,000 images. We use Keras to download and access the dataset. The first invocation of this method may take a while as the dataset has to be downloaded and cached. If size is None, then we return the original MNIST data. For rescaled MNIST, we chop off the border, downsample to the desired size with Lanczos resampling, and then (optionally) zero out the corner pixels. Returns (x_train, y_train, x_test, y_test) x_train ndarray of shape (60000, size, size) y_train ndarray of shape (60000,) x_test ndarray of shape (10000, size, size) y_test ndarray of shape (10000,) """ # DOCME: Fix up formatting above, # DOCME: Explain nums argument # JIT import since keras startup is slow from keras.datasets import mnist def _filter_mnist(x: np.ndarray, y: np.ndarray, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray]: xt = [] yt = [] items = len(y) for n in range(items): if nums is not None and y[n] in nums: xt.append(x[n]) yt.append(y[n]) xt = np.stack(xt) yt = np.stack(yt) return xt, yt def _rescale(imgarray: np.ndarray, size: int) -> np.ndarray: N = imgarray.shape[0] # Chop off border imgarray = imgarray[:, border:-border, border:-border] rescaled = np.zeros(shape=(N, size, size), dtype=np.float) for n in range(0, N): img = Image.fromarray(imgarray[n]) img = img.resize((size, size), Image.LANCZOS) rsc = np.asarray(img).reshape((size, size)) rsc = 256.*rsc/rsc.max() rescaled[n] = rsc return rescaled.astype(dtype=np.uint8) def _blank_corners(imgarray: np.ndarray) -> None: # Zero out corners sz = imgarray.shape[1] corner = (sz//2)-1 for x in range(0, corner): for y in range(0, corner-x): imgarray[:, x, y] = 0 imgarray[:, -(1+x), y] = 0 imgarray[:, -(1+x), -(1+y)] = 0 imgarray[:, x, -(1+y)] = 0 (x_train, y_train), (x_test, y_test) = mnist.load_data() if nums: x_train, y_train = _filter_mnist(x_train, y_train, nums) x_test, y_test = _filter_mnist(x_test, y_test, nums) if size: x_train = _rescale(x_train, size) x_test = _rescale(x_test, size) if blank_corners: _blank_corners(x_train) _blank_corners(x_test) return x_train, y_train, x_test, y_test
python
def load_mnist(size: int = None, border: int = _MNIST_BORDER, blank_corners: bool = False, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """Download and rescale the MNIST database of handwritten digits MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits, along with a test set of 10,000 images. We use Keras to download and access the dataset. The first invocation of this method may take a while as the dataset has to be downloaded and cached. If size is None, then we return the original MNIST data. For rescaled MNIST, we chop off the border, downsample to the desired size with Lanczos resampling, and then (optionally) zero out the corner pixels. Returns (x_train, y_train, x_test, y_test) x_train ndarray of shape (60000, size, size) y_train ndarray of shape (60000,) x_test ndarray of shape (10000, size, size) y_test ndarray of shape (10000,) """ # DOCME: Fix up formatting above, # DOCME: Explain nums argument # JIT import since keras startup is slow from keras.datasets import mnist def _filter_mnist(x: np.ndarray, y: np.ndarray, nums: List[int] = None) \ -> Tuple[np.ndarray, np.ndarray]: xt = [] yt = [] items = len(y) for n in range(items): if nums is not None and y[n] in nums: xt.append(x[n]) yt.append(y[n]) xt = np.stack(xt) yt = np.stack(yt) return xt, yt def _rescale(imgarray: np.ndarray, size: int) -> np.ndarray: N = imgarray.shape[0] # Chop off border imgarray = imgarray[:, border:-border, border:-border] rescaled = np.zeros(shape=(N, size, size), dtype=np.float) for n in range(0, N): img = Image.fromarray(imgarray[n]) img = img.resize((size, size), Image.LANCZOS) rsc = np.asarray(img).reshape((size, size)) rsc = 256.*rsc/rsc.max() rescaled[n] = rsc return rescaled.astype(dtype=np.uint8) def _blank_corners(imgarray: np.ndarray) -> None: # Zero out corners sz = imgarray.shape[1] corner = (sz//2)-1 for x in range(0, corner): for y in range(0, corner-x): imgarray[:, x, y] = 0 imgarray[:, -(1+x), y] = 0 imgarray[:, -(1+x), -(1+y)] = 0 imgarray[:, x, -(1+y)] = 0 (x_train, y_train), (x_test, y_test) = mnist.load_data() if nums: x_train, y_train = _filter_mnist(x_train, y_train, nums) x_test, y_test = _filter_mnist(x_test, y_test, nums) if size: x_train = _rescale(x_train, size) x_test = _rescale(x_test, size) if blank_corners: _blank_corners(x_train) _blank_corners(x_test) return x_train, y_train, x_test, y_test
[ "def", "load_mnist", "(", "size", ":", "int", "=", "None", ",", "border", ":", "int", "=", "_MNIST_BORDER", ",", "blank_corners", ":", "bool", "=", "False", ",", "nums", ":", "List", "[", "int", "]", "=", "None", ")", "->", "Tuple", "[", "np", ".",...
Download and rescale the MNIST database of handwritten digits MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits, along with a test set of 10,000 images. We use Keras to download and access the dataset. The first invocation of this method may take a while as the dataset has to be downloaded and cached. If size is None, then we return the original MNIST data. For rescaled MNIST, we chop off the border, downsample to the desired size with Lanczos resampling, and then (optionally) zero out the corner pixels. Returns (x_train, y_train, x_test, y_test) x_train ndarray of shape (60000, size, size) y_train ndarray of shape (60000,) x_test ndarray of shape (10000, size, size) y_test ndarray of shape (10000,)
[ "Download", "and", "rescale", "the", "MNIST", "database", "of", "handwritten", "digits" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/datasets/__init__.py#L43-L127
train
229,643
rigetti/quantumflow
quantumflow/backend/tensorflow2bk.py
astensor
def astensor(array: TensorLike) -> BKTensor: """Covert numpy array to tensorflow tensor""" tensor = tf.convert_to_tensor(value=array, dtype=CTYPE) return tensor
python
def astensor(array: TensorLike) -> BKTensor: """Covert numpy array to tensorflow tensor""" tensor = tf.convert_to_tensor(value=array, dtype=CTYPE) return tensor
[ "def", "astensor", "(", "array", ":", "TensorLike", ")", "->", "BKTensor", ":", "tensor", "=", "tf", ".", "convert_to_tensor", "(", "value", "=", "array", ",", "dtype", "=", "CTYPE", ")", "return", "tensor" ]
Covert numpy array to tensorflow tensor
[ "Covert", "numpy", "array", "to", "tensorflow", "tensor" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/tensorflow2bk.py#L74-L77
train
229,644
rigetti/quantumflow
quantumflow/backend/tensorflow2bk.py
inner
def inner(tensor0: BKTensor, tensor1: BKTensor) -> BKTensor: """Return the inner product between two states""" # Note: Relying on fact that vdot flattens arrays N = rank(tensor0) axes = list(range(N)) return tf.tensordot(tf.math.conj(tensor0), tensor1, axes=(axes, axes))
python
def inner(tensor0: BKTensor, tensor1: BKTensor) -> BKTensor: """Return the inner product between two states""" # Note: Relying on fact that vdot flattens arrays N = rank(tensor0) axes = list(range(N)) return tf.tensordot(tf.math.conj(tensor0), tensor1, axes=(axes, axes))
[ "def", "inner", "(", "tensor0", ":", "BKTensor", ",", "tensor1", ":", "BKTensor", ")", "->", "BKTensor", ":", "# Note: Relying on fact that vdot flattens arrays", "N", "=", "rank", "(", "tensor0", ")", "axes", "=", "list", "(", "range", "(", "N", ")", ")", ...
Return the inner product between two states
[ "Return", "the", "inner", "product", "between", "two", "states" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/tensorflow2bk.py#L92-L97
train
229,645
rigetti/quantumflow
quantumflow/qaoa.py
graph_cuts
def graph_cuts(graph: nx.Graph) -> np.ndarray: """For the given graph, return the cut value for all binary assignments of the graph. """ N = len(graph) diag_hamiltonian = np.zeros(shape=([2]*N), dtype=np.double) for q0, q1 in graph.edges(): for index, _ in np.ndenumerate(diag_hamiltonian): if index[q0] != index[q1]: weight = graph[q0][q1].get('weight', 1) diag_hamiltonian[index] += weight return diag_hamiltonian
python
def graph_cuts(graph: nx.Graph) -> np.ndarray: """For the given graph, return the cut value for all binary assignments of the graph. """ N = len(graph) diag_hamiltonian = np.zeros(shape=([2]*N), dtype=np.double) for q0, q1 in graph.edges(): for index, _ in np.ndenumerate(diag_hamiltonian): if index[q0] != index[q1]: weight = graph[q0][q1].get('weight', 1) diag_hamiltonian[index] += weight return diag_hamiltonian
[ "def", "graph_cuts", "(", "graph", ":", "nx", ".", "Graph", ")", "->", "np", ".", "ndarray", ":", "N", "=", "len", "(", "graph", ")", "diag_hamiltonian", "=", "np", ".", "zeros", "(", "shape", "=", "(", "[", "2", "]", "*", "N", ")", ",", "dtype...
For the given graph, return the cut value for all binary assignments of the graph.
[ "For", "the", "given", "graph", "return", "the", "cut", "value", "for", "all", "binary", "assignments", "of", "the", "graph", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/qaoa.py#L68-L81
train
229,646
rigetti/quantumflow
quantumflow/dagcircuit.py
DAGCircuit.depth
def depth(self, local: bool = True) -> int: """Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth. """ G = self.graph if not local: def remove_local(dagc: DAGCircuit) \ -> Generator[Operation, None, None]: for elem in dagc: if dagc.graph.degree[elem] > 2: yield elem G = DAGCircuit(remove_local(self)).graph return nx.dag_longest_path_length(G) - 1
python
def depth(self, local: bool = True) -> int: """Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth. """ G = self.graph if not local: def remove_local(dagc: DAGCircuit) \ -> Generator[Operation, None, None]: for elem in dagc: if dagc.graph.degree[elem] > 2: yield elem G = DAGCircuit(remove_local(self)).graph return nx.dag_longest_path_length(G) - 1
[ "def", "depth", "(", "self", ",", "local", ":", "bool", "=", "True", ")", "->", "int", ":", "G", "=", "self", ".", "graph", "if", "not", "local", ":", "def", "remove_local", "(", "dagc", ":", "DAGCircuit", ")", "->", "Generator", "[", "Operation", ...
Return the circuit depth. Args: local: If True include local one-qubit gates in depth calculation. Else return the multi-qubit gate depth.
[ "Return", "the", "circuit", "depth", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/dagcircuit.py#L97-L113
train
229,647
rigetti/quantumflow
quantumflow/dagcircuit.py
DAGCircuit.components
def components(self) -> List['DAGCircuit']: """Split DAGCircuit into independent components""" comps = nx.weakly_connected_component_subgraphs(self.graph) return [DAGCircuit(comp) for comp in comps]
python
def components(self) -> List['DAGCircuit']: """Split DAGCircuit into independent components""" comps = nx.weakly_connected_component_subgraphs(self.graph) return [DAGCircuit(comp) for comp in comps]
[ "def", "components", "(", "self", ")", "->", "List", "[", "'DAGCircuit'", "]", ":", "comps", "=", "nx", ".", "weakly_connected_component_subgraphs", "(", "self", ".", "graph", ")", "return", "[", "DAGCircuit", "(", "comp", ")", "for", "comp", "in", "comps"...
Split DAGCircuit into independent components
[ "Split", "DAGCircuit", "into", "independent", "components" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/dagcircuit.py#L124-L127
train
229,648
rigetti/quantumflow
quantumflow/states.py
zero_state
def zero_state(qubits: Union[int, Qubits]) -> State: """Return the all-zero state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) ket[(0,) * N] = 1 return State(ket, qubits)
python
def zero_state(qubits: Union[int, Qubits]) -> State: """Return the all-zero state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) ket[(0,) * N] = 1 return State(ket, qubits)
[ "def", "zero_state", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "State", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "ket", "=", "np", ".", "zeros", "(", "shape", "=", "[", "2", "]", "*", "...
Return the all-zero state on N qubits
[ "Return", "the", "all", "-", "zero", "state", "on", "N", "qubits" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L186-L191
train
229,649
rigetti/quantumflow
quantumflow/states.py
w_state
def w_state(qubits: Union[int, Qubits]) -> State: """Return a W state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) for n in range(N): idx = np.zeros(shape=N, dtype=int) idx[n] += 1 ket[tuple(idx)] = 1 / sqrt(N) return State(ket, qubits)
python
def w_state(qubits: Union[int, Qubits]) -> State: """Return a W state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) for n in range(N): idx = np.zeros(shape=N, dtype=int) idx[n] += 1 ket[tuple(idx)] = 1 / sqrt(N) return State(ket, qubits)
[ "def", "w_state", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "State", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "ket", "=", "np", ".", "zeros", "(", "shape", "=", "[", "2", "]", "*", "N",...
Return a W state on N qubits
[ "Return", "a", "W", "state", "on", "N", "qubits" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L194-L202
train
229,650
rigetti/quantumflow
quantumflow/states.py
ghz_state
def ghz_state(qubits: Union[int, Qubits]) -> State: """Return a GHZ state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) ket[(0, ) * N] = 1 / sqrt(2) ket[(1, ) * N] = 1 / sqrt(2) return State(ket, qubits)
python
def ghz_state(qubits: Union[int, Qubits]) -> State: """Return a GHZ state on N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.zeros(shape=[2] * N) ket[(0, ) * N] = 1 / sqrt(2) ket[(1, ) * N] = 1 / sqrt(2) return State(ket, qubits)
[ "def", "ghz_state", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "State", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "ket", "=", "np", ".", "zeros", "(", "shape", "=", "[", "2", "]", "*", "N...
Return a GHZ state on N qubits
[ "Return", "a", "GHZ", "state", "on", "N", "qubits" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L205-L211
train
229,651
rigetti/quantumflow
quantumflow/states.py
random_state
def random_state(qubits: Union[int, Qubits]) -> State: """Return a random state from the space of N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.random.normal(size=([2] * N)) \ + 1j * np.random.normal(size=([2] * N)) return State(ket, qubits).normalize()
python
def random_state(qubits: Union[int, Qubits]) -> State: """Return a random state from the space of N qubits""" N, qubits = qubits_count_tuple(qubits) ket = np.random.normal(size=([2] * N)) \ + 1j * np.random.normal(size=([2] * N)) return State(ket, qubits).normalize()
[ "def", "random_state", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "State", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "ket", "=", "np", ".", "random", ".", "normal", "(", "size", "=", "(", "...
Return a random state from the space of N qubits
[ "Return", "a", "random", "state", "from", "the", "space", "of", "N", "qubits" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L214-L219
train
229,652
rigetti/quantumflow
quantumflow/states.py
join_states
def join_states(*states: State) -> State: """Join two state vectors into a larger qubit state""" vectors = [ket.vec for ket in states] vec = reduce(outer_product, vectors) return State(vec.tensor, vec.qubits)
python
def join_states(*states: State) -> State: """Join two state vectors into a larger qubit state""" vectors = [ket.vec for ket in states] vec = reduce(outer_product, vectors) return State(vec.tensor, vec.qubits)
[ "def", "join_states", "(", "*", "states", ":", "State", ")", "->", "State", ":", "vectors", "=", "[", "ket", ".", "vec", "for", "ket", "in", "states", "]", "vec", "=", "reduce", "(", "outer_product", ",", "vectors", ")", "return", "State", "(", "vec"...
Join two state vectors into a larger qubit state
[ "Join", "two", "state", "vectors", "into", "a", "larger", "qubit", "state" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L225-L229
train
229,653
rigetti/quantumflow
quantumflow/states.py
print_state
def print_state(state: State, file: TextIO = None) -> None: """Print a state vector""" state = state.vec.asarray() for index, amplitude in np.ndenumerate(state): ket = "".join([str(n) for n in index]) print(ket, ":", amplitude, file=file)
python
def print_state(state: State, file: TextIO = None) -> None: """Print a state vector""" state = state.vec.asarray() for index, amplitude in np.ndenumerate(state): ket = "".join([str(n) for n in index]) print(ket, ":", amplitude, file=file)
[ "def", "print_state", "(", "state", ":", "State", ",", "file", ":", "TextIO", "=", "None", ")", "->", "None", ":", "state", "=", "state", ".", "vec", ".", "asarray", "(", ")", "for", "index", ",", "amplitude", "in", "np", ".", "ndenumerate", "(", "...
Print a state vector
[ "Print", "a", "state", "vector" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L234-L239
train
229,654
rigetti/quantumflow
quantumflow/states.py
print_probabilities
def print_probabilities(state: State, ndigits: int = 4, file: TextIO = None) -> None: """ Pretty print state probabilities. Args: state: ndigits: Number of digits of accuracy file: Output stream (Defaults to stdout) """ prob = bk.evaluate(state.probabilities()) for index, prob in np.ndenumerate(prob): prob = round(prob, ndigits) if prob == 0.0: continue ket = "".join([str(n) for n in index]) print(ket, ":", prob, file=file)
python
def print_probabilities(state: State, ndigits: int = 4, file: TextIO = None) -> None: """ Pretty print state probabilities. Args: state: ndigits: Number of digits of accuracy file: Output stream (Defaults to stdout) """ prob = bk.evaluate(state.probabilities()) for index, prob in np.ndenumerate(prob): prob = round(prob, ndigits) if prob == 0.0: continue ket = "".join([str(n) for n in index]) print(ket, ":", prob, file=file)
[ "def", "print_probabilities", "(", "state", ":", "State", ",", "ndigits", ":", "int", "=", "4", ",", "file", ":", "TextIO", "=", "None", ")", "->", "None", ":", "prob", "=", "bk", ".", "evaluate", "(", "state", ".", "probabilities", "(", ")", ")", ...
Pretty print state probabilities. Args: state: ndigits: Number of digits of accuracy file: Output stream (Defaults to stdout)
[ "Pretty", "print", "state", "probabilities", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L243-L259
train
229,655
rigetti/quantumflow
quantumflow/states.py
mixed_density
def mixed_density(qubits: Union[int, Qubits]) -> Density: """Returns the completely mixed density matrix""" N, qubits = qubits_count_tuple(qubits) matrix = np.eye(2**N) / 2**N return Density(matrix, qubits)
python
def mixed_density(qubits: Union[int, Qubits]) -> Density: """Returns the completely mixed density matrix""" N, qubits = qubits_count_tuple(qubits) matrix = np.eye(2**N) / 2**N return Density(matrix, qubits)
[ "def", "mixed_density", "(", "qubits", ":", "Union", "[", "int", ",", "Qubits", "]", ")", "->", "Density", ":", "N", ",", "qubits", "=", "qubits_count_tuple", "(", "qubits", ")", "matrix", "=", "np", ".", "eye", "(", "2", "**", "N", ")", "/", "2", ...
Returns the completely mixed density matrix
[ "Returns", "the", "completely", "mixed", "density", "matrix" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L322-L326
train
229,656
rigetti/quantumflow
quantumflow/states.py
join_densities
def join_densities(*densities: Density) -> Density: """Join two mixed states into a larger qubit state""" vectors = [rho.vec for rho in densities] vec = reduce(outer_product, vectors) memory = dict(ChainMap(*[rho.memory for rho in densities])) # TESTME return Density(vec.tensor, vec.qubits, memory)
python
def join_densities(*densities: Density) -> Density: """Join two mixed states into a larger qubit state""" vectors = [rho.vec for rho in densities] vec = reduce(outer_product, vectors) memory = dict(ChainMap(*[rho.memory for rho in densities])) # TESTME return Density(vec.tensor, vec.qubits, memory)
[ "def", "join_densities", "(", "*", "densities", ":", "Density", ")", "->", "Density", ":", "vectors", "=", "[", "rho", ".", "vec", "for", "rho", "in", "densities", "]", "vec", "=", "reduce", "(", "outer_product", ",", "vectors", ")", "memory", "=", "di...
Join two mixed states into a larger qubit state
[ "Join", "two", "mixed", "states", "into", "a", "larger", "qubit", "state" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L349-L355
train
229,657
rigetti/quantumflow
quantumflow/states.py
State.normalize
def normalize(self) -> 'State': """Normalize the state""" tensor = self.tensor / bk.ccast(bk.sqrt(self.norm())) return State(tensor, self.qubits, self._memory)
python
def normalize(self) -> 'State': """Normalize the state""" tensor = self.tensor / bk.ccast(bk.sqrt(self.norm())) return State(tensor, self.qubits, self._memory)
[ "def", "normalize", "(", "self", ")", "->", "'State'", ":", "tensor", "=", "self", ".", "tensor", "/", "bk", ".", "ccast", "(", "bk", ".", "sqrt", "(", "self", ".", "norm", "(", ")", ")", ")", "return", "State", "(", "tensor", ",", "self", ".", ...
Normalize the state
[ "Normalize", "the", "state" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L108-L111
train
229,658
rigetti/quantumflow
quantumflow/states.py
State.sample
def sample(self, trials: int) -> np.ndarray: """Measure the state in the computational basis the the given number of trials, and return the counts of each output configuration. """ # TODO: Can we do this within backend? probs = np.real(bk.evaluate(self.probabilities())) res = np.random.multinomial(trials, probs.ravel()) res = res.reshape(probs.shape) return res
python
def sample(self, trials: int) -> np.ndarray: """Measure the state in the computational basis the the given number of trials, and return the counts of each output configuration. """ # TODO: Can we do this within backend? probs = np.real(bk.evaluate(self.probabilities())) res = np.random.multinomial(trials, probs.ravel()) res = res.reshape(probs.shape) return res
[ "def", "sample", "(", "self", ",", "trials", ":", "int", ")", "->", "np", ".", "ndarray", ":", "# TODO: Can we do this within backend?", "probs", "=", "np", ".", "real", "(", "bk", ".", "evaluate", "(", "self", ".", "probabilities", "(", ")", ")", ")", ...
Measure the state in the computational basis the the given number of trials, and return the counts of each output configuration.
[ "Measure", "the", "state", "in", "the", "computational", "basis", "the", "the", "given", "number", "of", "trials", "and", "return", "the", "counts", "of", "each", "output", "configuration", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L121-L129
train
229,659
rigetti/quantumflow
quantumflow/states.py
State.expectation
def expectation(self, diag_hermitian: bk.TensorLike, trials: int = None) -> bk.BKTensor: """Return the expectation of a measurement. Since we can only measure our computer in the computational basis, we only require the diagonal of the Hermitian in that basis. If the number of trials is specified, we sample the given number of times. Else we return the exact expectation (as if we'd performed an infinite number of trials. ) """ if trials is None: probs = self.probabilities() else: probs = bk.real(bk.astensorproduct(self.sample(trials) / trials)) diag_hermitian = bk.astensorproduct(diag_hermitian) return bk.sum(bk.real(diag_hermitian) * probs)
python
def expectation(self, diag_hermitian: bk.TensorLike, trials: int = None) -> bk.BKTensor: """Return the expectation of a measurement. Since we can only measure our computer in the computational basis, we only require the diagonal of the Hermitian in that basis. If the number of trials is specified, we sample the given number of times. Else we return the exact expectation (as if we'd performed an infinite number of trials. ) """ if trials is None: probs = self.probabilities() else: probs = bk.real(bk.astensorproduct(self.sample(trials) / trials)) diag_hermitian = bk.astensorproduct(diag_hermitian) return bk.sum(bk.real(diag_hermitian) * probs)
[ "def", "expectation", "(", "self", ",", "diag_hermitian", ":", "bk", ".", "TensorLike", ",", "trials", ":", "int", "=", "None", ")", "->", "bk", ".", "BKTensor", ":", "if", "trials", "is", "None", ":", "probs", "=", "self", ".", "probabilities", "(", ...
Return the expectation of a measurement. Since we can only measure our computer in the computational basis, we only require the diagonal of the Hermitian in that basis. If the number of trials is specified, we sample the given number of times. Else we return the exact expectation (as if we'd performed an infinite number of trials. )
[ "Return", "the", "expectation", "of", "a", "measurement", ".", "Since", "we", "can", "only", "measure", "our", "computer", "in", "the", "computational", "basis", "we", "only", "require", "the", "diagonal", "of", "the", "Hermitian", "in", "that", "basis", "."...
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L131-L147
train
229,660
rigetti/quantumflow
quantumflow/states.py
State.measure
def measure(self) -> np.ndarray: """Measure the state in the computational basis. Returns: A [2]*bits array of qubit states, either 0 or 1 """ # TODO: Can we do this within backend? probs = np.real(bk.evaluate(self.probabilities())) indices = np.asarray(list(np.ndindex(*[2] * self.qubit_nb))) res = np.random.choice(probs.size, p=probs.ravel()) res = indices[res] return res
python
def measure(self) -> np.ndarray: """Measure the state in the computational basis. Returns: A [2]*bits array of qubit states, either 0 or 1 """ # TODO: Can we do this within backend? probs = np.real(bk.evaluate(self.probabilities())) indices = np.asarray(list(np.ndindex(*[2] * self.qubit_nb))) res = np.random.choice(probs.size, p=probs.ravel()) res = indices[res] return res
[ "def", "measure", "(", "self", ")", "->", "np", ".", "ndarray", ":", "# TODO: Can we do this within backend?", "probs", "=", "np", ".", "real", "(", "bk", ".", "evaluate", "(", "self", ".", "probabilities", "(", ")", ")", ")", "indices", "=", "np", ".", ...
Measure the state in the computational basis. Returns: A [2]*bits array of qubit states, either 0 or 1
[ "Measure", "the", "state", "in", "the", "computational", "basis", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L149-L160
train
229,661
rigetti/quantumflow
quantumflow/states.py
State.asdensity
def asdensity(self) -> 'Density': """Convert a pure state to a density matrix""" matrix = bk.outer(self.tensor, bk.conj(self.tensor)) return Density(matrix, self.qubits, self._memory)
python
def asdensity(self) -> 'Density': """Convert a pure state to a density matrix""" matrix = bk.outer(self.tensor, bk.conj(self.tensor)) return Density(matrix, self.qubits, self._memory)
[ "def", "asdensity", "(", "self", ")", "->", "'Density'", ":", "matrix", "=", "bk", ".", "outer", "(", "self", ".", "tensor", ",", "bk", ".", "conj", "(", "self", ".", "tensor", ")", ")", "return", "Density", "(", "matrix", ",", "self", ".", "qubits...
Convert a pure state to a density matrix
[ "Convert", "a", "pure", "state", "to", "a", "density", "matrix" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L162-L165
train
229,662
rigetti/quantumflow
tools/benchmark.py
benchmark
def benchmark(N, gates): """Create and run a circuit with N qubits and given number of gates""" qubits = list(range(0, N)) ket = qf.zero_state(N) for n in range(0, N): ket = qf.H(n).run(ket) for _ in range(0, (gates-N)//3): qubit0, qubit1 = random.sample(qubits, 2) ket = qf.X(qubit0).run(ket) ket = qf.T(qubit1).run(ket) ket = qf.CNOT(qubit0, qubit1).run(ket) return ket.vec.tensor
python
def benchmark(N, gates): """Create and run a circuit with N qubits and given number of gates""" qubits = list(range(0, N)) ket = qf.zero_state(N) for n in range(0, N): ket = qf.H(n).run(ket) for _ in range(0, (gates-N)//3): qubit0, qubit1 = random.sample(qubits, 2) ket = qf.X(qubit0).run(ket) ket = qf.T(qubit1).run(ket) ket = qf.CNOT(qubit0, qubit1).run(ket) return ket.vec.tensor
[ "def", "benchmark", "(", "N", ",", "gates", ")", ":", "qubits", "=", "list", "(", "range", "(", "0", ",", "N", ")", ")", "ket", "=", "qf", ".", "zero_state", "(", "N", ")", "for", "n", "in", "range", "(", "0", ",", "N", ")", ":", "ket", "="...
Create and run a circuit with N qubits and given number of gates
[ "Create", "and", "run", "a", "circuit", "with", "N", "qubits", "and", "given", "number", "of", "gates" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/tools/benchmark.py#L31-L45
train
229,663
rigetti/quantumflow
examples/weyl.py
sandwich_decompositions
def sandwich_decompositions(coords0, coords1, samples=SAMPLES): """Create composite gates, decompose, and return a list of canonical coordinates""" decomps = [] for _ in range(samples): circ = qf.Circuit() circ += qf.CANONICAL(*coords0, 0, 1) circ += qf.random_gate([0]) circ += qf.random_gate([1]) circ += qf.CANONICAL(*coords1, 0, 1) gate = circ.asgate() coords = qf.canonical_coords(gate) decomps.append(coords) return decomps
python
def sandwich_decompositions(coords0, coords1, samples=SAMPLES): """Create composite gates, decompose, and return a list of canonical coordinates""" decomps = [] for _ in range(samples): circ = qf.Circuit() circ += qf.CANONICAL(*coords0, 0, 1) circ += qf.random_gate([0]) circ += qf.random_gate([1]) circ += qf.CANONICAL(*coords1, 0, 1) gate = circ.asgate() coords = qf.canonical_coords(gate) decomps.append(coords) return decomps
[ "def", "sandwich_decompositions", "(", "coords0", ",", "coords1", ",", "samples", "=", "SAMPLES", ")", ":", "decomps", "=", "[", "]", "for", "_", "in", "range", "(", "samples", ")", ":", "circ", "=", "qf", ".", "Circuit", "(", ")", "circ", "+=", "qf"...
Create composite gates, decompose, and return a list of canonical coordinates
[ "Create", "composite", "gates", "decompose", "and", "return", "a", "list", "of", "canonical", "coordinates" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/examples/weyl.py#L82-L97
train
229,664
rigetti/quantumflow
quantumflow/paulialgebra.py
sX
def sX(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_X operator acting on the given qubit""" return Pauli.sigma(qubit, 'X', coefficient)
python
def sX(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_X operator acting on the given qubit""" return Pauli.sigma(qubit, 'X', coefficient)
[ "def", "sX", "(", "qubit", ":", "Qubit", ",", "coefficient", ":", "complex", "=", "1.0", ")", "->", "Pauli", ":", "return", "Pauli", ".", "sigma", "(", "qubit", ",", "'X'", ",", "coefficient", ")" ]
Return the Pauli sigma_X operator acting on the given qubit
[ "Return", "the", "Pauli", "sigma_X", "operator", "acting", "on", "the", "given", "qubit" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L224-L226
train
229,665
rigetti/quantumflow
quantumflow/paulialgebra.py
sY
def sY(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_Y operator acting on the given qubit""" return Pauli.sigma(qubit, 'Y', coefficient)
python
def sY(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_Y operator acting on the given qubit""" return Pauli.sigma(qubit, 'Y', coefficient)
[ "def", "sY", "(", "qubit", ":", "Qubit", ",", "coefficient", ":", "complex", "=", "1.0", ")", "->", "Pauli", ":", "return", "Pauli", ".", "sigma", "(", "qubit", ",", "'Y'", ",", "coefficient", ")" ]
Return the Pauli sigma_Y operator acting on the given qubit
[ "Return", "the", "Pauli", "sigma_Y", "operator", "acting", "on", "the", "given", "qubit" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L229-L231
train
229,666
rigetti/quantumflow
quantumflow/paulialgebra.py
sZ
def sZ(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_Z operator acting on the given qubit""" return Pauli.sigma(qubit, 'Z', coefficient)
python
def sZ(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: """Return the Pauli sigma_Z operator acting on the given qubit""" return Pauli.sigma(qubit, 'Z', coefficient)
[ "def", "sZ", "(", "qubit", ":", "Qubit", ",", "coefficient", ":", "complex", "=", "1.0", ")", "->", "Pauli", ":", "return", "Pauli", ".", "sigma", "(", "qubit", ",", "'Z'", ",", "coefficient", ")" ]
Return the Pauli sigma_Z operator acting on the given qubit
[ "Return", "the", "Pauli", "sigma_Z", "operator", "acting", "on", "the", "given", "qubit" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L234-L236
train
229,667
rigetti/quantumflow
quantumflow/paulialgebra.py
pauli_sum
def pauli_sum(*elements: Pauli) -> Pauli: """Return the sum of elements of the Pauli algebra""" terms = [] key = itemgetter(0) for term, grp in groupby(heapq.merge(*elements, key=key), key=key): coeff = sum(g[1] for g in grp) if not isclose(coeff, 0.0): terms.append((term, coeff)) return Pauli(tuple(terms))
python
def pauli_sum(*elements: Pauli) -> Pauli: """Return the sum of elements of the Pauli algebra""" terms = [] key = itemgetter(0) for term, grp in groupby(heapq.merge(*elements, key=key), key=key): coeff = sum(g[1] for g in grp) if not isclose(coeff, 0.0): terms.append((term, coeff)) return Pauli(tuple(terms))
[ "def", "pauli_sum", "(", "*", "elements", ":", "Pauli", ")", "->", "Pauli", ":", "terms", "=", "[", "]", "key", "=", "itemgetter", "(", "0", ")", "for", "term", ",", "grp", "in", "groupby", "(", "heapq", ".", "merge", "(", "*", "elements", ",", "...
Return the sum of elements of the Pauli algebra
[ "Return", "the", "sum", "of", "elements", "of", "the", "Pauli", "algebra" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L245-L255
train
229,668
rigetti/quantumflow
quantumflow/paulialgebra.py
pauli_product
def pauli_product(*elements: Pauli) -> Pauli: """Return the product of elements of the Pauli algebra""" result_terms = [] for terms in product(*elements): coeff = reduce(mul, [term[1] for term in terms]) ops = (term[0] for term in terms) out = [] key = itemgetter(0) for qubit, qops in groupby(heapq.merge(*ops, key=key), key=key): res = next(qops)[1] # Operator: X Y Z for op in qops: pair = res + op[1] res, rescoeff = PAULI_PROD[pair] coeff *= rescoeff if res != 'I': out.append((qubit, res)) p = Pauli(((tuple(out), coeff),)) result_terms.append(p) return pauli_sum(*result_terms)
python
def pauli_product(*elements: Pauli) -> Pauli: """Return the product of elements of the Pauli algebra""" result_terms = [] for terms in product(*elements): coeff = reduce(mul, [term[1] for term in terms]) ops = (term[0] for term in terms) out = [] key = itemgetter(0) for qubit, qops in groupby(heapq.merge(*ops, key=key), key=key): res = next(qops)[1] # Operator: X Y Z for op in qops: pair = res + op[1] res, rescoeff = PAULI_PROD[pair] coeff *= rescoeff if res != 'I': out.append((qubit, res)) p = Pauli(((tuple(out), coeff),)) result_terms.append(p) return pauli_sum(*result_terms)
[ "def", "pauli_product", "(", "*", "elements", ":", "Pauli", ")", "->", "Pauli", ":", "result_terms", "=", "[", "]", "for", "terms", "in", "product", "(", "*", "elements", ")", ":", "coeff", "=", "reduce", "(", "mul", ",", "[", "term", "[", "1", "]"...
Return the product of elements of the Pauli algebra
[ "Return", "the", "product", "of", "elements", "of", "the", "Pauli", "algebra" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L258-L279
train
229,669
rigetti/quantumflow
quantumflow/paulialgebra.py
pauli_pow
def pauli_pow(pauli: Pauli, exponent: int) -> Pauli: """ Raise an element of the Pauli algebra to a non-negative integer power. """ if not isinstance(exponent, int) or exponent < 0: raise ValueError("The exponent must be a non-negative integer.") if exponent == 0: return Pauli.identity() if exponent == 1: return pauli # https://en.wikipedia.org/wiki/Exponentiation_by_squaring y = Pauli.identity() x = pauli n = exponent while n > 1: if n % 2 == 0: # Even x = x * x n = n // 2 else: # Odd y = x * y x = x * x n = (n - 1) // 2 return x * y
python
def pauli_pow(pauli: Pauli, exponent: int) -> Pauli: """ Raise an element of the Pauli algebra to a non-negative integer power. """ if not isinstance(exponent, int) or exponent < 0: raise ValueError("The exponent must be a non-negative integer.") if exponent == 0: return Pauli.identity() if exponent == 1: return pauli # https://en.wikipedia.org/wiki/Exponentiation_by_squaring y = Pauli.identity() x = pauli n = exponent while n > 1: if n % 2 == 0: # Even x = x * x n = n // 2 else: # Odd y = x * y x = x * x n = (n - 1) // 2 return x * y
[ "def", "pauli_pow", "(", "pauli", ":", "Pauli", ",", "exponent", ":", "int", ")", "->", "Pauli", ":", "if", "not", "isinstance", "(", "exponent", ",", "int", ")", "or", "exponent", "<", "0", ":", "raise", "ValueError", "(", "\"The exponent must be a non-ne...
Raise an element of the Pauli algebra to a non-negative integer power.
[ "Raise", "an", "element", "of", "the", "Pauli", "algebra", "to", "a", "non", "-", "negative", "integer", "power", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L282-L308
train
229,670
rigetti/quantumflow
quantumflow/paulialgebra.py
pauli_commuting_sets
def pauli_commuting_sets(element: Pauli) -> Tuple[Pauli, ...]: """Gather the terms of a Pauli polynomial into commuting sets. Uses the algorithm defined in (Raeisi, Wiebe, Sanders, arXiv:1108.4318, 2011) to find commuting sets. Except uses commutation check from arXiv:1405.5749v2 """ if len(element) < 2: return (element,) groups: List[Pauli] = [] # typing: List[Pauli] for term in element: pterm = Pauli((term,)) assigned = False for i, grp in enumerate(groups): if paulis_commute(grp, pterm): groups[i] = grp + pterm assigned = True break if not assigned: groups.append(pterm) return tuple(groups)
python
def pauli_commuting_sets(element: Pauli) -> Tuple[Pauli, ...]: """Gather the terms of a Pauli polynomial into commuting sets. Uses the algorithm defined in (Raeisi, Wiebe, Sanders, arXiv:1108.4318, 2011) to find commuting sets. Except uses commutation check from arXiv:1405.5749v2 """ if len(element) < 2: return (element,) groups: List[Pauli] = [] # typing: List[Pauli] for term in element: pterm = Pauli((term,)) assigned = False for i, grp in enumerate(groups): if paulis_commute(grp, pterm): groups[i] = grp + pterm assigned = True break if not assigned: groups.append(pterm) return tuple(groups)
[ "def", "pauli_commuting_sets", "(", "element", ":", "Pauli", ")", "->", "Tuple", "[", "Pauli", ",", "...", "]", ":", "if", "len", "(", "element", ")", "<", "2", ":", "return", "(", "element", ",", ")", "groups", ":", "List", "[", "Pauli", "]", "=",...
Gather the terms of a Pauli polynomial into commuting sets. Uses the algorithm defined in (Raeisi, Wiebe, Sanders, arXiv:1108.4318, 2011) to find commuting sets. Except uses commutation check from arXiv:1405.5749v2
[ "Gather", "the", "terms", "of", "a", "Pauli", "polynomial", "into", "commuting", "sets", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/paulialgebra.py#L348-L372
train
229,671
rigetti/quantumflow
quantumflow/backend/numpybk.py
astensor
def astensor(array: TensorLike) -> BKTensor: """Converts a numpy array to the backend's tensor object """ array = np.asarray(array, dtype=CTYPE) return array
python
def astensor(array: TensorLike) -> BKTensor: """Converts a numpy array to the backend's tensor object """ array = np.asarray(array, dtype=CTYPE) return array
[ "def", "astensor", "(", "array", ":", "TensorLike", ")", "->", "BKTensor", ":", "array", "=", "np", ".", "asarray", "(", "array", ",", "dtype", "=", "CTYPE", ")", "return", "array" ]
Converts a numpy array to the backend's tensor object
[ "Converts", "a", "numpy", "array", "to", "the", "backend", "s", "tensor", "object" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/numpybk.py#L98-L102
train
229,672
rigetti/quantumflow
quantumflow/backend/numpybk.py
productdiag
def productdiag(tensor: BKTensor) -> BKTensor: """Returns the matrix diagonal of the product tensor""" # DOCME: Explain N = rank(tensor) tensor = reshape(tensor, [2**(N//2), 2**(N//2)]) tensor = np.diag(tensor) tensor = reshape(tensor, [2]*(N//2)) return tensor
python
def productdiag(tensor: BKTensor) -> BKTensor: """Returns the matrix diagonal of the product tensor""" # DOCME: Explain N = rank(tensor) tensor = reshape(tensor, [2**(N//2), 2**(N//2)]) tensor = np.diag(tensor) tensor = reshape(tensor, [2]*(N//2)) return tensor
[ "def", "productdiag", "(", "tensor", ":", "BKTensor", ")", "->", "BKTensor", ":", "# DOCME: Explain", "N", "=", "rank", "(", "tensor", ")", "tensor", "=", "reshape", "(", "tensor", ",", "[", "2", "**", "(", "N", "//", "2", ")", ",", "2", "**", "(",...
Returns the matrix diagonal of the product tensor
[ "Returns", "the", "matrix", "diagonal", "of", "the", "product", "tensor" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/numpybk.py#L150-L156
train
229,673
rigetti/quantumflow
quantumflow/backend/numpybk.py
tensormul
def tensormul(tensor0: BKTensor, tensor1: BKTensor, indices: typing.List[int]) -> BKTensor: r""" Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor """ # Note: This method is the critical computational core of QuantumFlow # We currently have two implementations, one that uses einsum, the other # using matrix multiplication # # numpy: # einsum is much faster particularly for small numbers of qubits # tensorflow: # Little different is performance, but einsum would restrict the # maximum number of qubits to 26 (Because tensorflow only allows 26 # einsum subscripts at present] # torch: # einsum is slower than matmul N = rank(tensor1) K = rank(tensor0) // 2 assert K == len(indices) out = list(EINSUM_SUBSCRIPTS[0:N]) left_in = list(EINSUM_SUBSCRIPTS[N:N+K]) left_out = [out[idx] for idx in indices] right = list(EINSUM_SUBSCRIPTS[0:N]) for idx, s in zip(indices, left_in): right[idx] = s subscripts = ''.join(left_out + left_in + [','] + right + ['->'] + out) # print('>>>', K, N, subscripts) tensor = einsum(subscripts, tensor0, tensor1) return tensor
python
def tensormul(tensor0: BKTensor, tensor1: BKTensor, indices: typing.List[int]) -> BKTensor: r""" Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor """ # Note: This method is the critical computational core of QuantumFlow # We currently have two implementations, one that uses einsum, the other # using matrix multiplication # # numpy: # einsum is much faster particularly for small numbers of qubits # tensorflow: # Little different is performance, but einsum would restrict the # maximum number of qubits to 26 (Because tensorflow only allows 26 # einsum subscripts at present] # torch: # einsum is slower than matmul N = rank(tensor1) K = rank(tensor0) // 2 assert K == len(indices) out = list(EINSUM_SUBSCRIPTS[0:N]) left_in = list(EINSUM_SUBSCRIPTS[N:N+K]) left_out = [out[idx] for idx in indices] right = list(EINSUM_SUBSCRIPTS[0:N]) for idx, s in zip(indices, left_in): right[idx] = s subscripts = ''.join(left_out + left_in + [','] + right + ['->'] + out) # print('>>>', K, N, subscripts) tensor = einsum(subscripts, tensor0, tensor1) return tensor
[ "def", "tensormul", "(", "tensor0", ":", "BKTensor", ",", "tensor1", ":", "BKTensor", ",", "indices", ":", "typing", ".", "List", "[", "int", "]", ")", "->", "BKTensor", ":", "# Note: This method is the critical computational core of QuantumFlow", "# We currently have...
r""" Generalization of matrix multiplication to product tensors. A state vector in product tensor representation has N dimension, one for each contravariant index, e.g. for 3-qubit states :math:`B^{b_0,b_1,b_2}`. An operator has K dimensions, K/2 for contravariant indices (e.g. ket components) and K/2 for covariant (bra) indices, e.g. :math:`A^{a_0,a_1}_{a_2,a_3}` for a 2-qubit gate. The given indices of A are contracted against B, replacing the given positions. E.g. ``tensormul(A, B, [0,2])`` is equivalent to .. math:: C^{a_0,b_1,a_1} =\sum_{i_0,i_1} A^{a_0,a_1}_{i_0,i_1} B^{i_0,b_1,i_1} Args: tensor0: A tensor product representation of a gate tensor1: A tensor product representation of a gate or state indices: List of indices of tensor1 on which to act. Returns: Resultant state or gate tensor
[ "r", "Generalization", "of", "matrix", "multiplication", "to", "product", "tensors", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/numpybk.py#L159-L214
train
229,674
rigetti/quantumflow
quantumflow/utils.py
invert_map
def invert_map(mapping: dict, one_to_one: bool = True) -> dict: """Invert a dictionary. If not one_to_one then the inverted map will contain lists of former keys as values. """ if one_to_one: inv_map = {value: key for key, value in mapping.items()} else: inv_map = {} for key, value in mapping.items(): inv_map.setdefault(value, set()).add(key) return inv_map
python
def invert_map(mapping: dict, one_to_one: bool = True) -> dict: """Invert a dictionary. If not one_to_one then the inverted map will contain lists of former keys as values. """ if one_to_one: inv_map = {value: key for key, value in mapping.items()} else: inv_map = {} for key, value in mapping.items(): inv_map.setdefault(value, set()).add(key) return inv_map
[ "def", "invert_map", "(", "mapping", ":", "dict", ",", "one_to_one", ":", "bool", "=", "True", ")", "->", "dict", ":", "if", "one_to_one", ":", "inv_map", "=", "{", "value", ":", "key", "for", "key", ",", "value", "in", "mapping", ".", "items", "(", ...
Invert a dictionary. If not one_to_one then the inverted map will contain lists of former keys as values.
[ "Invert", "a", "dictionary", ".", "If", "not", "one_to_one", "then", "the", "inverted", "map", "will", "contain", "lists", "of", "former", "keys", "as", "values", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L38-L49
train
229,675
rigetti/quantumflow
quantumflow/utils.py
bitlist_to_int
def bitlist_to_int(bitlist: Sequence[int]) -> int: """Converts a sequence of bits to an integer. >>> from quantumflow.utils import bitlist_to_int >>> bitlist_to_int([1, 0, 0]) 4 """ return int(''.join([str(d) for d in bitlist]), 2)
python
def bitlist_to_int(bitlist: Sequence[int]) -> int: """Converts a sequence of bits to an integer. >>> from quantumflow.utils import bitlist_to_int >>> bitlist_to_int([1, 0, 0]) 4 """ return int(''.join([str(d) for d in bitlist]), 2)
[ "def", "bitlist_to_int", "(", "bitlist", ":", "Sequence", "[", "int", "]", ")", "->", "int", ":", "return", "int", "(", "''", ".", "join", "(", "[", "str", "(", "d", ")", "for", "d", "in", "bitlist", "]", ")", ",", "2", ")" ]
Converts a sequence of bits to an integer. >>> from quantumflow.utils import bitlist_to_int >>> bitlist_to_int([1, 0, 0]) 4
[ "Converts", "a", "sequence", "of", "bits", "to", "an", "integer", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L52-L59
train
229,676
rigetti/quantumflow
quantumflow/utils.py
int_to_bitlist
def int_to_bitlist(x: int, pad: int = None) -> Sequence[int]: """Converts an integer to a binary sequence of bits. Pad prepends with sufficient zeros to ensures that the returned list contains at least this number of bits. >>> from quantumflow.utils import int_to_bitlist >>> int_to_bitlist(4, 4)) [0, 1, 0, 0] """ if pad is None: form = '{:0b}' else: form = '{:0' + str(pad) + 'b}' return [int(b) for b in form.format(x)]
python
def int_to_bitlist(x: int, pad: int = None) -> Sequence[int]: """Converts an integer to a binary sequence of bits. Pad prepends with sufficient zeros to ensures that the returned list contains at least this number of bits. >>> from quantumflow.utils import int_to_bitlist >>> int_to_bitlist(4, 4)) [0, 1, 0, 0] """ if pad is None: form = '{:0b}' else: form = '{:0' + str(pad) + 'b}' return [int(b) for b in form.format(x)]
[ "def", "int_to_bitlist", "(", "x", ":", "int", ",", "pad", ":", "int", "=", "None", ")", "->", "Sequence", "[", "int", "]", ":", "if", "pad", "is", "None", ":", "form", "=", "'{:0b}'", "else", ":", "form", "=", "'{:0'", "+", "str", "(", "pad", ...
Converts an integer to a binary sequence of bits. Pad prepends with sufficient zeros to ensures that the returned list contains at least this number of bits. >>> from quantumflow.utils import int_to_bitlist >>> int_to_bitlist(4, 4)) [0, 1, 0, 0]
[ "Converts", "an", "integer", "to", "a", "binary", "sequence", "of", "bits", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L62-L77
train
229,677
rigetti/quantumflow
quantumflow/utils.py
spanning_tree_count
def spanning_tree_count(graph: nx.Graph) -> int: """Return the number of unique spanning trees of a graph, using Kirchhoff's matrix tree theorem. """ laplacian = nx.laplacian_matrix(graph).toarray() comatrix = laplacian[:-1, :-1] det = np.linalg.det(comatrix) count = int(round(det)) return count
python
def spanning_tree_count(graph: nx.Graph) -> int: """Return the number of unique spanning trees of a graph, using Kirchhoff's matrix tree theorem. """ laplacian = nx.laplacian_matrix(graph).toarray() comatrix = laplacian[:-1, :-1] det = np.linalg.det(comatrix) count = int(round(det)) return count
[ "def", "spanning_tree_count", "(", "graph", ":", "nx", ".", "Graph", ")", "->", "int", ":", "laplacian", "=", "nx", ".", "laplacian_matrix", "(", "graph", ")", ".", "toarray", "(", ")", "comatrix", "=", "laplacian", "[", ":", "-", "1", ",", ":", "-",...
Return the number of unique spanning trees of a graph, using Kirchhoff's matrix tree theorem.
[ "Return", "the", "number", "of", "unique", "spanning", "trees", "of", "a", "graph", "using", "Kirchhoff", "s", "matrix", "tree", "theorem", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L108-L116
train
229,678
rigetti/quantumflow
quantumflow/utils.py
rationalize
def rationalize(flt: float, denominators: Set[int] = None) -> Fraction: """Convert a floating point number to a Fraction with a small denominator. Args: flt: A floating point number denominators: Collection of standard denominators. Default is 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 Raises: ValueError: If cannot rationalize float """ if denominators is None: denominators = _DENOMINATORS frac = Fraction.from_float(flt).limit_denominator() if frac.denominator not in denominators: raise ValueError('Cannot rationalize') return frac
python
def rationalize(flt: float, denominators: Set[int] = None) -> Fraction: """Convert a floating point number to a Fraction with a small denominator. Args: flt: A floating point number denominators: Collection of standard denominators. Default is 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 Raises: ValueError: If cannot rationalize float """ if denominators is None: denominators = _DENOMINATORS frac = Fraction.from_float(flt).limit_denominator() if frac.denominator not in denominators: raise ValueError('Cannot rationalize') return frac
[ "def", "rationalize", "(", "flt", ":", "float", ",", "denominators", ":", "Set", "[", "int", "]", "=", "None", ")", "->", "Fraction", ":", "if", "denominators", "is", "None", ":", "denominators", "=", "_DENOMINATORS", "frac", "=", "Fraction", ".", "from_...
Convert a floating point number to a Fraction with a small denominator. Args: flt: A floating point number denominators: Collection of standard denominators. Default is 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 Raises: ValueError: If cannot rationalize float
[ "Convert", "a", "floating", "point", "number", "to", "a", "Fraction", "with", "a", "small", "denominator", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L171-L189
train
229,679
rigetti/quantumflow
quantumflow/utils.py
symbolize
def symbolize(flt: float) -> sympy.Symbol: """Attempt to convert a real number into a simpler symbolic representation. Returns: A sympy Symbol. (Convert to string with str(sym) or to latex with sympy.latex(sym) Raises: ValueError: If cannot simplify float """ try: ratio = rationalize(flt) res = sympy.simplify(ratio) except ValueError: ratio = rationalize(flt/np.pi) res = sympy.simplify(ratio) * sympy.pi return res
python
def symbolize(flt: float) -> sympy.Symbol: """Attempt to convert a real number into a simpler symbolic representation. Returns: A sympy Symbol. (Convert to string with str(sym) or to latex with sympy.latex(sym) Raises: ValueError: If cannot simplify float """ try: ratio = rationalize(flt) res = sympy.simplify(ratio) except ValueError: ratio = rationalize(flt/np.pi) res = sympy.simplify(ratio) * sympy.pi return res
[ "def", "symbolize", "(", "flt", ":", "float", ")", "->", "sympy", ".", "Symbol", ":", "try", ":", "ratio", "=", "rationalize", "(", "flt", ")", "res", "=", "sympy", ".", "simplify", "(", "ratio", ")", "except", "ValueError", ":", "ratio", "=", "ratio...
Attempt to convert a real number into a simpler symbolic representation. Returns: A sympy Symbol. (Convert to string with str(sym) or to latex with sympy.latex(sym) Raises: ValueError: If cannot simplify float
[ "Attempt", "to", "convert", "a", "real", "number", "into", "a", "simpler", "symbolic", "representation", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/utils.py#L192-L208
train
229,680
rigetti/quantumflow
quantumflow/forest/__init__.py
pyquil_to_image
def pyquil_to_image(program: pyquil.Program) -> PIL.Image: # pragma: no cover """Returns an image of a pyquil circuit. See circuit_to_latex() for more details. """ circ = pyquil_to_circuit(program) latex = circuit_to_latex(circ) img = render_latex(latex) return img
python
def pyquil_to_image(program: pyquil.Program) -> PIL.Image: # pragma: no cover """Returns an image of a pyquil circuit. See circuit_to_latex() for more details. """ circ = pyquil_to_circuit(program) latex = circuit_to_latex(circ) img = render_latex(latex) return img
[ "def", "pyquil_to_image", "(", "program", ":", "pyquil", ".", "Program", ")", "->", "PIL", ".", "Image", ":", "# pragma: no cover", "circ", "=", "pyquil_to_circuit", "(", "program", ")", "latex", "=", "circuit_to_latex", "(", "circ", ")", "img", "=", "render...
Returns an image of a pyquil circuit. See circuit_to_latex() for more details.
[ "Returns", "an", "image", "of", "a", "pyquil", "circuit", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L160-L168
train
229,681
rigetti/quantumflow
quantumflow/forest/__init__.py
circuit_to_pyquil
def circuit_to_pyquil(circuit: Circuit) -> pyquil.Program: """Convert a QuantumFlow circuit to a pyQuil program""" prog = pyquil.Program() for elem in circuit.elements: if isinstance(elem, Gate) and elem.name in QUIL_GATES: params = list(elem.params.values()) if elem.params else [] prog.gate(elem.name, params, elem.qubits) elif isinstance(elem, Measure): prog.measure(elem.qubit, elem.cbit) else: # FIXME: more informative error message raise ValueError('Cannot convert operation to pyquil') return prog
python
def circuit_to_pyquil(circuit: Circuit) -> pyquil.Program: """Convert a QuantumFlow circuit to a pyQuil program""" prog = pyquil.Program() for elem in circuit.elements: if isinstance(elem, Gate) and elem.name in QUIL_GATES: params = list(elem.params.values()) if elem.params else [] prog.gate(elem.name, params, elem.qubits) elif isinstance(elem, Measure): prog.measure(elem.qubit, elem.cbit) else: # FIXME: more informative error message raise ValueError('Cannot convert operation to pyquil') return prog
[ "def", "circuit_to_pyquil", "(", "circuit", ":", "Circuit", ")", "->", "pyquil", ".", "Program", ":", "prog", "=", "pyquil", ".", "Program", "(", ")", "for", "elem", "in", "circuit", ".", "elements", ":", "if", "isinstance", "(", "elem", ",", "Gate", "...
Convert a QuantumFlow circuit to a pyQuil program
[ "Convert", "a", "QuantumFlow", "circuit", "to", "a", "pyQuil", "program" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L171-L185
train
229,682
rigetti/quantumflow
quantumflow/forest/__init__.py
pyquil_to_circuit
def pyquil_to_circuit(program: pyquil.Program) -> Circuit: """Convert a protoquil pyQuil program to a QuantumFlow Circuit""" circ = Circuit() for inst in program.instructions: # print(type(inst)) if isinstance(inst, pyquil.Declare): # Ignore continue if isinstance(inst, pyquil.Halt): # Ignore continue if isinstance(inst, pyquil.Pragma): # TODO Barrier? continue elif isinstance(inst, pyquil.Measurement): circ += Measure(inst.qubit.index) # elif isinstance(inst, pyquil.ResetQubit): # TODO # continue elif isinstance(inst, pyquil.Gate): defgate = STDGATES[inst.name] gate = defgate(*inst.params) qubits = [q.index for q in inst.qubits] gate = gate.relabel(qubits) circ += gate else: raise ValueError('PyQuil program is not protoquil') return circ
python
def pyquil_to_circuit(program: pyquil.Program) -> Circuit: """Convert a protoquil pyQuil program to a QuantumFlow Circuit""" circ = Circuit() for inst in program.instructions: # print(type(inst)) if isinstance(inst, pyquil.Declare): # Ignore continue if isinstance(inst, pyquil.Halt): # Ignore continue if isinstance(inst, pyquil.Pragma): # TODO Barrier? continue elif isinstance(inst, pyquil.Measurement): circ += Measure(inst.qubit.index) # elif isinstance(inst, pyquil.ResetQubit): # TODO # continue elif isinstance(inst, pyquil.Gate): defgate = STDGATES[inst.name] gate = defgate(*inst.params) qubits = [q.index for q in inst.qubits] gate = gate.relabel(qubits) circ += gate else: raise ValueError('PyQuil program is not protoquil') return circ
[ "def", "pyquil_to_circuit", "(", "program", ":", "pyquil", ".", "Program", ")", "->", "Circuit", ":", "circ", "=", "Circuit", "(", ")", "for", "inst", "in", "program", ".", "instructions", ":", "# print(type(inst))", "if", "isinstance", "(", "inst", ",", "...
Convert a protoquil pyQuil program to a QuantumFlow Circuit
[ "Convert", "a", "protoquil", "pyQuil", "program", "to", "a", "QuantumFlow", "Circuit" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L188-L213
train
229,683
rigetti/quantumflow
quantumflow/forest/__init__.py
quil_to_program
def quil_to_program(quil: str) -> Program: """Parse a quil program and return a Program object""" pyquil_instructions = pyquil.parser.parse(quil) return pyquil_to_program(pyquil_instructions)
python
def quil_to_program(quil: str) -> Program: """Parse a quil program and return a Program object""" pyquil_instructions = pyquil.parser.parse(quil) return pyquil_to_program(pyquil_instructions)
[ "def", "quil_to_program", "(", "quil", ":", "str", ")", "->", "Program", ":", "pyquil_instructions", "=", "pyquil", ".", "parser", ".", "parse", "(", "quil", ")", "return", "pyquil_to_program", "(", "pyquil_instructions", ")" ]
Parse a quil program and return a Program object
[ "Parse", "a", "quil", "program", "and", "return", "a", "Program", "object" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L216-L219
train
229,684
rigetti/quantumflow
quantumflow/forest/__init__.py
state_to_wavefunction
def state_to_wavefunction(state: State) -> pyquil.Wavefunction: """Convert a QuantumFlow state to a pyQuil Wavefunction""" # TODO: qubits? amplitudes = state.vec.asarray() # pyQuil labels states backwards. amplitudes = amplitudes.transpose() amplitudes = amplitudes.reshape([amplitudes.size]) return pyquil.Wavefunction(amplitudes)
python
def state_to_wavefunction(state: State) -> pyquil.Wavefunction: """Convert a QuantumFlow state to a pyQuil Wavefunction""" # TODO: qubits? amplitudes = state.vec.asarray() # pyQuil labels states backwards. amplitudes = amplitudes.transpose() amplitudes = amplitudes.reshape([amplitudes.size]) return pyquil.Wavefunction(amplitudes)
[ "def", "state_to_wavefunction", "(", "state", ":", "State", ")", "->", "pyquil", ".", "Wavefunction", ":", "# TODO: qubits?", "amplitudes", "=", "state", ".", "vec", ".", "asarray", "(", ")", "# pyQuil labels states backwards.", "amplitudes", "=", "amplitudes", "....
Convert a QuantumFlow state to a pyQuil Wavefunction
[ "Convert", "a", "QuantumFlow", "state", "to", "a", "pyQuil", "Wavefunction" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L350-L358
train
229,685
rigetti/quantumflow
quantumflow/forest/__init__.py
QuantumFlowQVM.load
def load(self, binary: pyquil.Program) -> 'QuantumFlowQVM': """ Load a pyQuil program, and initialize QVM into a fresh state. Args: binary: A pyQuil program """ assert self.status in ['connected', 'done'] prog = quil_to_program(str(binary)) self._prog = prog self.program = binary self.status = 'loaded' return self
python
def load(self, binary: pyquil.Program) -> 'QuantumFlowQVM': """ Load a pyQuil program, and initialize QVM into a fresh state. Args: binary: A pyQuil program """ assert self.status in ['connected', 'done'] prog = quil_to_program(str(binary)) self._prog = prog self.program = binary self.status = 'loaded' return self
[ "def", "load", "(", "self", ",", "binary", ":", "pyquil", ".", "Program", ")", "->", "'QuantumFlowQVM'", ":", "assert", "self", ".", "status", "in", "[", "'connected'", ",", "'done'", "]", "prog", "=", "quil_to_program", "(", "str", "(", "binary", ")", ...
Load a pyQuil program, and initialize QVM into a fresh state. Args: binary: A pyQuil program
[ "Load", "a", "pyQuil", "program", "and", "initialize", "QVM", "into", "a", "fresh", "state", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L379-L394
train
229,686
rigetti/quantumflow
quantumflow/forest/__init__.py
QuantumFlowQVM.run
def run(self) -> 'QuantumFlowQVM': """Run a previously loaded program""" assert self.status in ['loaded'] self.status = 'running' self._ket = self._prog.run() # Should set state to 'done' after run complete. # Makes no sense to keep status at running. But pyQuil's # QuantumComputer calls wait() after run, which expects state to be # 'running', and whose only effect to is to set state to 'done' return self
python
def run(self) -> 'QuantumFlowQVM': """Run a previously loaded program""" assert self.status in ['loaded'] self.status = 'running' self._ket = self._prog.run() # Should set state to 'done' after run complete. # Makes no sense to keep status at running. But pyQuil's # QuantumComputer calls wait() after run, which expects state to be # 'running', and whose only effect to is to set state to 'done' return self
[ "def", "run", "(", "self", ")", "->", "'QuantumFlowQVM'", ":", "assert", "self", ".", "status", "in", "[", "'loaded'", "]", "self", ".", "status", "=", "'running'", "self", ".", "_ket", "=", "self", ".", "_prog", ".", "run", "(", ")", "# Should set sta...
Run a previously loaded program
[ "Run", "a", "previously", "loaded", "program" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L401-L410
train
229,687
rigetti/quantumflow
quantumflow/forest/__init__.py
QuantumFlowQVM.wavefunction
def wavefunction(self) -> pyquil.Wavefunction: """ Return the wavefunction of a completed program. """ assert self.status == 'done' assert self._ket is not None wavefn = state_to_wavefunction(self._ket) return wavefn
python
def wavefunction(self) -> pyquil.Wavefunction: """ Return the wavefunction of a completed program. """ assert self.status == 'done' assert self._ket is not None wavefn = state_to_wavefunction(self._ket) return wavefn
[ "def", "wavefunction", "(", "self", ")", "->", "pyquil", ".", "Wavefunction", ":", "assert", "self", ".", "status", "==", "'done'", "assert", "self", ".", "_ket", "is", "not", "None", "wavefn", "=", "state_to_wavefunction", "(", "self", ".", "_ket", ")", ...
Return the wavefunction of a completed program.
[ "Return", "the", "wavefunction", "of", "a", "completed", "program", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/forest/__init__.py#L437-L444
train
229,688
rigetti/quantumflow
quantumflow/backend/torchbk.py
evaluate
def evaluate(tensor: BKTensor) -> TensorLike: """Return the value of a tensor""" if isinstance(tensor, _DTYPE): if torch.numel(tensor) == 1: return tensor.item() if tensor.numel() == 2: return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor
python
def evaluate(tensor: BKTensor) -> TensorLike: """Return the value of a tensor""" if isinstance(tensor, _DTYPE): if torch.numel(tensor) == 1: return tensor.item() if tensor.numel() == 2: return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor
[ "def", "evaluate", "(", "tensor", ":", "BKTensor", ")", "->", "TensorLike", ":", "if", "isinstance", "(", "tensor", ",", "_DTYPE", ")", ":", "if", "torch", ".", "numel", "(", "tensor", ")", "==", "1", ":", "return", "tensor", ".", "item", "(", ")", ...
Return the value of a tensor
[ "Return", "the", "value", "of", "a", "tensor" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/torchbk.py#L91-L100
train
229,689
rigetti/quantumflow
quantumflow/backend/torchbk.py
rank
def rank(tensor: BKTensor) -> int: """Return the number of dimensions of a tensor""" if isinstance(tensor, np.ndarray): return len(tensor.shape) return len(tensor[0].size())
python
def rank(tensor: BKTensor) -> int: """Return the number of dimensions of a tensor""" if isinstance(tensor, np.ndarray): return len(tensor.shape) return len(tensor[0].size())
[ "def", "rank", "(", "tensor", ":", "BKTensor", ")", "->", "int", ":", "if", "isinstance", "(", "tensor", ",", "np", ".", "ndarray", ")", ":", "return", "len", "(", "tensor", ".", "shape", ")", "return", "len", "(", "tensor", "[", "0", "]", ".", "...
Return the number of dimensions of a tensor
[ "Return", "the", "number", "of", "dimensions", "of", "a", "tensor" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/torchbk.py#L136-L141
train
229,690
rigetti/quantumflow
quantumflow/measures.py
state_fidelity
def state_fidelity(state0: State, state1: State) -> bk.BKTensor: """Return the quantum fidelity between pure states.""" assert state0.qubits == state1.qubits # FIXME tensor = bk.absolute(bk.inner(state0.tensor, state1.tensor))**bk.fcast(2) return tensor
python
def state_fidelity(state0: State, state1: State) -> bk.BKTensor: """Return the quantum fidelity between pure states.""" assert state0.qubits == state1.qubits # FIXME tensor = bk.absolute(bk.inner(state0.tensor, state1.tensor))**bk.fcast(2) return tensor
[ "def", "state_fidelity", "(", "state0", ":", "State", ",", "state1", ":", "State", ")", "->", "bk", ".", "BKTensor", ":", "assert", "state0", ".", "qubits", "==", "state1", ".", "qubits", "# FIXME", "tensor", "=", "bk", ".", "absolute", "(", "bk", ".",...
Return the quantum fidelity between pure states.
[ "Return", "the", "quantum", "fidelity", "between", "pure", "states", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L32-L36
train
229,691
rigetti/quantumflow
quantumflow/measures.py
state_angle
def state_angle(ket0: State, ket1: State) -> bk.BKTensor: """The Fubini-Study angle between states. Equal to the Burrs angle for pure states. """ return fubini_study_angle(ket0.vec, ket1.vec)
python
def state_angle(ket0: State, ket1: State) -> bk.BKTensor: """The Fubini-Study angle between states. Equal to the Burrs angle for pure states. """ return fubini_study_angle(ket0.vec, ket1.vec)
[ "def", "state_angle", "(", "ket0", ":", "State", ",", "ket1", ":", "State", ")", "->", "bk", ".", "BKTensor", ":", "return", "fubini_study_angle", "(", "ket0", ".", "vec", ",", "ket1", ".", "vec", ")" ]
The Fubini-Study angle between states. Equal to the Burrs angle for pure states.
[ "The", "Fubini", "-", "Study", "angle", "between", "states", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L39-L44
train
229,692
rigetti/quantumflow
quantumflow/measures.py
states_close
def states_close(state0: State, state1: State, tolerance: float = TOLERANCE) -> bool: """Returns True if states are almost identical. Closeness is measured with the metric Fubini-Study angle. """ return vectors_close(state0.vec, state1.vec, tolerance)
python
def states_close(state0: State, state1: State, tolerance: float = TOLERANCE) -> bool: """Returns True if states are almost identical. Closeness is measured with the metric Fubini-Study angle. """ return vectors_close(state0.vec, state1.vec, tolerance)
[ "def", "states_close", "(", "state0", ":", "State", ",", "state1", ":", "State", ",", "tolerance", ":", "float", "=", "TOLERANCE", ")", "->", "bool", ":", "return", "vectors_close", "(", "state0", ".", "vec", ",", "state1", ".", "vec", ",", "tolerance", ...
Returns True if states are almost identical. Closeness is measured with the metric Fubini-Study angle.
[ "Returns", "True", "if", "states", "are", "almost", "identical", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L47-L53
train
229,693
rigetti/quantumflow
quantumflow/measures.py
purity
def purity(rho: Density) -> bk.BKTensor: """ Calculate the purity of a mixed quantum state. Purity, defined as tr(rho^2), has an upper bound of 1 for a pure state, and a lower bound of 1/D (where D is the Hilbert space dimension) for a competently mixed state. Two closely related measures are the linear entropy, 1- purity, and the participation ratio, 1/purity. """ tensor = rho.tensor N = rho.qubit_nb matrix = bk.reshape(tensor, [2**N, 2**N]) return bk.trace(bk.matmul(matrix, matrix))
python
def purity(rho: Density) -> bk.BKTensor: """ Calculate the purity of a mixed quantum state. Purity, defined as tr(rho^2), has an upper bound of 1 for a pure state, and a lower bound of 1/D (where D is the Hilbert space dimension) for a competently mixed state. Two closely related measures are the linear entropy, 1- purity, and the participation ratio, 1/purity. """ tensor = rho.tensor N = rho.qubit_nb matrix = bk.reshape(tensor, [2**N, 2**N]) return bk.trace(bk.matmul(matrix, matrix))
[ "def", "purity", "(", "rho", ":", "Density", ")", "->", "bk", ".", "BKTensor", ":", "tensor", "=", "rho", ".", "tensor", "N", "=", "rho", ".", "qubit_nb", "matrix", "=", "bk", ".", "reshape", "(", "tensor", ",", "[", "2", "**", "N", ",", "2", "...
Calculate the purity of a mixed quantum state. Purity, defined as tr(rho^2), has an upper bound of 1 for a pure state, and a lower bound of 1/D (where D is the Hilbert space dimension) for a competently mixed state. Two closely related measures are the linear entropy, 1- purity, and the participation ratio, 1/purity.
[ "Calculate", "the", "purity", "of", "a", "mixed", "quantum", "state", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L59-L73
train
229,694
rigetti/quantumflow
quantumflow/measures.py
bures_distance
def bures_distance(rho0: Density, rho1: Density) -> float: """Return the Bures distance between mixed quantum states Note: Bures distance cannot be calculated within the tensor backend. """ fid = fidelity(rho0, rho1) op0 = asarray(rho0.asoperator()) op1 = asarray(rho1.asoperator()) tr0 = np.trace(op0) tr1 = np.trace(op1) return np.sqrt(tr0 + tr1 - 2.*np.sqrt(fid))
python
def bures_distance(rho0: Density, rho1: Density) -> float: """Return the Bures distance between mixed quantum states Note: Bures distance cannot be calculated within the tensor backend. """ fid = fidelity(rho0, rho1) op0 = asarray(rho0.asoperator()) op1 = asarray(rho1.asoperator()) tr0 = np.trace(op0) tr1 = np.trace(op1) return np.sqrt(tr0 + tr1 - 2.*np.sqrt(fid))
[ "def", "bures_distance", "(", "rho0", ":", "Density", ",", "rho1", ":", "Density", ")", "->", "float", ":", "fid", "=", "fidelity", "(", "rho0", ",", "rho1", ")", "op0", "=", "asarray", "(", "rho0", ".", "asoperator", "(", ")", ")", "op1", "=", "as...
Return the Bures distance between mixed quantum states Note: Bures distance cannot be calculated within the tensor backend.
[ "Return", "the", "Bures", "distance", "between", "mixed", "quantum", "states" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L95-L106
train
229,695
rigetti/quantumflow
quantumflow/measures.py
bures_angle
def bures_angle(rho0: Density, rho1: Density) -> float: """Return the Bures angle between mixed quantum states Note: Bures angle cannot be calculated within the tensor backend. """ return np.arccos(np.sqrt(fidelity(rho0, rho1)))
python
def bures_angle(rho0: Density, rho1: Density) -> float: """Return the Bures angle between mixed quantum states Note: Bures angle cannot be calculated within the tensor backend. """ return np.arccos(np.sqrt(fidelity(rho0, rho1)))
[ "def", "bures_angle", "(", "rho0", ":", "Density", ",", "rho1", ":", "Density", ")", "->", "float", ":", "return", "np", ".", "arccos", "(", "np", ".", "sqrt", "(", "fidelity", "(", "rho0", ",", "rho1", ")", ")", ")" ]
Return the Bures angle between mixed quantum states Note: Bures angle cannot be calculated within the tensor backend.
[ "Return", "the", "Bures", "angle", "between", "mixed", "quantum", "states" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L110-L115
train
229,696
rigetti/quantumflow
quantumflow/measures.py
density_angle
def density_angle(rho0: Density, rho1: Density) -> bk.BKTensor: """The Fubini-Study angle between density matrices""" return fubini_study_angle(rho0.vec, rho1.vec)
python
def density_angle(rho0: Density, rho1: Density) -> bk.BKTensor: """The Fubini-Study angle between density matrices""" return fubini_study_angle(rho0.vec, rho1.vec)
[ "def", "density_angle", "(", "rho0", ":", "Density", ",", "rho1", ":", "Density", ")", "->", "bk", ".", "BKTensor", ":", "return", "fubini_study_angle", "(", "rho0", ".", "vec", ",", "rho1", ".", "vec", ")" ]
The Fubini-Study angle between density matrices
[ "The", "Fubini", "-", "Study", "angle", "between", "density", "matrices" ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L118-L120
train
229,697
rigetti/quantumflow
quantumflow/measures.py
densities_close
def densities_close(rho0: Density, rho1: Density, tolerance: float = TOLERANCE) -> bool: """Returns True if densities are almost identical. Closeness is measured with the metric Fubini-Study angle. """ return vectors_close(rho0.vec, rho1.vec, tolerance)
python
def densities_close(rho0: Density, rho1: Density, tolerance: float = TOLERANCE) -> bool: """Returns True if densities are almost identical. Closeness is measured with the metric Fubini-Study angle. """ return vectors_close(rho0.vec, rho1.vec, tolerance)
[ "def", "densities_close", "(", "rho0", ":", "Density", ",", "rho1", ":", "Density", ",", "tolerance", ":", "float", "=", "TOLERANCE", ")", "->", "bool", ":", "return", "vectors_close", "(", "rho0", ".", "vec", ",", "rho1", ".", "vec", ",", "tolerance", ...
Returns True if densities are almost identical. Closeness is measured with the metric Fubini-Study angle.
[ "Returns", "True", "if", "densities", "are", "almost", "identical", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L123-L129
train
229,698
rigetti/quantumflow
quantumflow/measures.py
entropy
def entropy(rho: Density, base: float = None) -> float: """ Returns the von-Neumann entropy of a mixed quantum state. Args: rho: A density matrix base: Optional logarithm base. Default is base e, and entropy is measures in nats. For bits set base to 2. Returns: The von-Neumann entropy of rho """ op = asarray(rho.asoperator()) probs = np.linalg.eigvalsh(op) probs = np.maximum(probs, 0.0) # Compensate for floating point errors return scipy.stats.entropy(probs, base=base)
python
def entropy(rho: Density, base: float = None) -> float: """ Returns the von-Neumann entropy of a mixed quantum state. Args: rho: A density matrix base: Optional logarithm base. Default is base e, and entropy is measures in nats. For bits set base to 2. Returns: The von-Neumann entropy of rho """ op = asarray(rho.asoperator()) probs = np.linalg.eigvalsh(op) probs = np.maximum(probs, 0.0) # Compensate for floating point errors return scipy.stats.entropy(probs, base=base)
[ "def", "entropy", "(", "rho", ":", "Density", ",", "base", ":", "float", "=", "None", ")", "->", "float", ":", "op", "=", "asarray", "(", "rho", ".", "asoperator", "(", ")", ")", "probs", "=", "np", ".", "linalg", ".", "eigvalsh", "(", "op", ")",...
Returns the von-Neumann entropy of a mixed quantum state. Args: rho: A density matrix base: Optional logarithm base. Default is base e, and entropy is measures in nats. For bits set base to 2. Returns: The von-Neumann entropy of rho
[ "Returns", "the", "von", "-", "Neumann", "entropy", "of", "a", "mixed", "quantum", "state", "." ]
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/measures.py#L133-L148
train
229,699