idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
40,700
def ping ( self , peer , * peers , ** kwargs ) : if "count" in kwargs : kwargs . setdefault ( "opts" , { "count" : kwargs [ "count" ] } ) del kwargs [ "count" ] args = ( peer , ) + peers return self . _client . request ( '/ping' , args , decoder = 'json' , ** kwargs )
Provides round - trip latency information for the routing system .
40,701
def config ( self , key , value = None , ** kwargs ) : args = ( key , value ) return self . _client . request ( '/config' , args , decoder = 'json' , ** kwargs )
Controls configuration variables .
40,702
def config_replace ( self , * args , ** kwargs ) : return self . _client . request ( '/config/replace' , args , decoder = 'json' , ** kwargs )
Replaces the existing config with a user - defined config .
40,703
def log_level ( self , subsystem , level , ** kwargs ) : r args = ( subsystem , level ) return self . _client . request ( '/log/level' , args , decoder = 'json' , ** kwargs )
r Changes the logging output of a running daemon .
40,704
def log_tail ( self , ** kwargs ) : r return self . _client . request ( '/log/tail' , decoder = 'json' , stream = True , ** kwargs )
r Reads log outputs as they are written .
40,705
def files_cp ( self , source , dest , ** kwargs ) : args = ( source , dest ) return self . _client . request ( '/files/cp' , args , ** kwargs )
Copies files within the MFS .
40,706
def files_ls ( self , path , ** kwargs ) : args = ( path , ) return self . _client . request ( '/files/ls' , args , decoder = 'json' , ** kwargs )
Lists contents of a directory in the MFS .
40,707
def files_mkdir ( self , path , parents = False , ** kwargs ) : kwargs . setdefault ( "opts" , { "parents" : parents } ) args = ( path , ) return self . _client . request ( '/files/mkdir' , args , ** kwargs )
Creates a directory within the MFS .
40,708
def files_rm ( self , path , recursive = False , ** kwargs ) : kwargs . setdefault ( "opts" , { "recursive" : recursive } ) args = ( path , ) return self . _client . request ( '/files/rm' , args , ** kwargs )
Removes a file from the MFS .
40,709
def files_read ( self , path , offset = 0 , count = None , ** kwargs ) : opts = { "offset" : offset } if count is not None : opts [ "count" ] = count kwargs . setdefault ( "opts" , opts ) args = ( path , ) return self . _client . request ( '/files/read' , args , ** kwargs )
Reads a file stored in the MFS .
40,710
def files_write ( self , path , file , offset = 0 , create = False , truncate = False , count = None , ** kwargs ) : opts = { "offset" : offset , "create" : create , "truncate" : truncate } if count is not None : opts [ "count" ] = count kwargs . setdefault ( "opts" , opts ) args = ( path , ) body , headers = multipart . stream_files ( file , self . chunk_size ) return self . _client . request ( '/files/write' , args , data = body , headers = headers , ** kwargs )
Writes to a mutable file in the MFS .
40,711
def files_mv ( self , source , dest , ** kwargs ) : args = ( source , dest ) return self . _client . request ( '/files/mv' , args , ** kwargs )
Moves files and directories within the MFS .
40,712
def add_bytes ( self , data , ** kwargs ) : body , headers = multipart . stream_bytes ( data , self . chunk_size ) return self . _client . request ( '/add' , decoder = 'json' , data = body , headers = headers , ** kwargs )
Adds a set of bytes as a file to IPFS .
40,713
def add_str ( self , string , ** kwargs ) : body , headers = multipart . stream_text ( string , self . chunk_size ) return self . _client . request ( '/add' , decoder = 'json' , data = body , headers = headers , ** kwargs )
Adds a Python string as a file to IPFS .
40,714
def add_json ( self , json_obj , ** kwargs ) : return self . add_bytes ( encoding . Json ( ) . encode ( json_obj ) , ** kwargs )
Adds a json - serializable Python dict as a json file to IPFS .
40,715
def add_pyobj ( self , py_obj , ** kwargs ) : warnings . warn ( "Using `*_pyobj` on untrusted data is a security risk" , DeprecationWarning ) return self . add_bytes ( encoding . Pickle ( ) . encode ( py_obj ) , ** kwargs )
Adds a picklable Python object as a file to IPFS .
40,716
def get_pyobj ( self , multihash , ** kwargs ) : warnings . warn ( "Using `*_pyobj` on untrusted data is a security risk" , DeprecationWarning ) return self . cat ( multihash , decoder = 'pickle' , ** kwargs )
Loads a pickled Python object from IPFS .
40,717
def pubsub_peers ( self , topic = None , ** kwargs ) : args = ( topic , ) if topic is not None else ( ) return self . _client . request ( '/pubsub/peers' , args , decoder = 'json' , ** kwargs )
List the peers we are pubsubbing with .
40,718
def pubsub_pub ( self , topic , payload , ** kwargs ) : args = ( topic , payload ) return self . _client . request ( '/pubsub/pub' , args , decoder = 'json' , ** kwargs )
Publish a message to a given pubsub topic
40,719
def pubsub_sub ( self , topic , discover = False , ** kwargs ) : args = ( topic , discover ) return SubChannel ( self . _client . request ( '/pubsub/sub' , args , stream = True , decoder = 'json' ) )
Subscribe to mesages on a given topic
40,720
def guess_mimetype ( filename ) : fn = os . path . basename ( filename ) return mimetypes . guess_type ( fn ) [ 0 ] or 'application/octet-stream'
Guesses the mimetype of a file based on the given filename .
40,721
def ls_dir ( dirname ) : ls = os . listdir ( dirname ) files = [ p for p in ls if os . path . isfile ( os . path . join ( dirname , p ) ) ] dirs = [ p for p in ls if os . path . isdir ( os . path . join ( dirname , p ) ) ] return files , dirs
Returns files and subdirectories within a given directory .
40,722
def clean_files ( files ) : if isinstance ( files , ( list , tuple ) ) : for f in files : yield clean_file ( f ) else : yield clean_file ( files )
Generates tuples with a file - like object and a close indicator .
40,723
def merge ( directory , message , branch_label , rev_id , revisions ) : _merge ( directory , revisions , message , branch_label , rev_id )
Merge two revisions together creating a new revision file
40,724
def downgrade ( directory , sql , tag , x_arg , revision ) : _downgrade ( directory , revision , sql , tag , x_arg )
Revert to a previous version
40,725
def get_metadata ( bind ) : if bind == '' : bind = None m = MetaData ( ) for t in target_metadata . tables . values ( ) : if t . info . get ( 'bind_key' ) == bind : t . tometadata ( m ) return m
Return the metadata for a bind .
40,726
def init ( directory = None , multidb = False ) : if directory is None : directory = current_app . extensions [ 'migrate' ] . directory config = Config ( ) config . set_main_option ( 'script_location' , directory ) config . config_file_name = os . path . join ( directory , 'alembic.ini' ) config = current_app . extensions [ 'migrate' ] . migrate . call_configure_callbacks ( config ) if multidb : command . init ( config , directory , 'flask-multidb' ) else : command . init ( config , directory , 'flask' )
Creates a new migration repository
40,727
def edit ( directory = None , revision = 'current' ) : if alembic_version >= ( 0 , 8 , 0 ) : config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory ) command . edit ( config , revision ) else : raise RuntimeError ( 'Alembic 0.8.0 or greater is required' )
Edit current revision .
40,728
def merge ( directory = None , revisions = '' , message = None , branch_label = None , rev_id = None ) : if alembic_version >= ( 0 , 7 , 0 ) : config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory ) command . merge ( config , revisions , message = message , branch_label = branch_label , rev_id = rev_id ) else : raise RuntimeError ( 'Alembic 0.7.0 or greater is required' )
Merge two revisions together . Creates a new migration file
40,729
def heads ( directory = None , verbose = False , resolve_dependencies = False ) : if alembic_version >= ( 0 , 7 , 0 ) : config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory ) command . heads ( config , verbose = verbose , resolve_dependencies = resolve_dependencies ) else : raise RuntimeError ( 'Alembic 0.7.0 or greater is required' )
Show current available heads in the script directory
40,730
def branches ( directory = None , verbose = False ) : config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory ) if alembic_version >= ( 0 , 7 , 0 ) : command . branches ( config , verbose = verbose ) else : command . branches ( config )
Show current branch points
40,731
def current ( directory = None , verbose = False , head_only = False ) : config = current_app . extensions [ 'migrate' ] . migrate . get_config ( directory ) if alembic_version >= ( 0 , 7 , 0 ) : command . current ( config , verbose = verbose , head_only = head_only ) else : command . current ( config )
Display the current revision for each database .
40,732
def to_json ( self , content , pretty_print = False ) : if PY3 : if isinstance ( content , bytes ) : content = content . decode ( encoding = 'utf-8' ) if pretty_print : json_ = self . _json_pretty_print ( content ) else : json_ = json . loads ( content ) logger . info ( 'To JSON using : content=%s ' % ( content ) ) logger . info ( 'To JSON using : pretty_print=%s ' % ( pretty_print ) ) return json_
Convert a string to a JSON object
40,733
def get_request ( self , alias , uri , headers = None , json = None , params = None , allow_redirects = None , timeout = None ) : session = self . _cache . switch ( alias ) redir = True if allow_redirects is None else allow_redirects response = self . _get_request ( session , uri , params , headers , json , redir , timeout ) logger . info ( 'Get Request using : alias=%s, uri=%s, headers=%s json=%s' % ( alias , uri , headers , json ) ) return response
Send a GET request on the session object found using the given alias
40,734
def post_request ( self , alias , uri , data = None , json = None , params = None , headers = None , files = None , allow_redirects = None , timeout = None ) : session = self . _cache . switch ( alias ) if not files : data = self . _format_data_according_to_header ( session , data , headers ) redir = True if allow_redirects is None else allow_redirects response = self . _body_request ( "post" , session , uri , data , json , params , files , headers , redir , timeout ) dataStr = self . _format_data_to_log_string_according_to_header ( data , headers ) logger . info ( 'Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s ' % ( alias , uri , dataStr , headers , files , redir ) ) return response
Send a POST request on the session object found using the given alias
40,735
def delete_request ( self , alias , uri , data = None , json = None , params = None , headers = None , allow_redirects = None , timeout = None ) : session = self . _cache . switch ( alias ) data = self . _format_data_according_to_header ( session , data , headers ) redir = True if allow_redirects is None else allow_redirects response = self . _delete_request ( session , uri , data , json , params , headers , redir , timeout ) if isinstance ( data , bytes ) : data = data . decode ( 'utf-8' ) logger . info ( 'Delete Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % ( alias , uri , data , headers , redir ) ) return response
Send a DELETE request on the session object found using the given alias
40,736
def head_request ( self , alias , uri , headers = None , allow_redirects = None , timeout = None ) : session = self . _cache . switch ( alias ) redir = False if allow_redirects is None else allow_redirects response = self . _head_request ( session , uri , headers , redir , timeout ) logger . info ( 'Head Request using : alias=%s, uri=%s, headers=%s, \ allow_redirects=%s ' % ( alias , uri , headers , redir ) ) return response
Send a HEAD request on the session object found using the given alias
40,737
def options_request ( self , alias , uri , headers = None , allow_redirects = None , timeout = None ) : session = self . _cache . switch ( alias ) redir = True if allow_redirects is None else allow_redirects response = self . _options_request ( session , uri , headers , redir , timeout ) logger . info ( 'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' % ( alias , uri , headers , redir ) ) return response
Send an OPTIONS request on the session object found using the given alias
40,738
def _get_url ( self , session , uri ) : url = session . url if uri : slash = '' if uri . startswith ( '/' ) else '/' url = "%s%s%s" % ( session . url , slash , uri ) return url
Helper method to get the full url
40,739
def _json_pretty_print ( self , content ) : temp = json . loads ( content ) return json . dumps ( temp , sort_keys = True , indent = 4 , separators = ( ',' , ': ' ) )
Pretty print a JSON object
40,740
def get_measurements ( self , measurement = 'Weight' , lower_bound = None , upper_bound = None ) : if upper_bound is None : upper_bound = datetime . date . today ( ) if lower_bound is None : lower_bound = upper_bound - datetime . timedelta ( days = 30 ) if lower_bound > upper_bound : lower_bound , upper_bound = upper_bound , lower_bound document = self . _get_document_for_url ( self . _get_url_for_measurements ( ) ) measurement_ids = self . _get_measurement_ids ( document ) if measurement in measurement_ids . keys ( ) : measurement_id = measurement_ids [ measurement ] else : raise ValueError ( "Measurement '%s' does not exist." % measurement ) page = 1 measurements = OrderedDict ( ) while True : document = self . _get_document_for_url ( self . _get_url_for_measurements ( page , measurement_id ) ) results = self . _get_measurements ( document ) measurements . update ( results ) if len ( results ) == 0 : break elif list ( results . keys ( ) ) [ - 1 ] > lower_bound : page += 1 continue else : break for date in list ( measurements . keys ( ) ) : if not upper_bound >= date >= lower_bound : del measurements [ date ] return measurements
Returns measurements of a given name between two dates .
40,741
def set_measurements ( self , measurement = 'Weight' , value = None ) : if value is None : raise ValueError ( "Cannot update blank value." ) document = self . _get_document_for_url ( self . _get_url_for_measurements ( ) ) measurement_ids = self . _get_measurement_ids ( document ) if measurement not in measurement_ids . keys ( ) : raise ValueError ( "Measurement '%s' does not exist." % measurement ) update_url = parse . urljoin ( self . BASE_URL , 'measurements/save' ) data = { } data [ 'authenticity_token' ] = self . _authenticity_token if measurement == 'Weight' : data [ 'weight[display_value]' ] = value measurement_index = 0 for measurement_id in measurement_ids . keys ( ) : n = str ( measurement_index ) meas_type = 'measurement_type[' + n + ']' meas_val = 'measurement_value[' + n + ']' data [ meas_type ] = measurement_ids [ measurement_id ] if measurement == measurement_id : data [ meas_val ] = value else : data [ meas_val ] = "" measurement_index += 1 result = self . session . post ( update_url , data = data ) if not result . ok : raise RuntimeError ( "Unable to update measurement in MyFitnessPal: " "status code: {status}" . format ( status = result . status_code ) )
Sets measurement for today s date .
40,742
def get_measurement_id_options ( self ) : document = self . _get_document_for_url ( self . _get_url_for_measurements ( ) ) measurement_ids = self . _get_measurement_ids ( document ) return measurement_ids
Returns list of measurement choices .
40,743
def file_supports_color ( file_obj ) : plat = sys . platform supported_platform = plat != 'Pocket PC' and ( plat != 'win32' or 'ANSICON' in os . environ ) is_a_tty = file_is_a_tty ( file_obj ) return ( supported_platform and is_a_tty )
Returns True if the running system s terminal supports color .
40,744
def load_report ( identifier = None ) : path = os . path . join ( report_dir ( ) , identifier + '.pyireport' ) return ProfilerSession . load ( path )
Returns the session referred to by identifier
40,745
def save_report ( session ) : previous_reports = glob . glob ( os . path . join ( report_dir ( ) , '*.pyireport' ) ) previous_reports . sort ( reverse = True ) while len ( previous_reports ) > 10 : report_file = previous_reports . pop ( ) os . remove ( report_file ) identifier = time . strftime ( '%Y-%m-%dT%H-%M-%S' , time . localtime ( session . start_time ) ) path = os . path . join ( report_dir ( ) , identifier + '.pyireport' ) session . save ( path ) return path , identifier
Saves the session to a temp file and returns that path . Also prunes the number of reports to 10 so there aren t loads building up .
40,746
def root_frame ( self , trim_stem = True ) : root_frame = None frame_stack = [ ] for frame_tuple in self . frame_records : identifier_stack = frame_tuple [ 0 ] time = frame_tuple [ 1 ] for stack_depth , frame_identifier in enumerate ( identifier_stack ) : if stack_depth < len ( frame_stack ) : if frame_identifier != frame_stack [ stack_depth ] . identifier : del frame_stack [ stack_depth : ] if stack_depth >= len ( frame_stack ) : frame = Frame ( frame_identifier ) frame_stack . append ( frame ) if stack_depth == 0 : assert root_frame is None , ASSERTION_MESSAGE root_frame = frame else : parent = frame_stack [ stack_depth - 1 ] parent . add_child ( frame ) del frame_stack [ stack_depth + 1 : ] frame_stack [ - 1 ] . add_child ( SelfTimeFrame ( self_time = time ) ) if root_frame is None : return None if trim_stem : root_frame = self . _trim_stem ( root_frame ) return root_frame
Parses the internal frame records and returns a tree of Frame objects
40,747
def remove_from_parent ( self ) : if self . parent : self . parent . _children . remove ( self ) self . parent . _invalidate_time_caches ( ) self . parent = None
Removes this frame from its parent and nulls the parent link
40,748
def add_child ( self , frame , after = None ) : frame . remove_from_parent ( ) frame . parent = self if after is None : self . _children . append ( frame ) else : index = self . _children . index ( after ) + 1 self . _children . insert ( index , frame ) self . _invalidate_time_caches ( )
Adds a child frame updating the parent link . Optionally insert the frame in a specific position by passing the frame to insert this one after .
40,749
def add_children ( self , frames , after = None ) : if after is not None : for frame in reversed ( frames ) : self . add_child ( frame , after = after ) else : for frame in frames : self . add_child ( frame )
Convenience method to add multiple frames at once .
40,750
def file_path_short ( self ) : if not hasattr ( self , '_file_path_short' ) : if self . file_path : result = None for path in sys . path : try : candidate = os . path . relpath ( self . file_path , path ) except ValueError : continue if not result or ( len ( candidate . split ( os . sep ) ) < len ( result . split ( os . sep ) ) ) : result = candidate self . _file_path_short = result else : self . _file_path_short = None return self . _file_path_short
Return the path resolved against the closest entry in sys . path
40,751
def exit_frames ( self ) : if self . _exit_frames is None : exit_frames = [ ] for frame in self . frames : if any ( c . group != self for c in frame . children ) : exit_frames . append ( frame ) self . _exit_frames = exit_frames return self . _exit_frames
Returns a list of frames whose children include a frame outside of the group
40,752
def first_interesting_frame ( self ) : root_frame = self . root_frame ( ) frame = root_frame while len ( frame . children ) <= 1 : if frame . children : frame = frame . children [ 0 ] else : return root_frame return frame
Traverse down the frame hierarchy until a frame is found with more than one child
40,753
def aggregate_repeated_calls ( frame , options ) : if frame is None : return None children_by_identifier = { } for child in frame . children : if child . identifier in children_by_identifier : aggregate_frame = children_by_identifier [ child . identifier ] aggregate_frame . self_time += child . self_time if child . children : aggregate_frame . add_children ( child . children ) child . remove_from_parent ( ) else : children_by_identifier [ child . identifier ] = child for child in frame . children : aggregate_repeated_calls ( child , options = options ) frame . _children . sort ( key = methodcaller ( 'time' ) , reverse = True ) return frame
Converts a timeline into a time - aggregate summary .
40,754
def merge_consecutive_self_time ( frame , options ) : if frame is None : return None previous_self_time_frame = None for child in frame . children : if isinstance ( child , SelfTimeFrame ) : if previous_self_time_frame : previous_self_time_frame . self_time += child . self_time child . remove_from_parent ( ) else : previous_self_time_frame = child else : previous_self_time_frame = None for child in frame . children : merge_consecutive_self_time ( child , options = options ) return frame
Combines consecutive self time frames
40,755
def remove_unnecessary_self_time_nodes ( frame , options ) : if frame is None : return None if len ( frame . children ) == 1 and isinstance ( frame . children [ 0 ] , SelfTimeFrame ) : child = frame . children [ 0 ] frame . self_time += child . self_time child . remove_from_parent ( ) for child in frame . children : remove_unnecessary_self_time_nodes ( child , options = options ) return frame
When a frame has only one child and that is a self - time frame remove that node since it s unnecessary - it clutters the output and offers no additional information .
40,756
def open_in_browser ( self , session , output_filename = None ) : if output_filename is None : output_file = tempfile . NamedTemporaryFile ( suffix = '.html' , delete = False ) output_filename = output_file . name with codecs . getwriter ( 'utf-8' ) ( output_file ) as f : f . write ( self . render ( session ) ) else : with codecs . open ( output_filename , 'w' , 'utf-8' ) as f : f . write ( self . render ( session ) ) from pyinstrument . vendor . six . moves import urllib url = urllib . parse . urlunparse ( ( 'file' , '' , output_filename , '' , '' , '' ) ) webbrowser . open ( url ) return output_filename
Open the rendered HTML in a webbrowser .
40,757
def run ( self ) : if subprocess . call ( [ 'npm' , '--version' ] ) != 0 : raise RuntimeError ( 'npm is required to build the HTML renderer.' ) self . check_call ( [ 'npm' , 'install' ] , cwd = HTML_RENDERER_DIR ) self . check_call ( [ 'npm' , 'run' , 'build' ] , cwd = HTML_RENDERER_DIR ) self . copy_file ( HTML_RENDERER_DIR + '/dist/js/app.js' , 'pyinstrument/renderers/html_resources/app.js' ) setuptools . command . build_py . build_py . run ( self )
compile the JS then run superclass implementation
40,758
def deprecated ( func , * args , ** kwargs ) : warnings . warn ( '{} is deprecated and should no longer be used.' . format ( func ) , DeprecationWarning , stacklevel = 3 ) return func ( * args , ** kwargs )
Marks a function as deprecated .
40,759
def deprecated_option ( option_name , message = '' ) : def caller ( func , * args , ** kwargs ) : if option_name in kwargs : warnings . warn ( '{} is deprecated. {}' . format ( option_name , message ) , DeprecationWarning , stacklevel = 3 ) return func ( * args , ** kwargs ) return decorator ( caller )
Marks an option as deprecated .
40,760
def THUMBNAIL_OPTIONS ( self ) : from django . core . exceptions import ImproperlyConfigured size = self . _setting ( 'DJNG_THUMBNAIL_SIZE' , ( 200 , 200 ) ) if not ( isinstance ( size , ( list , tuple ) ) and len ( size ) == 2 and isinstance ( size [ 0 ] , int ) and isinstance ( size [ 1 ] , int ) ) : raise ImproperlyConfigured ( "'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers." ) return { 'crop' : True , 'size' : size }
Set the size as a 2 - tuple for thumbnailed images after uploading them .
40,761
def get_context ( self , name , value , attrs ) : context = super ( NgWidgetMixin , self ) . get_context ( name , value , attrs ) if callable ( getattr ( self . _field , 'update_widget_rendering_context' , None ) ) : self . _field . update_widget_rendering_context ( context ) return context
Some widgets require a modified rendering context if they contain angular directives .
40,762
def errors ( self ) : if not hasattr ( self , '_errors_cache' ) : self . _errors_cache = self . form . get_field_errors ( self ) return self . _errors_cache
Returns a TupleErrorList for this field . This overloaded method adds additional error lists to the errors as detected by the form validator .
40,763
def css_classes ( self , extra_classes = None ) : if hasattr ( extra_classes , 'split' ) : extra_classes = extra_classes . split ( ) extra_classes = set ( extra_classes or [ ] ) field_css_classes = getattr ( self . form , 'field_css_classes' , None ) if hasattr ( field_css_classes , 'split' ) : extra_classes . update ( field_css_classes . split ( ) ) elif isinstance ( field_css_classes , ( list , tuple ) ) : extra_classes . update ( field_css_classes ) elif isinstance ( field_css_classes , dict ) : extra_field_classes = [ ] for key in ( '*' , self . name ) : css_classes = field_css_classes . get ( key ) if hasattr ( css_classes , 'split' ) : extra_field_classes = css_classes . split ( ) elif isinstance ( css_classes , ( list , tuple ) ) : if '__default__' in css_classes : css_classes . remove ( '__default__' ) extra_field_classes . extend ( css_classes ) else : extra_field_classes = css_classes extra_classes . update ( extra_field_classes ) return super ( NgBoundField , self ) . css_classes ( extra_classes )
Returns a string of space - separated CSS classes for the wrapping element of this input field .
40,764
def get_field_errors ( self , field ) : identifier = format_html ( '{0}[\'{1}\']' , self . form_name , field . name ) errors = self . errors . get ( field . html_name , [ ] ) return self . error_class ( [ SafeTuple ( ( identifier , self . field_error_css_classes , '$pristine' , '$pristine' , 'invalid' , e ) ) for e in errors ] )
Return server side errors . Shall be overridden by derived forms to add their extra errors for AngularJS .
40,765
def update_widget_attrs ( self , bound_field , attrs ) : if bound_field . field . has_subwidgets ( ) is False : widget_classes = getattr ( self , 'widget_css_classes' , None ) if widget_classes : if 'class' in attrs : attrs [ 'class' ] += ' ' + widget_classes else : attrs . update ( { 'class' : widget_classes } ) return attrs
Updated the widget attributes which shall be added to the widget when rendering this field .
40,766
def rectify_multipart_form_data ( self , data ) : for name , field in self . base_fields . items ( ) : try : field . implode_multi_values ( name , data ) except AttributeError : pass return data
If a widget was converted and the Form data was submitted through a multipart request then these data fields must be converted to suit the Django Form validation
40,767
def rectify_ajax_form_data ( self , data ) : for name , field in self . base_fields . items ( ) : try : data [ name ] = field . convert_ajax_data ( data . get ( name , { } ) ) except AttributeError : pass return data
If a widget was converted and the Form data was submitted through an Ajax request then these data fields must be converted to suit the Django Form validation
40,768
def djng_locale_script ( context , default_language = 'en' ) : language = get_language_from_request ( context [ 'request' ] ) if not language : language = default_language return format_html ( 'angular-locale_{}.js' , language . lower ( ) )
Returns a script tag for including the proper locale script in any HTML page . This tag determines the current language with its locale .
40,769
def update_widget_attrs ( self , bound_field , attrs ) : bound_field . form . update_widget_attrs ( bound_field , attrs ) widget_classes = self . widget . attrs . get ( 'class' , None ) if widget_classes : if 'class' in attrs : attrs [ 'class' ] += ' ' + widget_classes else : attrs . update ( { 'class' : widget_classes } ) return attrs
Update the dictionary of attributes used while rendering the input widget
40,770
def implode_multi_values ( self , name , data ) : mkeys = [ k for k in data . keys ( ) if k . startswith ( name + '.' ) ] mvls = [ data . pop ( k ) [ 0 ] for k in mkeys ] if mvls : data . setlist ( name , mvls )
Due to the way Angular organizes it model when Form data is sent via a POST request then for this kind of widget the posted data must to be converted into a format suitable for Django s Form validation .
40,771
def convert_ajax_data ( self , field_data ) : data = [ key for key , val in field_data . items ( ) if val ] return data
Due to the way Angular organizes it model when this Form data is sent using Ajax then for this kind of widget the sent data has to be converted into a format suitable for Django s Form validation .
40,772
def process_request ( self , request ) : if request . path == self . ANGULAR_REVERSE : url_name = request . GET . get ( 'djng_url_name' ) url_args = request . GET . getlist ( 'djng_url_args' , [ ] ) url_kwargs = { } url_args = filter ( lambda x : x , url_args ) for param in request . GET : if param . startswith ( 'djng_url_kwarg_' ) : if request . GET [ param ] : url_kwargs [ param [ 15 : ] ] = request . GET [ param ] url = unquote ( reverse ( url_name , args = url_args , kwargs = url_kwargs ) ) assert not url . startswith ( self . ANGULAR_REVERSE ) , "Prevent recursive requests" request . path = request . path_info = url request . environ [ 'PATH_INFO' ] = url query = request . GET . copy ( ) for key in request . GET : if key . startswith ( 'djng_url' ) : query . pop ( key , None ) if six . PY3 : request . environ [ 'QUERY_STRING' ] = query . urlencode ( ) else : request . environ [ 'QUERY_STRING' ] = query . urlencode ( ) . encode ( 'utf-8' ) request . GET = http . QueryDict ( request . environ [ 'QUERY_STRING' ] )
Reads url name args kwargs from GET parameters reverses the url and resolves view function Returns the result of resolved view function called with provided args and kwargs Since the view function is called directly it isn t ran through middlewares so the middlewares must be added manually The final result is exactly the same as if the request was for the resolved view .
40,773
def ng_delete ( self , request , * args , ** kwargs ) : if 'pk' not in request . GET : raise NgMissingParameterError ( "Object id is required to delete." ) obj = self . get_object ( ) response = self . build_json_response ( obj ) obj . delete ( ) return response
Delete object and return it s data in JSON encoding The response is build before the object is actually deleted so that we can still retrieve a serialization in the response even with a m2m relationship
40,774
def _post_clean ( self ) : super ( NgModelFormMixin , self ) . _post_clean ( ) if self . _errors and self . prefix : self . _errors = ErrorDict ( ( self . add_prefix ( name ) , value ) for name , value in self . _errors . items ( ) )
Rewrite the error dictionary so that its keys correspond to the model fields .
40,775
def percentage ( self ) : if self . max_value is None or self . max_value is base . UnknownLength : return None elif self . max_value : todo = self . value - self . min_value total = self . max_value - self . min_value percentage = todo / total else : percentage = 1 return percentage * 100
Return current percentage returns None if no max_value is given
40,776
def example ( fn ) : @ functools . wraps ( fn ) def wrapped ( ) : try : sys . stdout . write ( 'Running: %s\n' % fn . __name__ ) fn ( ) sys . stdout . write ( '\n' ) except KeyboardInterrupt : sys . stdout . write ( '\nSkipping example.\n\n' ) time . sleep ( 0.2 ) examples . append ( wrapped ) return wrapped
Wrap the examples so they generate readable output
40,777
def load_stdgraphs ( size : int ) -> List [ nx . Graph ] : from pkg_resources import resource_stream if size < 6 or size > 32 : raise ValueError ( 'Size out of range.' ) filename = 'datasets/data/graph{}er100.g6' . format ( size ) fdata = resource_stream ( 'quantumflow' , filename ) return nx . read_graph6 ( fdata )
Load standard graph validation sets
40,778
def load_mnist ( size : int = None , border : int = _MNIST_BORDER , blank_corners : bool = False , nums : List [ int ] = None ) -> Tuple [ np . ndarray , np . ndarray , np . ndarray , np . ndarray ] : from keras . datasets import mnist def _filter_mnist ( x : np . ndarray , y : np . ndarray , nums : List [ int ] = None ) -> Tuple [ np . ndarray , np . ndarray ] : xt = [ ] yt = [ ] items = len ( y ) for n in range ( items ) : if nums is not None and y [ n ] in nums : xt . append ( x [ n ] ) yt . append ( y [ n ] ) xt = np . stack ( xt ) yt = np . stack ( yt ) return xt , yt def _rescale ( imgarray : np . ndarray , size : int ) -> np . ndarray : N = imgarray . shape [ 0 ] imgarray = imgarray [ : , border : - border , border : - border ] rescaled = np . zeros ( shape = ( N , size , size ) , dtype = np . float ) for n in range ( 0 , N ) : img = Image . fromarray ( imgarray [ n ] ) img = img . resize ( ( size , size ) , Image . LANCZOS ) rsc = np . asarray ( img ) . reshape ( ( size , size ) ) rsc = 256. * rsc / rsc . max ( ) rescaled [ n ] = rsc return rescaled . astype ( dtype = np . uint8 ) def _blank_corners ( imgarray : np . ndarray ) -> None : sz = imgarray . shape [ 1 ] corner = ( sz // 2 ) - 1 for x in range ( 0 , corner ) : for y in range ( 0 , corner - x ) : imgarray [ : , x , y ] = 0 imgarray [ : , - ( 1 + x ) , y ] = 0 imgarray [ : , - ( 1 + x ) , - ( 1 + y ) ] = 0 imgarray [ : , x , - ( 1 + y ) ] = 0 ( x_train , y_train ) , ( x_test , y_test ) = mnist . load_data ( ) if nums : x_train , y_train = _filter_mnist ( x_train , y_train , nums ) x_test , y_test = _filter_mnist ( x_test , y_test , nums ) if size : x_train = _rescale ( x_train , size ) x_test = _rescale ( x_test , size ) if blank_corners : _blank_corners ( x_train ) _blank_corners ( x_test ) return x_train , y_train , x_test , y_test
Download and rescale the MNIST database of handwritten digits
40,779
def astensor ( array : TensorLike ) -> BKTensor : tensor = tf . convert_to_tensor ( value = array , dtype = CTYPE ) return tensor
Covert numpy array to tensorflow tensor
40,780
def inner ( tensor0 : BKTensor , tensor1 : BKTensor ) -> BKTensor : N = rank ( tensor0 ) axes = list ( range ( N ) ) return tf . tensordot ( tf . math . conj ( tensor0 ) , tensor1 , axes = ( axes , axes ) )
Return the inner product between two states
40,781
def graph_cuts ( graph : nx . Graph ) -> np . ndarray : N = len ( graph ) diag_hamiltonian = np . zeros ( shape = ( [ 2 ] * N ) , dtype = np . double ) for q0 , q1 in graph . edges ( ) : for index , _ in np . ndenumerate ( diag_hamiltonian ) : if index [ q0 ] != index [ q1 ] : weight = graph [ q0 ] [ q1 ] . get ( 'weight' , 1 ) diag_hamiltonian [ index ] += weight return diag_hamiltonian
For the given graph return the cut value for all binary assignments of the graph .
40,782
def depth ( self , local : bool = True ) -> int : G = self . graph if not local : def remove_local ( dagc : DAGCircuit ) -> Generator [ Operation , None , None ] : for elem in dagc : if dagc . graph . degree [ elem ] > 2 : yield elem G = DAGCircuit ( remove_local ( self ) ) . graph return nx . dag_longest_path_length ( G ) - 1
Return the circuit depth .
40,783
def components ( self ) -> List [ 'DAGCircuit' ] : comps = nx . weakly_connected_component_subgraphs ( self . graph ) return [ DAGCircuit ( comp ) for comp in comps ]
Split DAGCircuit into independent components
40,784
def zero_state ( qubits : Union [ int , Qubits ] ) -> State : N , qubits = qubits_count_tuple ( qubits ) ket = np . zeros ( shape = [ 2 ] * N ) ket [ ( 0 , ) * N ] = 1 return State ( ket , qubits )
Return the all - zero state on N qubits
40,785
def w_state ( qubits : Union [ int , Qubits ] ) -> State : N , qubits = qubits_count_tuple ( qubits ) ket = np . zeros ( shape = [ 2 ] * N ) for n in range ( N ) : idx = np . zeros ( shape = N , dtype = int ) idx [ n ] += 1 ket [ tuple ( idx ) ] = 1 / sqrt ( N ) return State ( ket , qubits )
Return a W state on N qubits
40,786
def ghz_state ( qubits : Union [ int , Qubits ] ) -> State : N , qubits = qubits_count_tuple ( qubits ) ket = np . zeros ( shape = [ 2 ] * N ) ket [ ( 0 , ) * N ] = 1 / sqrt ( 2 ) ket [ ( 1 , ) * N ] = 1 / sqrt ( 2 ) return State ( ket , qubits )
Return a GHZ state on N qubits
40,787
def random_state ( qubits : Union [ int , Qubits ] ) -> State : N , qubits = qubits_count_tuple ( qubits ) ket = np . random . normal ( size = ( [ 2 ] * N ) ) + 1j * np . random . normal ( size = ( [ 2 ] * N ) ) return State ( ket , qubits ) . normalize ( )
Return a random state from the space of N qubits
40,788
def join_states ( * states : State ) -> State : vectors = [ ket . vec for ket in states ] vec = reduce ( outer_product , vectors ) return State ( vec . tensor , vec . qubits )
Join two state vectors into a larger qubit state
40,789
def print_state ( state : State , file : TextIO = None ) -> None : state = state . vec . asarray ( ) for index , amplitude in np . ndenumerate ( state ) : ket = "" . join ( [ str ( n ) for n in index ] ) print ( ket , ":" , amplitude , file = file )
Print a state vector
40,790
def print_probabilities ( state : State , ndigits : int = 4 , file : TextIO = None ) -> None : prob = bk . evaluate ( state . probabilities ( ) ) for index , prob in np . ndenumerate ( prob ) : prob = round ( prob , ndigits ) if prob == 0.0 : continue ket = "" . join ( [ str ( n ) for n in index ] ) print ( ket , ":" , prob , file = file )
Pretty print state probabilities .
40,791
def mixed_density ( qubits : Union [ int , Qubits ] ) -> Density : N , qubits = qubits_count_tuple ( qubits ) matrix = np . eye ( 2 ** N ) / 2 ** N return Density ( matrix , qubits )
Returns the completely mixed density matrix
40,792
def join_densities ( * densities : Density ) -> Density : vectors = [ rho . vec for rho in densities ] vec = reduce ( outer_product , vectors ) memory = dict ( ChainMap ( * [ rho . memory for rho in densities ] ) ) return Density ( vec . tensor , vec . qubits , memory )
Join two mixed states into a larger qubit state
40,793
def normalize ( self ) -> 'State' : tensor = self . tensor / bk . ccast ( bk . sqrt ( self . norm ( ) ) ) return State ( tensor , self . qubits , self . _memory )
Normalize the state
40,794
def sample ( self , trials : int ) -> np . ndarray : probs = np . real ( bk . evaluate ( self . probabilities ( ) ) ) res = np . random . multinomial ( trials , probs . ravel ( ) ) res = res . reshape ( probs . shape ) return res
Measure the state in the computational basis the the given number of trials and return the counts of each output configuration .
40,795
def expectation ( self , diag_hermitian : bk . TensorLike , trials : int = None ) -> bk . BKTensor : if trials is None : probs = self . probabilities ( ) else : probs = bk . real ( bk . astensorproduct ( self . sample ( trials ) / trials ) ) diag_hermitian = bk . astensorproduct ( diag_hermitian ) return bk . sum ( bk . real ( diag_hermitian ) * probs )
Return the expectation of a measurement . Since we can only measure our computer in the computational basis we only require the diagonal of the Hermitian in that basis .
40,796
def measure ( self ) -> np . ndarray : probs = np . real ( bk . evaluate ( self . probabilities ( ) ) ) indices = np . asarray ( list ( np . ndindex ( * [ 2 ] * self . qubit_nb ) ) ) res = np . random . choice ( probs . size , p = probs . ravel ( ) ) res = indices [ res ] return res
Measure the state in the computational basis .
40,797
def asdensity ( self ) -> 'Density' : matrix = bk . outer ( self . tensor , bk . conj ( self . tensor ) ) return Density ( matrix , self . qubits , self . _memory )
Convert a pure state to a density matrix
40,798
def benchmark ( N , gates ) : qubits = list ( range ( 0 , N ) ) ket = qf . zero_state ( N ) for n in range ( 0 , N ) : ket = qf . H ( n ) . run ( ket ) for _ in range ( 0 , ( gates - N ) // 3 ) : qubit0 , qubit1 = random . sample ( qubits , 2 ) ket = qf . X ( qubit0 ) . run ( ket ) ket = qf . T ( qubit1 ) . run ( ket ) ket = qf . CNOT ( qubit0 , qubit1 ) . run ( ket ) return ket . vec . tensor
Create and run a circuit with N qubits and given number of gates
40,799
def sandwich_decompositions ( coords0 , coords1 , samples = SAMPLES ) : decomps = [ ] for _ in range ( samples ) : circ = qf . Circuit ( ) circ += qf . CANONICAL ( * coords0 , 0 , 1 ) circ += qf . random_gate ( [ 0 ] ) circ += qf . random_gate ( [ 1 ] ) circ += qf . CANONICAL ( * coords1 , 0 , 1 ) gate = circ . asgate ( ) coords = qf . canonical_coords ( gate ) decomps . append ( coords ) return decomps
Create composite gates decompose and return a list of canonical coordinates