idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
16,100
def get_ad_via_hitid ( hit_id ) : username = CONFIG . get ( 'psiTurk Access' , 'psiturk_access_key_id' ) password = CONFIG . get ( 'psiTurk Access' , 'psiturk_secret_access_id' ) try : req = requests . get ( 'https://api.psiturk.org/api/ad/lookup/' + hit_id , auth = ( username , password ) ) except : raise ExperimentError ( 'api_server_not_reachable' ) else : if req . status_code == 200 : return req . json ( ) [ 'ad_id' ] else : return "error"
Get ad via HIT id
16,101
def load ( uid = None ) : app . logger . info ( "GET /sync route with id: %s" % uid ) try : user = Participant . query . filter ( Participant . uniqueid == uid ) . one ( ) except exc . SQLAlchemyError : app . logger . error ( "DB error: Unique user not found." ) try : resp = json . loads ( user . datastring ) except : resp = { "condition" : user . cond , "counterbalance" : user . counterbalance , "assignmentId" : user . assignmentid , "workerId" : user . workerid , "hitId" : user . hitid , "bonus" : user . bonus } return jsonify ( ** resp )
Load experiment data which should be a JSON object and will be stored after converting to string .
16,102
def update ( uid = None ) : app . logger . info ( "PUT /sync route with id: %s" % uid ) try : user = Participant . query . filter ( Participant . uniqueid == uid ) . one ( ) except exc . SQLAlchemyError : app . logger . error ( "DB error: Unique user not found." ) if hasattr ( request , 'json' ) : user . datastring = request . data . decode ( 'utf-8' ) . encode ( 'ascii' , 'xmlcharrefreplace' ) db_session . add ( user ) db_session . commit ( ) try : data = json . loads ( user . datastring ) except : data = { } trial = data . get ( "currenttrial" , None ) app . logger . info ( "saved data for %s (current trial: %s)" , uid , trial ) resp = { "status" : "user data saved" } return jsonify ( ** resp )
Save experiment data which should be a JSON object and will be stored after converting to string .
16,103
def quitter ( ) : unique_id = request . form [ 'uniqueId' ] if unique_id [ : 5 ] == "debug" : debug_mode = True else : debug_mode = False if debug_mode : resp = { "status" : "didn't mark as quitter since this is debugging" } return jsonify ( ** resp ) else : try : unique_id = request . form [ 'uniqueId' ] app . logger . info ( "Marking quitter %s" % unique_id ) user = Participant . query . filter ( Participant . uniqueid == unique_id ) . one ( ) user . status = QUITEARLY db_session . add ( user ) db_session . commit ( ) except exc . SQLAlchemyError : raise ExperimentError ( 'tried_to_quit' ) else : resp = { "status" : "marked as quitter" } return jsonify ( ** resp )
Mark quitter as such .
16,104
def debug_complete ( ) : if not 'uniqueId' in request . args : raise ExperimentError ( 'improper_inputs' ) else : unique_id = request . args [ 'uniqueId' ] mode = request . args [ 'mode' ] try : user = Participant . query . filter ( Participant . uniqueid == unique_id ) . one ( ) user . status = COMPLETED user . endhit = datetime . datetime . now ( ) db_session . add ( user ) db_session . commit ( ) except : raise ExperimentError ( 'error_setting_worker_complete' ) else : if ( mode == 'sandbox' or mode == 'live' ) : return render_template ( 'closepopup.html' ) else : return render_template ( 'complete.html' )
Debugging route for complete .
16,105
def regularpage ( foldername = None , pagename = None ) : if foldername is None and pagename is None : raise ExperimentError ( 'page_not_found' ) if foldername is None and pagename is not None : return render_template ( pagename ) else : return render_template ( foldername + "/" + pagename )
Route not found by the other routes above . May point to a static template .
16,106
def run_webserver ( ) : host = "0.0.0.0" port = CONFIG . getint ( 'Server Parameters' , 'port' ) print "Serving on " , "http://" + host + ":" + str ( port ) app . config [ 'TEMPLATES_AUTO_RELOAD' ] = True app . jinja_env . auto_reload = True app . run ( debug = True , host = host , port = port )
Run web server
16,107
def random_id_generator ( self , size = 6 , chars = string . ascii_uppercase + string . digits ) : return '' . join ( random . choice ( chars ) for x in range ( size ) )
Generate random id numbers
16,108
def add_bonus ( worker_dict ) : " Adds DB-logged worker bonus to worker list data " try : unique_id = '{}:{}' . format ( worker_dict [ 'workerId' ] , worker_dict [ 'assignmentId' ] ) worker = Participant . query . filter ( Participant . uniqueid == unique_id ) . one ( ) worker_dict [ 'bonus' ] = worker . bonus except sa . exc . InvalidRequestError : worker_dict [ 'bonus' ] = 'N/A' return worker_dict
Adds DB - logged worker bonus to worker list data
16,109
def get_workers ( self , status = None , chosen_hits = None , assignment_ids = None , all_studies = False ) : if assignment_ids : workers = [ self . get_worker ( assignment_id ) for assignment_id in assignment_ids ] else : workers = self . amt_services . get_workers ( assignment_status = status , chosen_hits = chosen_hits ) if workers is False : raise Exception ( '*** failed to get workers' ) if not all_studies : my_hitids = self . _get_my_hitids ( ) workers = [ worker for worker in workers if worker [ 'hitId' ] in my_hitids ] workers = [ self . add_bonus ( worker ) for worker in workers ] return workers
Status if set can be one of Submitted Approved or Rejected
16,110
def hit_extend ( self , hit_id , assignments , minutes ) : assert type ( hit_id ) is list assert type ( hit_id [ 0 ] ) is str if self . amt_services . extend_hit ( hit_id [ 0 ] , assignments , minutes ) : print "HIT extended."
Add additional worker assignments or minutes to a HIT .
16,111
def hit_delete ( self , all_hits , hit_ids = None ) : if all_hits : hits_data = self . amt_services . get_all_hits ( ) hit_ids = [ hit . options [ 'hitid' ] for hit in hits_data if hit . options [ 'status' ] == "Reviewable" ] for hit in hit_ids : status = self . amt_services . get_hit_status ( hit ) if not status : print "*** Error getting hit status" return if self . amt_services . get_hit_status ( hit ) != "Reviewable" : print ( "*** This hit is not 'Reviewable' and so can not be " "deleted" ) return else : success = self . amt_services . delete_hit ( hit ) if success : if self . sandbox : print "deleting sandbox HIT" , hit else : print "deleting live HIT" , hit
Delete HIT .
16,112
def hit_expire ( self , all_hits , hit_ids = None ) : if all_hits : hits_data = self . get_active_hits ( ) hit_ids = [ hit . options [ 'hitid' ] for hit in hits_data ] for hit in hit_ids : success = self . amt_services . expire_hit ( hit ) if success : if self . sandbox : print "expiring sandbox HIT" , hit else : print "expiring live HIT" , hit
Expire all HITs .
16,113
def hit_create ( self , numWorkers , reward , duration ) : if self . sandbox : mode = 'sandbox' else : mode = 'live' server_loc = str ( self . config . get ( 'Server Parameters' , 'host' ) ) use_psiturk_ad_server = self . config . getboolean ( 'Shell Parameters' , 'use_psiturk_ad_server' ) if use_psiturk_ad_server : if not self . web_services . check_credentials ( ) : error_msg = '\n' . join ( [ '*****************************' , ' Sorry, your psiTurk Credentials are invalid.\n ' , ' You cannot create ads and hits until you enter valid credentials in ' , ' the \'psiTurk Access\' section of ~/.psiturkconfig. You can obtain your' , ' credentials or sign up at https://www.psiturk.org/login.\n' ] ) raise Exception ( error_msg ) if not self . amt_services . verify_aws_login ( ) : error_msg = '\n' . join ( [ '*****************************' , ' Sorry, your AWS Credentials are invalid.\n ' , ' You cannot create ads and hits until you enter valid credentials in ' , ' the \'AWS Access\' section of ~/.psiturkconfig. You can obtain your ' , ' credentials via the Amazon AMT requester website.\n' ] ) raise Exception ( error_msg ) ad_id = None if use_psiturk_ad_server : ad_id = self . create_psiturk_ad ( ) create_failed = False fail_msg = None if ad_id is not False : ad_location = self . web_services . get_ad_url ( ad_id , int ( self . sandbox ) ) hit_config = self . generate_hit_config ( ad_location , numWorkers , reward , duration ) hit_id = self . amt_services . create_hit ( hit_config ) if hit_id is not False : if not self . web_services . set_ad_hitid ( ad_id , hit_id , int ( self . sandbox ) ) : create_failed = True fail_msg = " Unable to update Ad on http://ad.psiturk.org to point at HIT." else : create_failed = True fail_msg = " Unable to create HIT on Amazon Mechanical Turk." else : create_failed = True fail_msg = " Unable to create Ad on http://ad.psiturk.org." else : ad_location = "{}?mode={}" . format ( self . config . get ( 'Shell Parameters' , 'ad_location' ) , mode ) hit_config = self . generate_hit_config ( ad_location , numWorkers , reward , duration ) create_failed = False hit_id = self . amt_services . create_hit ( hit_config ) if hit_id is False : create_failed = True fail_msg = " Unable to create HIT on Amazon Mechanical Turk." if create_failed : print '\n' . join ( [ '*****************************' , ' Sorry, there was an error creating hit and registering ad.' ] ) if fail_msg is None : fail_msg = '' raise Exception ( fail_msg ) return ( hit_id , ad_id )
Create a HIT
16,114
def db_aws_list_regions ( self ) : regions = self . db_services . list_regions ( ) if regions != [ ] : print "Avaliable AWS regions:" for reg in regions : print '\t' + reg , if reg == self . db_services . get_region ( ) : print "(currently selected)" else : print ''
List AWS DB regions
16,115
def db_aws_set_region ( self , region_name ) : if region_name is None : self . db_aws_list_regions ( ) allowed_regions = self . db_services . list_regions ( ) region_name = "NONSENSE WORD1234" tries = 0 while region_name not in allowed_regions : if tries == 0 : region_name = raw_input ( 'Enter the name of the region you ' 'would like to use: ' ) else : print ( "*** The region name (%s) you entered is not allowed, " "please choose from the list printed above (use type 'db " "aws_list_regions'." % region_name ) region_name = raw_input ( 'Enter the name of the region you ' 'would like to use: ' ) tries += 1 if tries > 5 : print ( "*** Error, region you are requesting not available. " "No changes made to regions." ) return self . db_services . set_region ( region_name ) print "Region updated to " , region_name self . config . set ( 'AWS Access' , 'aws_region' , region_name , True ) if self . server . is_server_running ( ) == 'yes' : self . server_restart ( )
Set AWS region
16,116
def db_aws_list_instances ( self ) : instances = self . db_services . get_db_instances ( ) if not instances : print ( "There are no DB instances associated with your AWS account " "in region " + self . db_services . get_region ( ) ) else : print ( "Here are the current DB instances associated with your AWS " "account in region " + self . db_services . get_region ( ) ) for dbinst in instances : print '\t' + '-' * 20 print "\tInstance ID: " + dbinst . id print "\tStatus: " + dbinst . status
List AWS DB instances
16,117
def db_aws_delete_instance ( self , instance_id ) : interactive = False if instance_id is None : interactive = True instances = self . db_services . get_db_instances ( ) instance_list = [ dbinst . id for dbinst in instances ] if interactive : valid = False if len ( instances ) == 0 : print ( "There are no instances you can delete currently. Use " "`db aws_create_instance` to make one." ) return print "Here are the available instances you can delete:" for inst in instances : print "\t " , inst . id , "(" , inst . status , ")" while not valid : instance_id = raw_input ( 'Enter the instance identity you would ' 'like to delete: ' ) res = self . db_services . validate_instance_id ( instance_id ) if res is True : valid = True else : print ( res + " Try again, instance name not valid. Check " "for typos." ) if instance_id in instance_list : valid = True else : valid = False print ( "Try again, instance not present in this account. " "Try again checking for typos." ) else : res = self . db_services . validate_instance_id ( instance_id ) if res is not True : print ( "*** Error, instance name either not valid. Try again " "checking for typos." ) return if instance_id not in instance_list : print ( "*** Error, This instance not present in this account. " "Try again checking for typos. Run `db aws_list_instances` to " "see valid list." ) return user_input = raw_input ( "Deleting an instance will erase all your data associated with the " "database in that instance. Really quit? y or n:" ) if user_input == 'y' : res = self . db_services . delete_db_instance ( instance_id ) if res : print ( "AWS RDS database instance %s deleted. Run `db " "aws_list_instances` for current status." % instance_id ) else : print ( "*** Error deleting database instance %s. " "It maybe because it is still being created, deleted, or is " "being backed up. Run `db aws_list_instances` for current " "status." % instance_id ) else : return
Delete AWS DB instance
16,118
def init ( self , * args ) : cfg = { } for k , v in self . options . items ( ) : if k . lower ( ) in self . cfg . settings and v is not None : cfg [ k . lower ( ) ] = v return cfg
init method Takes our custom options from self . options and creates a config dict which specifies custom settings .
16,119
def wait_until_online ( function , ip , port ) : awaiting_service = Wait_For_State ( lambda : not is_port_available ( ip , port ) , function ) awaiting_service . start ( ) return awaiting_service
Uses Wait_For_State to wait for the server to come online then runs the given function .
16,120
def process ( ) : invoked_as = os . path . basename ( sys . argv [ 0 ] ) if invoked_as == "psiturk" : launch_shell ( ) elif invoked_as == "psiturk-server" : launch_server ( ) elif invoked_as == "psiturk-shell" : launch_shell ( ) elif invoked_as == "psiturk-setup-example" : setup_example ( ) elif invoked_as == "psiturk-install" : install_from_exchange ( )
Figure out how we were invoked
16,121
def install_from_exchange ( ) : parser = argparse . ArgumentParser ( description = 'Download experiment from the psiturk.org experiment\ exchange (http://psiturk.org/ee).' ) parser . add_argument ( 'exp_id' , metavar = 'exp_id' , type = str , help = 'the id number of the\ experiment in the exchange' ) args = parser . parse_args ( ) exp_exch = ExperimentExchangeServices ( ) exp_exch . download_experiment ( args . exp_id )
Install from experiment exchange .
16,122
def setup_example ( ) : parser = argparse . ArgumentParser ( description = 'Creates a simple default project (stroop) in the current\ directory with the necessary psiTurk files.' ) parser . add_argument ( '-v' , '--version' , help = 'Print version number.' , action = "store_true" ) args = parser . parse_args ( ) if args . version : print version_number else : import psiturk . setup_example as se se . setup_example ( )
Add commands for testing etc .
16,123
def colorize ( target , color , use_escape = True ) : def escape ( code ) : return '\001%s\002' % code if color == 'purple' : color_code = '\033[95m' elif color == 'cyan' : color_code = '\033[96m' elif color == 'darkcyan' : color_code = '\033[36m' elif color == 'blue' : color_code = '\033[93m' elif color == 'green' : color_code = '\033[92m' elif color == 'yellow' : color_code = '\033[93m' elif color == 'red' : color_code = '\033[91m' elif color == 'white' : color_code = '\033[37m' elif color == 'bold' : color_code = '\033[1m' elif color == 'underline' : color_code = '\033[4m' else : color_code = '' if use_escape : return escape ( color_code ) + target + escape ( '\033[0m' ) else : return color_code + target + '\033[m'
Colorize target string . Set use_escape to false when text will not be interpreted by readline such as in intro message .
16,124
def default ( self , cmd ) : choices = [ "help" , "mode" , "psiturk_status" , "server" , "shortcuts" , "worker" , "db" , "edit" , "open" , "config" , "show" , "debug" , "setup_example" , "status" , "tunnel" , "amt_balance" , "download_datafiles" , "exit" , "hit" , "load" , "quit" , "save" , "shell" , "version" ] print "%s is not a psiTurk command. See 'help'." % ( cmd ) print "Did you mean this?\n %s" % ( process . extractOne ( cmd , choices ) [ 0 ] )
Collect incorrect and mistyped commands
16,125
def check_offline_configuration ( self ) : quit_on_start = False database_url = self . config . get ( 'Database Parameters' , 'database_url' ) host = self . config . get ( 'Server Parameters' , 'host' , 'localhost' ) if database_url [ : 6 ] != 'sqlite' : print ( "*** Error: config.txt option 'database_url' set to use " "mysql://. Please change this sqllite:// while in cabin mode." ) quit_on_start = True if host != 'localhost' : print ( "*** Error: config option 'host' is not set to localhost. " "Please change this to localhost while in cabin mode." ) quit_on_start = True if quit_on_start : exit ( )
Check offline configuration file
16,126
def get_intro_prompt ( self ) : sys_status = open ( self . help_path + 'cabin.txt' , 'r' ) server_msg = sys_status . read ( ) return server_msg + colorize ( 'psiTurk version ' + version_number + '\nType "help" for more information.' , 'green' , False )
Print cabin mode message
16,127
def color_prompt ( self ) : prompt = '[' + colorize ( 'psiTurk' , 'bold' ) server_string = '' server_status = self . server . is_server_running ( ) if server_status == 'yes' : server_string = colorize ( 'on' , 'green' ) elif server_status == 'no' : server_string = colorize ( 'off' , 'red' ) elif server_status == 'maybe' : server_string = colorize ( 'unknown' , 'yellow' ) elif server_status == 'blocked' : server_string = colorize ( 'blocked' , 'red' ) prompt += ' server:' + server_string prompt += ' mode:' + colorize ( 'cabin' , 'bold' ) prompt += ']$ ' self . prompt = prompt
Construct psiTurk shell prompt
16,128
def preloop ( self ) : if not self . already_prelooped : self . already_prelooped = True open ( '.psiturk_history' , 'a' ) . close ( ) readline . read_history_file ( '.psiturk_history' ) for i in range ( readline . get_current_history_length ( ) ) : if readline . get_history_item ( i ) is not None : self . history . append ( readline . get_history_item ( i ) )
Keep persistent command history .
16,129
def onecmd_plus_hooks ( self , line ) : if not line : return self . emptyline ( ) return Cmd . onecmd_plus_hooks ( self , line )
Trigger hooks after command .
16,130
def postcmd ( self , stop , line ) : self . color_prompt ( ) return Cmd . postcmd ( self , stop , line )
Exit cmd cleanly .
16,131
def hit_list ( self , active_hits , reviewable_hits , all_studies ) : if active_hits : hits_data = self . amt_services_wrapper . get_active_hits ( all_studies ) elif reviewable_hits : hits_data = self . amt_services_wrapper . get_reviewable_hits ( all_studies ) else : hits_data = self . amt_services_wrapper . get_all_hits ( all_studies ) if not hits_data : print '*** no hits retrieved' else : for hit in hits_data : print hit
List hits .
16,132
def _confirm_dialog ( self , prompt ) : response = raw_input ( prompt ) . strip ( ) . lower ( ) valid = { 'y' : True , 'ye' : True , 'yes' : True , 'n' : False , 'no' : False } while True : try : return valid [ response ] except : response = raw_input ( "Please respond 'y' or 'n': " ) . strip ( ) . lower ( )
Prompts for a yes or no to given prompt .
16,133
def server_off ( self ) : if ( self . server . is_server_running ( ) == 'yes' or self . server . is_server_running ( ) == 'maybe' ) : self . server . shutdown ( ) print 'Please wait. This could take a few seconds.' time . sleep ( 0.5 )
Stop experiment server
16,134
def complete_config ( self , text , line , begidx , endidx ) : return [ i for i in PsiturkShell . config_commands if i . startswith ( text ) ]
Tab - complete config command
16,135
def print_config ( self , _ ) : for section in self . config . sections ( ) : print '[%s]' % section items = dict ( self . config . items ( section ) ) for k in items : print "%(a)s=%(b)s" % { 'a' : k , 'b' : items [ k ] } print ''
Print configuration .
16,136
def reload_config ( self , _ ) : restart_server = False if ( self . server . is_server_running ( ) == 'yes' or self . server . is_server_running ( ) == 'maybe' ) : user_input = raw_input ( "Reloading configuration requires the server " "to restart. Really reload? y or n: " ) if user_input != 'y' : return restart_server = True self . config . load_config ( ) if restart_server : self . server_restart ( )
Reload config .
16,137
def do_status ( self , _ ) : server_status = self . server . is_server_running ( ) if server_status == 'yes' : print 'Server: ' + colorize ( 'currently online' , 'green' ) elif server_status == 'no' : print 'Server: ' + colorize ( 'currently offline' , 'red' ) elif server_status == 'maybe' : print 'Server: ' + colorize ( 'status unknown' , 'yellow' ) elif server_status == 'blocked' : print 'Server: ' + colorize ( 'blocked' , 'red' )
Notify user of server status .
16,138
def db_use_local_file ( self , arg , filename = None ) : if filename is None : filename = raw_input ( 'Enter the filename of the local SQLLite ' 'database you would like to use ' '[default=participants.db]: ' ) if filename == '' : filename = 'participants.db' base_url = "sqlite:///" + filename self . config . set ( "Database Parameters" , "database_url" , base_url ) print "Updated database setting (database_url): \n\t" , self . config . get ( "Database Parameters" , "database_url" ) if self . server . is_server_running ( ) == 'yes' : self . server_restart ( )
Use local file for DB .
16,139
def do_download_datafiles ( self , _ ) : contents = { "trialdata" : lambda p : p . get_trial_data ( ) , "eventdata" : lambda p : p . get_event_data ( ) , "questiondata" : lambda p : p . get_question_data ( ) } query = Participant . query . all ( ) for k in contents : ret = "" . join ( [ contents [ k ] ( p ) for p in query ] ) temp_file = open ( k + '.csv' , 'w' ) temp_file . write ( ret ) temp_file . close ( )
Download datafiles .
16,140
def complete_server ( self , text , line , begidx , endidx ) : return [ i for i in PsiturkShell . server_commands if i . startswith ( text ) ]
Tab - complete server command
16,141
def do_quit ( self , _ ) : if ( self . server . is_server_running ( ) == 'yes' or self . server . is_server_running ( ) == 'maybe' ) : user_input = raw_input ( "Quitting shell will shut down experiment " "server. Really quit? y or n: " ) if user_input == 'y' : self . server_off ( ) else : return False return True
Override do_quit for network clean up .
16,142
def clean_up ( self ) : if self . tunnel . is_open : print 'Closing tunnel...' self . tunnel . close ( ) print 'Done.' else : pass
Clean up child and orphaned processes .
16,143
def get_intro_prompt ( self ) : server_msg = self . web_services . get_system_status ( ) return server_msg + colorize ( 'psiTurk version ' + version_number + '\nType "help" for more information.' , 'green' , False )
Overloads intro prompt with network - aware version if you can reach psiTurk . org request system status message
16,144
def complete_db ( self , text , line , begidx , endidx ) : return [ i for i in PsiturkNetworkShell . db_commands if i . startswith ( text ) ]
Tab - complete db command
16,145
def complete_hit ( self , text , line , begidx , endidx ) : return [ i for i in PsiturkNetworkShell . hit_commands if i . startswith ( text ) ]
Tab - complete hit command .
16,146
def complete_worker ( self , text , line , begidx , endidx ) : return [ i for i in PsiturkNetworkShell . worker_commands if i . startswith ( text ) ]
Tab - complete worker command .
16,147
def get_db_instance_info ( self , dbid ) : if not self . connect_to_aws_rds ( ) : return False try : instances = self . rdsc . describe_db_instances ( dbid ) . get ( 'DBInstances' ) except : return False else : myinstance = instances [ 0 ] return myinstance
Get DB instance info
16,148
def allow_access_to_instance ( self , _ , ip_address ) : if not self . connect_to_aws_rds ( ) : return False try : conn = boto . ec2 . connect_to_region ( self . region , aws_access_key_id = self . aws_access_key_id , aws_secret_access_key = self . aws_secret_access_key ) sgs = conn . get_all_security_groups ( 'default' ) default_sg = sgs [ 0 ] default_sg . authorize ( ip_protocol = 'tcp' , from_port = 3306 , to_port = 3306 , cidr_ip = str ( ip_address ) + '/32' ) except EC2ResponseError , exception : if exception . error_code == "InvalidPermission.Duplicate" : return True else : return False else : return True
Allow access to instance .
16,149
def validate_instance_id ( self , instid ) : if re . match ( '[\w-]+$' , instid ) is not None : if len ( instid ) <= 63 and len ( instid ) >= 1 : if instid [ 0 ] . isalpha ( ) : return True return "*** Error: Instance ids must be 1-63 alphanumeric characters, \ first is a letter."
Validate instance ID
16,150
def validate_instance_username ( self , username ) : if re . match ( '[\w-]+$' , username ) is not None : if len ( username ) <= 16 and len ( username ) >= 1 : if username [ 0 ] . isalpha ( ) : if username not in MYSQL_RESERVED_WORDS : return True return '*** Error: Usernames must be 1-16 alphanumeric chracters, \ first a letter, cannot be reserved MySQL word.'
Validate instance username
16,151
def validate_instance_password ( self , password ) : if re . match ( '[\w-]+$' , password ) is not None : if len ( password ) <= 41 and len ( password ) >= 8 : return True return '*** Error: Passwords must be 8-41 alphanumeric characters'
Validate instance passwords
16,152
def validate_instance_dbname ( self , dbname ) : if re . match ( '[\w-]+$' , dbname ) is not None : if len ( dbname ) <= 41 and len ( dbname ) >= 1 : if dbname . lower ( ) not in MYSQL_RESERVED_WORDS : return True return '*** Error: Database names must be 1-64 alphanumeric characters,\ cannot be a reserved MySQL word.'
Validate instance database name
16,153
def create_db_instance ( self , params ) : if not self . connect_to_aws_rds ( ) : return False try : database = self . rdsc . create_dbinstance ( id = params [ 'id' ] , allocated_storage = params [ 'size' ] , instance_class = 'db.t1.micro' , engine = 'MySQL' , master_username = params [ 'username' ] , master_password = params [ 'password' ] , db_name = params [ 'dbname' ] , multi_az = False ) except : return False else : return True
Create db instance
16,154
def get_all_hits ( self ) : if not self . connect_to_turk ( ) : return False try : hits = [ ] paginator = self . mtc . get_paginator ( 'list_hits' ) for page in paginator . paginate ( ) : hits . extend ( page [ 'HITs' ] ) except Exception as e : print e return False hits_data = self . _hit_xml_to_object ( hits ) return hits_data
Get all HITs
16,155
def setup_mturk_connection ( self ) : if ( ( self . aws_access_key_id == 'YourAccessKeyId' ) or ( self . aws_secret_access_key == 'YourSecretAccessKey' ) ) : print "AWS access key not set in ~/.psiturkconfig; please enter a valid access key." assert False if self . is_sandbox : endpoint_url = 'https://mturk-requester-sandbox.us-east-1.amazonaws.com' else : endpoint_url = 'https://mturk-requester.us-east-1.amazonaws.com' self . mtc = boto3 . client ( 'mturk' , region_name = 'us-east-1' , aws_access_key_id = self . aws_access_key_id , aws_secret_access_key = self . aws_secret_access_key , endpoint_url = endpoint_url ) return True
Connect to turk
16,156
def get_hit_status ( self , hitid ) : hitdata = self . get_hit ( hitid ) if not hitdata : return False return hitdata [ 'HITStatus' ]
Get HIT status
16,157
def query_records_no_auth ( self , name , query = '' ) : req = requests . get ( self . api_server + '/api/' + name + "/" + query ) return req
Query records without authorization
16,158
def get_tunnel_ad_url ( self ) : req = requests . get ( 'https://api.psiturk.org/api/tunnel' , auth = ( self . access_key , self . secret_key ) ) if req . status_code in [ 401 , 403 , 500 ] : print ( req . content ) return False else : return req . json ( ) [ 'tunnel_hostname' ]
Get tunnel hostname from psiturk . org
16,159
def change_tunnel_ad_url ( self ) : if self . is_open : self . close ( ) req = requests . delete ( 'https://api.psiturk.org/api/tunnel/' , auth = ( self . access_key , self . secret_key ) ) if req . status_code in [ 401 , 403 , 500 ] : print ( req . content ) return False
Change tunnel ad url .
16,160
def traveling_salesman ( G , sampler = None , lagrange = 2 , weight = 'weight' , ** sampler_args ) : Q = traveling_salesman_qubo ( G , lagrange , weight ) response = sampler . sample_qubo ( Q , ** sampler_args ) sample = next ( iter ( response ) ) route = [ ] for entry in sample : if sample [ entry ] > 0 : route . append ( entry ) route . sort ( key = lambda x : x [ 1 ] ) return list ( ( x [ 0 ] for x in route ) )
Returns an approximate minimum traveling salesperson route .
16,161
def traveling_salesman_qubo ( G , lagrange = 2 , weight = 'weight' ) : N = G . number_of_nodes ( ) if N in ( 1 , 2 ) or len ( G . edges ) != N * ( N - 1 ) // 2 : msg = "graph must be a complete graph with at least 3 nodes or empty" raise ValueError ( msg ) Q = defaultdict ( float ) for node in G : for pos_1 in range ( N ) : Q [ ( ( node , pos_1 ) , ( node , pos_1 ) ) ] -= lagrange for pos_2 in range ( pos_1 + 1 , N ) : Q [ ( ( node , pos_1 ) , ( node , pos_2 ) ) ] += 2.0 * lagrange for pos in range ( N ) : for node_1 in G : Q [ ( ( node_1 , pos ) , ( node_1 , pos ) ) ] -= lagrange for node_2 in set ( G ) - { node_1 } : Q [ ( ( node_1 , pos ) , ( node_2 , pos ) ) ] += 2.0 * lagrange for u , v in itertools . combinations ( G . nodes , 2 ) : for pos in range ( N ) : nextpos = ( pos + 1 ) % N Q [ ( ( u , pos ) , ( v , nextpos ) ) ] += G [ u ] [ v ] [ weight ] Q [ ( ( v , pos ) , ( u , nextpos ) ) ] += G [ u ] [ v ] [ weight ] return Q
Return the QUBO with ground states corresponding to a minimum TSP route .
16,162
def markov_network ( potentials ) : G = nx . Graph ( ) G . name = 'markov_network({!r})' . format ( potentials ) for clique , phis in potentials . items ( ) : num_vars = len ( clique ) if not isinstance ( phis , abc . Mapping ) : raise TypeError ( "phis should be a dict" ) elif not all ( config in phis for config in itertools . product ( ( 0 , 1 ) , repeat = num_vars ) ) : raise ValueError ( "not all potentials provided for {!r}" . format ( clique ) ) if num_vars == 1 : u , = clique G . add_node ( u , potential = phis ) elif num_vars == 2 : u , v = clique G . add_edge ( u , v , potential = phis , order = ( u , v ) ) else : raise ValueError ( "Only supports cliques up to size 2" ) return G
Creates a Markov Network from potentials .
16,163
def maximum_cut ( G , sampler = None , ** sampler_args ) : h = { v : 0. for v in G } J = { ( u , v ) : 1 for u , v in G . edges } response = sampler . sample_ising ( h , J , ** sampler_args ) sample = next ( iter ( response ) ) return set ( v for v in G if sample [ v ] >= 0 )
Returns an approximate maximum cut .
16,164
def weighted_maximum_cut ( G , sampler = None , ** sampler_args ) : h = { v : 0. for v in G } try : J = { ( u , v ) : G [ u ] [ v ] [ 'weight' ] for u , v in G . edges } except KeyError : raise DWaveNetworkXException ( "edges must have 'weight' attribute" ) response = sampler . sample_ising ( h , J , ** sampler_args ) sample = next ( iter ( response ) ) return set ( v for v in G if sample [ v ] >= 0 )
Returns an approximate weighted maximum cut .
16,165
def get_pegasus_to_nice_fn ( * args , ** kwargs ) : if args or kwargs : warnings . warn ( "Deprecation warning: get_pegasus_to_nice_fn does not need / use parameters anymore" ) def p2n0 ( u , w , k , z ) : return ( 0 , w - 1 if u else z , z if u else w , u , k - 4 if u else k - 4 ) def p2n1 ( u , w , k , z ) : return ( 1 , w - 1 if u else z , z if u else w , u , k if u else k - 8 ) def p2n2 ( u , w , k , z ) : return ( 2 , w if u else z , z if u else w - 1 , u , k - 8 if u else k ) def p2n ( u , w , k , z ) : return [ p2n0 , p2n1 , p2n2 ] [ ( 2 - u - ( 2 * u - 1 ) * ( k // 4 ) ) % 3 ] ( u , w , k , z ) return p2n
Returns a coordinate translation function from the 4 - term pegasus_index coordinates to the 5 - term nice coordinates .
16,166
def get_nice_to_pegasus_fn ( * args , ** kwargs ) : if args or kwargs : warnings . warn ( "Deprecation warning: get_pegasus_to_nice_fn does not need / use parameters anymore" ) def c2p0 ( y , x , u , k ) : return ( u , y + 1 if u else x , 4 + k if u else 4 + k , x if u else y ) def c2p1 ( y , x , u , k ) : return ( u , y + 1 if u else x , k if u else 8 + k , x if u else y ) def c2p2 ( y , x , u , k ) : return ( u , y if u else x + 1 , 8 + k if u else k , x if u else y ) def n2p ( t , y , x , u , k ) : return [ c2p0 , c2p1 , c2p2 ] [ t ] ( y , x , u , k ) return n2p
Returns a coordinate translation function from the 5 - term nice coordinates to the 4 - term pegasus_index coordinates .
16,167
def tuple ( self , r ) : m , m1 = self . args r , z = divmod ( r , m1 ) r , k = divmod ( r , 12 ) u , w = divmod ( r , m ) return u , w , k , z
Converts the linear_index q into an pegasus_index
16,168
def ints ( self , qlist ) : m , m1 = self . args return ( ( ( m * u + w ) * 12 + k ) * m1 + z for ( u , w , k , z ) in qlist )
Converts a sequence of pegasus_index node labels into linear_index node labels preserving order
16,169
def tuples ( self , rlist ) : m , m1 = self . args for r in rlist : r , z = divmod ( r , m1 ) r , k = divmod ( r , 12 ) u , w = divmod ( r , m ) yield u , w , k , z
Converts a sequence of linear_index node labels into pegasus_index node labels preserving order
16,170
def __pair_repack ( self , f , plist ) : ulist = f ( u for p in plist for u in p ) for u in ulist : v = next ( ulist ) yield u , v
Flattens a sequence of pairs to pass through f and then re - pairs the result .
16,171
def sample_markov_network ( MN , sampler = None , fixed_variables = None , return_sampleset = False , ** sampler_args ) : bqm = markov_network_bqm ( MN ) fv_sampler = dimod . FixedVariableComposite ( sampler ) sampleset = fv_sampler . sample ( bqm , fixed_variables = fixed_variables , ** sampler_args ) if return_sampleset : return sampleset else : return list ( map ( dict , sampleset . samples ( ) ) )
Samples from a markov network using the provided sampler .
16,172
def markov_network_bqm ( MN ) : bqm = dimod . BinaryQuadraticModel . empty ( dimod . BINARY ) for v , ddict in MN . nodes ( data = True , default = None ) : potential = ddict . get ( 'potential' , None ) if potential is None : continue phi0 = potential [ ( 0 , ) ] phi1 = potential [ ( 1 , ) ] bqm . add_variable ( v , phi1 - phi0 ) bqm . add_offset ( phi0 ) for u , v , ddict in MN . edges ( data = True , default = None ) : potential = ddict . get ( 'potential' , None ) if potential is None : continue order = ddict [ 'order' ] u , v = order phi00 = potential [ ( 0 , 0 ) ] phi01 = potential [ ( 0 , 1 ) ] phi10 = potential [ ( 1 , 0 ) ] phi11 = potential [ ( 1 , 1 ) ] bqm . add_variable ( u , phi10 - phi00 ) bqm . add_variable ( v , phi01 - phi00 ) bqm . add_interaction ( u , v , phi11 - phi10 - phi01 + phi00 ) bqm . add_offset ( phi00 ) return bqm
Construct a binary quadratic model for a markov network .
16,173
def chimera_layout ( G , scale = 1. , center = None , dim = 2 ) : if not isinstance ( G , nx . Graph ) : empty_graph = nx . Graph ( ) empty_graph . add_edges_from ( G ) G = empty_graph if G . graph . get ( "family" ) == "chimera" : m = G . graph [ 'rows' ] n = G . graph [ 'columns' ] t = G . graph [ 'tile' ] xy_coords = chimera_node_placer_2d ( m , n , t , scale , center , dim ) if G . graph . get ( 'labels' ) == 'coordinate' : pos = { v : xy_coords ( * v ) for v in G . nodes ( ) } elif G . graph . get ( 'data' ) : pos = { v : xy_coords ( * dat [ 'chimera_index' ] ) for v , dat in G . nodes ( data = True ) } else : coord = chimera_coordinates ( m , n , t ) pos = { v : xy_coords ( * coord . tuple ( v ) ) for v in G . nodes ( ) } else : if all ( 'chimera_index' in dat for __ , dat in G . nodes ( data = True ) ) : chimera_indices = { v : dat [ 'chimera_index' ] for v , dat in G . nodes ( data = True ) } else : chimera_indices = find_chimera_indices ( G ) m = max ( idx [ 0 ] for idx in itervalues ( chimera_indices ) ) + 1 n = max ( idx [ 1 ] for idx in itervalues ( chimera_indices ) ) + 1 t = max ( idx [ 3 ] for idx in itervalues ( chimera_indices ) ) + 1 xy_coords = chimera_node_placer_2d ( m , n , t , scale , center , dim ) pos = { v : xy_coords ( i , j , u , k ) for v , ( i , j , u , k ) in iteritems ( chimera_indices ) } return pos
Positions the nodes of graph G in a Chimera cross topology .
16,174
def chimera_node_placer_2d ( m , n , t , scale = 1. , center = None , dim = 2 ) : import numpy as np tile_center = t // 2 tile_length = t + 3 scale /= max ( m , n ) * tile_length - 3 grid_offsets = { } if center is None : center = np . zeros ( dim ) else : center = np . asarray ( center ) paddims = dim - 2 if paddims < 0 : raise ValueError ( "layout must have at least two dimensions" ) if len ( center ) != dim : raise ValueError ( "length of center coordinates must match dimension of layout" ) def _xy_coords ( i , j , u , k ) : if k < tile_center : p = k else : p = k + 1 if u : xy = np . array ( [ tile_center , - 1 * p ] ) else : xy = np . array ( [ p , - 1 * tile_center ] ) if i > 0 or j > 0 : if ( i , j ) in grid_offsets : xy += grid_offsets [ ( i , j ) ] else : off = np . array ( [ j * tile_length , - 1 * i * tile_length ] ) xy += off grid_offsets [ ( i , j ) ] = off return np . hstack ( ( xy * scale , np . zeros ( paddims ) ) ) + center return _xy_coords
Generates a function that converts Chimera indices to x y coordinates for a plot .
16,175
def draw_chimera_embedding ( G , * args , ** kwargs ) : draw_embedding ( G , chimera_layout ( G ) , * args , ** kwargs )
Draws an embedding onto the chimera graph G according to layout .
16,176
def find_chimera_indices ( G ) : try : nlist = sorted ( G . nodes ) except TypeError : nlist = G . nodes ( ) n_nodes = len ( nlist ) chimera_indices = { } if n_nodes == 0 : return chimera_indices elif n_nodes == 1 : raise DWaveNetworkXException ( 'Singleton graphs are not Chimera-structured' ) elif n_nodes == 2 : return { nlist [ 0 ] : ( 0 , 0 , 0 , 0 ) , nlist [ 1 ] : ( 0 , 0 , 1 , 0 ) } coloring = color ( G ) if coloring [ nlist [ 0 ] ] == 1 : coloring = { v : 1 - coloring [ v ] for v in coloring } dia = diameter ( G ) if dia == 2 : shore_indices = [ 0 , 0 ] for v in nlist : u = coloring [ v ] chimera_indices [ v ] = ( 0 , 0 , u , shore_indices [ u ] ) shore_indices [ u ] += 1 return chimera_indices raise Exception ( 'not yet implemented for Chimera graphs with more than one tile' )
Attempts to determine the Chimera indices of the nodes in graph G .
16,177
def chimera_elimination_order ( m , n = None , t = None ) : if n is None : n = m if t is None : t = 4 index_flip = m > n if index_flip : m , n = n , m def chimeraI ( m0 , n0 , k0 , l0 ) : if index_flip : return m * 2 * t * n0 + 2 * t * m0 + t * ( 1 - k0 ) + l0 else : return n * 2 * t * m0 + 2 * t * n0 + t * k0 + l0 order = [ ] for n_i in range ( n ) : for t_i in range ( t ) : for m_i in range ( m ) : order . append ( chimeraI ( m_i , n_i , 0 , t_i ) ) for n_i in range ( n ) : for m_i in range ( m ) : for t_i in range ( t ) : order . append ( chimeraI ( m_i , n_i , 1 , t_i ) ) return order
Provides a variable elimination order for a Chimera graph .
16,178
def tuple ( self , r ) : m , n , t = self . args r , k = divmod ( r , t ) r , u = divmod ( r , 2 ) i , j = divmod ( r , n ) return i , j , u , k
Converts the linear_index q into an chimera_index
16,179
def ints ( self , qlist ) : m , n , t = self . args return ( ( ( n * i + j ) * 2 + u ) * t + k for ( i , j , u , k ) in qlist )
Converts a sequence of chimera_index node labels into linear_index node labels preserving order
16,180
def tuples ( self , rlist ) : m , n , t = self . args for r in rlist : r , k = divmod ( r , t ) r , u = divmod ( r , 2 ) i , j = divmod ( r , n ) yield i , j , u , k
Converts a sequence of linear_index node labels into chimera_index node labels preserving order
16,181
def structural_imbalance ( S , sampler = None , ** sampler_args ) : h , J = structural_imbalance_ising ( S ) response = sampler . sample_ising ( h , J , ** sampler_args ) sample = next ( iter ( response ) ) colors = { v : ( spin + 1 ) // 2 for v , spin in iteritems ( sample ) } frustrated_edges = { } for u , v , data in S . edges ( data = True ) : sign = data [ 'sign' ] if sign > 0 and colors [ u ] != colors [ v ] : frustrated_edges [ ( u , v ) ] = data elif sign < 0 and colors [ u ] == colors [ v ] : frustrated_edges [ ( u , v ) ] = data return frustrated_edges , colors
Returns an approximate set of frustrated edges and a bicoloring .
16,182
def structural_imbalance_ising ( S ) : h = { v : 0.0 for v in S } J = { } for u , v , data in S . edges ( data = True ) : try : J [ ( u , v ) ] = - 1. * data [ 'sign' ] except KeyError : raise ValueError ( ( "graph should be a signed social graph," "each edge should have a 'sign' attr" ) ) return h , J
Construct the Ising problem to calculate the structural imbalance of a signed social network .
16,183
def is_simplicial ( G , n ) : return all ( u in G [ v ] for u , v in itertools . combinations ( G [ n ] , 2 ) )
Determines whether a node n in G is simplicial .
16,184
def is_almost_simplicial ( G , n ) : for w in G [ n ] : if all ( u in G [ v ] for u , v in itertools . combinations ( G [ n ] , 2 ) if u != w and v != w ) : return True return False
Determines whether a node n in G is almost simplicial .
16,185
def minor_min_width ( G ) : adj = { v : set ( G [ v ] ) for v in G } lb = 0 while len ( adj ) > 1 : v = min ( adj , key = lambda v : len ( adj [ v ] ) ) neighbors = adj [ v ] if not neighbors : del adj [ v ] continue def neighborhood_degree ( u ) : Gu = adj [ u ] return sum ( w in Gu for w in neighbors ) u = min ( neighbors , key = neighborhood_degree ) new_lb = len ( adj [ v ] ) if new_lb > lb : lb = new_lb adj [ v ] = adj [ v ] . union ( n for n in adj [ u ] if n != v ) for n in adj [ v ] : adj [ n ] . add ( v ) for n in adj [ u ] : adj [ n ] . discard ( u ) del adj [ u ] return lb
Computes a lower bound for the treewidth of graph G .
16,186
def min_fill_heuristic ( G ) : adj = { v : set ( G [ v ] ) for v in G } num_nodes = len ( adj ) order = [ 0 ] * num_nodes upper_bound = 0 for i in range ( num_nodes ) : v = min ( adj , key = lambda x : _min_fill_needed_edges ( adj , x ) ) dv = len ( adj [ v ] ) if dv > upper_bound : upper_bound = dv _elim_adj ( adj , v ) order [ i ] = v return upper_bound , order
Computes an upper bound on the treewidth of graph G based on the min - fill heuristic for the elimination ordering .
16,187
def min_width_heuristic ( G ) : adj = { v : set ( G [ v ] ) for v in G } num_nodes = len ( adj ) order = [ 0 ] * num_nodes upper_bound = 0 for i in range ( num_nodes ) : v = min ( adj , key = lambda u : len ( adj [ u ] ) + random ( ) ) dv = len ( adj [ v ] ) if dv > upper_bound : upper_bound = dv _elim_adj ( adj , v ) order [ i ] = v return upper_bound , order
Computes an upper bound on the treewidth of graph G based on the min - width heuristic for the elimination ordering .
16,188
def max_cardinality_heuristic ( G ) : adj = { v : set ( G [ v ] ) for v in G } num_nodes = len ( adj ) order = [ 0 ] * num_nodes upper_bound = 0 labelled_neighbors = { v : 0 for v in adj } for i in range ( num_nodes ) : v = max ( labelled_neighbors , key = lambda u : labelled_neighbors [ u ] + random ( ) ) del labelled_neighbors [ v ] for u in adj [ v ] : if u in labelled_neighbors : labelled_neighbors [ u ] += 1 order [ - ( i + 1 ) ] = v for v in order : dv = len ( adj [ v ] ) if dv > upper_bound : upper_bound = dv _elim_adj ( adj , v ) return upper_bound , order
Computes an upper bound on the treewidth of graph G based on the max - cardinality heuristic for the elimination ordering .
16,189
def _elim_adj ( adj , n ) : neighbors = adj [ n ] new_edges = set ( ) for u , v in itertools . combinations ( neighbors , 2 ) : if v not in adj [ u ] : adj [ u ] . add ( v ) adj [ v ] . add ( u ) new_edges . add ( ( u , v ) ) new_edges . add ( ( v , u ) ) for v in neighbors : adj [ v ] . discard ( n ) del adj [ n ] return new_edges
eliminates a variable acting on the adj matrix of G returning set of edges that were added .
16,190
def elimination_order_width ( G , order ) : adj = { v : set ( G [ v ] ) for v in G } treewidth = 0 for v in order : try : dv = len ( adj [ v ] ) except KeyError : raise ValueError ( '{} is in order but not in G' . format ( v ) ) if dv > treewidth : treewidth = dv _elim_adj ( adj , v ) if adj : raise ValueError ( 'not all nodes in G were in order' ) return treewidth
Calculates the width of the tree decomposition induced by a variable elimination order .
16,191
def treewidth_branch_and_bound ( G , elimination_order = None , treewidth_upperbound = None ) : if not any ( G [ v ] for v in G ) : return 0 , list ( G ) x = [ ] f = minor_min_width ( G ) g = 0 ub , order = min_fill_heuristic ( G ) if elimination_order is not None : upperbound = elimination_order_width ( G , elimination_order ) if upperbound <= ub : ub , order = upperbound , elimination_order if treewidth_upperbound is not None and treewidth_upperbound < ub : ub , order = treewidth_upperbound , [ ] best_found = ub , order assert f <= ub , "Logic error" if f < ub : adj = { v : set ( G [ v ] ) for v in G } best_found = _branch_and_bound ( adj , x , g , f , best_found ) return best_found
Computes the treewidth of graph G and a corresponding perfect elimination ordering .
16,192
def _graph_reduction ( adj , x , g , f ) : as_list = set ( ) as_nodes = { v for v in adj if len ( adj [ v ] ) <= f and is_almost_simplicial ( adj , v ) } while as_nodes : as_list . union ( as_nodes ) for n in as_nodes : dv = len ( adj [ n ] ) if dv > g : g = dv if g > f : f = g x . append ( n ) _elim_adj ( adj , n ) as_nodes = { v for v in adj if len ( adj [ v ] ) <= f and is_almost_simplicial ( adj , v ) } return g , f , as_list
we can go ahead and remove any simplicial or almost - simplicial vertices from adj .
16,193
def _theorem5p4 ( adj , ub ) : new_edges = set ( ) for u , v in itertools . combinations ( adj , 2 ) : if u in adj [ v ] : continue if len ( adj [ u ] . intersection ( adj [ v ] ) ) > ub : new_edges . add ( ( u , v ) ) while new_edges : for u , v in new_edges : adj [ u ] . add ( v ) adj [ v ] . add ( u ) new_edges = set ( ) for u , v in itertools . combinations ( adj , 2 ) : if u in adj [ v ] : continue if len ( adj [ u ] . intersection ( adj [ v ] ) ) > ub : new_edges . add ( ( u , v ) )
By Theorem 5 . 4 if any two vertices have ub + 1 common neighbors then we can add an edge between them .
16,194
def _theorem6p1 ( ) : pruning_set = set ( ) def _prune ( x ) : if len ( x ) <= 2 : return False key = ( tuple ( x [ : - 2 ] ) , x [ - 2 ] , x [ - 1 ] ) return key in pruning_set def _explored ( x ) : if len ( x ) >= 3 : prunable = ( tuple ( x [ : - 2 ] ) , x [ - 1 ] , x [ - 2 ] ) pruning_set . add ( prunable ) return _prune , _explored
See Theorem 6 . 1 in paper .
16,195
def draw_qubit_graph ( G , layout , linear_biases = { } , quadratic_biases = { } , nodelist = None , edgelist = None , cmap = None , edge_cmap = None , vmin = None , vmax = None , edge_vmin = None , edge_vmax = None , ** kwargs ) : if linear_biases or quadratic_biases : try : import matplotlib . pyplot as plt import matplotlib as mpl except ImportError : raise ImportError ( "Matplotlib and numpy required for draw_qubit_graph()" ) if nodelist is None : nodelist = G . nodes ( ) if edgelist is None : edgelist = G . edges ( ) if cmap is None : cmap = plt . get_cmap ( 'coolwarm' ) if edge_cmap is None : edge_cmap = plt . get_cmap ( 'coolwarm' ) def edge_color ( u , v ) : c = 0. if ( u , v ) in quadratic_biases : c += quadratic_biases [ ( u , v ) ] if ( v , u ) in quadratic_biases : c += quadratic_biases [ ( v , u ) ] return c def node_color ( v ) : c = 0. if v in linear_biases : c += linear_biases [ v ] if ( v , v ) in quadratic_biases : c += quadratic_biases [ ( v , v ) ] return c node_color = [ node_color ( v ) for v in nodelist ] edge_color = [ edge_color ( u , v ) for u , v in edgelist ] kwargs [ 'edge_color' ] = edge_color kwargs [ 'node_color' ] = node_color vmag = max ( max ( abs ( c ) for c in node_color ) , max ( abs ( c ) for c in edge_color ) ) if vmin is None : vmin = - 1 * vmag if vmax is None : vmax = vmag if edge_vmin is None : edge_vmin = - 1 * vmag if edge_vmax is None : edge_vmax = vmag draw ( G , layout , nodelist = nodelist , edgelist = edgelist , cmap = cmap , edge_cmap = edge_cmap , vmin = vmin , vmax = vmax , edge_vmin = edge_vmin , edge_vmax = edge_vmax , ** kwargs ) if linear_biases or quadratic_biases : fig = plt . figure ( 1 ) cax = fig . add_axes ( [ .9 , 0.2 , 0.04 , 0.6 ] ) mpl . colorbar . ColorbarBase ( cax , cmap = cmap , norm = mpl . colors . Normalize ( vmin = - 1 * vmag , vmax = vmag , clip = False ) , orientation = 'vertical' )
Draws graph G according to layout .
16,196
def min_vertex_coloring ( G , sampler = None , ** sampler_args ) : if not nx . is_connected ( G ) : coloring = { } for subG in ( G . subgraph ( c ) . copy ( ) for c in nx . connected_components ( G ) ) : sub_coloring = min_vertex_coloring ( subG , sampler , ** sampler_args ) coloring . update ( sub_coloring ) return coloring n_nodes = len ( G ) n_edges = len ( G . edges ) if not n_edges : return { node : 0 for node in G } if n_edges == n_nodes * ( n_nodes - 1 ) // 2 : return { node : color for color , node in enumerate ( G ) } chi_ub = _chromatic_number_upper_bound ( G , n_nodes , n_edges ) partial_coloring , possible_colors , chi_lb = _partial_precolor ( G , chi_ub ) counter = itertools . count ( ) x_vars = { v : { c : next ( counter ) for c in possible_colors [ v ] } for v in possible_colors } Q_neighbor = _vertex_different_colors_qubo ( G , x_vars ) Q_vertex = _vertex_one_color_qubo ( x_vars ) Q_min_color = _minimum_coloring_qubo ( x_vars , chi_lb , chi_ub , magnitude = .75 ) Q = Q_neighbor for ( u , v ) , bias in iteritems ( Q_vertex ) : if ( u , v ) in Q : Q [ ( u , v ) ] += bias elif ( v , u ) in Q : Q [ ( v , u ) ] += bias else : Q [ ( u , v ) ] = bias for ( v , v ) , bias in iteritems ( Q_min_color ) : if ( v , v ) in Q : Q [ ( v , v ) ] += bias else : Q [ ( v , v ) ] = bias response = sampler . sample_qubo ( Q , ** sampler_args ) sample = next ( iter ( response ) ) for v in x_vars : for c in x_vars [ v ] : if sample [ x_vars [ v ] [ c ] ] : partial_coloring [ v ] = c return partial_coloring
Returns an approximate minimum vertex coloring .
16,197
def _minimum_coloring_qubo ( x_vars , chi_lb , chi_ub , magnitude = 1. ) : if chi_lb == chi_ub : return { } scaling = magnitude / ( chi_ub - chi_lb ) Q = { } for v in x_vars : for f , color in enumerate ( range ( chi_lb , chi_ub ) ) : idx = x_vars [ v ] [ color ] Q [ ( idx , idx ) ] = ( f + 1 ) * scaling return Q
We want to disincentivize unneeded colors . Generates the QUBO that does that .
16,198
def _vertex_different_colors_qubo ( G , x_vars ) : Q = { } for u , v in G . edges : if u not in x_vars or v not in x_vars : continue for color in x_vars [ u ] : if color in x_vars [ v ] : Q [ ( x_vars [ u ] [ color ] , x_vars [ v ] [ color ] ) ] = 1. return Q
For each vertex it should not have the same color as any of its neighbors . Generates the QUBO to enforce this constraint .
16,199
def _vertex_one_color_qubo ( x_vars ) : Q = { } for v in x_vars : for color in x_vars [ v ] : idx = x_vars [ v ] [ color ] Q [ ( idx , idx ) ] = - 1 for color0 , color1 in itertools . combinations ( x_vars [ v ] , 2 ) : idx0 = x_vars [ v ] [ color0 ] idx1 = x_vars [ v ] [ color1 ] Q [ ( idx0 , idx1 ) ] = 2 return Q
For each vertex it should have exactly one color . Generates the QUBO to enforce this constraint .