idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
30,900
def status ( self ) -> Status : errors = STATUS_VALID if not self . occupied : errors |= STATUS_EMPTY if not self . occupied_co [ WHITE ] & self . kings : errors |= STATUS_NO_WHITE_KING if not self . occupied_co [ BLACK ] & self . kings : errors |= STATUS_NO_BLACK_KING if popcount ( self . occupied & self . kings ) > 2...
Gets a bitmask of possible problems with the position .
30,901
def remove ( self , square : Square ) -> None : mask = BB_SQUARES [ square ] if self . mask & mask : self . mask ^= mask else : raise KeyError ( square )
Removes a square from the set .
30,902
def pop ( self ) -> Square : if not self . mask : raise KeyError ( "pop from empty SquareSet" ) square = lsb ( self . mask ) self . mask &= ( self . mask - 1 ) return square
Removes a square from the set and returns it .
30,903
def tolist ( self ) -> List [ bool ] : result = [ False ] * 64 for square in self : result [ square ] = True return result
Convert the set to a list of 64 bools .
30,904
def find_variant ( name : str ) -> Type [ chess . Board ] : for variant in VARIANTS : if any ( alias . lower ( ) == name . lower ( ) for alias in variant . aliases ) : return variant raise ValueError ( "unsupported variant: {}" . format ( name ) )
Looks for a variant board class by variant name .
30,905
def zobrist_hash ( board : chess . Board , * , _hasher : Callable [ [ chess . Board ] , int ] = ZobristHasher ( POLYGLOT_RANDOM_ARRAY ) ) -> int : return _hasher ( board )
Calculates the Polyglot Zobrist hash of the position .
30,906
def find_all ( self , board : Union [ chess . Board , int ] , * , minimum_weight : int = 1 , exclude_moves : Container [ chess . Move ] = ( ) ) -> Iterator [ Entry ] : try : key = int ( board ) context = None except ( TypeError , ValueError ) : context = typing . cast ( chess . Board , board ) key = zobrist_hash ( cont...
Seeks a specific position and yields corresponding entries .
30,907
def find ( self , board : Union [ chess . Board , int ] , * , minimum_weight : int = 1 , exclude_moves : Container [ chess . Move ] = ( ) ) -> Entry : try : return max ( self . find_all ( board , minimum_weight = minimum_weight , exclude_moves = exclude_moves ) , key = lambda entry : entry . weight ) except ValueError ...
Finds the main entry for the given position or Zobrist hash .
30,908
def choice ( self , board : Union [ chess . Board , int ] , * , minimum_weight : int = 1 , exclude_moves : Container [ chess . Move ] = ( ) , random = random ) -> Entry : chosen_entry = None for i , entry in enumerate ( self . find_all ( board , minimum_weight = minimum_weight , exclude_moves = exclude_moves ) ) : if c...
Uniformly selects a random entry for the given position .
30,909
def weighted_choice ( self , board : Union [ chess . Board , int ] , * , exclude_moves : Container [ chess . Move ] = ( ) , random = random ) -> Entry : total_weights = sum ( entry . weight for entry in self . find_all ( board , exclude_moves = exclude_moves ) ) if not total_weights : raise IndexError ( ) choice = rand...
Selects a random entry for the given position distributed by the weights of the entries .
30,910
def close ( self ) -> None : if self . mmap is not None : self . mmap . close ( ) try : os . close ( self . fd ) except OSError : pass
Closes the reader .
30,911
def _geom_points ( geom ) : if geom [ 'type' ] == 'Point' : yield tuple ( geom [ 'coordinates' ] ) elif geom [ 'type' ] in ( 'MultiPoint' , 'LineString' ) : for position in geom [ 'coordinates' ] : yield tuple ( position ) else : raise InvalidFeatureError ( "Unsupported geometry type:{0}" . format ( geom [ 'type' ] ) )
GeoJSON geometry to a sequence of point tuples
30,912
def read_points ( features ) : for feature in features : if isinstance ( feature , ( tuple , list ) ) and len ( feature ) == 2 : yield feature elif hasattr ( feature , '__geo_interface__' ) : try : geom = feature . __geo_interface__ [ 'geometry' ] for pt in _geom_points ( geom ) : yield pt except KeyError : for pt in _...
Iterable of features to a sequence of point tuples Where features can be either GeoJSON mappings or objects implementing the geo_interface
30,913
def encode_polyline ( features ) : points = list ( read_points ( features ) ) latlon_points = [ ( x [ 1 ] , x [ 0 ] ) for x in points ] return polyline . encode ( latlon_points )
Encode and iterable of features as a polyline
30,914
def directions ( self , features , profile = 'mapbox/driving' , alternatives = None , geometries = None , overview = None , steps = None , continue_straight = None , waypoint_snapping = None , annotations = None , language = None , ** kwargs ) : if 'geometry' in kwargs and geometries is None : geometries = kwargs [ 'ge...
Request directions for waypoints encoded as GeoJSON features .
30,915
def matrix ( self , coordinates , profile = 'mapbox/driving' , sources = None , destinations = None , annotations = None ) : annotations = self . _validate_annotations ( annotations ) profile = self . _validate_profile ( profile ) coords = encode_waypoints ( coordinates ) params = self . _make_query ( sources , destina...
Request a directions matrix for trips between coordinates
30,916
def _attribs ( self , name = None , description = None ) : a = { } if name : a [ 'name' ] = name if description : a [ 'description' ] = description return a
Form an attributes dictionary from keyword args .
30,917
def create ( self , name = None , description = None ) : uri = URITemplate ( self . baseuri + '/{owner}' ) . expand ( owner = self . username ) return self . session . post ( uri , json = self . _attribs ( name , description ) )
Creates a new empty dataset .
30,918
def list ( self ) : uri = URITemplate ( self . baseuri + '/{owner}' ) . expand ( owner = self . username ) return self . session . get ( uri )
Lists all datasets for a particular account .
30,919
def update_dataset ( self , dataset , name = None , description = None ) : uri = URITemplate ( self . baseuri + '/{owner}/{id}' ) . expand ( owner = self . username , id = dataset ) return self . session . patch ( uri , json = self . _attribs ( name , description ) )
Updates a single dataset .
30,920
def delete_dataset ( self , dataset ) : uri = URITemplate ( self . baseuri + '/{owner}/{id}' ) . expand ( owner = self . username , id = dataset ) return self . session . delete ( uri )
Deletes a single dataset including all of the features that it contains .
30,921
def list_features ( self , dataset , reverse = False , start = None , limit = None ) : uri = URITemplate ( self . baseuri + '/{owner}/{id}/features' ) . expand ( owner = self . username , id = dataset ) params = { } if reverse : params [ 'reverse' ] = 'true' if start : params [ 'start' ] = start if limit : params [ 'li...
Lists features in a dataset .
30,922
def delete_feature ( self , dataset , fid ) : uri = URITemplate ( self . baseuri + '/{owner}/{did}/features/{fid}' ) . expand ( owner = self . username , did = dataset , fid = fid ) return self . session . delete ( uri )
Removes a feature from a dataset .
30,923
def _get_credentials ( self ) : uri = URITemplate ( self . baseuri + '/{username}/credentials' ) . expand ( username = self . username ) resp = self . session . post ( uri ) self . handle_http_error ( resp , custom_messages = { 401 : "Token is not authorized" , 404 : "Token does not have upload scope" , 429 : "Too many...
Gets temporary S3 credentials to stage user - uploaded files
30,924
def _validate_tileset ( self , tileset ) : if '.' not in tileset : tileset = "{0}.{1}" . format ( self . username , tileset ) pattern = '^[a-z0-9-_]{1,32}\.[a-z0-9-_]{1,32}$' if not re . match ( pattern , tileset , flags = re . IGNORECASE ) : raise ValidationError ( 'tileset {0} is invalid, must match r"{1}"' . format ...
Validate the tileset name and ensure that it includes the username
30,925
def _resolve_username ( self , account , username ) : if account is not None : warnings . warn ( "Use keyword argument 'username' instead of 'account'" , DeprecationWarning ) return username or account or self . username
Resolve username and handle deprecation of account kwarg
30,926
def stage ( self , fileobj , creds = None , callback = None ) : if not hasattr ( fileobj , 'read' ) : fileobj = open ( fileobj , 'rb' ) if not creds : res = self . _get_credentials ( ) creds = res . json ( ) session = boto3_session ( aws_access_key_id = creds [ 'accessKeyId' ] , aws_secret_access_key = creds [ 'secretA...
Stages data in a Mapbox - owned S3 bucket
30,927
def create ( self , stage_url , tileset , name = None , patch = False , bypass = False ) : tileset = self . _validate_tileset ( tileset ) username , _name = tileset . split ( "." ) msg = { 'tileset' : tileset , 'url' : stage_url } if patch : msg [ 'patch' ] = patch if bypass : msg [ 'bypass_mbtiles_validation' ] = bypa...
Create a tileset
30,928
def list ( self , account = None , username = None ) : username = self . _resolve_username ( account , username ) uri = URITemplate ( self . baseuri + '/{username}' ) . expand ( username = username ) resp = self . session . get ( uri ) self . handle_http_error ( resp ) return resp
List of all uploads
30,929
def status ( self , upload , account = None , username = None ) : username = self . _resolve_username ( account , username ) if isinstance ( upload , dict ) : upload_id = upload [ 'id' ] else : upload_id = upload uri = URITemplate ( self . baseuri + '/{username}/{upload_id}' ) . expand ( username = username , upload_id...
Check status of upload
30,930
def upload ( self , fileobj , tileset , name = None , patch = False , callback = None , bypass = False ) : tileset = self . _validate_tileset ( tileset ) url = self . stage ( fileobj , callback = callback ) return self . create ( url , tileset , name = name , patch = patch , bypass = bypass )
Upload data and create a Mapbox tileset
30,931
def _validate_country_codes ( self , ccs ) : for cc in ccs : if cc not in self . country_codes : raise InvalidCountryCodeError ( cc ) return { 'country' : "," . join ( ccs ) }
Validate country code filters for use in requests .
30,932
def _validate_place_types ( self , types ) : for pt in types : if pt not in self . place_types : raise InvalidPlaceTypeError ( pt ) return { 'types' : "," . join ( types ) }
Validate place types and return a mapping for use in requests .
30,933
def forward ( self , address , types = None , lon = None , lat = None , country = None , bbox = None , limit = None , languages = None ) : uri = URITemplate ( self . baseuri + '/{dataset}/{query}.json' ) . expand ( dataset = self . name , query = address . encode ( 'utf-8' ) ) params = { } if country : params . update ...
Returns a Requests response object that contains a GeoJSON collection of places matching the given address .
30,934
def reverse ( self , lon , lat , types = None , limit = None ) : uri = URITemplate ( self . baseuri + '/{dataset}/{lon},{lat}.json' ) . expand ( dataset = self . name , lon = str ( round ( float ( lon ) , self . precision . get ( 'reverse' , 5 ) ) ) , lat = str ( round ( float ( lat ) , self . precision . get ( 'revers...
Returns a Requests response object that contains a GeoJSON collection of places near the given longitude and latitude .
30,935
def Session ( access_token = None , env = None ) : if env is None : env = os . environ . copy ( ) access_token = ( access_token or env . get ( 'MapboxAccessToken' ) or env . get ( 'MAPBOX_ACCESS_TOKEN' ) ) session = requests . Session ( ) session . params . update ( access_token = access_token ) session . headers . upd...
Create an HTTP session .
30,936
def username ( self ) : token = self . session . params . get ( 'access_token' ) if not token : raise errors . TokenError ( "session does not have a valid access_token param" ) data = token . split ( '.' ) [ 1 ] data = data . replace ( '-' , '+' ) . replace ( '_' , '/' ) + "===" try : return json . loads ( base64 . b64...
The username in the service s access token
30,937
def handle_http_error ( self , response , custom_messages = None , raise_for_status = False ) : if not custom_messages : custom_messages = { } if response . status_code in custom_messages . keys ( ) : raise errors . HTTPError ( custom_messages [ response . status_code ] ) if raise_for_status : response . raise_for_stat...
Converts service errors to Python exceptions
30,938
def match ( self , feature , gps_precision = None , profile = 'mapbox.driving' ) : profile = self . _validate_profile ( profile ) feature = self . _validate_feature ( feature ) geojson_line_feature = json . dumps ( feature ) uri = URITemplate ( self . baseuri + '/{profile}.json' ) . expand ( profile = profile ) params ...
Match features to OpenStreetMap data .
30,939
def _validate_geometry ( self , geometry ) : if geometry is not None and geometry not in self . valid_geometries : raise InvalidParameterError ( "{} is not a valid geometry" . format ( geometry ) ) return geometry
Validates geometry raising error if invalid .
30,940
def tilequery ( self , map_id , lon = None , lat = None , radius = None , limit = None , dedupe = None , geometry = None , layers = None , ) : if isinstance ( map_id , list ) : map_id = "," . join ( map_id ) lon = self . _validate_lon ( lon ) lat = self . _validate_lat ( lat ) path_values = dict ( api_name = self . api...
Returns data about specific features from a vector tileset .
30,941
def _validate_file_format ( self , file_format ) : if file_format not in self . valid_file_formats : raise InvalidFileFormatError ( "{} is not a valid file format" . format ( file_format ) ) return file_format
Validates file format raising error if invalid .
30,942
def _validate_feature_format ( self , feature_format ) : if feature_format not in self . valid_feature_formats : raise InvalidFeatureFormatError ( "{} is not a valid feature format" . format ( feature_format ) ) return feature_format
Validates feature format raising error if invalid .
30,943
def _validate_marker_name ( self , marker_name ) : if marker_name not in self . valid_marker_names : raise InvalidMarkerNameError ( "{} is not a valid marker name" . format ( marker_name ) ) return marker_name
Validates marker name raising error if invalid .
30,944
def _validate_label ( self , label ) : letter_pattern = compile ( "^[a-z]{1}$" ) number_pattern = compile ( "^[0]{1}$|^[1-9]{1,2}$" ) icon_pattern = compile ( "^[a-zA-Z ]{1,}$" ) if not match ( letter_pattern , label ) and not match ( number_pattern , label ) and not match ( icon_pattern , label ) : raise InvalidLabelE...
Validates label raising error if invalid .
30,945
def _validate_color ( self , color ) : three_digit_pattern = compile ( "^[a-f0-9]{3}$" ) six_digit_pattern = compile ( "^[a-f0-9]{6}$" ) if not match ( three_digit_pattern , color ) and not match ( six_digit_pattern , color ) : raise InvalidColorError ( "{} is not a valid color" . format ( color ) ) return color
Validates color raising error if invalid .
30,946
def tile ( self , map_id , x , y , z , retina = False , file_format = "png" , style_id = None , timestamp = None ) : if x is None or y is None or z is None : raise ValidationError ( "x, y, and z must be not be None" ) x = self . _validate_x ( x , z ) y = self . _validate_y ( y , z ) z = self . _validate_z ( z ) retina ...
Returns an image tile vector tile or UTFGrid in the specified file format .
30,947
def features ( self , map_id , feature_format = "json" ) : feature_format = self . _validate_feature_format ( feature_format ) path_values = dict ( map_id = map_id , feature_format = feature_format ) path_part = "/{map_id}/features.{feature_format}" uri = URITemplate ( self . base_uri + path_part ) . expand ( ** path_v...
Returns vector features from Mapbox Editor projects as GeoJSON or KML .
30,948
def metadata ( self , map_id , secure = False ) : path_values = dict ( map_id = map_id ) path_part = "/{map_id}.json" uri = URITemplate ( self . base_uri + path_part ) . expand ( ** path_values ) query_parameters = dict ( ) if secure : query_parameters [ "secure" ] = "" response = self . session . get ( uri , params = ...
Returns TileJSON metadata for a tileset .
30,949
def marker ( self , marker_name = None , label = None , color = None , retina = False ) : if marker_name is None : raise ValidationError ( "marker_name is a required argument" ) marker_name = self . _validate_marker_name ( marker_name ) retina = self . _validate_retina ( retina ) path_values = dict ( marker_name = mark...
Returns a single marker image without any background map .
30,950
def set_werkzeug_log_color ( ) : from django . core . management . color import color_style from werkzeug . serving import WSGIRequestHandler from werkzeug . _internal import _log _style = color_style ( ) _orig_log = WSGIRequestHandler . log def werk_log ( self , type , message , * args ) : try : msg = '%s - - [%s] %s'...
Try to set color to the werkzeug log .
30,951
def _slug_strip ( self , value ) : re_sep = '(?:-|%s)' % re . escape ( self . separator ) value = re . sub ( '%s+' % re_sep , self . separator , value ) return re . sub ( r'^%s+|%s+$' % ( re_sep , re_sep ) , '' , value )
Clean up a slug by removing slug separator characters that occur at the beginning or end of a slug .
30,952
def full_name ( first_name , last_name , username , ** extra ) : name = " " . join ( n for n in [ first_name , last_name ] if n ) if not name : return username return name
Return full name or username .
30,953
def google ( self , qs ) : csvf = writer ( sys . stdout ) csvf . writerow ( [ 'Name' , 'Email' ] ) for ent in qs : csvf . writerow ( [ full_name ( ** ent ) , ent [ 'email' ] ] )
CSV format suitable for importing into google GMail
30,954
def linkedin ( self , qs ) : csvf = writer ( sys . stdout ) csvf . writerow ( [ 'First Name' , 'Last Name' , 'Email' ] ) for ent in qs : csvf . writerow ( [ ent [ 'first_name' ] , ent [ 'last_name' ] , ent [ 'email' ] ] )
CSV format suitable for importing into linkedin Groups . perfect for pre - approving members of a linkedin group .
30,955
def outlook ( self , qs ) : csvf = writer ( sys . stdout ) columns = [ 'Name' , 'E-mail Address' , 'Notes' , 'E-mail 2 Address' , 'E-mail 3 Address' , 'Mobile Phone' , 'Pager' , 'Company' , 'Job Title' , 'Home Phone' , 'Home Phone 2' , 'Home Fax' , 'Home Address' , 'Business Phone' , 'Business Phone 2' , 'Business Fax'...
CSV format suitable for importing into outlook
30,956
def vcard ( self , qs ) : try : import vobject except ImportError : print ( self . style . ERROR ( "Please install vobject to use the vcard export format." ) ) sys . exit ( 1 ) out = sys . stdout for ent in qs : card = vobject . vCard ( ) card . add ( 'fn' ) . value = full_name ( ** ent ) if not ent [ 'last_name' ] and...
VCARD format .
30,957
def parse_mysql_cnf ( dbinfo ) : read_default_file = dbinfo . get ( 'OPTIONS' , { } ) . get ( 'read_default_file' ) if read_default_file : config = configparser . RawConfigParser ( { 'user' : '' , 'password' : '' , 'database' : '' , 'host' : '' , 'port' : '' , 'socket' : '' , } ) import os config . read ( os . path . e...
Attempt to parse mysql database config file for connection settings . Ideally we would hook into django s code to do this but read_default_file is handled by the mysql C libs so we have to emulate the behaviour
30,958
def get_jobs ( when = None , only_scheduled = False ) : try : cpath = os . path . dirname ( os . path . realpath ( sys . argv [ 0 ] ) ) ppath = os . path . dirname ( cpath ) if ppath not in sys . path : sys . path . append ( ppath ) except Exception : pass _jobs = { } for app_name in [ app . name for app in apps . get_...
Return a dictionary mapping of job names together with their respective application class .
30,959
def runjobs_by_signals ( self , when , options ) : from django_extensions . management import signals from django . conf import settings verbosity = options [ "verbosity" ] for app_name in settings . INSTALLED_APPS : try : __import__ ( app_name + '.management' , '' , '' , [ '' ] ) except ImportError : pass for app in (...
Run jobs from the signals
30,960
def get_version ( version ) : if len ( version ) > 2 and version [ 2 ] is not None : if isinstance ( version [ 2 ] , int ) : str_version = "%s.%s.%s" % version [ : 3 ] else : str_version = "%s.%s_%s" % version [ : 3 ] else : str_version = "%s.%s" % version [ : 2 ] return str_version
Dynamically calculate the version based on VERSION tuple .
30,961
def indentby ( parser , token ) : args = token . split_contents ( ) largs = len ( args ) if largs not in ( 2 , 4 ) : raise template . TemplateSyntaxError ( "indentby tag requires 1 or 3 arguments" ) indent_level = args [ 1 ] if_statement = None if largs == 4 : if_statement = args [ 3 ] nodelist = parser . parse ( ( 'en...
Add indentation to text between the tags by the given indentation level .
30,962
def _urlopen_as_json ( self , url , headers = None ) : req = Request ( url , headers = headers ) return json . loads ( urlopen ( req ) . read ( ) )
Shorcut for return contents as json
30,963
def check_pypi ( self ) : for dist in get_installed_distributions ( ) : name = dist . project_name if name in self . reqs . keys ( ) : self . reqs [ name ] [ "dist" ] = dist pypi = ServerProxy ( "https://pypi.python.org/pypi" ) for name , req in list ( self . reqs . items ( ) ) : if req [ "url" ] : continue elif "dist"...
If the requirement is frozen to pypi check for a new version .
30,964
def check_other ( self ) : if self . reqs : self . stdout . write ( self . style . ERROR ( "\nOnly pypi and github based requirements are supported:" ) ) for name , req in self . reqs . items ( ) : if "dist" in req : pkg_info = "{dist.project_name} {dist.version}" . format ( dist = req [ "dist" ] ) elif "url" in req : ...
If the requirement is frozen somewhere other than pypi or github skip .
30,965
def get_crypt_class ( self ) : crypt_type = getattr ( settings , 'ENCRYPTED_FIELD_MODE' , 'DECRYPT_AND_ENCRYPT' ) if crypt_type == 'ENCRYPT' : crypt_class_name = 'Encrypter' elif crypt_type == 'DECRYPT_AND_ENCRYPT' : crypt_class_name = 'Crypter' else : raise ImproperlyConfigured ( 'ENCRYPTED_FIELD_MODE must be either D...
Get the Keyczar class to use .
30,966
def _postgresql ( self , dbhost , dbport , dbname , dbuser , dbpass , dsn_style = None ) : dsn = [ ] if dsn_style is None or dsn_style == 'all' or dsn_style == 'keyvalue' : dsnstr = "host='{0}' dbname='{2}' user='{3}' password='{4}'" if dbport is not None : dsnstr += " port='{1}'" dsn . append ( dsnstr . format ( dbhos...
PostgreSQL psycopg2 driver accepts two syntaxes
30,967
def check_dependencies ( model , model_queue , avaliable_models ) : allowed_links = [ m . model . __name__ for m in model_queue ] + [ model . __name__ , 'ContentType' ] for field in model . _meta . fields : if not field . remote_field : continue if field . remote_field . model . __name__ not in allowed_links : if field...
Check that all the depenedencies for this model are already in the queue .
30,968
def get_import_lines ( self ) : if self . imports : return [ "from %s import %s" % ( value , key ) for key , value in self . imports . items ( ) ] else : return [ ]
Take the stored imports and converts them to lines
30,969
def skip ( self ) : if self . skip_me is not None : return self . skip_me cls = self . instance . __class__ using = router . db_for_write ( cls , instance = self . instance ) collector = Collector ( using = using ) collector . collect ( [ self . instance ] , collect_related = False ) sub_objects = sum ( [ list ( i ) fo...
Determine whether or not this object should be skipped . If this model instance is a parent of a single subclassed instance skip it . The subclassed instance will create this parent instance for us .
30,970
def instantiate ( self ) : code_lines = [ ] if not self . instantiated : code_lines . append ( "%s = %s()" % ( self . variable_name , self . model . __name__ ) ) self . instantiated = True pk_name = self . instance . _meta . pk . name key = '%s_%s' % ( self . model . __name__ , getattr ( self . instance , pk_name ) ) s...
Write lines for instantiation
30,971
def get_waiting_list ( self , force = False ) : code_lines = [ ] skip_autofield = self . options [ 'skip_autofield' ] for field in list ( self . waiting_list ) : try : value = get_attribute_value ( self . instance , field , self . context , force = force , skip_autofield = skip_autofield ) code_lines . append ( '%s.%s ...
Add lines for any waiting fields that can be completed now .
30,972
def get_many_to_many_lines ( self , force = False ) : lines = [ ] for field , rel_items in self . many_to_many_waiting_list . items ( ) : for rel_item in list ( rel_items ) : try : pk_name = rel_item . _meta . pk . name key = '%s_%s' % ( rel_item . __class__ . __name__ , getattr ( rel_item , pk_name ) ) value = "%s" % ...
Generate lines that define many to many relations for this instance .
30,973
def _queue_models ( self , models , context ) : model_queue = [ ] number_remaining_models = len ( models ) MAX_CYCLES = number_remaining_models allowed_cycles = MAX_CYCLES while number_remaining_models > 0 : previous_number_remaining_models = number_remaining_models model = models . pop ( 0 ) if check_dependencies ( mo...
Work an an appropriate ordering for the models . This isn t essential but makes the script look nicer because more instances can be defined on their first try .
30,974
def sql_to_dict ( self , query , param ) : cursor = connection . cursor ( ) cursor . execute ( query , param ) fieldnames = [ name [ 0 ] for name in cursor . description ] result = [ ] for row in cursor . fetchall ( ) : rowset = [ ] for field in zip ( fieldnames , row ) : rowset . append ( field ) result . append ( dic...
Execute query and return a dict
30,975
def print_diff ( self , style = no_style ( ) ) : if self . options [ 'sql' ] : self . print_diff_sql ( style ) else : self . print_diff_text ( style )
Print differences to stdout
30,976
def pygments_required ( func ) : def wrapper ( * args , ** kwargs ) : if not HAS_PYGMENTS : raise ImportError ( "Please install 'pygments' library to use syntax_color." ) rv = func ( * args , ** kwargs ) return rv return wrapper
Raise ImportError if pygments is not installed .
30,977
def addparentstofks ( rels , fks ) : for j in rels : son = index ( fks , j [ 1 ] ) parent = index ( fks , j [ 0 ] ) fks [ son ] [ 2 ] = fks [ son ] [ 2 ] . replace ( "models.Model" , parent ) if parent not in fks [ son ] [ 0 ] : fks [ son ] [ 0 ] . append ( parent )
Get a list of relations between parents and sons and a dict of clases named in dia and modifies the fks to add the parent as fk to get order on the output of classes and replaces the base class of the son to put the class parent name .
30,978
def get_app_name ( mod_name ) : rparts = list ( reversed ( mod_name . split ( '.' ) ) ) try : try : return rparts [ rparts . index ( MODELS_MODULE_NAME ) + 1 ] except ValueError : return rparts [ 1 ] except IndexError : return mod_name
Retrieve application name from models . py module path
30,979
def get_generic_fields ( ) : generic_fields = [ ] for model in apps . get_models ( ) : for field_name , field in model . __dict__ . items ( ) : if isinstance ( field , GenericForeignKey ) : generic_fields . append ( field ) return generic_fields
Return a list of all GenericForeignKeys in all models .
30,980
def merge_model_instances ( self , primary_object , alias_objects ) : generic_fields = get_generic_fields ( ) related_fields = list ( filter ( lambda x : x . is_relation is True , primary_object . _meta . get_fields ( ) ) ) many_to_many_fields = list ( filter ( lambda x : x . many_to_many is True , related_fields ) ) r...
Merge several model instances into one the primary_object . Use this function to merge model objects and migrate all of the related fields from the alias objects the primary object .
30,981
def add_arguments ( self , parser ) : parser . add_argument ( 'app_label' , nargs = '*' ) for argument in self . arguments : parser . add_argument ( * argument . split ( ' ' ) , ** self . arguments [ argument ] )
Unpack self . arguments for parser . add_arguments .
30,982
def render_output_json ( self , graph_data , output_file = None ) : if output_file : with open ( output_file , 'wt' ) as json_output_f : json . dump ( graph_data , json_output_f ) else : self . stdout . write ( json . dumps ( graph_data ) )
Write model data to file or stdout in JSON format .
30,983
def render_output_pygraphviz ( self , dotdata , ** kwargs ) : if not HAS_PYGRAPHVIZ : raise CommandError ( "You need to install pygraphviz python module" ) version = pygraphviz . __version__ . rstrip ( "-svn" ) try : if tuple ( int ( v ) for v in version . split ( '.' ) ) < ( 0 , 36 ) : tmpfile = tempfile . NamedTempor...
Render model data as image using pygraphviz
30,984
def render_output_pydot ( self , dotdata , ** kwargs ) : if not HAS_PYDOT : raise CommandError ( "You need to install pydot python module" ) graph = pydot . graph_from_dot_data ( dotdata ) if not graph : raise CommandError ( "pydot returned an error" ) if isinstance ( graph , ( list , tuple ) ) : if len ( graph ) > 1 :...
Render model data as image using pydot
30,985
def extract_views_from_urlpatterns ( self , urlpatterns , base = '' , namespace = None ) : views = [ ] for p in urlpatterns : if isinstance ( p , ( URLPattern , RegexURLPattern ) ) : try : if not p . name : name = p . name elif namespace : name = '{0}:{1}' . format ( namespace , p . name ) else : name = p . name patter...
Return a list of views from a list of urlpatterns .
30,986
def foreignkey_autocomplete ( self , request ) : query = request . GET . get ( 'q' , None ) app_label = request . GET . get ( 'app_label' , None ) model_name = request . GET . get ( 'model_name' , None ) search_fields = request . GET . get ( 'search_fields' , None ) object_pk = request . GET . get ( 'object_pk' , None ...
Search in the fields of the given related model and returns the result as a simple string to be used by the jQuery Autocomplete plugin
30,987
def formfield_for_dbfield ( self , db_field , ** kwargs ) : if isinstance ( db_field , models . ForeignKey ) and db_field . name in self . related_search_fields : help_text = self . get_help_text ( db_field . name , db_field . remote_field . model . _meta . object_name ) if kwargs . get ( 'help_text' ) : help_text = si...
Override the default widget for Foreignkey fields if they are specified in the related_search_fields class attribute .
30,988
def null_technical_500_response ( request , exc_type , exc_value , tb , status_code = 500 ) : try : if isinstance ( tb . tb_next . tb_frame . f_locals . get ( 'self' ) , WSGIHandler ) : tld . wsgi_tb = tb elif tld . wsgi_tb : tb = tld . wsgi_tb except AttributeError : pass six . reraise ( exc_type , exc_value , tb )
Alternative function for django . views . debug . technical_500_response .
30,989
def invalidate_objects_cf ( self ) : if not self . AWS_CLOUDFRONT_DISTRIBUTION : raise CommandError ( 'An object invalidation was requested but the variable ' 'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.' ) chunk = 1000 conn = self . open_cf ( ) objs = self . uploaded_files chunks = [ objs [ i : i + ch...
Split the invalidation request in groups of 1000 objects
30,990
def open_s3 ( self ) : conn = boto . connect_s3 ( self . AWS_ACCESS_KEY_ID , self . AWS_SECRET_ACCESS_KEY , ** self . get_s3connection_kwargs ( ) ) try : bucket = conn . get_bucket ( self . AWS_BUCKET_NAME ) except boto . exception . S3ResponseError : bucket = conn . create_bucket ( self . AWS_BUCKET_NAME ) return buck...
Open connection to S3 returning bucket and key
30,991
def set_application_name ( self , options ) : supported_backends = [ 'django.db.backends.postgresql' , 'django.db.backends.postgresql_psycopg2' ] opt_name = 'fallback_application_name' default_app_name = 'django_shell' app_name = default_app_name dbs = getattr ( settings , 'DATABASES' , [ ] ) for db in dbs . keys ( ) :...
Set the application_name on PostgreSQL connection
30,992
def process_message ( self , peer , mailfrom , rcpttos , data , ** kwargs ) : inheaders = 1 lines = data . split ( '\n' ) logger . info ( '---------- MESSAGE FOLLOWS ----------' ) for line in lines : if inheaders and not line : logger . info ( 'X-Peer: %s' % peer [ 0 ] ) inheaders = 0 logger . info ( line ) logger . in...
Output will be sent to the module logger at INFO level .
30,993
def run_from_argv ( self , argv ) : self . argv_string = ' ' . join ( argv ) super ( EmailNotificationCommand , self ) . run_from_argv ( argv )
Overriden in order to access the command line arguments .
30,994
def execute ( self , * args , ** options ) : try : super ( EmailNotificationCommand , self ) . execute ( * args , ** options ) except Exception : if options [ 'email_exception' ] or getattr ( self , 'email_exception' , False ) : self . send_email_notification ( include_traceback = True ) raise
Overriden in order to send emails on unhandled exception .
30,995
def send_email_notification ( self , notification_id = None , include_traceback = False , verbosity = 1 ) : if notification_id is not None : try : email_settings = settings . EMAIL_NOTIFICATIONS . get ( notification_id , { } ) except AttributeError : email_settings = { } else : email_settings = { } if not include_trace...
Send email notifications .
30,996
def load_tag_library ( libname ) : from django . template . backends . django import get_installed_libraries from django . template . library import InvalidTemplateLibrary try : lib = get_installed_libraries ( ) [ libname ] lib = importlib . import_module ( lib ) . register return lib except ( InvalidTemplateLibrary , ...
Load a templatetag library on multiple Django versions .
30,997
def get_template_setting ( template_key , default = None ) : templates_var = getattr ( settings , 'TEMPLATES' , None ) if templates_var : for tdict in templates_var : if template_key in tdict : return tdict [ template_key ] return default
Read template settings
30,998
def use_model ( self , model_name ) : if self . exclude_models : for model_pattern in self . exclude_models : model_pattern = '^%s$' % model_pattern . replace ( '*' , '.*' ) if re . search ( model_pattern , model_name ) : return False elif self . include_models : for model_pattern in self . include_models : model_patte...
Decide whether to use a model based on the model name and the lists of models to exclude and include .
30,999
def GetClosestPoint ( x , a , b ) : assert ( x . IsUnitLength ( ) ) assert ( a . IsUnitLength ( ) ) assert ( b . IsUnitLength ( ) ) a_cross_b = a . RobustCrossProd ( b ) p = x . Minus ( a_cross_b . Times ( x . DotProd ( a_cross_b ) / a_cross_b . Norm2 ( ) ) ) if SimpleCCW ( a_cross_b , a , p ) and SimpleCCW ( p , b , a...
Returns the point on the great circle segment ab closest to x .