idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
245,000
def startDataStoreMachine ( self , dataStoreItemName , machineName ) : url = self . _url + "/items/enterpriseDatabases/%s/machines/%s/start" % ( dataStoreItemName , machineName ) params = { "f" : "json" } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Starts the database instance running on the Data Store machine .
112
12
245,001
def unregisterDataItem ( self , path ) : url = self . _url + "/unregisterItem" params = { "f" : "json" , "itempath" : path , "force" : "true" } return self . _post ( url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Unregisters a data item that has been previously registered with the server s data store .
94
18
245,002
def validateDataStore ( self , dataStoreName , machineName ) : url = self . _url + "/items/enterpriseDatabases/%s/machines/%s/validate" % ( dataStoreName , machineName ) params = { "f" : "json" } return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
Checks the status of ArcGIS Data Store and provides a health check response .
110
17
245,003
def layers ( self ) : if self . _layers is None : self . __init ( ) lyrs = [ ] for lyr in self . _layers : lyr [ 'object' ] = GlobeServiceLayer ( url = self . _url + "/%s" % lyr [ 'id' ] , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) lyrs . append ( lyr ) return lyrs
gets the globe service layers
106
5
245,004
def loadFeatures ( self , path_to_fc ) : from . . common . spatial import featureclass_to_json v = json . loads ( featureclass_to_json ( path_to_fc ) ) self . value = v
loads a feature class features to the object
51
8
245,005
def fromFeatureClass ( fc , paramName ) : from . . common . spatial import featureclass_to_json val = json . loads ( featureclass_to_json ( fc ) ) v = GPFeatureRecordSetLayer ( ) v . value = val v . paramName = paramName return v
returns a GPFeatureRecordSetLayer object from a feature class
65
13
245,006
def asDictionary ( self ) : template = { "type" : "simple" , "symbol" : self . _symbol . asDictionary , "label" : self . _label , "description" : self . _description , "rotationType" : self . _rotationType , "rotationExpression" : self . _rotationExpression } return template
provides a dictionary representation of the object
82
8
245,007
def searchDiagrams ( self , whereClause = None , relatedObjects = None , relatedSchematicObjects = None ) : params = { "f" : "json" } if whereClause : params [ "where" ] = whereClause if relatedObjects : params [ "relatedObjects" ] = relatedObjects if relatedSchematicObjects : params [ "relatedSchematicObjects" ] = relatedSchematicObjects exportURL = self . _url + "/searchDiagrams" return self . _get ( url = exportURL , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The Schematic Search Diagrams operation is performed on the schematic service resource . The result of this operation is an array of Schematic Diagram Information Object .
157
32
245,008
def _validateurl ( self , url ) : parsed = urlparse ( url ) path = parsed . path . strip ( "/" ) if path : parts = path . split ( "/" ) url_types = ( "admin" , "manager" , "rest" ) if any ( i in parts for i in url_types ) : while parts . pop ( ) not in url_types : next elif "services" in parts : while parts . pop ( ) not in "services" : next path = "/" . join ( parts ) else : path = "arcgis" self . _adminUrl = "%s://%s/%s/admin" % ( parsed . scheme , parsed . netloc , path ) return "%s://%s/%s/rest/services" % ( parsed . scheme , parsed . netloc , path )
assembles the server url
181
5
245,009
def admin ( self ) : if self . _securityHandler is None : raise Exception ( "Cannot connect to adminstrative server without authentication" ) from . . manageags import AGSAdministration return AGSAdministration ( url = self . _adminUrl , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False )
points to the adminstrative side of ArcGIS Server
90
12
245,010
def addUser ( self , username , password , firstname , lastname , email , role ) : self . _invites . append ( { "username" : username , "password" : password , "firstname" : firstname , "lastname" : lastname , "fullname" : "%s %s" % ( firstname , lastname ) , "email" : email , "role" : role } )
adds a user to the invitation list
90
8
245,011
def removeByIndex ( self , index ) : if index < len ( self . _invites ) - 1 and index >= 0 : self . _invites . remove ( index )
removes a user from the invitation list by position
38
10
245,012
def fromDictionary ( value ) : if isinstance ( value , dict ) : pp = PortalParameters ( ) for k , v in value . items ( ) : setattr ( pp , "_%s" % k , v ) return pp else : raise AttributeError ( "Invalid input." )
creates the portal properties object from a dictionary
62
9
245,013
def value ( self ) : val = { } for k in self . __allowed_keys : value = getattr ( self , "_" + k ) if value is not None : val [ k ] = value return val
returns the values as a dictionary
46
7
245,014
def tile_fonts ( self , fontstack , stack_range , out_folder = None ) : url = "{url}/resources/fonts/{fontstack}/{stack_range}.pbf" . format ( url = self . _url , fontstack = fontstack , stack_range = stack_range ) params = { } if out_folder is None : out_folder = tempfile . gettempdir ( ) return self . _get ( url = url , param_dict = params , out_folder = out_folder , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_host )
This resource returns glyphs in PBF format . The template url for this fonts resource is represented in Vector Tile Style resource .
148
25
245,015
def tile_sprite ( self , out_format = "sprite.json" , out_folder = None ) : url = "{url}/resources/sprites/{f}" . format ( url = self . _url , f = out_format ) if out_folder is None : out_folder = tempfile . gettempdir ( ) return self . _get ( url = url , param_dict = { } , out_folder = out_folder , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_host )
This resource returns sprite image and metadata
131
7
245,016
def layers ( self ) : if self . _layers is None : self . __init ( ) self . _getLayers ( ) return self . _layers
gets the layers for the feature service
35
7
245,017
def _getLayers ( self ) : params = { "f" : "json" } json_dict = self . _get ( self . _url , params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _layers = [ ] if 'layers' in json_dict : for l in json_dict [ "layers" ] : self . _layers . append ( layer . FeatureLayer ( url = self . _url + "/%s" % l [ 'id' ] , securityHandler = self . _securityHandler , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) )
gets layers for the featuer service
162
7
245,018
def query ( self , layerDefsFilter = None , geometryFilter = None , timeFilter = None , returnGeometry = True , returnIdsOnly = False , returnCountOnly = False , returnZ = False , returnM = False , outSR = None ) : qurl = self . _url + "/query" params = { "f" : "json" , "returnGeometry" : returnGeometry , "returnIdsOnly" : returnIdsOnly , "returnCountOnly" : returnCountOnly , "returnZ" : returnZ , "returnM" : returnM } if not layerDefsFilter is None and isinstance ( layerDefsFilter , LayerDefinitionFilter ) : params [ 'layerDefs' ] = layerDefsFilter . filter if not geometryFilter is None and isinstance ( geometryFilter , GeometryFilter ) : gf = geometryFilter . filter params [ 'geometryType' ] = gf [ 'geometryType' ] params [ 'spatialRel' ] = gf [ 'spatialRel' ] params [ 'geometry' ] = gf [ 'geometry' ] params [ 'inSR' ] = gf [ 'inSR' ] if not outSR is None and isinstance ( outSR , SpatialReference ) : params [ 'outSR' ] = outSR . asDictionary if not timeFilter is None and isinstance ( timeFilter , TimeFilter ) : params [ 'time' ] = timeFilter . filter res = self . _post ( url = qurl , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if returnIdsOnly == False and returnCountOnly == False : if isinstance ( res , str ) : jd = json . loads ( res ) return [ FeatureSet . fromJSON ( json . dumps ( lyr ) ) for lyr in jd [ 'layers' ] ] elif isinstance ( res , dict ) : return [ FeatureSet . fromJSON ( json . dumps ( lyr ) ) for lyr in res [ 'layers' ] ] else : return res return res
The Query operation is performed on a feature service resource
467
10
245,019
def create_feature_layer ( ds , sql , name = "layer" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) result = arcpy . MakeFeatureLayer_management ( in_features = ds , out_layer = name , where_clause = sql ) return result [ 0 ]
creates a feature layer object
76
6
245,020
def featureclass_to_json ( fc ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) desc = arcpy . Describe ( fc ) if desc . dataType == "Table" or desc . dataType == "TableView" : return recordset_to_json ( table = fc ) else : return arcpy . FeatureSet ( fc ) . JSON
converts a feature class to JSON
91
7
245,021
def get_attachment_data ( attachmentTable , sql , nameField = "ATT_NAME" , blobField = "DATA" , contentTypeField = "CONTENT_TYPE" , rel_object_field = "REL_OBJECTID" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) ret_rows = [ ] with arcpy . da . SearchCursor ( attachmentTable , [ nameField , blobField , contentTypeField , rel_object_field ] , where_clause = sql ) as rows : for row in rows : temp_f = os . environ [ 'temp' ] + os . sep + row [ 0 ] writer = open ( temp_f , 'wb' ) writer . write ( row [ 1 ] ) writer . flush ( ) writer . close ( ) del writer ret_rows . append ( { "name" : row [ 0 ] , "blob" : temp_f , "content" : row [ 2 ] , "rel_oid" : row [ 3 ] } ) del row return ret_rows
gets all the data to pass to a feature service
235
10
245,022
def get_records_with_attachments ( attachment_table , rel_object_field = "REL_OBJECTID" ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) OIDs = [ ] with arcpy . da . SearchCursor ( attachment_table , [ rel_object_field ] ) as rows : for row in rows : if not str ( row [ 0 ] ) in OIDs : OIDs . append ( "%s" % str ( row [ 0 ] ) ) del row del rows return OIDs
returns a list of ObjectIDs for rows in the attachment table
123
13
245,023
def get_OID_field ( fs ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) desc = arcpy . Describe ( fs ) if desc . hasOID : return desc . OIDFieldName return None
returns a featureset s object id field
58
9
245,024
def merge_feature_class ( merges , out_fc , cleanUp = True ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) if cleanUp == False : if len ( merges ) == 0 : return None elif len ( merges ) == 1 : desc = arcpy . Describe ( merges [ 0 ] ) if hasattr ( desc , 'shapeFieldName' ) : return arcpy . CopyFeatures_management ( merges [ 0 ] , out_fc ) [ 0 ] else : return arcpy . CopyRows_management ( merges [ 0 ] , out_fc ) [ 0 ] else : return arcpy . Merge_management ( inputs = merges , output = out_fc ) [ 0 ] else : if len ( merges ) == 0 : return None elif len ( merges ) == 1 : desc = arcpy . Describe ( merges [ 0 ] ) if hasattr ( desc , 'shapeFieldName' ) : merged = arcpy . CopyFeatures_management ( merges [ 0 ] , out_fc ) [ 0 ] else : merged = arcpy . CopyRows_management ( merges [ 0 ] , out_fc ) [ 0 ] else : merged = arcpy . Merge_management ( inputs = merges , output = out_fc ) [ 0 ] for m in merges : arcpy . Delete_management ( m ) del m return merged
merges featureclass into a single feature class
309
9
245,025
def insert_rows ( fc , features , fields , includeOIDField = False , oidField = None ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) icur = None if includeOIDField : arcpy . AddField_management ( fc , "FSL_OID" , "LONG" ) fields . append ( "FSL_OID" ) if len ( features ) > 0 : fields . append ( "SHAPE@" ) workspace = os . path . dirname ( fc ) with arcpy . da . Editor ( workspace ) as edit : date_fields = getDateFields ( fc ) icur = arcpy . da . InsertCursor ( fc , fields ) for feat in features : row = [ "" ] * len ( fields ) drow = feat . asRow [ 0 ] dfields = feat . fields for field in fields : if field in dfields or ( includeOIDField and field == "FSL_OID" ) : if field in date_fields : row [ fields . index ( field ) ] = toDateTime ( drow [ dfields . index ( field ) ] ) elif field == "FSL_OID" : row [ fields . index ( "FSL_OID" ) ] = drow [ dfields . index ( oidField ) ] else : row [ fields . index ( field ) ] = drow [ dfields . index ( field ) ] del field row [ fields . index ( "SHAPE@" ) ] = feat . geometry icur . insertRow ( row ) del row del drow del dfields del feat del features icur = None del icur del fields return fc else : return fc
inserts rows based on a list features object
377
9
245,026
def create_feature_class ( out_path , out_name , geom_type , wkid , fields , objectIdField ) : if arcpyFound == False : raise Exception ( "ArcPy is required to use this function" ) arcpy . env . overwriteOutput = True field_names = [ ] fc = arcpy . CreateFeatureclass_management ( out_path = out_path , out_name = out_name , geometry_type = lookUpGeometry ( geom_type ) , spatial_reference = arcpy . SpatialReference ( wkid ) ) [ 0 ] for field in fields : if field [ 'name' ] != objectIdField : field_names . append ( field [ 'name' ] ) arcpy . AddField_management ( out_path + os . sep + out_name , field [ 'name' ] , lookUpFieldType ( field [ 'type' ] ) ) return fc , field_names
creates a feature class in a given gdb or folder
204
12
245,027
def download_arcrest ( ) : arcrest_name = "arcrest.zip" arcresthelper_name = "arcresthelper.zip" url = "https://github.com/Esri/ArcREST/archive/master.zip" file_name = os . path . join ( arcpy . env . scratchFolder , os . path . basename ( url ) ) scratch_folder = os . path . join ( arcpy . env . scratchFolder , "temp34asdf3d" ) arcrest_zip = os . path . join ( scratch_folder , arcrest_name ) arcresthelper_zip = os . path . join ( scratch_folder , arcresthelper_name ) if sys . version_info . major == 3 : import urllib . request urllib . request . urlretrieve ( url , file_name ) else : import urllib urllib . urlretrieve ( url , file_name ) if os . path . isdir ( scratch_folder ) : shutil . rmtree ( scratch_folder ) os . makedirs ( scratch_folder ) zip_obj = zipfile . ZipFile ( file_name , 'r' ) zip_obj . extractall ( scratch_folder ) zip_obj . close ( ) del zip_obj zip_obj = zipfile . ZipFile ( arcrest_zip , 'w' ) zipws ( path = os . path . join ( scratch_folder , "arcrest-master" , "src" , "arcrest" ) , zip = zip_obj , keep = True ) zip_obj . close ( ) del zip_obj zip_obj = zipfile . ZipFile ( arcresthelper_zip , 'w' ) zipws ( path = os . path . join ( scratch_folder , "arcrest-master" , "src" , "arcresthelper" ) , zip = zip_obj , keep = True ) zip_obj . close ( ) del zip_obj shutil . rmtree ( os . path . join ( scratch_folder , "arcrest-master" ) ) return arcrest_zip , arcresthelper_zip
downloads arcrest to disk
467
6
245,028
def handler ( self ) : if hasNTLM : if self . _handler is None : passman = request . HTTPPasswordMgrWithDefaultRealm ( ) passman . add_password ( None , self . _parsed_org_url , self . _login_username , self . _password ) self . _handler = HTTPNtlmAuthHandler . HTTPNtlmAuthHandler ( passman ) return self . _handler else : raise Exception ( "Missing Ntlm python package." )
gets the security handler for the class
107
7
245,029
def token ( self ) : return self . _portalTokenHandler . servertoken ( serverURL = self . _serverUrl , referer = self . _referer )
gets the AGS server token
37
6
245,030
def token ( self ) : if self . _token is None or datetime . datetime . now ( ) >= self . _token_expires_on : self . _generateForOAuthSecurity ( self . _client_id , self . _secret_id , self . _token_url ) return self . _token
obtains a token from the site
70
7
245,031
def _generateForOAuthSecurity ( self , client_id , secret_id , token_url = None ) : grant_type = "client_credentials" if token_url is None : token_url = "https://www.arcgis.com/sharing/rest/oauth2/token" params = { "client_id" : client_id , "client_secret" : secret_id , "grant_type" : grant_type , "f" : "json" } token = self . _post ( url = token_url , param_dict = params , securityHandler = None , proxy_port = self . _proxy_port , proxy_url = self . _proxy_url ) if 'access_token' in token : self . _token = token [ 'access_token' ] self . _expires_in = token [ 'expires_in' ] self . _token_created_on = datetime . datetime . now ( ) self . _token_expires_on = self . _token_created_on + datetime . timedelta ( seconds = int ( token [ 'expires_in' ] ) ) self . _valid = True self . _message = "Token Generated" else : self . _token = None self . _expires_in = None self . _token_created_on = None self . _token_expires_on = None self . _valid = False self . _message = token
generates a token based on the OAuth security model
320
11
245,032
def referer_url ( self , value ) : if self . _referer_url != value : self . _token = None self . _referer_url = value
sets the referer url
37
5
245,033
def __getRefererUrl ( self , url = None ) : if url is None : url = "http://www.arcgis.com/sharing/rest/portals/self" params = { "f" : "json" , "token" : self . token } val = self . _get ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _referer_url = "arcgis.com" #"http://%s.%s" % (val['urlKey'], val['customBaseUrl']) self . _token = None return self . _referer_url
gets the referer url for the token handler
153
9
245,034
def servertoken ( self , serverURL , referer ) : if self . _server_token is None or self . _server_token_expires_on is None or datetime . datetime . now ( ) >= self . _server_token_expires_on or self . _server_url != serverURL : self . _server_url = serverURL result = self . _generateForServerTokenSecurity ( serverURL = serverURL , token = self . token , tokenUrl = self . _token_url , referer = referer ) if 'error' in result : self . _valid = False self . _message = result else : self . _valid = True self . _message = "Server Token Generated" return self . _server_token
returns the server token for the server
164
8
245,035
def exportCertificate ( self , certificate , folder ) : url = self . _url + "/sslcertificates/%s/export" % certificate params = { "f" : "json" , } return self . _get ( url = url , param_dict = params , out_folder = folder )
gets the SSL Certificates for a given machine
66
10
245,036
def currentVersion ( self ) : if self . _currentVersion is None : self . __init ( self . _url ) return self . _currentVersion
returns the current version of the site
32
8
245,037
def portals ( self ) : url = "%s/portals" % self . root return _portals . Portals ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the Portals class that provides administration access into a given organization
61
14
245,038
def oauth2 ( self ) : if self . _url . endswith ( "/oauth2" ) : url = self . _url else : url = self . _url + "/oauth2" return _oauth2 . oauth2 ( oauth_url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns the oauth2 class
93
7
245,039
def community ( self ) : return _community . Community ( url = self . _url + "/community" , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The portal community root covers user and group resources and operations .
54
12
245,040
def content ( self ) : return _content . Content ( url = self . _url + "/content" , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
returns access into the site s content
54
8
245,041
def search ( self , q , t = None , focus = None , bbox = None , start = 1 , num = 10 , sortField = None , sortOrder = "asc" , useSecurity = True ) : if self . _url . endswith ( "/rest" ) : url = self . _url + "/search" else : url = self . _url + "/rest/search" params = { "f" : "json" , "q" : q , "sortOrder" : sortOrder , "num" : num , "start" : start , 'restrict' : useSecurity } if not focus is None : params [ 'focus' ] = focus if not t is None : params [ 't' ] = t if useSecurity and self . _securityHandler is not None and self . _securityHandler . method == "token" : params [ "token" ] = self . _securityHandler . token if sortField is not None : params [ 'sortField' ] = sortField if bbox is not None : params [ 'bbox' ] = bbox return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
This operation searches for content items in the portal . The searches are performed against a high performance index that indexes the most popular fields of an item . See the Search reference page for information on the fields and the syntax of the query . The search index is updated whenever users add update or delete content . There can be a lag between the time that the content is updated and the time when it s reflected in the search results . The results of a search only contain items that the user has permission to access .
278
100
245,042
def hostingServers ( self ) : portals = self . portals portal = portals . portalSelf urls = portal . urls if 'error' in urls : print ( urls ) return services = [ ] if urls != { } : if 'urls' in urls : if 'features' in urls [ 'urls' ] : if 'https' in urls [ 'urls' ] [ 'features' ] : for https in urls [ 'urls' ] [ 'features' ] [ 'https' ] : if portal . isPortal == True : url = "%s/admin" % https #url = https services . append ( AGSAdministration ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) else : url = "https://%s/%s/ArcGIS/rest/admin" % ( https , portal . portalId ) services . append ( Services ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) elif 'http' in urls [ 'urls' ] [ 'features' ] : for http in urls [ 'urls' ] [ 'features' ] [ 'http' ] : if ( portal . isPortal == True ) : url = "%s/admin" % http services . append ( AGSAdministration ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) ) else : url = "http://%s/%s/ArcGIS/rest/admin" % ( http , portal . portalId ) services . append ( Services ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) ) else : print ( "Publishing servers not found" ) else : print ( "Publishing servers not found" ) else : print ( "Publishing servers not found" ) return services else : for server in portal . servers [ 'servers' ] : url = server [ 'adminUrl' ] + "/admin" sh = PortalServerSecurityHandler ( tokenHandler = self . _securityHandler , serverUrl = url , referer = server [ 'name' ] . replace ( ":6080" , ":6443" ) ) services . append ( AGSAdministration ( url = url , securityHandler = sh , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False ) ) return services
Returns the objects to manage site s hosted services . It returns AGSAdministration object if the site is Portal and it returns a hostedservice . Services object if it is AGOL .
596
37
245,043
def add_codedValue ( self , name , code ) : if self . _codedValues is None : self . _codedValues = [ ] self . _codedValues . append ( { "name" : name , "code" : code } )
adds a value to the coded value list
52
9
245,044
def __init ( self ) : res = self . _get ( url = self . _url , param_dict = { "f" : "json" } , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) self . _json_dict = res self . _json_string = json . dumps ( self . _json_dict ) for k , v in self . _json_dict . items ( ) : setattr ( self , k , v )
loads the json values
117
4
245,045
def areasAndLengths ( self , polygons , lengthUnit , areaUnit , calculationType , ) : url = self . _url + "/areasAndLengths" params = { "f" : "json" , "lengthUnit" : lengthUnit , "areaUnit" : { "areaUnit" : areaUnit } , "calculationType" : calculationType } if isinstance ( polygons , list ) and len ( polygons ) > 0 : p = polygons [ 0 ] if isinstance ( p , Polygon ) : params [ 'sr' ] = p . spatialReference [ 'wkid' ] params [ 'polygons' ] = [ poly . asDictionary for poly in polygons ] del p else : return "No polygons provided, please submit a list of polygon geometries" return self . _get ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The areasAndLengths operation is performed on a geometry service resource . This operation calculates areas and perimeter lengths for each polygon specified in the input array .
221
31
245,046
def __geometryToGeomTemplate ( self , geometry ) : template = { "geometryType" : None , "geometry" : None } if isinstance ( geometry , Polyline ) : template [ 'geometryType' ] = "esriGeometryPolyline" elif isinstance ( geometry , Polygon ) : template [ 'geometryType' ] = "esriGeometryPolygon" elif isinstance ( geometry , Point ) : template [ 'geometryType' ] = "esriGeometryPoint" elif isinstance ( geometry , MultiPoint ) : template [ 'geometryType' ] = "esriGeometryMultipoint" elif isinstance ( geometry , Envelope ) : template [ 'geometryType' ] = "esriGeometryEnvelope" else : raise AttributeError ( "Invalid geometry type" ) template [ 'geometry' ] = geometry . asDictionary return template
Converts a single geometry object to a geometry service geometry template value .
200
14
245,047
def __geomToStringArray ( self , geometries , returnType = "str" ) : listGeoms = [ ] for g in geometries : if isinstance ( g , Point ) : listGeoms . append ( g . asDictionary ) elif isinstance ( g , Polygon ) : listGeoms . append ( g . asDictionary ) #json.dumps( elif isinstance ( g , Polyline ) : listGeoms . append ( { 'paths' : g . asDictionary [ 'paths' ] } ) if returnType == "str" : return json . dumps ( listGeoms ) elif returnType == "list" : return listGeoms else : return json . dumps ( listGeoms )
function to convert the geomtries to strings
162
10
245,048
def autoComplete ( self , polygons = [ ] , polylines = [ ] , sr = None ) : url = self . _url + "/autoComplete" params = { "f" : "json" } if sr is not None : params [ 'sr' ] = sr params [ 'polygons' ] = self . __geomToStringArray ( polygons ) params [ 'polylines' ] = self . __geomToStringArray ( polylines ) return self . _get ( url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The autoComplete operation simplifies the process of constructing new polygons that are adjacent to other polygons . It constructs polygons that fill in the gaps between existing polygons and a set of polylines .
145
41
245,049
def buffer ( self , geometries , inSR , distances , units , outSR = None , bufferSR = None , unionResults = True , geodesic = True ) : url = self . _url + "/buffer" params = { "f" : "json" , "inSR" : inSR , "geodesic" : geodesic , "unionResults" : unionResults } if isinstance ( geometries , list ) and len ( geometries ) > 0 : g = geometries [ 0 ] if isinstance ( g , Polygon ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPolygon" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } elif isinstance ( g , Point ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPoint" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } elif isinstance ( g , Polyline ) : params [ 'geometries' ] = { "geometryType" : "esriGeometryPolyline" , "geometries" : self . __geomToStringArray ( geometries , "list" ) } else : return None if isinstance ( distances , list ) : distances = [ str ( d ) for d in distances ] params [ 'distances' ] = "," . join ( distances ) else : params [ 'distances' ] = str ( distances ) params [ 'units' ] = units if bufferSR is not None : params [ 'bufferSR' ] = bufferSR if outSR is not None : params [ 'outSR' ] = outSR return self . _get ( url , param_dict = params , proxy_port = self . _proxy_port , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url )
The buffer operation is performed on a geometry service resource The result of this operation is buffered polygons at the specified distances for the input geometry array . Options are available to union buffers and to use geodesic distance .
431
44
245,050
def findTransformation ( self , inSR , outSR , extentOfInterest = None , numOfResults = 1 ) : params = { "f" : "json" , "inSR" : inSR , "outSR" : outSR } url = self . _url + "/findTransformations" if isinstance ( numOfResults , int ) : params [ 'numOfResults' ] = numOfResults if isinstance ( extentOfInterest , Envelope ) : params [ 'extentOfInterest' ] = extentOfInterest . asDictionary return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The findTransformations operation is performed on a geometry service resource . This operation returns a list of applicable geographic transformations you should use when projecting geometries from the input spatial reference to the output spatial reference . The transformations are in JSON format and are returned in order of most applicable to least applicable . Recall that a geographic transformation is not needed when the input and output spatial references have the same underlying geographic coordinate systems . In this case findTransformations returns an empty list . Every returned geographic transformation is a forward transformation meaning that it can be used as - is to project from the input spatial reference to the output spatial reference . In the case where a predefined transformation needs to be applied in the reverse direction it is returned as a forward composite transformation containing one transformation and a transformForward element with a value of false .
165
161
245,051
def fromGeoCoordinateString ( self , sr , strings , conversionType , conversionMode = None ) : url = self . _url + "/fromGeoCoordinateString" params = { "f" : "json" , "sr" : sr , "strings" : strings , "conversionType" : conversionType } if not conversionMode is None : params [ 'conversionMode' ] = conversionMode return self . _post ( url = url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port )
The fromGeoCoordinateString operation is performed on a geometry service resource . The operation converts an array of well - known strings into xy - coordinates based on the conversion type and spatial reference supplied by the user . An optional conversion mode parameter is available for some conversion types .
135
56
245,052
def toGeoCoordinateString ( self , sr , coordinates , conversionType , conversionMode = "mgrsDefault" , numOfDigits = None , rounding = True , addSpaces = True ) : params = { "f" : "json" , "sr" : sr , "coordinates" : coordinates , "conversionType" : conversionType } url = self . _url + "/toGeoCoordinateString" if not conversionMode is None : params [ 'conversionMode' ] = conversionMode if isinstance ( numOfDigits , int ) : params [ 'numOfDigits' ] = numOfDigits if isinstance ( rounding , int ) : params [ 'rounding' ] = rounding if isinstance ( addSpaces , bool ) : params [ 'addSpaces' ] = addSpaces return self . _post ( url = url , param_dict = params , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , securityHandler = self . _securityHandler )
The toGeoCoordinateString operation is performed on a geometry service resource . The operation converts an array of xy - coordinates into well - known strings based on the conversion type and spatial reference supplied by the user . Optional parameters are available for some conversion types . Note that if an optional parameter is not applicable for a particular conversion type but a value is supplied for that parameter the value will be ignored .
225
81
245,053
def __init_url ( self ) : portals_self_url = "{}/portals/self" . format ( self . _url ) params = { "f" : "json" } if not self . _securityHandler is None : params [ 'token' ] = self . _securityHandler . token res = self . _get ( url = portals_self_url , param_dict = params , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port ) if "helperServices" in res : helper_services = res . get ( "helperServices" ) if "hydrology" in helper_services : analysis_service = helper_services . get ( "elevation" ) if "url" in analysis_service : self . _analysis_url = analysis_service . get ( "url" ) self . _gpService = GPService ( url = self . _analysis_url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = False )
loads the information into the class
247
6
245,054
def get_argument_parser ( name = None , * * kwargs ) : if name is None : name = "default" if len ( kwargs ) > 0 or name not in _parsers : init_argument_parser ( name , * * kwargs ) return _parsers [ name ]
Returns the global ArgumentParser instance with the given name . The 1st time this function is called a new ArgumentParser instance will be created for the given name and any args other than name will be passed on to the ArgumentParser constructor .
68
47
245,055
def parse ( self , stream ) : items = OrderedDict ( ) for i , line in enumerate ( stream ) : line = line . strip ( ) if not line or line [ 0 ] in [ "#" , ";" , "[" ] or line . startswith ( "---" ) : continue white_space = "\\s*" key = "(?P<key>[^:=;#\s]+?)" value = white_space + "[:=\s]" + white_space + "(?P<value>.+?)" comment = white_space + "(?P<comment>\\s[;#].*)?" key_only_match = re . match ( "^" + key + comment + "$" , line ) if key_only_match : key = key_only_match . group ( "key" ) items [ key ] = "true" continue key_value_match = re . match ( "^" + key + value + comment + "$" , line ) if key_value_match : key = key_value_match . group ( "key" ) value = key_value_match . group ( "value" ) if value . startswith ( "[" ) and value . endswith ( "]" ) : # handle special case of lists value = [ elem . strip ( ) for elem in value [ 1 : - 1 ] . split ( "," ) ] items [ key ] = value continue raise ConfigFileParserException ( "Unexpected line %s in %s: %s" % ( i , getattr ( stream , 'name' , 'stream' ) , line ) ) return items
Parses the keys + values from a config file .
355
12
245,056
def parse ( self , stream ) : yaml = self . _load_yaml ( ) try : parsed_obj = yaml . safe_load ( stream ) except Exception as e : raise ConfigFileParserException ( "Couldn't parse config file: %s" % e ) if not isinstance ( parsed_obj , dict ) : raise ConfigFileParserException ( "The config file doesn't appear to " "contain 'key: value' pairs (aka. a YAML mapping). " "yaml.load('%s') returned type '%s' instead of 'dict'." % ( getattr ( stream , 'name' , 'stream' ) , type ( parsed_obj ) . __name__ ) ) result = OrderedDict ( ) for key , value in parsed_obj . items ( ) : if isinstance ( value , list ) : result [ key ] = value else : result [ key ] = str ( value ) return result
Parses the keys and values from a config file .
203
12
245,057
def write_config_file ( self , parsed_namespace , output_file_paths , exit_after = False ) : for output_file_path in output_file_paths : # validate the output file path try : with open ( output_file_path , "w" ) as output_file : pass except IOError as e : raise ValueError ( "Couldn't open %s for writing: %s" % ( output_file_path , e ) ) if output_file_paths : # generate the config file contents config_items = self . get_items_for_config_file_output ( self . _source_to_settings , parsed_namespace ) file_contents = self . _config_file_parser . serialize ( config_items ) for output_file_path in output_file_paths : with open ( output_file_path , "w" ) as output_file : output_file . write ( file_contents ) message = "Wrote config file to " + ", " . join ( output_file_paths ) if exit_after : self . exit ( 0 , message ) else : print ( message )
Write the given settings to output files .
254
8
245,058
def convert_item_to_command_line_arg ( self , action , key , value ) : args = [ ] if action is None : command_line_key = self . get_command_line_key_for_unknown_config_file_setting ( key ) else : command_line_key = action . option_strings [ - 1 ] # handle boolean value if action is not None and isinstance ( action , ACTION_TYPES_THAT_DONT_NEED_A_VALUE ) : if value . lower ( ) in ( "true" , "yes" , "1" ) : args . append ( command_line_key ) elif value . lower ( ) in ( "false" , "no" , "0" ) : # don't append when set to "false" / "no" pass else : self . error ( "Unexpected value for %s: '%s'. Expecting 'true', " "'false', 'yes', 'no', '1' or '0'" % ( key , value ) ) elif isinstance ( value , list ) : if action is None or isinstance ( action , argparse . _AppendAction ) : for list_elem in value : args . append ( command_line_key ) args . append ( str ( list_elem ) ) elif ( isinstance ( action , argparse . _StoreAction ) and action . nargs in ( '+' , '*' ) ) or ( isinstance ( action . nargs , int ) and action . nargs > 1 ) : args . append ( command_line_key ) for list_elem in value : args . append ( str ( list_elem ) ) else : self . error ( ( "%s can't be set to a list '%s' unless its action type is changed " "to 'append' or nargs is set to '*', '+', or > 1" ) % ( key , value ) ) elif isinstance ( value , str ) : args . append ( command_line_key ) args . append ( value ) else : raise ValueError ( "Unexpected value type %s for value: %s" % ( type ( value ) , value ) ) return args
Converts a config file or env var key + value to a list of commandline args to append to the commandline .
480
25
245,059
def get_possible_config_keys ( self , action ) : keys = [ ] # Do not write out the config options for writing out a config file if getattr ( action , 'is_write_out_config_file_arg' , None ) : return keys for arg in action . option_strings : if any ( [ arg . startswith ( 2 * c ) for c in self . prefix_chars ] ) : keys += [ arg [ 2 : ] , arg ] # eg. for '--bla' return ['bla', '--bla'] return keys
This method decides which actions can be set in a config file and what their keys will be . It returns a list of 0 or more config keys that can be used to set the given action s value in a config file .
125
45
245,060
def eval ( lisp ) : macro_values = [ ] if not isinstance ( lisp , list ) : raise EvalError ( 'eval root element must be a list' ) for item in lisp : if not isinstance ( item , list ) : raise EvalError ( 'must evaluate list of list' ) if not all ( isinstance ( i , str ) for i in item ) : raise EvalError ( 'must evaluate list of list of strings. not a list of strings: {}' . format ( item ) ) name = item [ 0 ] args = item [ 1 : ] try : macro = state [ 'macros' ] [ name ] except KeyError : raise MacroNotFoundError ( "macro {} not found" . format ( repr ( name ) ) ) try : res = macro ( * args ) except Exception as exc : if os . getenv ( 'PLASH_DEBUG' , '' ) . lower ( ) in ( '1' , 'yes' , 'true' ) : raise if isinstance ( exc , MacroError ) : # only raise that one time and don't have multiple wrapped MacroError raise raise MacroError ( macro , name , sys . exc_info ( ) ) if not isinstance ( res , str ) and res is not None : raise EvalError ( 'eval macro must return string or None ({} returned {})' . format ( name , type ( res ) ) ) if res is not None : macro_values . append ( res ) return '\n' . join ( macro_values )
plash lisp is one dimensional lisp .
322
10
245,061
def plash_map ( * args ) : from subprocess import check_output out = check_output ( [ 'plash' , 'map' ] + list ( args ) ) if out == '' : return None return out . decode ( ) . strip ( '\n' )
thin wrapper around plash map
60
6
245,062
def defpm ( name , * lines ) : @ register_macro ( name , group = 'package managers' ) @ shell_escape_args def package_manager ( * packages ) : if not packages : return sh_packages = ' ' . join ( pkg for pkg in packages ) expanded_lines = [ line . format ( sh_packages ) for line in lines ] return eval ( [ [ 'run' ] + expanded_lines ] ) package_manager . __doc__ = "install packages with {}" . format ( name )
define a new package manager
114
5
245,063
def layer ( command = None , * args ) : if not command : return eval ( [ [ 'hint' , 'layer' ] ] ) # fall back to buildin layer macro else : lst = [ [ 'layer' ] ] for arg in args : lst . append ( [ command , arg ] ) lst . append ( [ 'layer' ] ) return eval ( lst )
hints the start of a new layer
84
8
245,064
def import_env ( * envs ) : for env in envs : parts = env . split ( ':' , 1 ) if len ( parts ) == 1 : export_as = env else : env , export_as = parts env_val = os . environ . get ( env ) if env_val is not None : yield '{}={}' . format ( export_as , shlex . quote ( env_val ) )
import environment variables from host
94
5
245,065
def write_file ( fname , * lines ) : yield 'touch {}' . format ( fname ) for line in lines : yield "echo {} >> {}" . format ( line , fname )
write lines to a file
43
5
245,066
def eval_file ( file ) : fname = os . path . realpath ( os . path . expanduser ( file ) ) with open ( fname ) as f : inscript = f . read ( ) sh = run_write_read ( [ 'plash' , 'eval' ] , inscript . encode ( ) ) . decode ( ) # we remove an possibly existing newline # because else this macros would add one if sh . endswith ( '\n' ) : return sh [ : - 1 ] return sh
evaluate file content as expressions
113
5
245,067
def eval_string ( stri ) : tokens = shlex . split ( stri ) return run_write_read ( [ 'plash' , 'eval' ] , '\n' . join ( tokens ) . encode ( ) ) . decode ( )
evaluate expressions passed as string
53
5
245,068
def eval_stdin ( ) : cmd = [ 'plash' , 'eval' ] p = subprocess . Popen ( cmd , stdin = sys . stdin , stdout = sys . stdout ) exit = p . wait ( ) if exit : raise subprocess . CalledProcessError ( exit , cmd )
evaluate expressions read from stdin
68
6
245,069
def from_map ( map_key ) : image_id = subprocess . check_output ( [ 'plash' , 'map' , map_key ] ) . decode ( ) . strip ( '\n' ) if not image_id : raise MapDoesNotExist ( 'map {} not found' . format ( repr ( map_key ) ) ) return hint ( 'image' , image_id )
use resolved map as image
89
5
245,070
def fields ( self ) : fields = super ( DynamicFieldsMixin , self ) . fields if not hasattr ( self , '_context' ) : # We are being called before a request cycle return fields # Only filter if this is the root serializer, or if the parent is the # root serializer with many=True is_root = self . root == self parent_is_list_root = self . parent == self . root and getattr ( self . parent , 'many' , False ) if not ( is_root or parent_is_list_root ) : return fields try : request = self . context [ 'request' ] except KeyError : conf = getattr ( settings , 'DRF_DYNAMIC_FIELDS' , { } ) if not conf . get ( 'SUPPRESS_CONTEXT_WARNING' , False ) is True : warnings . warn ( 'Context does not have access to request. ' 'See README for more information.' ) return fields # NOTE: drf test framework builds a request object where the query # parameters are found under the GET attribute. params = getattr ( request , 'query_params' , getattr ( request , 'GET' , None ) ) if params is None : warnings . warn ( 'Request object does not contain query paramters' ) try : filter_fields = params . get ( 'fields' , None ) . split ( ',' ) except AttributeError : filter_fields = None try : omit_fields = params . get ( 'omit' , None ) . split ( ',' ) except AttributeError : omit_fields = [ ] # Drop any fields that are not specified in the `fields` argument. existing = set ( fields . keys ( ) ) if filter_fields is None : # no fields param given, don't filter. allowed = existing else : allowed = set ( filter ( None , filter_fields ) ) # omit fields in the `omit` argument. omitted = set ( filter ( None , omit_fields ) ) for field in existing : if field not in allowed : fields . pop ( field , None ) if field in omitted : fields . pop ( field , None ) return fields
Filters the fields according to the fields query parameter .
463
11
245,071
def setup_admin_on_rest_handlers ( admin , admin_handler ) : add_route = admin . router . add_route add_static = admin . router . add_static static_folder = str ( PROJ_ROOT / 'static' ) a = admin_handler add_route ( 'GET' , '' , a . index_page , name = 'admin.index' ) add_route ( 'POST' , '/token' , a . token , name = 'admin.token' ) add_static ( '/static' , path = static_folder , name = 'admin.static' ) add_route ( 'DELETE' , '/logout' , a . logout , name = 'admin.logout' )
Initialize routes .
161
4
245,072
async def index_page ( self , request ) : context = { "initial_state" : self . schema . to_json ( ) } return render_template ( self . template , request , context , app_key = TEMPLATE_APP_KEY , )
Return index page with initial state for admin
58
8
245,073
async def logout ( self , request ) : if "Authorization" not in request . headers : msg = "Auth header is not present, can not destroy token" raise JsonValidaitonError ( msg ) response = json_response ( ) await forget ( request , response ) return response
Simple handler for logout
62
5
245,074
def validate_query_structure ( query ) : query_dict = dict ( query ) filters = query_dict . pop ( '_filters' , None ) if filters : try : f = json . loads ( filters ) except ValueError : msg = '_filters field can not be serialized' raise JsonValidaitonError ( msg ) else : query_dict [ '_filters' ] = f try : q = ListQuery ( query_dict ) except t . DataError as exc : msg = '_filters query invalid' raise JsonValidaitonError ( msg , * * as_dict ( exc ) ) return q
Validate query arguments in list request .
138
8
245,075
def to_json ( self ) : endpoints = [ ] for endpoint in self . endpoints : list_fields = endpoint . fields resource_type = endpoint . Meta . resource_type table = endpoint . Meta . table data = endpoint . to_dict ( ) data [ 'fields' ] = resource_type . get_type_of_fields ( list_fields , table , ) endpoints . append ( data ) data = { 'title' : self . title , 'endpoints' : sorted ( endpoints , key = lambda x : x [ 'name' ] ) , } return json . dumps ( data )
Prepare data for the initial state of the admin - on - rest
130
14
245,076
def resources ( self ) : resources = [ ] for endpoint in self . endpoints : resource_type = endpoint . Meta . resource_type table = endpoint . Meta . table url = endpoint . name resources . append ( ( resource_type , { 'table' : table , 'url' : url } ) ) return resources
Return list of all registered resources .
67
7
245,077
def get_type_of_fields ( fields , table ) : if not fields : fields = table . primary_key actual_fields = [ field for field in table . c . items ( ) if field [ 0 ] in fields ] data_type_fields = { name : FIELD_TYPES . get ( type ( field_type . type ) , rc . TEXT_FIELD . value ) for name , field_type in actual_fields } return data_type_fields
Return data types of fields that are in table . If a given parameter is empty return primary key .
101
20
245,078
def get_type_for_inputs ( table ) : return [ dict ( type = INPUT_TYPES . get ( type ( field_type . type ) , rc . TEXT_INPUT . value ) , name = name , isPrimaryKey = ( name in table . primary_key ) , props = None , ) for name , field_type in table . c . items ( ) ]
Return information about table s fields in dictionary type .
85
10
245,079
def _setup ( app , * , schema , title = None , app_key = APP_KEY , db = None ) : admin = web . Application ( loop = app . loop ) app [ app_key ] = admin loader = jinja2 . FileSystemLoader ( [ TEMPLATES_ROOT , ] ) aiohttp_jinja2 . setup ( admin , loader = loader , app_key = TEMPLATE_APP_KEY ) if title : schema . title = title resources = [ init ( db , info [ 'table' ] , url = info [ 'url' ] ) for init , info in schema . resources ] admin_handler = AdminOnRestHandler ( admin , resources = resources , loop = app . loop , schema = schema , ) admin [ 'admin_handler' ] = admin_handler setup_admin_on_rest_handlers ( admin , admin_handler ) return admin
Initialize the admin - on - rest admin
195
9
245,080
def to_dict ( self ) : data = { "name" : self . name , "canEdit" : self . can_edit , "canCreate" : self . can_create , "canDelete" : self . can_delete , "perPage" : self . per_page , "showPage" : self . generate_data_for_show_page ( ) , "editPage" : self . generate_data_for_edit_page ( ) , "createPage" : self . generate_data_for_create_page ( ) , } return data
Return dict with the all base information about the instance .
123
11
245,081
def generate_data_for_edit_page ( self ) : if not self . can_edit : return { } if self . edit_form : return self . edit_form . to_dict ( ) return self . generate_simple_data_page ( )
Generate a custom representation of table s fields in dictionary type if exist edit form else use default representation .
56
21
245,082
def generate_data_for_create_page ( self ) : if not self . can_create : return { } if self . create_form : return self . create_form . to_dict ( ) return self . generate_simple_data_page ( )
Generate a custom representation of table s fields in dictionary type if exist create form else use default representation .
56
21
245,083
async def register ( self , request ) : session = await get_session ( request ) user_id = session . get ( 'user_id' ) if user_id : return redirect ( request , 'timeline' ) error = None form = None if request . method == 'POST' : form = await request . post ( ) user_id = await db . get_user_id ( self . mongo . user , form [ 'username' ] ) if not form [ 'username' ] : error = 'You have to enter a username' elif not form [ 'email' ] or '@' not in form [ 'email' ] : error = 'You have to enter a valid email address' elif not form [ 'password' ] : error = 'You have to enter a password' elif form [ 'password' ] != form [ 'password2' ] : error = 'The two passwords do not match' elif user_id is not None : error = 'The username is already taken' else : await self . mongo . user . insert ( { 'username' : form [ 'username' ] , 'email' : form [ 'email' ] , 'pw_hash' : generate_password_hash ( form [ 'password' ] ) } ) return redirect ( request , 'login' ) return { "error" : error , "form" : form }
Registers the user .
296
5
245,084
async def follow_user ( self , request ) : username = request . match_info [ 'username' ] session = await get_session ( request ) user_id = session . get ( 'user_id' ) if not user_id : raise web . HTTPNotAuthorized ( ) whom_id = await db . get_user_id ( self . mongo . user , username ) if whom_id is None : raise web . HTTPFound ( ) await self . mongo . follower . update ( { 'who_id' : ObjectId ( user_id ) } , { '$push' : { 'whom_id' : whom_id } } , upsert = True ) return redirect ( request , 'user_timeline' , parts = { "username" : username } )
Adds the current user as follower of the given user .
171
11
245,085
async def add_message ( self , request ) : session = await get_session ( request ) user_id = session . get ( 'user_id' ) if not user_id : raise web . HTTPNotAuthorized ( ) form = await request . post ( ) if form . get ( 'text' ) : user = await self . mongo . user . find_one ( { '_id' : ObjectId ( session [ 'user_id' ] ) } , { 'email' : 1 , 'username' : 1 } ) await self . mongo . message . insert ( { 'author_id' : ObjectId ( user_id ) , 'email' : user [ 'email' ] , 'username' : user [ 'username' ] , 'text' : form [ 'text' ] , 'pub_date' : datetime . datetime . utcnow ( ) } ) return redirect ( request , 'timeline' )
Registers a new message for the user .
203
9
245,086
def robo_avatar_url ( user_data , size = 80 ) : hash = md5 ( str ( user_data ) . strip ( ) . lower ( ) . encode ( 'utf-8' ) ) . hexdigest ( ) url = "https://robohash.org/{hash}.png?size={size}x{size}" . format ( hash = hash , size = size ) return url
Return the gravatar image for the given email address .
90
11
245,087
def waitgrab ( self , timeout = 60 , autocrop = True , cb_imgcheck = None ) : t = 0 sleep_time = 0.3 # for fast windows repeat_time = 1 while 1 : log . debug ( 'sleeping %s secs' % str ( sleep_time ) ) time . sleep ( sleep_time ) t += sleep_time img = self . grab ( autocrop = autocrop ) if img : if not cb_imgcheck : break if cb_imgcheck ( img ) : break sleep_time = repeat_time repeat_time += 1 # progressive if t > timeout : msg = 'Timeout! elapsed time:%s timeout:%s ' % ( t , timeout ) raise DisplayTimeoutError ( msg ) break log . debug ( 'screenshot is empty, next try..' ) assert img # if not img: # log.debug('screenshot is empty!') return img
start process and create screenshot . Repeat screenshot until it is not empty and cb_imgcheck callback function returns True for current screenshot .
200
27
245,088
def _setup_xauth ( self ) : handle , filename = tempfile . mkstemp ( prefix = 'PyVirtualDisplay.' , suffix = '.Xauthority' ) self . _xauth_filename = filename os . close ( handle ) # Save old environment self . _old_xauth = { } self . _old_xauth [ 'AUTHFILE' ] = os . getenv ( 'AUTHFILE' ) self . _old_xauth [ 'XAUTHORITY' ] = os . getenv ( 'XAUTHORITY' ) os . environ [ 'AUTHFILE' ] = os . environ [ 'XAUTHORITY' ] = filename cookie = xauth . generate_mcookie ( ) xauth . call ( 'add' , self . new_display_var , '.' , cookie )
Set up the Xauthority file and the XAUTHORITY environment variable .
182
17
245,089
def _clear_xauth ( self ) : os . remove ( self . _xauth_filename ) for varname in [ 'AUTHFILE' , 'XAUTHORITY' ] : if self . _old_xauth [ varname ] is None : del os . environ [ varname ] else : os . environ [ varname ] = self . _old_xauth [ varname ] self . _old_xauth = None
Clear the Xauthority file and restore the environment variables .
97
12
245,090
def GetCookies ( self ) : sectoken = self . GetSecurityToken ( self . Username , self . Password ) url = self . share_point_site + '/_forms/default.aspx?wa=wsignin1.0' response = requests . post ( url , data = sectoken ) return response . cookies
Grabs the cookies form your Office Sharepoint site and uses it as Authentication for the rest of the calls
69
21
245,091
def DeleteList ( self , listName ) : # Build Request soap_request = soap ( 'DeleteList' ) soap_request . add_parameter ( 'listName' , listName ) self . last_request = str ( soap_request ) # Send Request response = self . _session . post ( url = self . _url ( 'Lists' ) , headers = self . _headers ( 'DeleteList' ) , data = str ( soap_request ) , verify = self . _verify_ssl , timeout = self . timeout ) # Parse Request if response == 200 : return response . text else : return response
Delete a List with given name
133
6
245,092
def GetListCollection ( self ) : # Build Request soap_request = soap ( 'GetListCollection' ) self . last_request = str ( soap_request ) # Send Request response = self . _session . post ( url = self . _url ( 'SiteData' ) , headers = self . _headers ( 'GetListCollection' ) , data = str ( soap_request ) , verify = self . _verify_ssl , timeout = self . timeout ) # Parse Response if response . status_code == 200 : envelope = etree . fromstring ( response . text . encode ( 'utf-8' ) , parser = etree . XMLParser ( huge_tree = self . huge_tree ) ) result = envelope [ 0 ] [ 0 ] [ 0 ] . text lists = envelope [ 0 ] [ 0 ] [ 1 ] data = [ ] for _list in lists : _list_data = { } for item in _list : key = item . tag . replace ( '{http://schemas.microsoft.com/sharepoint/soap/}' , '' ) value = item . text _list_data [ key ] = value data . append ( _list_data ) return data else : return response
Returns List information for current Site
261
6
245,093
def _convert_to_internal ( self , data ) : for _dict in data : keys = list ( _dict . keys ( ) ) [ : ] for key in keys : if key not in self . _disp_cols : raise Exception ( key + ' not a column in current List.' ) _dict [ self . _disp_cols [ key ] [ 'name' ] ] = self . _sp_type ( key , _dict . pop ( key ) )
From Column Title to Column_x0020_Title
104
11
245,094
def _convert_to_display ( self , data ) : for _dict in data : keys = list ( _dict . keys ( ) ) [ : ] for key in keys : if key not in self . _sp_cols : raise Exception ( key + ' not a column in current List.' ) _dict [ self . _sp_cols [ key ] [ 'name' ] ] = self . _python_type ( key , _dict . pop ( key ) )
From Column_x0020_Title to Column Title
102
11
245,095
def GetView ( self , viewname ) : # Build Request soap_request = soap ( 'GetView' ) soap_request . add_parameter ( 'listName' , self . listName ) if viewname == None : views = self . GetViewCollection ( ) for view in views : if 'DefaultView' in view : if views [ view ] [ 'DefaultView' ] == 'TRUE' : viewname = view break if self . listName not in [ 'UserInfo' , 'User Information List' ] : soap_request . add_parameter ( 'viewName' , self . views [ viewname ] [ 'Name' ] [ 1 : - 1 ] ) else : soap_request . add_parameter ( 'viewName' , viewname ) self . last_request = str ( soap_request ) # Send Request response = self . _session . post ( url = self . _url ( 'Views' ) , headers = self . _headers ( 'GetView' ) , data = str ( soap_request ) , verify = self . _verify_ssl , timeout = self . timeout ) # Parse Response if response . status_code == 200 : envelope = etree . fromstring ( response . text . encode ( 'utf-8' ) , parser = etree . XMLParser ( huge_tree = self . huge_tree ) ) view = envelope [ 0 ] [ 0 ] [ 0 ] [ 0 ] info = { key : value for ( key , value ) in view . items ( ) } fields = [ x . items ( ) [ 0 ] [ 1 ] for x in view [ 1 ] ] return { 'info' : info , 'fields' : fields } else : raise Exception ( "ERROR:" , response . status_code , response . text )
Get Info on View Name
382
5
245,096
def UpdateListItems ( self , data , kind ) : if type ( data ) != list : raise Exception ( 'data must be a list of dictionaries' ) # Build Request soap_request = soap ( 'UpdateListItems' ) soap_request . add_parameter ( 'listName' , self . listName ) if kind != 'Delete' : self . _convert_to_internal ( data ) soap_request . add_actions ( data , kind ) self . last_request = str ( soap_request ) # Send Request response = self . _session . post ( url = self . _url ( 'Lists' ) , headers = self . _headers ( 'UpdateListItems' ) , data = str ( soap_request ) , verify = self . _verify_ssl , timeout = self . timeout ) # Parse Response if response . status_code == 200 : envelope = etree . fromstring ( response . text . encode ( 'utf-8' ) , parser = etree . XMLParser ( huge_tree = self . huge_tree ) ) results = envelope [ 0 ] [ 0 ] [ 0 ] [ 0 ] data = { } for result in results : if result . text != '0x00000000' and result [ 0 ] . text != '0x00000000' : data [ result . attrib [ 'ID' ] ] = ( result [ 0 ] . text , result [ 1 ] . text ) else : data [ result . attrib [ 'ID' ] ] = result [ 0 ] . text return data else : return response
Update List Items kind = New Update or Delete
331
9
245,097
def GetAttachmentCollection ( self , _id ) : # Build Request soap_request = soap ( 'GetAttachmentCollection' ) soap_request . add_parameter ( 'listName' , self . listName ) soap_request . add_parameter ( 'listItemID' , _id ) self . last_request = str ( soap_request ) # Send Request response = self . _session . post ( url = self . _url ( 'Lists' ) , headers = self . _headers ( 'GetAttachmentCollection' ) , data = str ( soap_request ) , verify = False , timeout = self . timeout ) # Parse Request if response . status_code == 200 : envelope = etree . fromstring ( response . text . encode ( 'utf-8' ) , parser = etree . XMLParser ( huge_tree = self . huge_tree ) ) attaches = envelope [ 0 ] [ 0 ] [ 0 ] [ 0 ] attachments = [ ] for attachment in attaches . getchildren ( ) : attachments . append ( attachment . text ) return attachments else : return response
Get Attachments for given List Item ID
232
9
245,098
def changes ( new_cmp_dict , old_cmp_dict , id_column , columns ) : update_ldict = [ ] same_keys = set ( new_cmp_dict ) . intersection ( set ( old_cmp_dict ) ) for same_key in same_keys : # Get the Union of the set of keys # for both dictionaries to account # for missing keys old_dict = old_cmp_dict [ same_key ] new_dict = new_cmp_dict [ same_key ] dict_keys = set ( old_dict ) . intersection ( set ( new_dict ) ) update_dict = { } for dict_key in columns : old_val = old_dict . get ( dict_key , 'NaN' ) new_val = new_dict . get ( dict_key , 'NaN' ) if old_val != new_val and new_val != 'NaN' : if id_column != None : try : update_dict [ id_column ] = old_dict [ id_column ] except KeyError : print ( "Input Dictionary 'old_cmp_dict' must have ID column" ) update_dict [ dict_key ] = new_val if update_dict : update_ldict . append ( update_dict ) return update_ldict
Return a list dict of the changes of the rows that exist in both dictionaries User must provide an ID column for old_cmp_dict
280
28
245,099
def unique ( new_cmp_dict , old_cmp_dict ) : newkeys = set ( new_cmp_dict ) oldkeys = set ( old_cmp_dict ) unique = newkeys - oldkeys unique_ldict = [ ] for key in unique : unique_ldict . append ( new_cmp_dict [ key ] ) return unique_ldict
Return a list dict of the unique keys in new_cmp_dict
78
14