index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
40,175
|
onkelrow/First
|
refs/heads/master
|
/sd_files_vorbereiten.py
|
import json
import urllib, httplib
import sys
import getpass
import arcpy
import os
import codecs
def main(argv=None):
# Ask for admin user name and password
username = raw_input("Enter user name: ")
password = getpass.getpass("Enter password: ")
# Ask for server name and the port
serverName = raw_input("Enter server name: ")
serverPort = raw_input("Enter server port: ")
# Get a token and connect
token = getToken(username, password, serverName, serverPort)
if token == "":
sys.exit(1)
# Create a connection file to the server
serverURL = "http://" + serverName + ":" + str(serverPort) + "/arcgis/admin"
try:
arcpy.mapping.CreateGISServerConnectionFile("PUBLISH_GIS_SERVICES", os.curdir, serverName + ".ags", serverURL,
"ARCGIS_SERVER", username=username, password=password)
except Exception, e:
print e.message
agsConnection = os.path.join(os.curdir, serverName + ".ags")
if not os.path.isfile(agsConnection):
print("Unable to connect to ArcGIS Server -- exiting")
sys.exit(1)
# Input file that contains the services information
servicesFile = raw_input("Path to pipe-delimited text file containing services: ")
num = 0
services = {}
for serviceRow in readlinesFromInputFile(servicesFile):
serviceEntry = {}
for index in range(len(serviceRow)):
serviceProp = serviceRow[index].split("=")
if serviceProp[0] == "SD":
serviceEntry["in_sd_file"] = serviceProp[1]
if serviceProp[0] == "serviceName":
serviceEntry["in_service_name"] = serviceProp[1]
if serviceProp[0] == "folderName":
serviceEntry["in_folder"] = serviceProp[1]
if isFolderPresent(serviceProp[1], serverName, serverPort, token):
serviceEntry["in_folder_type"] = "EXISTING"
else:
serviceEntry["in_folder_type"] = "NEW"
if serviceProp[0] == "clusterName":
serviceEntry["in_cluster"] = serviceProp[1]
if serviceProp[0] == "startupType":
serviceEntry["in_startupType"] = serviceProp[1]
# Add the services information to a dictionary
services["service" + str(num)] = serviceEntry
num += 1
# Call helper functions to publish services
addServices(services, serverName, serverPort, token, agsConnection)
# A function that reads lines from the input file
def readlinesFromInputFile(filename, delim='|'):
file = codecs.open(filename, 'r', 'utf-8-sig')
for line in file.readlines():
# Remove the trailing whitespaces and the newline characters
line = line.rstrip()
if line.startswith('#') or len(line) == 0:
pass # Skip the lines that contain # at the beginning or any empty lines
else:
# Split the current line into list
yield line.split(delim)
file.close()
def addServices(serviceDict, serverName, serverPort, token, agsConnection):
for serviceToAdd in serviceDict:
# Check to see that SD is present and that it is reachable.
if not os.path.isfile(serviceDict[serviceToAdd]['in_sd_file']):
print("Unable to access '" + serviceDict[serviceToAdd]['in_sd_file'] + "'. Skipping to publish.")
else:
# Delete the service first (if it exists) and then re-publish it
if serviceDict[serviceToAdd].has_key("in_service_name"):
if serviceDict[serviceToAdd].has_key("in_folder"):
deleteServiceIfPresent(serverName, serverPort, token, serviceDict[serviceToAdd]["in_service_name"],
serviceDict[serviceToAdd]["in_folder"])
else:
deleteServiceIfPresent(serverName, serverPort, token, serviceDict[serviceToAdd]["in_service_name"])
serviceDict[serviceToAdd]["in_server"] = agsConnection
print "Publishing the service: " + serviceDict[serviceToAdd]['in_sd_file']
try:
arcpy.UploadServiceDefinition_server(**serviceDict[serviceToAdd])
print "Successfully published the service: " + serviceDict[serviceToAdd]['in_sd_file']
except Exception, e:
print "Publishing of " + serviceDict[serviceToAdd]['in_sd_file'] + " failed."
print e.message
# A function that will post HTTP POST request to the server
def postToServer(serverName, serverPort, url, params):
httpConn = httplib.HTTPConnection(serverName, serverPort)
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
# URL encode the resource URL
url = urllib.quote(url.encode('utf-8'))
# Build the connection to add the roles to the server
httpConn.request("POST", url, params, headers)
response = httpConn.getresponse()
data = response.read()
httpConn.close()
return (response, data)
# A function that checks that the JSON response received from the server does not contain an error
def assertJsonSuccess(data):
obj = json.loads(data)
if 'status' in obj and obj['status'] == "error":
return False
else:
return True
def getToken(username, password, serverName, serverPort):
tokenURL = "/arcgis/admin/generateToken"
params = urllib.urlencode({'username': username, 'password': password, 'client': 'requestip', 'f': 'json'})
response, data = postToServer(serverName, serverPort, tokenURL, params)
if (response.status != 200 or not assertJsonSuccess(data)):
print "Error while fetching tokens from admin URL. Please check if the server is running and ensure that the username/password provided are correct"
print str(data)
return
else:
# Extract the token from it
token = json.loads(data)
return token['token']
def isFolderPresent(folderName, serverName, serverPort, token):
params = urllib.urlencode({'token': token, 'f': 'json'})
folderURL = "/arcgis/admin/services"
response, data = postToServer(serverName, serverPort, folderURL, params)
if (response.status != 200 or not assertJsonSuccess(data)):
print "Error while fetching folders from the server."
print str(data)
return
servicesJSON = json.loads(data)
folders = servicesJSON['folders']
for folder in folders:
if folder == folderName:
return True
return False
def deleteServiceIfPresent(serverName, serverPort, token, serviceName, folderName='root'):
# If the folder itself is not present, we do not need to check for the service's presence in this folder.
if folderName != 'root' and not isFolderPresent(folderName, serverName, serverPort, token):
return
params = urllib.urlencode({'token': token, 'f': 'json'})
if folderName == 'root':
URL = "/arcgis/admin/services/"
else:
URL = "/arcgis/admin/services/" + folderName
response, data = postToServer(serverName, serverPort, URL, params)
if (response.status != 200 or not assertJsonSuccess(data)):
print "Error while fetching the service information from the server."
print str(data)
return
# extract the services from the JSON response
servicesJSON = json.loads(data)
services = servicesJSON['services']
for service in services:
if service['serviceName'] == serviceName:
##delete the service
params = urllib.urlencode({'token': token, 'f': 'json'})
if folderName == 'root':
URL = "/arcgis/admin/services/" + serviceName + "." + service['type'] + "/delete"
else:
URL = "/arcgis/admin/services/" + folderName + "/" + serviceName + "." + service['type'] + "/delete"
print "Found the service '" + serviceName + "." + service[
'type'] + " at '" + folderName + "' folder in the server, the service will be deleted and be re-published."
response, data = postToServer(serverName, serverPort, URL, params)
if (response.status != 200 or not assertJsonSuccess(data)):
print "Failed to delete the service: '" + serviceName + "." + service['type']
print str(data)
return
else:
print "Deleted the service '" + serviceName + "." + service['type'] + " successfully."
# Script start
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
{"/osgeo.py": ["/gdal.py"]}
|
40,176
|
onkelrow/First
|
refs/heads/master
|
/getstatistics.py
|
__author__ = '53_e_rv'
__author__ = 'Administrator'
# Queries the logs and writes statistics on map service activity during the past 24 hours
# For Http calls
import httplib, urllib, json
# For system tools
import sys, time
# For reading passwords without echoing
import getpass
#Defines the entry point into the script
def main(argv=None):
# Print some info
print
print "This tool is a sample script that queries the ArcGIS Server logs and writes a report"
print " summarizing all map service draws within the past 24 hours."
print
# Ask for admin/publisher user name and password
username = raw_input("Enter user name: ")
#password = getpass.getpass("Enter password: ")
password = raw_input("pw: ")
# Ask for server name
serverName = raw_input("Enter Server name: ")
serverPort = 6080
# Ask for text file path
filePath = raw_input("Enter the path of the text file where the statistics should be written.")
millisecondsToQuery = 86400000 # One day
hitDict = {}
# Get a token
token = getToken(username, password, serverName, serverPort)
if token == "":
print "Could not generate a token with the username and password provided."
return
# Construct URL to query the logs
logQueryURL = "/arcgis/admin/logs/query"
startTime = int(round(time.time() * 1000))
endTime = startTime - millisecondsToQuery
logFilter = "{'services':'*','server':'*','machines':'*'}"
params = urllib.urlencode({'level': 'FINE', 'startTime': startTime, 'endTime': endTime, 'filter':logFilter, 'token': token, 'f': 'json'})
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
# Connect to URL and post parameters
httpConn = httplib.HTTPConnection(serverName, serverPort)
httpConn.request("POST", logQueryURL, params, headers)
# Read response
response = httpConn.getresponse()
if (response.status != 200):
httpConn.close()
print "Error while querying logs."
return
else:
data = response.read()
# Check that data returned is not an error object
if not assertJsonSuccess(data):
print "Error returned by operation. " + data
else:
print "Operation completed successfully!"
# Deserialize response into Python object
dataObj = json.loads(data)
httpConn.close()
# Need these variables to calculate average draw time for an ExportMapImage call
mapDraws = 0
totalDrawTime = 0
# Iterate over messages
for item in dataObj["logMessages"]:
if item["message"] == "End ExportMapImage":
elapsed = float(item["elapsed"])
keyCheck = item["source"]
if keyCheck in hitDict:
stats = hitDict[keyCheck]
# Add 1 to tally of hits
stats[0] += 1
# Add elapsed time to total elapsed time
stats[1] += elapsed
else:
# Add key with one hit and total elapsed time
hitDict[keyCheck] = [1,elapsed]
# Open text file and write header line
summaryFile = open(filePath, "w")
header = "Service,Number of hits,Average seconds per draw\n"
summaryFile.write(header)
# Read through dictionary and write totals into file
for key in hitDict:
# Calculate average elapsed time
totalDraws = hitDict[key][0]
totalElapsed = hitDict[key][1]
avgElapsed = 0
if totalDraws > 0:
avgElapsed = (1.0 * (totalElapsed / totalDraws)) #Elapsed time divided by hits
# Construct and write the comma-separated line
line = key + "," + str(totalDraws) + "," + str(avgElapsed) + "\n"
summaryFile.write(line)
summaryFile.close()
return
#A function to generate a token given username, password and the adminURL.
def getToken(username, password, serverName, serverPort):
# Token URL is typically http://server[:port]/arcgis/admin/generateToken
tokenURL = "/arcgis/admin/generateToken"
# URL-encode the token parameters
params = urllib.urlencode({'username': username, 'password': password, 'client': 'requestip', 'f': 'json'})
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
# Connect to URL and post parameters
httpConn = httplib.HTTPConnection(serverName, serverPort)
httpConn.request("POST", tokenURL, params, headers)
# Read response
response = httpConn.getresponse()
if (response.status != 200):
httpConn.close()
print "Error while fetching tokens from admin URL. Please check the URL and try again."
return
else:
data = response.read()
httpConn.close()
# Check that data returned is not an error object
if not assertJsonSuccess(data):
return
# Extract the toke from it
token = json.loads(data)
return token['token']
#A function that checks that the input JSON object
# is not an error object.
def assertJsonSuccess(data):
obj = json.loads(data)
if 'status' in obj and obj['status'] == "error":
print "Error: JSON object returns an error. " + str(obj)
return False
else:
return True
# Script start
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
{"/osgeo.py": ["/gdal.py"]}
|
40,177
|
onkelrow/First
|
refs/heads/master
|
/find and replace.py
|
# #import arcpy
# import arcpy, os
#
# for root, dirs, files in os.walk(r"C:\Project"):
#
# for f in files:
# if f.endswith(".mxd"):
# mxd = root + '\\' + f
# #analysis = arcpy.mapping.AnalyzeForMSD(mxd)
# #mxd = arcpy.mapping.MapDocument(r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl\automated\wrrl_k_bwp_2_2.mxd")
# mxd.findAndReplaceWorkspacePaths(r"\\Itzgs2\WRRL_BW", r"\\ripsagsdata\data\Internet\ripsgdi\wrrl_bw")
# mxd.saveACopy(r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl\automated\wrrl_k_bwp_2_2_neu.mxd")
# del mxd
# import arcpy, os
# from arcpy import env
# arcpy.env.overwriteOutput = True
# ws = r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl"
# #oldpath = r"\\Itzgs2\WRRL_BW"
# #oldbase = r"C:\Base Data"
# #newpath = r"\\ripsagsdata\data\Internet\ripsgdi\wrrl_bw"
# #newbase = r"C:\Users\mittcla\Documents\Base Data"
# for root, dirs, files in os.walk(ws):
# for f in files:
# if f.endswith('.mxd'):
# fullpath = os.path.join(root,f)
# mxd = arcpy.mapping.MapDocument(fullpath)
# print "Replacing path for " + f +"..."
# #mxd.findAndReplaceWorkspacePaths(oldpath, newpath)
# #mxd.findAndReplaceWorkspacePaths(oldbase, newbase)
# mxd.findAndReplaceWorkspacePaths(r"\\Itzgs2\WRRL_BW", r"\\ripsagsdata\data\Internet\ripsgdi\wrrl_bw")
# mxd.saveACopy(r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl\automated")
# del mxd
__author__ = 'Roman'
import arcpy, os#workspace to search for MXDs
Workspace = r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl"
arcpy.env.workspace = Workspace
#list map documents in folder
mxdList = arcpy.ListFiles("*.mxd")
#set relative path setting for each MXD in list.
for mapdoc in mxdList:
#set map document to change
filePath = os.path.join(Workspace, mapdoc)
mxd = arcpy.mapping.MapDocument(filePath)
#Get the file name
basename = mapdoc
try:
#mxd.findAndReplaceWorkspacePaths(r"C:\Users\Administrator\AppData\Roaming\ESRI\Desktop10.0\ArcCatalog\Connection to Direct_lfu_webview_11g_sde_neu.sde",
#r"C:\Users\Administrator\AppData\Roaming\ESRI\Desktop10.2\ArcCatalog\Connection to direct_lfu_webview_geodbtemp.sde", False)
mxd.findAndReplaceWorkspacePaths(r"\\Itzgs2\WRRL_BW", r"\\ripsagsdata\data\Internet\ripsgdi\WRRL_BW")
mxd.findAndReplaceWorkspacePaths(r"\\Itzgs2\fix_data\WRRL_BW", r"\\ripsagsdata\data\fix_data\WRRL_BW")
except Exception as e:
print e.message
#set relative paths property
mxd.relativePaths = False
output = os.path.join(r"C:\Users\Administrator\Desktop\ripsgdi14\wrrl\automated", basename)
#save map doucment change
mxd.saveACopy (output)
#mxd.saveACopy(r"E:\data\Vicente\Oracle11\Schutzgebiete_kombiniert_neu.mxd")
del mxd, mapdoc
|
{"/osgeo.py": ["/gdal.py"]}
|
40,178
|
onkelrow/First
|
refs/heads/master
|
/ArcPy testdatasources.py
|
import arcpy, os, sys, string, codecs
# def encode(text):
# """
# For printing unicode characters to the console.
# """
# return text.encode('utf-8')
#Remove temporary connection file if it already exists
# sdeFile = r"C:\Project\Output\TempSDEConnectionFile.sde"
# if os.path.exists(sdeFile):
# os.remove(sdeFile)
#
# #Create temporary connection file in memory
# arcpy.CreateArcSDEConnectionFile_management(r"C:\Project\Output", "TempConnection", "myServerName", "5151", "myDatabase", "DATABASE_AUTH", "myUserName", "myPassword", "SAVE_USERNAME", "myUser.DEFAULT", "SAVE_VERSION")
#u'\xe4'.encode('ascii', 'ignore')
#Report service properties for layers in a map that support SERVICEPROPERTIES
outFile = open(r"E:\Data\check\checklayerdatasources.text", "w")
mxd = arcpy.mapping.MapDocument(r"E:\Data\check\120521_FIS_Deiche_event.mxd")
for lyr in arcpy.mapping.ListLayers(mxd):
if lyr.supports("SERVICEPROPERTIES"):
#if lyr.supports("DATASOURCE"):
servProp = lyr.serviceProperties
lyrdataProp = lyr.dataSource if lyr.supports("dataSource") else "N/A"
#print "Layer name:" + lyr.name
outFile.write ("Layer name:" + lyr.name.encode('utf-8') + "\n")
outFile.write ("Service Type: " + servProp.get('ServiceType', 'N/A') +"\n")
#outFile.write ("Layer Datasource:" + lyrdataProp.dataSource + "\n")
outFile.write (lyrdataProp + "\n")
#print "-----------------------------------------------------"
# if lyr.serviceProperties["ServiceType"] != "SDE":
# print "Service Type: " + servProp.get('ServiceType', 'N/A')
# print " URL: " + servProp.get('URL', 'N/A')
# print " Connection: " + servProp.get('Connection', 'N/A')
# print " Server: " + servProp.get('Server', 'N/A')
# print " Cache: " + str(servProp.get('Cache', 'N/A'))
# print " User Name: " + servProp.get('UserName', 'N/A')
# print " Password: " + servProp.get('Password', 'N/A')
# print ""
else:
# print "Service Type: " + servProp.get('ServiceType', 'N/A')
# print " Database: " + servProp.get('Database', 'N/A')
# print " Server: " + servProp.get('Server', 'N/A')
# print " Service: " + servProp.get('Service', 'N/A')
# print " Version: " + servProp.get('Version', 'N/A')
# print " User name: " + servProp.get('UserName', 'N/A')
# print " Authentication: " + servProp.get('AuthenticationMode', 'N/A')
print ""
#del mxd
|
{"/osgeo.py": ["/gdal.py"]}
|
40,179
|
onkelrow/First
|
refs/heads/master
|
/Publish arcgis server service -internet.py
|
#import modules
import arcpy, sys, os, string
#specify folder containing MXDs
inFolder = raw_input("Please enter folder containing 10.2 MXDs to Publish to ArcServer: ")
#specify connection FiMichael Hoffnerle Path
connectionFilePath = r'C:\Users\53_e_rv\AppData\Roaming\ESRI\Desktop10.3\ArcCatalog\arcgis on 10.41.253.42 (Admin).ags'
#look in folder for mxds
MapPath= []
MapFolder = os.listdir(inFolder)
for file in MapFolder:
fileExt = os.path.splitext(file)[1]
if fileExt == ".mxd":
MapPath = os.path.join(inFolder, file)
file = MapPath.strip('\'')
mxd = arcpy.mapping.MapDocument(file)
base = os.path.basename(file)
serviceName = base[:-4]
SDDraft = file[:-4] + ".sddraft"
sd = file[:-4] + ".sd"
#Create Map SD Draft
print "\n" + "Publishing: " + base
analysis = arcpy.mapping.CreateMapSDDraft(mxd, SDDraft, serviceName, "FROM_CONNECTION_FILE", connectionFilePath, "False", "wrrl", "", "")
# stage and upload the service if the sddraft analysis did not contain errors
if analysis['errors'] == {}:
# Execute StageService
print "SD vorbereiten"
arcpy.StageService_server(SDDraft, sd)
# Execute UploadServiceDefinition
print "Hochladen Service Definition"
arcpy.UploadServiceDefinition_server(sd, connectionFilePath)
print "Erstellen " + base +" geglueckt" + "\n"
else:
#pass
# if the sddraft analysis contained errors, display them
print analysis['errors']
#continue
|
{"/osgeo.py": ["/gdal.py"]}
|
40,180
|
onkelrow/First
|
refs/heads/master
|
/raster reproject.py
|
import arcpy, sys
InFolder = sys.argv[1]
OutFolder = sys.argv[2]
OutSR = arcpy.SpatialReference(32750) # "WGS_1984_UTM_Zone_50S"
arcpy.env.workspace = InFolder
for Ras in arcpy.ListRasters():
arcpy.AddMessage("Projecting " + Ras)
arcpy.ProjectRaster_management(InFolder + "\\" + Ras, OutFolder + "\\" + Ras, OutSR)
arcpy.AddMessage("Projecting complete")
|
{"/osgeo.py": ["/gdal.py"]}
|
40,181
|
onkelrow/First
|
refs/heads/master
|
/stageService.py
|
# ---------------------------------------------------------------------------
# stageService.py
# Created on: 2014-10-30
# Usage:
# Description:
# ---------------------------------------------------------------------------
# Set the necessary product code
# Import arcpy module
import arcpy, sys, os, string
def encode(text):
# """
# For printing unicode characters to the console.
# """
return text.encode('utf-8')
#Arcgisserver Connection file
connectionFilePath = r'C:\Users\administrator\AppData\Roaming\ESRI\Desktop10.2\ArcCatalog\arcgis on ripsproc2014_6080 (admin).ags'
# pruefen der Projektdateien
MapPath = []
def stageService():
try:
inFolder = raw_input("10.2 Mxd's ")
MapFolder = os.listdir(inFolder)
for file in MapFolder:
processFile(file)
except arcpy.ExecuteError:
print arcpy.GetMessages(2)
arcpy.AddError(arcpy.GetMessages(2))
except Exception as e:
print e.args[0]
arcpy.AddError(e.args[0])
def processFile(file):
try:
fileExt = os.path.splitext(file)[1]
if fileExt == ".mxd":
MapPath = os.path.join(inFolder, file)
file = MapPath.strip('\'')
mxd = arcpy.mapping.MapDocument(file)
base = os.path.basename(file)
serviceName = base[:-4]
SDDraft = file[:-4] + ".sddraft"
sd = file[:-4] + ".sd"
#erstellen des Servicefiles als sd draft
print "\n" + "Publishing: " + base
analysis = arcpy.mapping.CreateMapSDDraft(mxd, SDDraft, serviceName, "FROM_CONNECTION_FILE",
connectionFilePath, "False", "", "None", "None")
arcpy.StageService_server(SDDraft, sd)
#sofern keine fehler beim Ueberpruefen zur bereitstellung erfolgt sind erstelle einen Dienst fuer den mapservice
arcpy.UploadServiceDefinition_server(sd, connectionFilePath)
except arcpy.ExecuteError:
print arcpy.GetMessages(2)
arcpy.AddError(arcpy.GetMessages(2))
except Exception as e:
print e.args[0]
arcpy.AddError(e.args[0])
# This test allows the script to be used from the operating
# system command prompt (stand-alone), in a Python IDE,
# as a geoprocessing script tool, or as a module imported in
# another script
if __name__ == '__main__':
# Arguments are optional
argv = tuple(arcpy.GetParameterAsText(i)
for i in range(arcpy.GetArgumentCount()))
stageService()
|
{"/osgeo.py": ["/gdal.py"]}
|
40,182
|
onkelrow/First
|
refs/heads/master
|
/Monika upload.py
|
__author__ = 'Administrator'
import arcpy, os#workspace to search for MXDs
Workspace = r"C:\temp"
arcpy.env.workspace = Workspace
#list map documents in folder
mxdList = arcpy.ListFiles("*.mxd")
#set relative path setting for each MXD in list.
for mapdoc in mxdList:
#set map document to change
filePath = os.path.join(Workspace, mapdoc)
mxd = arcpy.mapping.MapDocument(filePath)
#Get the file name
basename = mapdoc
try:
mxd.findAndReplaceWorkspacePaths(r"C:\Users\Administrator\AppData\Roaming\ESRI\Desktop10.0\ArcCatalog\Connection to Direct_lfu_webview_11g_sde_neu.sde",
r"C:\Users\Administrator\AppData\Roaming\ESRI\Desktop10.2\ArcCatalog\Connection to direct_lfu_webview_geodbtemp.sde", False)
except Exception as e:
print e.message
#set relative paths property
mxd.relativePaths = False
output = os.path.join(r"C:\temp\neue mxd", basename)
#save map doucment change
mxd.saveACopy (output)
#mxd.saveACopy(r"E:\data\Vicente\Oracle11\Schutzgebiete_kombiniert_neu.mxd")
del mxd, mapdoc
# import arcpy
# mxd = arcpy.mapping.MapDocument(r"C:\Users\Administrator\Desktop\UIS_0100000005800001_neu.mxd")
# mxd.findAndReplaceWorkspacePaths(r"Database Connections\Connection to direct_geo_sde_db_rac.sde",
# r"Database Connections\Connection to direct_lfu_webview_geodbtemp.sde")
# mxd.saveACopy(r"C:\Users\Administrator\Desktop\UIS_0100000005800001_neu_3.mxd")
# del mxd
#
|
{"/osgeo.py": ["/gdal.py"]}
|
40,183
|
onkelrow/First
|
refs/heads/master
|
/dbtune export import.py
|
import arcpy
ent_gdb = "C:\\gdbs\\enterprisegdb.sde"
output_file = "C:\\temp\\keyword.txt"
arcpy.ExportGeodatabaseConfigurationKeywords_management(ent_gdb,output_file)
|
{"/osgeo.py": ["/gdal.py"]}
|
40,184
|
onkelrow/First
|
refs/heads/master
|
/Python script for reconsile and post ArcSDE 102.py
|
import arcpy, time, smtplib
#workspace definieren
arcpy.env.workspace = 'database Connections/connection to geo.sde'
#workspace in variable überführen
workspace = arcpy.env.workspace
# Liste der angemeldeten Geodatabase_user
userList = arcpy.ListUsers("Database Connections/admin.sde")
# Liste aller angemeldeten User aus array ziehen
emailList = [u.Name + "@lubw.bwl.de" for user in arcpy.ListUser("Database Connections/admin.sde")]
# Email über stmplib an User schicken
Server = "mailserver"
#blockieren von neuen Verbindungen zum Schema
arcpy.AcceptConnections('Database Connections/admin.sde', False)
# abwarten für 5 Minuten
time.sleep(300)
#alle user abmelden
arcpy.disconnectUser()
|
{"/osgeo.py": ["/gdal.py"]}
|
40,185
|
onkelrow/First
|
refs/heads/master
|
/osgeo.py
|
def setupgdal():
GDAL_PATH = "C:\\Program Files (x86)\\GDAL"
if not os.path.isdir(GDAL_PATH):
print("GDAL not found on system at {0}".format(GDAL_PATH))
return False
_environ = dict(os.environ)
_environ["PATH"] = "{0};{1}".format(GDAL_PATH, _environ["PATH"])
_environ["GDAL_DATA"] = "{0}\\gdal-data".format(GDAL_PATH)
_environ["GDAL_PLUGINS"] = "{0}\\gdalplugins".format(GDAL_PATH)
os.environ.clear()
os.environ.update(_environ)
try:
import gdal
except ImportError:
print("GDAL Import Failed")
|
{"/osgeo.py": ["/gdal.py"]}
|
40,186
|
onkelrow/First
|
refs/heads/master
|
/arcsde reconsile and post.py
|
import arcpy, time, smtplib
# set the workspace
arcpy.env.workspace = 'Database Connections/admin.sde'
# set a variable for the workspace
workspace = arcpy.env.workspace
# get a list of connected users.
userList = arcpy.ListUsers("Database Connections/admin.sde")
# get a list of usernames of users currently connected and make email addresses
emailList = [u.Name + "@yourcompany.com" for user in arcpy.ListUsers("Database Connections/admin.sde")]
# take the email list and use it to send an email to connected users.
SERVER = "mailserver.yourcompany.com"
FROM = "SDE Admin <python@yourcompany.com>"
TO = emailList
SUBJECT = "Maintenance is about to be performed"
MSG = "Auto generated Message.\n\rServer maintenance will be performed in 15 minutes. Please log off."
# Prepare actual message
MESSAGE = """\
From: {0:s}
To: {1:s}
Subject: {2:s}
{3:s}
""".format(FROM, ", ".join(TO), SUBJECT, MSG)
# Send the mail
server = smtplib.SMTP(SERVER)
server.sendmail(FROM, TO, MESSAGE)
server.quit()
#block new connections to the database.
arcpy.AcceptConnections('Database Connections/admin.sde', False)
# wait 15 minutes
time.sleep(900)
#disconnect all users from the database.
arcpy.DisconnectUser('Database Connections/admin.sde', "ALL")
# Get a list of versions to pass into the ReconcileVersions tool.
versionList = arcpy.ListVersions('Database Connections/admin.sde')
# Execute the ReconcileVersions tool.
arcpy.ReconcileVersions_management('Database Connections/admin.sde', "ALL_VERSIONS", "sde.DEFAULT", versionList, "LOCK_ACQUIRED", "NO_ABORT", "BY_OBJECT", "FAVOR_TARGET_VERSION", "POST", "DELETE_VERSION", "c:/temp/reconcilelog.txt")
# Run the compress tool.
arcpy.Compress_management('Database Connections/admin.sde')
#Allow the database to begin accepting connections again
arcpy.AcceptConnections('Database Connections/admin.sde', True)
#Get a list of datasets owned by the admin user
# Get the user name for the workspace
# this assumes you are using database authentication.
# OS authentication connection files do not have a 'user' property.
userName = arcpy.Describe(arcpy.env.workspace).connectionProperties.user
# Get a list of all the datasets the user has access to.
# First, get all the stand alone tables, feature classes and rasters.
dataList = arcpy.ListTables('*.' + userName + '.*') + arcpy.ListFeatureClasses('*.' + userName + '.*') + arcpy.ListRasters('*.' + userName + '.*')
# Next, for feature datasets get all of the featureclasses
# from the list and add them to the master list.
for dataset in arcpy.ListDatasets('*.' + userName + '.*'):
dataList += arcpy.ListFeatureClasses(feature_dataset=dataset)
# pass in the list of datasets owned by the admin to the rebuild indexes and analyze datasets tools
# Note: to use the "SYSTEM" option the user must be an administrator.
arcpy.RebuildIndexes_management(workspace, "SYSTEM", dataList, "ALL")
arcpy.AnalyzeDatasets_management(workspace, "SYSTEM", dataList, "ANALYZE_BASE", "ANALYZE_DELTA", "ANALYZE_ARCHIVE")
|
{"/osgeo.py": ["/gdal.py"]}
|
40,187
|
onkelrow/First
|
refs/heads/master
|
/gdal.py
|
from osgeo
|
{"/osgeo.py": ["/gdal.py"]}
|
40,188
|
onkelrow/First
|
refs/heads/master
|
/spatialreference.py
|
import arcpy
sr = arcpy.SpatialReference(r"C:\Program Files (x86)\ArcGIS\Desktop10.0\Coordinate Systems\Projected Coordinate Systems\UTM\NAD 1983\NAD 1983 UTM Zone 10N.prj")
mxd = arcpy.mapping.MapDocument("current")
df = arcpy.mapping.ListDataFrames(mxd)[0]
df.spatialReference = sr
|
{"/osgeo.py": ["/gdal.py"]}
|
40,189
|
onkelrow/First
|
refs/heads/master
|
/update services in context_2.py
|
print "service definition draft created"
arcpy.AddMessage("service definition draft created")
# Set local variables
inSdFile = sd
con = "GIS Servers/arcgis on HPWS32_6080 (admin)"
inServer = con
inServiceName = 'districtwise'
inServiceName = InSdFile
inCluster = "default"
inFolderType = "EXISTING"
inFolder = "weather"
inStartup = "STARTED"
inOverride = "OVERRIDE_DEFINITION"
inMyContents = "NO_SHARE_ONLINE"
inPublic = "PRIVATE"
inOrganization = "NO_SHARE_ORGANIZATION"
inGroups = ""
print "publishing local variables defined"
arcpy.AddMessage("publishing local variables defined")
# stage and upload the service if the sddraft analysis did not contain errors
if analysis['errors'] == {}:
# Execute StageService
arcpy.StageService_server(sddraft, sd)
print "StageService_server created"
arcpy.AddMessage("StageService_server created")
# Execute UploadServiceDefinition
arcpy.UploadServiceDefinition_server(inSdFile, inServer, inServiceName,
inCluster, inFolderType, inFolder,
inStartup, inOverride, inMyContents,
inPublic, inOrganization, inGroups)
print "Uploaded..."
arcpy.AddMessage("Uploaded..")
else:
# if the sddraft analysis contained errors, display them
print analysis['errors']
|
{"/osgeo.py": ["/gdal.py"]}
|
40,190
|
SheLily/Spy-Project
|
refs/heads/master
|
/vk_spy.py
|
import requests
from settings import User, Token, V
BASE_PARAMS = {
'access_token': Token,
'v': V
}
def get_request_params(base_params=BASE_PARAMS, **kwargs):
base_params.update(kwargs)
return base_params
def get_request(method, params):
url = f'https://api.vk.com/method/{method}'
return requests.get(url, params).json()
def get_id_by_name():
response = get_request('users.get', get_request_params(user_ids=User))
return response['response'][0]['id']
def get_groups(user_id):
response = get_request(
'groups.get',
get_request_params(
user_id=int(user_id),
extended=1,
fields='members_count',
)
)
if 'error' not in response:
return response['response']['items']
def get_friends(user_id):
response = get_request(
'friends.get',
get_request_params(user_id=int(user_id)),
)
if 'error' not in response:
return response['response']['items']
def get_groups_set(group_list):
return {i['id'] for i in group_list}
|
{"/vk_spy.py": ["/settings.py"], "/main.py": ["/vk_spy.py", "/file_ops.py", "/settings.py"]}
|
40,191
|
SheLily/Spy-Project
|
refs/heads/master
|
/main.py
|
import time
import tqdm
import vk_spy
import file_ops
from settings import User, Token, V, Request_time
def main():
user = vk_spy.get_id_by_name()
usr_groups = vk_spy.get_groups(user)
set_usr_group = vk_spy.get_groups_set(usr_groups)
usr_friends = vk_spy.get_friends(user)
pbar = tqdm.tqdm(usr_friends)
for i in pbar:
start = time.time()
frnd_groups = vk_spy.get_groups(i['id'])
if frnd_groups:
set_usr_group -= vk_spy.get_groups_set(frnd_groups[:1000])
sleep_time = Request_time - (time.time() - start)
if sleep_time > 0:
time.sleep(sleep_time)
usr_groups = [i for i in usr_groups if i['id'] in set_usr_group]
for i in usr_groups:
if 'members_count' not in i:
i['members_count'] = 0
usr_groups = [
{
'name': i['name'],
'gid': i['id'],
'members_count': i['members_count']}
for i in usr_groups
]
file_ops.save_results(usr_groups, 'groups.json')
if __name__ == '__main__':
main()
|
{"/vk_spy.py": ["/settings.py"], "/main.py": ["/vk_spy.py", "/file_ops.py", "/settings.py"]}
|
40,192
|
SheLily/Spy-Project
|
refs/heads/master
|
/file_ops.py
|
import json
def save_results(data, fname):
with open(fname, 'w', encoding='utf-8') as fout:
json.dump(data, fout, ensure_ascii=False)
|
{"/vk_spy.py": ["/settings.py"], "/main.py": ["/vk_spy.py", "/file_ops.py", "/settings.py"]}
|
40,193
|
SheLily/Spy-Project
|
refs/heads/master
|
/settings.py
|
User = 'eshmargunov'
Token = ('958eb5d439726565e9333aa30e50e0f937ee432e927f0dbd541c541887d919a7c56'
'f95c04217915c32008')
V = '5.107'
Request_time = 1 / 3
|
{"/vk_spy.py": ["/settings.py"], "/main.py": ["/vk_spy.py", "/file_ops.py", "/settings.py"]}
|
40,210
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/exemple1.py
|
value = input("voulez vous le dessert: (True/False)")
if value = "True":
print("voici la carte")
else:
print("Au revoir")
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,211
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/exception_finally.py
|
def calcul(a,b):
try :
fichier = open("menu.txt",'r')
lines = fichier.readlines()
x = a/b
if x==0:
print("zero")
else :
pass
return lines
except ZeroDivisionError:
pass
print("apres pass")
finally:
fichier.close()
print("traitement continue")
print("traitment hors try")
print(calcul(1,0))
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,212
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/exceptions.py
|
dict_ = {"cle":12,"cle2":13}
def ouvrirFichier ():
try:
fichier = open("menu.txt",'r')
lines = fichier.readlines()
except FileNotFoundError:
return (False, "erreur")
else:
print("aucune exception n'est declanché")
return (True, lines)
success, result = ouvrirFichier()
if(success) :
print(result)
else :
print(result)
exit()
while True:
try:
cle = input("lire clé : ")
print(dict_[cle])
a = 10 / 0
except KeyError:
print("la cle n'existe pas")
except ZeroDivisionError:
print ("division par 0")
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,213
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/fizzbuzz2.py
|
n=int(input("saisir n :\n"))
print((((n,"Fizz")[n%3==0],"Buzz")[n%5==0],"FizzBuzz")[n%15==0])
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,214
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/modules/pizza.py
|
class Pizza(object):
def __init__(self, name):
self.name = name
self.prix = 0
self.ingredients = []
self.qte = 0
def getName(self):
return self.name
def addIngredient(self, ingredient):
self.ingredients.append(ingredient)
def removeIngredient(self, ingredient):
self.ingredients.remove(ingredient)
def fixerPrix(self, prix):
self.prix = prix
def addQuantite(self, qte):
self.qte += qte
def getQuantite(self):
return self.qte
def __repr__(self):
return f"Pizza : {self.name} {self.ingredients} prix: {self.prix} euros"
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,215
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/pizzeria_python_poo.py
|
from modules.menu import Menu
from modules.pizza import Pizza
menu = Menu()
def saveMenu(file_path, menu):
##sauvegarder le menu dans le fichier file_path
fichier = open(file_path, 'w')
fichier.write("menu :\n")
fichier.write(str(menu))
fichier.close()
def getMenuFromFile(file_path):
##recuperer le menu du fichier file_path
print("getMenu")
def helpAdmin():
print(""" Help :
- addPizza pizzaName (ajouter une pizza au menu)
- addIngredient pizzaName ingredient (ajouter un ingrédient à une pizza)
- addPrix pizzaName prix (ajouter un prix à une pizza)
- addQte pizzaName qte (ajouter une qte à une pizza)
- exit (sortir du menu admin)
""")
def addPizza(option):
global menu
_, pizzaName = option.split(" ")
menu.addPizza(Pizza(pizzaName))
def addIngredient(option):
global menu
_, pizzaname, ingredient = option.split(" ")
menu.getPizza(pizzaname).addIngredient(ingredient)
def addPrix(option):
global menu
_, pizzaname, prix = option.split(" ")
menu.getPizza(pizzaname).fixerPrix(int(prix))
def addQte(option):
global menu
_, pizzaname, quantite = option.split(" ")
menu.getPizza(pizzaname).addQuantite(int(quantite))
def helpClient():
print(""" Help :
- menu (afficher le menu)
- manger pizzaName (manger une pizza)
- exit (sortir du restaurant)
""")
def displayMenu():
global menu
print(menu)
def manger(action):
global menu
_, pizza = action.split(" ")
if menu.getPizza(pizza).getQuantite()>0:
print("bonap")
menu.getPizza(pizza).addQuantite(-1)
else:
print("pizza non disponible")
while True:
login = input("login : ")
if login == "exit":
break
if login == "admin":
while True:
option = input("choisir une option : ")
if option == "exit":
break
if option == "help":
helpAdmin()
# Traiter les commandes admin
if option.startswith("addPizza"):
addPizza(option)
if option.startswith("addIngredient"):
addIngredient(option)
if option.startswith("addPrix"):
addPrix(option)
if option.startswith("addQte"):
addQte(option)
if option.startswith("saveMenu"):
saveMenu("./menu.txt", menu)
if login == "client":
while True:
action = input("que voulez vous faire : ")
if action == "exit":
print("Au revoir")
break
if action == "help":
helpClient()
if action == "menu":
displayMenu()
if action.startswith("manger"):
manger(action)
# Traiter les commandes client
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,216
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/nbPremiers2.py
|
n=int(input("saisir un nombre"))
for nb in range(2,n) :
premier=True
for div in range(2,nb) :
if nb%div==0 :
premier=False
break;
if premier :
print(nb)
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,217
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/fizzbuzz3.py
|
while True :
n=input("saisir n :\n")
if n=="exit":
break
n=int(n)
print((((n,"Fizz")[n%3==0],"Buzz")[n%5==0],"FizzBuzz")[n%15==0])
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,218
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/nbPremiers.py
|
n=int(input("saisir un nombre"))
i=2
while(i<n):
j=2
while(j<=i):
if(i%j==0):
break;
else :
j+=1
if(j==i):
print(i)
i+=1
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,219
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/POO.py
|
import datetime
class Personne(object):
def __init__(self, nom, prenom, dateNaissance):
self.nom = nom
self.prenom = prenom
self.dateNaissance = dateNaissance
def saluer(self):
print(f"Salut {self.nom}")
def displayPersonne(self):
print(f"Hello {self.nom}")
def calculAge(self):
this_year = datetime.datetime.now().year
return this_year - self.dateNaissance
class Etudiant(Personne):
def __init__(self, nom, prenom, date, numeroCarte):
Personne.__init__(self, nom, prenom, date)
self.numeroCarte = numeroCarte
class Employe(Personne):
def __init__(self, nom, prenom, date, salaire):
Personne.__init__(self, nom, prenom, date)
self.salaire = salaire
def saluer(self) :
self.saluer()
print("le salaire :", self.salaire)
etudiant = Etudiant("Felix","Lechat", 2005, "123NB22")
etudiant.saluer()
empl = Employe("Tom", "Cruz", 1965, 1200000)
empl.saluer()
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,220
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/pizzeria_python.py
|
menu={"Napo":[["anchois"],12,2]}
def saveMenu(file_path, menu) :
##sauvegarder le menu dans le fichier file_path
fichier = open(file_path, 'w')
fichier.write("menu :\n")
fichier.write(str(menu))
fichier.close()
def getMenuFromFile(file_path):
##recuperer le menu du fichier file_path
print("getMenu")
def helpAdmin():
print(""" Help :
- addPizza pizzaName (ajouter une pizza au menu)
- addIngredient pizzaName ingredient (ajouter un ingrédient à une pizza)
- addPrix pizzaName prix (ajouter un prix à une pizza)
- addQte pizzaName qte (ajouter une qte à une pizza)
- exit (sortir du menu admin)
""")
def addPizza(option):
global menu
_, pizzaName = option.split(" ")
menu[pizzaName] = [[], 0, 0]
def addIngredient(option):
global menu
_, pizza, ingredient = option.split(" ")
if not pizza in menu:
menu[pizza] = [[], 0, 0]
menu.get(pizza)[0].append(ingredient)
def addPrix(option):
global menu
_, pizza, prix = option.split(" ")
if pizza in menu:
menu.get(pizza)[1] = int(prix)
def addQte(option):
global menu
_, pizza, quantite = option.split(" ")
if pizza in menu:
menu.get(pizza)[2] += int(quantite)
def helpClient():
print(""" Help :
- menu (afficher le menu)
- manger pizzaName (manger une pizza)
- exit (sortir du restaurant)
""")
def displayMenu():
global menu
print(menu)
def manger():
global menu
_, pizza = action.split(" ")
if pizza in menu and menu.get(pizza)[2] > 0:
print("bonap")
menu.get(pizza)[2] -= 1
else:
print("pizza non disponible")
while True:
login = input("login : ")
if login == "exit":
break
if login=="admin":
while True:
option = input("choisir une option : ")
if option == "exit":
break
if option == "help":
helpAdmin()
#Traiter les commandes admin
if option.startswith("addPizza"):
addPizza(option)
if option.startswith("addIngredient"):
addIngredient(option)
if option.startswith("addPrix"):
addPrix(option)
if option.startswith("addQte"):
addQte(option)
if option.startswith("saveMenu"):
saveMenu("./menu.txt",menu)
if login=="client":
while True:
action = input("que voulez vous faire : ")
if action == "exit":
print("Au revoir")
break
if action == "help":
helpClient()
if action =="menu":
displayMenu()
if action.startswith("manger"):
manger(action)
#Traiter les commandes client
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,221
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/application1.py
|
nom = input("sasissez le nom : \n")
msg= "Bonjour "+nom
print(msg);
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,222
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/application2.py
|
nom = input("sasissez le nom : \n")
genre_val=input("saisir le genre :\n")
#genre= ("Monsieur" if genre_val=="1" else "Madame")
genre=("Monsieur", "Madame")[genre_val=="0"]
msg= "Bonjour {} {}"
print("Bonjour {} {}".format(genre,nom))
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,223
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/fonction1.py
|
def cap_(s):
s1 = str(s[0]).upper()
s2 = str(s[1:]).lower()
return s1+s2
def cap(s):
result=""
for elem in s.split("."):
result = result + "."+cap_(elem)
return str(result[1:])
def operation(*args):
liste_int = [x for x in args if type(x) is int]
if len(liste_int) == 0:
return ("Liste vide","Liste vide","Liste vide","Liste vide")
val_max=liste_int[0]
val_min=liste_int[0]
val_somme=0;
for x in liste_int :
val_somme+=x
if x > val_max:
val_max = x
if x < val_min:
val_min = x
val_moy = val_somme / len(liste_int)
return (val_somme,val_max, val_min, val_moy)
somme,max,min,moy = operation(1,2,3,4,"chaine", True)
print("somme = ",somme)
print("max = ",max)
print("min = ",min)
print("moy = ",moy)
#var = cap("chaine1.chaine2")
#print(var)
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,224
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/fizzbuzz.py
|
n=int(input("saisir n :\n"))
if n % 15==0:
print("FizzBuzz")
elif n % 5==0:
print("Buzz")
elif n % 3==0:
print("Fizz")
else:
print(n)
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,225
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/modules/menu.py
|
class Menu(object):
def __init__(self):
self.pizzas = []
def addPizza(self, pizza):
self.pizzas.append(pizza)
def removePizza(self, pizza_name):
for pizza in self.pizzas:
if pizza.getName() == pizza_name:
self.pizzas.remove(pizza)
def getPizza(self, pizzaname):
for pizza in self.pizzas:
if pizza.getName() == pizzaname:
return pizza
def __repr__(self):
s = "___ Menu ___\n\n"
for pizza in self.pizzas:
s += "\t" + str(pizza) + "\n"
s += "___________"
return s
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,226
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/raiseException.py
|
class NegativeValueException(Exception):
pass
def calculAge(annee):
try:
annee = int(annee)
print("valeur ok")
if(annee <= 0):
print(annee)
raise NegativeValueException("valeur negative")
except ValueError as err:
print(err)
calculAge(200)
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,227
|
badre-Idrissi/tpPython
|
refs/heads/master
|
/STR.py
|
class SrtMultiplicationException(Exception):
pass
class Str(str):
def __mul__(self, other):
if(type(other) is int):
raise SrtMultiplicationException("Multiplication Interdite")
s = Str("toto2")
s2 = Str("toto2")
print(s==s2)
#print(s*2)
|
{"/pizzeria_python_poo.py": ["/modules/menu.py", "/modules/pizza.py"]}
|
40,239
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0010_auto_20200729_1132.py
|
# Generated by Django 3.0.8 on 2020-07-29 14:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('vincular', '0001_initial'),
('animals', '0009_auto_20200728_1115'),
]
operations = [
migrations.AlterField(
model_name='caravana',
name='codRFID',
field=models.OneToOneField(help_text='Seleccione el codigo de RFID', null=True, on_delete=django.db.models.deletion.SET_NULL, to='vincular.RFID'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,240
|
gasti10/finalProject
|
refs/heads/master
|
/vincular/admin.py
|
from django.contrib import admin
from django.http import HttpResponseRedirect
from .models import RFID
from django.contrib import messages
# Register your models here.
@admin.register(RFID)
class RFIDAdmin(admin.ModelAdmin):
def response_change(self, request, obj):
if "_activarlector" in request.POST:
obj.codigo = 12
obj.save()
self.message_user(request, "Se ha efectuado la lectura")
messages.error(request, 'Three credits sa in your account.')
return HttpResponseRedirect(".")
return super().response_change(request, obj)
def response_add(self, request, obj, post_url_continue=None):
if "_activarlector" in request.POST:
obj.codigo = 15
obj.save()
self.message_user(request, "Se ha efectuado la lectura")
return super().response_add(request, obj)
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,241
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0001_initial.py
|
# Generated by Django 2.2.6 on 2019-11-21 22:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Animal',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('sexo', models.CharField(help_text='M --> Macho & H --> Hembra', max_length=5, verbose_name='Sexo')),
('raza', models.CharField(help_text='Ingrese la raza si lo considera necesario', max_length=100)),
('fecha_nac', models.DateField(help_text='Ingrese la fecha de nacimiento', verbose_name='Fecha de nacimiento')),
('edad', models.DateField(help_text='Se autocompleta a partir de la fecha indicada como nacimiento', verbose_name='Edad en dias')),
],
),
migrations.CreateModel(
name='Caravana',
fields=[
('numero', models.IntegerField(primary_key=True, serialize=False)),
('CUIG', models.CharField(help_text='Buscar que es !!!!!!!', max_length=100)),
('RENSPA', models.CharField(help_text='Buscar que es !!!!!!!!!!!', max_length=100)),
('codRFID', models.IntegerField(help_text='Ingrese el codigo de RFID', unique=True, verbose_name='RFID')),
('color', models.CharField(help_text='Ingrese el color de la caravana', max_length=100, verbose_name='Color')),
],
),
migrations.CreateModel(
name='Vacuna',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('tipo', models.CharField(help_text='Ingrese el tipo de la vacuna', max_length=100)),
],
),
migrations.CreateModel(
name='AnimalVacunado',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('Vacuna', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='animals.Vacuna')),
('animal', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='animals.Animal')),
],
),
migrations.AddField(
model_name='animal',
name='caravana_id',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='animals.Caravana'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,242
|
gasti10/finalProject
|
refs/heads/master
|
/animals/admin.py
|
from django.contrib import admin
from .models import Animal, Caravana, Vacuna, AnimalVacunado
# Register your models here.
#admin.site.register(Animal)
admin.site.register(Caravana)
admin.site.register(Vacuna)
admin.site.register(AnimalVacunado)
@admin.register(Animal)
class AnimalAdmin(admin.ModelAdmin):
actions = ['aplicarVacuna_GripeA']
def aplicarVacuna_GripeA(self, request, queryset):
num = Vacuna.objects.count()+1
for anim in queryset:
AnimalVacunado.objects.create( animal=anim, vacuna=Vacuna.objects.get(id=num))
aplicarVacuna_GripeA.short_description = 'Aplicar Vacuna contra COVID-19'
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,243
|
gasti10/finalProject
|
refs/heads/master
|
/animals/views.py
|
from django.shortcuts import render
# Create your views here.
from .models import *
def index(request):
"""
Función vista para la página inicio del sitio.
"""
# Genera contadores de algunos de los objetos principales
num_animales=Animal.objects.all().count()
num_caravanas=Caravana.objects.all().count()
# Libros disponibles (status = 'a')
#num_instances_available=BookInstance.objects.filter(status__exact='a').count()
num_vacunas=Vacuna.objects.count() # El 'all()' esta implícito por defecto.
# Renderiza la plantilla HTML index.html con los datos en la variable contexto
return render(
request,
'index.html',
context={'num_animales':num_animales,'num_caravanas':num_caravanas,'num_vacunas':num_vacunas},
)
from django.views import generic
class BovinoListView(generic.ListView):
model = Animal
# Filtra por animales de tipo "bovino"
queryset = Animal.objects.filter(tipo__icontains='bovino')
# Your own name for the list as a template variable
context_object_name = 'bovinos'
# Specify your own template name/location
template_name = 'bovinos.html'
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(BovinoListView, self).get_context_data(**kwargs)
# Get the blog from id and add it to the context
context['vacunas'] = Vacuna.objects.all()
# Obtengo todas las vacunas que tienen los animales y lo ordeno
context['animalesVacunados'] = AnimalVacunado.objects.all()
context['tiene'] = 0
return context
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,244
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0011_auto_20200729_1141.py
|
# Generated by Django 3.0.8 on 2020-07-29 14:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('animals', '0010_auto_20200729_1132'),
]
operations = [
migrations.RenameField(
model_name='animal',
old_name='caravana_id',
new_name='caravana',
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,245
|
gasti10/finalProject
|
refs/heads/master
|
/vincular/apps.py
|
from django.apps import AppConfig
class VincularConfig(AppConfig):
name = 'vincular'
verbose_name = "Vincular Chip-RFID"
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,246
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0005_auto_20191128_1907.py
|
# Generated by Django 2.2.6 on 2019-11-28 22:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0004_auto_20191128_1907'),
]
operations = [
migrations.AlterField(
model_name='animal',
name='tipo',
field=models.CharField(default='Bovino', help_text='Bovino - Ovino - Porcino', max_length=100),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,247
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0003_auto_20191122_1844.py
|
# Generated by Django 2.2.6 on 2019-11-22 21:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0002_auto_20191122_1805'),
]
operations = [
migrations.AlterModelOptions(
name='animal',
options={'verbose_name': 'Animal', 'verbose_name_plural': 'Animales'},
),
migrations.AlterModelOptions(
name='animalvacunado',
options={'verbose_name': 'Animal vacunado', 'verbose_name_plural': 'Animales vacunados'},
),
migrations.AlterModelOptions(
name='caravana',
options={'verbose_name': 'Caravana', 'verbose_name_plural': 'Caravanas'},
),
migrations.AlterModelOptions(
name='vacuna',
options={'verbose_name': 'Vacuna', 'verbose_name_plural': 'Vacunas'},
),
migrations.AddField(
model_name='animal',
name='tipo',
field=models.CharField(default='Bovino', help_text='Bovino - Vacuno - Ovino - Porcino', max_length=100),
),
migrations.AlterField(
model_name='animal',
name='edad',
field=models.IntegerField(help_text='Se autocompleta a partir de la fecha indicada como nacimiento', verbose_name='Edad en dias'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,248
|
gasti10/finalProject
|
refs/heads/master
|
/createAnimals.py
|
# python manage.py shell
from animals.models import *
for i in range(1, 101):
num = 1217 + i
c = Caravana.objects.create(numero=num, CUIG="AA 420" , RENSPA="8888", codRFID = num )
a = Animal.objects.create(sexo='M', raza="Vogelsberger Rund", caravana_id=c)
prueba = "hola"
print(prueba)
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,249
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0006_auto_20191210_1819.py
|
# Generated by Django 2.2.6 on 2019-12-10 21:19
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0005_auto_20191128_1907'),
]
operations = [
migrations.AlterField(
model_name='animal',
name='edad',
field=models.IntegerField(default=1, help_text='Se autocompleta a partir de la fecha indicada como nacimiento', verbose_name='Edad en dias'),
),
migrations.AlterField(
model_name='animal',
name='fecha_nac',
field=models.DateField(default=datetime.date.today, help_text='Ingrese la fecha de nacimiento', verbose_name='Fecha de nacimiento'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,250
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0002_auto_20191122_1805.py
|
# Generated by Django 2.2.6 on 2019-11-22 21:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='animal',
name='id',
field=models.AutoField(primary_key=True, serialize=False, verbose_name='Animal'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,251
|
gasti10/finalProject
|
refs/heads/master
|
/vincular/migrations/0001_initial.py
|
# Generated by Django 3.0.8 on 2020-07-29 14:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='RFID',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('codigo', models.IntegerField(default=0, help_text='Codigo de Chip', verbose_name='Codigo de Chip')),
],
options={
'verbose_name': 'RFID',
'verbose_name_plural': 'RFIDs',
'ordering': ['-id'],
},
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,252
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0008_auto_20191226_1803.py
|
# Generated by Django 2.2.6 on 2019-12-26 21:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0007_auto_20191210_1854'),
]
operations = [
migrations.AlterField(
model_name='caravana',
name='CUIG',
field=models.CharField(help_text='La obtención de la Clave Única de Identificación Ganadera (CUIG) es un requisito indispensable para adquirir las nuevas caravanas, que identificarán a los animales en forma individual.', max_length=100),
),
migrations.AlterField(
model_name='caravana',
name='RENSPA',
field=models.CharField(help_text='El RENSPA es un registro obligatorio para todas las actividades de producción primaria del sector agrario. El responsable sanitario de la actividad debe aclarar sus datos personales, los del establecimiento y los datos de la explotación.', max_length=100),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,253
|
gasti10/finalProject
|
refs/heads/master
|
/animals/models.py
|
from django.db import models
# Create your models here.
from django.urls import reverse #Used to generate URLs by reversing the URL patterns
import datetime
class Animal(models.Model):
"""
Modelo que representa un animal.
"""
id = models.AutoField('Animal',primary_key=True)
sexo = models.CharField('Sexo', help_text="M --> Macho & H --> Hembra", null=False, max_length=5)
raza = models.CharField(max_length=100, help_text="Ingrese la raza si lo considera necesario")
tipo = models.CharField(default="Bovino",max_length=100, help_text="Bovino - Ovino - Porcino", null=False)
fecha_nac = models.DateField('Fecha de nacimiento',default=datetime.date.today,help_text="Ingrese la fecha de nacimiento")
edad = models.IntegerField('Edad en dias',default=1, help_text="Se autocompleta a partir de la fecha indicada como nacimiento")
caravana = models.OneToOneField('Caravana',on_delete=models.SET_NULL, null=True)
def __str__(self):
"""
String que representa al objeto Animal
"""
return self.tipo + " - Caravana Nro " + str(self.caravana.numero)
class Meta:
verbose_name = "Animal"
verbose_name_plural = "Animales"
ordering = ["-caravana"]
class Caravana(models.Model):
"""
Modelo que representa una Caravana.
"""
numero = models.IntegerField(primary_key=True, null=False)
CUIG = models.CharField(help_text="La obtención de la Clave Única de Identificación Ganadera (CUIG) es un requisito indispensable para adquirir las nuevas caravanas, que identificarán a los animales en forma individual.", max_length=100)
RENSPA = models.CharField(max_length=100, help_text="El RENSPA es un registro obligatorio para todas las actividades de producción primaria del sector agrario. El responsable sanitario de la actividad debe aclarar sus datos personales, los del establecimiento y los datos de la explotación.")
codRFID = models.OneToOneField('vincular.RFID',on_delete=models.SET_NULL,help_text="Seleccione el codigo de RFID", null=True)
COLOR_CHOICES = [('1', ('Azul')),
('2', ('Amarillo')),
('3', ('Rojo')),
('4', ('Verde'))]
color = models.CharField('Color', default="Azul", max_length=100, help_text="Ingrese el color de la caravana",choices=COLOR_CHOICES)
def __str__(self):
"""
String que representa al objeto Caravana
"""
return "Nro "+ str(self.numero) + " - CUIG=" + self.CUIG
def get_absolute_url(self):
"""
Devuelve el URL a una instancia particular de Caravana
"""
return reverse('Caravana-detail', args=[str(self.numero)])
class Meta:
verbose_name = "Caravana"
verbose_name_plural = "Caravanas"
ordering = ["-numero"]
class Vacuna(models.Model):
"""
Modelo que representa una Vacuna.
"""
id = models.AutoField(primary_key=True)
tipo = models.CharField(help_text="Ingrese el tipo de la vacuna", max_length=100)
def __str__(self):
"""
String que representa al objeto Vacuna
"""
return self.tipo
def get_absolute_url(self):
"""
Devuelve el URL a una instancia particular de Vacuna
"""
return reverse('Vacuna-detail', args=[str(self.id)])
class Meta:
verbose_name = "Vacuna"
verbose_name_plural = "Vacunas"
ordering = ["-id"]
class AnimalVacunado(models.Model):
"""
Modelo que representa un Animal con Vacuna.
"""
id = models.AutoField(primary_key=True)
animal = models.ForeignKey('Animal',on_delete=models.SET_NULL, null=True)
vacuna = models.ForeignKey('Vacuna',on_delete=models.SET_NULL, null=True)
def __str__(self):
"""
String que representa al objeto AnimalVacunado
"""
return self.animal.tipo + " Nro " + str(self.animal.caravana_id.numero) + " vacunado con " + self.vacuna.tipo
def get_absolute_url(self):
"""
Devuelve el URL a una instancia particular de AnimalVacunado
"""
return reverse('Animal Vacunado -detail', args=[str(self.id)])
class Meta:
verbose_name = "Animal vacunado"
verbose_name_plural = "Animales vacunados"
ordering = ["-animal"]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,254
|
gasti10/finalProject
|
refs/heads/master
|
/vincular/models.py
|
from django.db import models
from django.urls import reverse #Used to generate URLs by reversing the URL patterns
# Create your models here.
class RFID(models.Model):
"""
Modelo que representa un Chip RFID.
"""
readonly_fields = ('created', 'updated')
id = models.AutoField(primary_key=True)
codigo = models.IntegerField('Codigo de Chip',help_text="Codigo de Chip", default=0)
def __str__(self):
"""
String que representa al objeto RFID
"""
if self.codigo == 0:
rta = "Numero de Chip NO asignado"
else:
rta = str(self.codigo)
return rta
def get_absolute_url(self):
"""
Devuelve la url para acceder a una instancia particular del modelo.
"""
return reverse('model-detail-view', args=[str(self.id)])
class Meta:
verbose_name = "RFID"
verbose_name_plural = "RFIDs"
ordering = ["-id"]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,255
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0009_auto_20200728_1115.py
|
# Generated by Django 3.0.8 on 2020-07-28 14:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('animals', '0008_auto_20191226_1803'),
]
operations = [
migrations.AlterField(
model_name='animal',
name='caravana_id',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='animals.Caravana'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,256
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0007_auto_20191210_1854.py
|
# Generated by Django 2.2.6 on 2019-12-10 21:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('animals', '0006_auto_20191210_1819'),
]
operations = [
migrations.AlterField(
model_name='caravana',
name='color',
field=models.CharField(default='Azul', help_text='Ingrese el color de la caravana', max_length=100, verbose_name='Color'),
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,257
|
gasti10/finalProject
|
refs/heads/master
|
/animals/migrations/0004_auto_20191128_1907.py
|
# Generated by Django 2.2.6 on 2019-11-28 22:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('animals', '0003_auto_20191122_1844'),
]
operations = [
migrations.AlterModelOptions(
name='animal',
options={'ordering': ['-caravana_id'], 'verbose_name': 'Animal', 'verbose_name_plural': 'Animales'},
),
migrations.AlterModelOptions(
name='animalvacunado',
options={'ordering': ['-animal'], 'verbose_name': 'Animal vacunado', 'verbose_name_plural': 'Animales vacunados'},
),
migrations.AlterModelOptions(
name='caravana',
options={'ordering': ['-numero'], 'verbose_name': 'Caravana', 'verbose_name_plural': 'Caravanas'},
),
migrations.AlterModelOptions(
name='vacuna',
options={'ordering': ['-id'], 'verbose_name': 'Vacuna', 'verbose_name_plural': 'Vacunas'},
),
migrations.RenameField(
model_name='animalvacunado',
old_name='Vacuna',
new_name='vacuna',
),
]
|
{"/vincular/admin.py": ["/vincular/models.py"], "/animals/admin.py": ["/animals/models.py"], "/animals/views.py": ["/animals/models.py"], "/createAnimals.py": ["/animals/models.py"]}
|
40,356
|
medyagh/seeing-is-beliebing
|
refs/heads/master
|
/data-strip.py
|
# convenience script to obfuscate, but not completely redact, *some*
# identifier information from a list of Instagram images, such as
# all likes and commenters. Image unique ID is left in so original data
# can be recovered
import json
from random import choice
from string import ascii_letters, digits
FNAME = './examples/images.json'
jdata = json.load(open(FNAME))
# the example file can be expected to contain standard Instagram response with 'data' attribute
# or just the list extracted from the original response's 'data' attribute
images = jdata if isinstance(jdata, list) else jdata['data']
print("Number of images: %s" % len(images))
for img in images:
# remove comments and likes
img['likes']['data'] = []
img['likes']['count'] = 0
img['comments']['data'] = []
img['comments']['count'] = 0
# remove user info, nominally
tk_username = "".join(choice(ascii_letters) for i in range(10))
tk_user_id = "".join(choice(digits) for i in range(12))
tk_user_fullname = "Doe " + "".join(choice(ascii_letters) for i in range(5))
img['user']['username'] = tk_username
img['user']['full_name'] = tk_user_fullname
img['user']['id'] = tk_user_id
# edit caption info
if img.get('caption'):
img['caption']['from']['username'] = tk_username
img['caption']['from']['full_name'] = tk_user_fullname
img['caption']['from']['id'] = tk_user_id
print(json.dumps(images, indent = 2))
|
{"/beliebe.py": ["/instagram_utils.py"]}
|
40,357
|
medyagh/seeing-is-beliebing
|
refs/heads/master
|
/instagram_utils.py
|
import requests
import json
from datetime import datetime
INSTAGRAM_DOMAIN = 'https://api.instagram.com'
MEDIA_PATH = '/v1/media/shortcode/%s'
MEDIA_SEARCH_PATH = '/v1/media/search'
def extract_shortcode(weburl):
"""
weburl (String):
Can be either:
- "https://instagram.com/p/6xXvJqwi-k/?taken-by=danwinny"
- "6xXvJqwi-k"
returns (String):
"6xXvJqwi-k"
"""
u = weburl.split('instagram.com/p/')
if len(u) > 1:
x = u[1]
return x.split('/')[0]
else: # just return whatever you got
return weburl
def get_image_from_shortcode(shortcode, access_token):
"""
shortcode (str):
A web shortcode for an instagram image, e.g.
'6xXvJqwi-k' from "https://instagram.com/p/6xXvJqwi-k"
returns (dict):
An object representing an Instagram image
"""
path = MEDIA_PATH % shortcode
url = INSTAGRAM_DOMAIN + path
atts = {"access_token": access_token}
resp = requests.get(url, params = atts).json()
return resp['data']
def get_images_near_coordinates(lat, lng, distance_in_meters,
min_timestamp, max_timestamp, access_token):
"""
returns (list)"
A list of all images found in the search
more info:
https://instagram.com/developer/endpoints/media/#get_media_search
"""
images_dict = {}
ix = 0
atts = {
'access_token': access_token,
'distance': distance_in_meters,
'lat': lat,
'lng': lng,
'min_timestamp': min_timestamp,
'max_timestamp': max_timestamp
}
base_url = INSTAGRAM_DOMAIN + MEDIA_SEARCH_PATH
# Now continually fetch from Instasgram until no more results are found
while True:
print("%s loop; from %s to %s:\t%s images total" % (ix,
datetime.fromtimestamp(atts['min_timestamp']),
datetime.fromtimestamp(atts['max_timestamp']),
len(images_dict)))
try:
ix += 1
resp = requests.get(base_url, params = atts).json()
new_images = resp['data']
except Exception as e:
print("There was an error while retrieving images on %s loop" % ix)
print(e)
break
except KeyboardInterrupt:
break
else:
if new_images:
# i'm sure there's a better way to prevent dupes...
for img in new_images:
images_dict[img['id']] = img
# get new max timestamp from oldest image in the latest batch
# and subtract one second
oldest_ts = int(new_images[-1]['created_time']) - 1
# Instagram's search filter is lax so sometimes photos
# that are newer than max_timestamp are let in...we need to do
# our own check
if oldest_ts >= atts['max_timestamp'] or atts['min_timestamp'] >= atts['max_timestamp']:
break
else:
atts['max_timestamp'] = oldest_ts
else:
print("get out")
break # out of while loop
# end try/else
# end while
return list(images_dict.values())
def get_images_near_some_other_image_via_shortcode(shortcode, access_token,
seconds_before, seconds_after, dist_m):
"""
An omni-wrapper method
shortcode (str):
A web shortcode for an instagram image, e.g.
'6xXvJqwi-k' from "https://instagram.com/p/6xXvJqwi-k"
returns (list):
A list of images near `image` as extracted from Instagram media search
"""
origin_image = get_image_from_shortcode(shortcode, access_token)
# attempt to extract location
try:
lat = origin_image['location']['latitude']
lng = origin_image['location']['longitude']
except (KeyError, TypeError) as e:
print("Target image lacks location information")
print(json.dumps(origin_image, indent = 2))
raise e
# extract time as UTC seconds
dts = int(origin_image['created_time'])
nearby_images = get_images_near_coordinates(lat = lat, lng = lng,
distance_in_meters = dist_m,
min_timestamp = dts - seconds_before,
max_timestamp = dts + seconds_after,
access_token = access_token
)
# ad-hoc, add a label to the origin_image as found by the shortcode
for img in nearby_images:
if img['id'] == origin_image['id']:
img['is_origin'] = True
break
return nearby_images
|
{"/beliebe.py": ["/instagram_utils.py"]}
|
40,358
|
medyagh/seeing-is-beliebing
|
refs/heads/master
|
/beliebe.py
|
from datetime import datetime
from instagram_utils import extract_shortcode, get_images_near_some_other_image_via_shortcode
from os import environ, makedirs
from os.path import join
from shutil import copytree
import argparse
import json
DEFAULT_TOKEN = environ['INSTAGRAM_TOKEN']
TOKENHELPMSG = "Default is %s" % DEFAULT_TOKEN if DEFAULT_TOKEN else "(no default set)"
DEFAULT_BEFORE_MIN = 30
DEFAULT_AFTER_MIN = 240
DEFAULT_DISTANCE_M = 500
def beliebe(shortcode, args):
"""
TODO: make args better
returns:
a lot of stuff
"""
access_token = args.token
bargs = {}
bargs['access_token'] = access_token
bargs['seconds_before'] = args.minutes_before * 60
bargs['seconds_after'] = args.minutes_after * 60
bargs['dist_m'] = args.distance_in_meters
nearby_images = get_images_near_some_other_image_via_shortcode(shortcode, **bargs)
return nearby_images
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("shortcode", nargs = 1,
help = "Instagram web URL/shortcode")
parser.add_argument("--token", '-t', default = DEFAULT_TOKEN,
help = "Instagram access token %s" % TOKENHELPMSG)
parser.add_argument("--minutes-before", '-b', default = DEFAULT_BEFORE_MIN,
type = int,
help = "Limit search to photos X minutes-or-less before target photo's timestamp. Default is %s" % DEFAULT_BEFORE_MIN)
parser.add_argument("--minutes-after", '-a', default = DEFAULT_AFTER_MIN,
type = int,
help = "Limit search to photos X minutes-or-less after target photo's timestamp. Default is %s" % DEFAULT_AFTER_MIN)
parser.add_argument("--distance-in-meters", '-d', default = DEFAULT_DISTANCE_M,
type = int,
help = "Limit search to photos . Default is within X number of meters from target photo location. Default is %s" % DEFAULT_DISTANCE_M)
args = parser.parse_args()
shortcode = extract_shortcode(args.shortcode[0])
print("Fetching images near %s" % shortcode)
nearby_images = beliebe(shortcode, args)
pdir = "./pages/" + shortcode + '--' + datetime.now().strftime("%Y-%m-%d_%H%M%S")
# save into directory
copytree('./template', pdir)
datadir = join(pdir, 'data')
makedirs(datadir)
with open(join(datadir, 'images.json'), 'w') as fd:
json.dump(nearby_images, fd, indent = 2)
print("""
Run:
python3 -m http.server
In your browser, visit:
http://localhost:8000/{page_path}
""".format(page_path = pdir[2:]))
|
{"/beliebe.py": ["/instagram_utils.py"]}
|
40,381
|
parthvyas103/important_events
|
refs/heads/main
|
/services.py
|
from typing import Dict
import json as _json
import datetime as dt
def get_all_events() -> Dict:
with open("events.json", encoding='utf-8') as events_file:
data = _json.load(events_file)
return data
def get_month_events(month: str) -> Dict:
events = get_all_events()
month = month.lower()
try:
month_events = events[month]
return month_events
except KeyError:
return "Month does not exist"
def get_day_events(month: str, day: int) -> Dict:
events = get_all_events()
month = month.lower()
try:
day_events = events[month][str(day)]
return day_events
except KeyError:
return "Day does not exist"
def get_today_events():
today = dt.date.today()
month = today.strftime("%B")
return get_day_events(month, today.day)
|
{"/main.py": ["/services.py"]}
|
40,382
|
parthvyas103/important_events
|
refs/heads/main
|
/scrape.py
|
import requests as _requests
import bs4 as _bs4
from typing import List
def _generate_url(month: str, day : int) -> str:
url = f"https://www.onthisday.com/events/{month}/{day}"
return url
def _get_page(url : str) -> _bs4.BeautifulSoup:
page = _requests.get(url)
soup = _bs4.BeautifulSoup(page.content, "html.parser")
return soup
def events_of_the_day(month: str, day: int) -> List[str]:
url = _generate_url(month, day)
page = _get_page(url)
raw_events = page.find_all(class_ ="event")
events = [event.text for event in raw_events]
print(events)
return events
events_of_the_day("december", 2)
|
{"/main.py": ["/services.py"]}
|
40,383
|
parthvyas103/important_events
|
refs/heads/main
|
/main.py
|
from fastapi import FastAPI
import services as _services
app = FastAPI()
@app.get("/")
async def root():
return {"message":"Welcome to Events API"}
@app.get("/events")
async def all_events():
return _services.get_all_events()
@app.get("/events/today")
async def get_today_events():
return _services.get_today_events()
@app.get("/events/{month}")
async def get_month_events(month: str):
return _services.get_month_events(month)
@app.get("/events/{month}/{day}")
async def get_day_events(month: str, day :int):
return _services.get_day_events(month, day)
|
{"/main.py": ["/services.py"]}
|
40,391
|
andrewlilley/office_intro_music
|
refs/heads/master
|
/predict_faces_live.py
|
# check out http://www.clipconverter.cc/ to get clips
import cv
import cv2
import datetime
import gevent
import json
import numpy as np
import os
# from read_from_web_cam import get_cv_img_from_ip_cam
from camera_input.read_from_canon_cam import get_cv_img_from_canon_cam
from utils.face_utils import face_detect_on_photo, normalize_face_for_save, OUTPUT_DIRECTORY, get_recognizer
from utils.sonos_utils import find_sonos_device, play_mp3s, currently_playing_intro
from engineer_list import ENGINEER_TO_MP3
EXTENSION = ".jpg"
# Lower as necessary
CONFIDENCE_THRESHOLD = 2000
music_dict = ENGINEER_TO_MP3
has_played_today = {k: False for k in music_dict.keys()}
global_label_dict = {}
def reset_has_played_today_if_necessary():
now = datetime.datetime.now()
if now.hour == 23 and now.minute == 59:
for key in has_played_today.keys():
has_played_today[key] = False
def observe_faces(recognizer):
cv2.namedWindow("Live View", cv2.CV_WINDOW_AUTOSIZE)
cv2.namedWindow("Face", cv2.CV_WINDOW_AUTOSIZE)
sonos_device = find_sonos_device()
print "starting video capture..."
while True:
for cv_array in get_cv_img_from_canon_cam():
reset_has_played_today_if_necessary()
try:
cv2.imshow("Live View", cv_array) # cv_array is a numpy array
cv2.waitKey(1)
except cv2.error:
continue # cv_array was malformed, ignore and move to next frame
try:
img = cv.fromarray(cv_array)
except TypeError:
print "Warning...got malformed JPEG data"
continue
if currently_playing_intro():
continue # dont do facial detection while music is playing
detect_and_handle_faces(img, sonos_device, recognizer)
def detect_and_handle_faces(img, sonos_device=None, recognizer=None):
faces = face_detect_on_photo(img)
for face in faces:
face = normalize_face_for_save(face)
face = np.asarray(face)
cv2.imshow("Face", face)
if recognizer is not None:
[label_id, confidence] = recognizer.predict(face)
if confidence <= CONFIDENCE_THRESHOLD:
person = get_person_from_label(label_id)
print "Predicting %s with %s confidence" % (person, confidence)
if sonos_device is not None:
try_to_play_music(label_id, sonos_device)
else:
label_id = None
save_face(face, label_id)
def save_face(face, label_id):
person = get_person_from_label(label_id) if label_id is not None else ""
filename = datetime.datetime.now().strftime("%m%d%Y_%H%M%S_%f")
canonical_person = person.lower().replace(" ", "_")
filename = "_" + filename + "_" + canonical_person
full_path = "/".join((OUTPUT_DIRECTORY, filename,)) + EXTENSION
print "saving %s" % full_path
cv2.imwrite(full_path, face)
def try_to_play_music(label_id, sonos_device):
person = get_person_from_label(label_id)
if person not in has_played_today or person not in music_dict:
print "HEY! THERE'S NO MUSIC FOR %s" % person
return
if has_played_today[person]:
return
mp3_urls = music_dict.get(person)
gevent.spawn(play_mp3s, mp3_urls, sonos_device)
gevent.sleep(0)
has_played_today[person] = True
def get_person_from_label(label_id):
key = "global_label_key"
label_id = str(label_id)
if key in global_label_dict:
label_dict = global_label_dict[key]
else:
with open("labels.txt", "r") as file:
json_str = file.read()
label_dict = json.loads(json_str)
global_label_dict[key] = label_dict
return label_dict[label_id]
def recognize_faces():
recognizer = get_recognizer()
observe_faces(recognizer)
if __name__ == "__main__":
if not os.path.exists(OUTPUT_DIRECTORY):
os.makedirs(OUTPUT_DIRECTORY)
recognize_faces()
|
{"/retrain_recognizer.py": ["/predict_faces_live.py"]}
|
40,392
|
andrewlilley/office_intro_music
|
refs/heads/master
|
/retrain_recognizer.py
|
from predict_faces_live import train_recognizer, RECOGNIZER_FILENAME
import cv2
if __name__ == "__main__":
recognizer = cv2.createFisherFaceRecognizer()
recognizer = train_recognizer(recognizer)
recognizer.save(RECOGNIZER_FILENAME)
|
{"/retrain_recognizer.py": ["/predict_faces_live.py"]}
|
40,393
|
andrewlilley/office_intro_music
|
refs/heads/master
|
/voice_intro_creation.py
|
# THIS FILE WILL ONLY WORK ON MAC OS X
import pyttsx
import Foundation
import random
from engineer_list import ENGINEER_TO_MP3
Foundation.NSURL
engine = pyttsx.Engine()
voices = engine.getProperty("voices")
for voice in voices:
print voice.id
if "Alex" in str(voice.id): # cannot arbitrarily create objc strings
engine.setProperty("voice", voice.id)
# engine.setProperty("voice", voice.id)
# engine.say("this is a test")
# engine.runAndWait()
engine.setProperty("rate", 150)
engine.proxy._driver._proxy.setBusy(False) # must set to False in order for above properties to take effect
GREETING = [
"hello",
"good morning",
"top of the morning",
"welcome",
"all hail",
"behold",
"in the red corner hailing from San Francisco",
"in the blue corner hailing from far east in Canada"
"all rise for",
]
NICKNAMES = [
"Iceman",
"Maverick",
"Crazy Fingers",
"Deepwoods",
"The Blizzard",
"Smoke",
"Ice Box",
"Spike",
"Whack Attack",
"Tee Bone"
]
RANDOM_OUTROS = [
"destroyer of all the universe",
"first of his name and protector of product",
"lord commander of Hearsay Social",
"senior compliance engineer and marketing blocker",
"senior underling to the lord commander of Hearsay Social",
"assistant to the chief technical officer",
"heavy weight software engineering champion",
"the one whom the prophecies fortold",
"the one man wolf pack",
"the legendary coder who allegedly doesn't even use vim"
]
def to_filename(name):
filename = name.lower().replace(" ", "_") + ".wav"
return filename
for engineer in ENGINEER_TO_MP3.keys():
file = Foundation.NSURL.fileURLWithPath_(to_filename(engineer))
intro = random.choice(GREETING)
outro = random.choice(RANDOM_OUTROS)
nickname = random.choice(NICKNAMES)
first = engineer.split(" ")[0]
last = engineer.split(" ")[1]
thing_to_say = " ".join((intro, first, nickname, last, outro))
engine.proxy._driver._tts.startSpeakingString_toURL_(thing_to_say, file)
print thing_to_say
# USE MP3-CONVERTER TO CONVERT TO MP3 AFTERWARD
|
{"/retrain_recognizer.py": ["/predict_faces_live.py"]}
|
40,394
|
andrewlilley/office_intro_music
|
refs/heads/master
|
/utils/sonos_utils.py
|
import datetime
import time
import soco
sonos_devices = soco.SonosDiscovery()
TARGET_SPEAKER = "Engineering"
NEW_VOLUME = 75
SECONDS_TO_LOWER_VOLUME = 5
VOLUME_REDUCTION_STEP_COUNT = 5
is_playing_music = False
def find_sonos_device():
print "Starting search for Sonos speaker..."
while True:
for ip in sonos_devices.get_speaker_ips():
device = soco.SoCo(ip)
if device.get_speaker_info().get("zone_name") == TARGET_SPEAKER:
print "Found %s" % TARGET_SPEAKER
return device
print "WARNING! Did not find a Sonos device! Retrying..."
def currently_playing_intro():
global is_playing_music
return is_playing_music
def get_current_state(device):
current_state = device.get_current_transport_info().get("current_transport_state")
track_info = device.get_current_track_info()
playlist_position = int(track_info["playlist_position"]) - 1
position = track_info["position"]
title = track_info["title"]
print "Current state is %s playing %s at position %s at %s" % (current_state, title, playlist_position, position)
return current_state, playlist_position, position
def reduce_volume(device, current_volume):
current_volume *= 1.0 * (100 - VOLUME_REDUCTION_STEP_COUNT) / 100.0
device.volume(current_volume)
print "lowering volume to %s" % current_volume
return current_volume
def return_to_original_state(device, initial_state, playlist_position, position, updated_playlist_queue, initial_volume, mp3_urls):
global is_playing_music
try:
for num_added in xrange(len(mp3_urls)):
id_to_remove = len(updated_playlist_queue) - num_added
print "Removing %s from queue" % id_to_remove
device.remove_from_queue(id_to_remove)
print "successfully removed item from queue"
except:
print "ERROR! CLearing the entire queue!"
device.clear_queue()
print "Setting the volume back to its original state..."
device.volume(initial_volume)
try:
print "Restarting the track that was playing before..."
device.play_from_queue(playlist_position)
except:
return
if initial_state != "PLAYING":
print "Sonos was not playing previously, stopping Sonos."
device.stop()
else:
print "Seeking to the previous position of the song..."
device.seek(position)
print "DONE"
is_playing_music = False
def wait_to_finish_playing(device, mp3_urls):
state = device.get_current_transport_info().get("current_transport_state")
time_last_song_start = None
current_volume = NEW_VOLUME
while current_volume >= 15 and (state == "PLAYING" or state == "TRANSITIONING"):
print "waiting for theme music to stop..."
if time_last_song_start is None:
track_info = device.get_current_track_info()
currently_playing = track_info.get("uri")
if len(mp3_urls) > 1 and currently_playing == mp3_urls[-1]:
time_last_song_start = datetime.datetime.now()
print "Detected that the last song in the list is being played..."
else:
seconds_elapsed = (datetime.datetime.now() - time_last_song_start).total_seconds()
if seconds_elapsed >= SECONDS_TO_LOWER_VOLUME:
current_volume = reduce_volume(device, current_volume)
time.sleep(0.1) # there's already a lot of latency going on
state = device.get_current_transport_info().get("current_transport_state")
def add_items_to_queue(device, mp3_urls):
for mp3_url in mp3_urls:
print "Adding URL to queue: %s" % mp3_url
device.add_to_queue(mp3_url)
updated_playlist_queue = device.get_queue()
return updated_playlist_queue
def play_mp3s(mp3_urls, device):
global is_playing_music
if is_playing_music:
return
is_playing_music = True
print "Trying to play an mp3..."
initial_state, playlist_position, position = get_current_state(device)
initial_volume = device.volume()
updated_playlist_queue = add_items_to_queue(device, mp3_urls)
print "length of mp3 urls is %s" % len(mp3_urls)
print "length of current playlist queue is %s" % len(updated_playlist_queue)
print "playing %s" % (len(updated_playlist_queue) - len(mp3_urls))
device.play_from_queue(len(updated_playlist_queue) - len(mp3_urls))
device.volume(NEW_VOLUME)
wait_to_finish_playing(device, mp3_urls)
return_to_original_state(device, initial_state, playlist_position, position, updated_playlist_queue, initial_volume, mp3_urls)
|
{"/retrain_recognizer.py": ["/predict_faces_live.py"]}
|
40,395
|
andrewlilley/office_intro_music
|
refs/heads/master
|
/engineer_list.py
|
from utils.face_utils import OUTPUT_DIRECTORY
import os
INTRO_BASE = "https://s3.amazonaws.com/intro_voice/%s.mp3"
ENGINEER_TO_SONG = {
"Scott Lobdell": ["https://s3.amazonaws.com/theme_music/Mike+Jones+My+64+Ft+Lil+Eazy+E%2C+Snoop+Dogg%2C+Bun+B.mp3"],
# other engineer names go here
}
def _name_to_formatted(name):
return INTRO_BASE % name.lower().replace(" ", "_")
# TODO: clean up the names here, they're really sloppy simply because of
# evolving code
ENGINEER_TO_MP3 = {k: [_name_to_formatted(k)] + v for k, v in ENGINEER_TO_SONG.items()}
if __name__ == "__main__":
if not os.path.exists(OUTPUT_DIRECTORY):
os.makedirs(OUTPUT_DIRECTORY)
for engineer_name in ENGINEER_TO_MP3.keys():
full_path = "/".join((OUTPUT_DIRECTORY, engineer_name,))
if not os.path.exists(full_path):
print "Creating directory %s" % full_path
os.makedirs(full_path)
for directory in os.listdir(OUTPUT_DIRECTORY):
if directory not in ENGINEER_TO_MP3:
print "%s is not in the music list!" % directory
|
{"/retrain_recognizer.py": ["/predict_faces_live.py"]}
|
40,422
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/tasks.py
|
from celery import shared_task
from celery.utils.log import get_task_logger
import requests
import json
from . import models
logger = get_task_logger(__name__)
@shared_task
def post_model(data):
logger.info(f'post_model data {data}')
model_name = data['model_name']
auth_t = data['Authorization']
logger.info(f'post_model data AUTH {auth_t}')
model = models.FmuModel.objects.get(model_name=model_name)
if model is not None:
url = 'http://generator:8000/file_upload/{0}'.format(model_name)
logger.info(f'post_model url {url}')
epw_file = open(model.epw_file.path, 'rb')
idf_file = open(model.idf_file.path, 'rb')
file = {'epw': (model_name + '.epw', epw_file, 'application/octet-stream'),
'idf': (model_name + '.idf', idf_file, 'application/octet-stream'),
'json': (None, json.dumps(data), 'application/json')}
r = requests.post(url, files=file)
epw_file.close()
idf_file.close()
return "{0} : {1}".format(r.content, r.status_code)
@shared_task
def send_fmu(data):
logger.info(f'send_fmu data {data}')
url = 'http://generator:8000/fmu_to_simulator/{0}'.format(data['model_name'])
r = requests.post(url=url, json=data)
return "{0} | {1}".format(r.content, r.status_code)
@shared_task
def post_input(data):
logger.info(f'post_input data {data}')
input_instance = models.Input.objects.last()
if input_instance is not None:
url = 'http://{0}:8000/model_input'.format(data['container_id'])
logger.info(f'post_input url {url}')
headers = {'Content-type': 'application/json'}
data = {'time_step': data['time_step']}
@shared_task
def post_router_input(data):
logger.info(f'post_input data {data}')
input_instance = models.Input.objects.last()
if input_instance is not None:
url = 'http://router:8000/route_input/{0}'.format(data['container_id'])
logger.info(f'post_input url {url}')
headers = {'Content-type': 'application/json'}
data = {'data': data}
r = requests.post(url=url, json=json.dumps(data), headers=headers)
return r.status_code
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,423
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/input_output_router/router_api.py
|
import json
from bottle import request, route, run
from celery.utils.log import get_task_logger
from router_tasks import post_input, post_output
logger = get_task_logger(__name__)
@route('/route_input/<container_id>', method='POST')
def route_input(container_id):
post_input.apply_async((request.json, container_id), queue='router', routing_key='input')
@route('/route_output/', method='POST')
def route_output():
output_data = json.loads(request.json)
post_output.apply_async((output_data,), queue='router', routing_key='output')
run(host='0.0.0.0', port=8000, debug=True, reloader=True)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,424
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_generator/run_eptf.py
|
import os
class RunEnergyPlusToFMU:
def __init__(self, idf='_fmu-export-variable.idf', epw='USA.epw', directory='/home/fmu/code/energy/test'):
self.idf = idf
self.epw = epw
self.directory = directory
def run(self):
eplus_command = "cd " + self.directory + " && \
python /home/fmu/code/energy/Scripts/EnergyPlusToFMU.py -d -i /home/fmu/code/Energy+.idd \
-w " + self.epw + " " + self.idf + " && ./idf-to-fmu-export-prep-linux \
/home/fmu/code/Energy+.idd " + self.idf
os.system(eplus_command)
return 'success'
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,425
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/tests/test_views.py
|
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_api.models import User, FmuModel
# TODO write tests
class FmuModelViewTest(APITestCase):
"""
Test FmuModelParameters View
"""
def setUp(self) -> None:
self.test_user = User(name='test_user',
email='test@test.com',
password='test user 88')
self.test_user.save()
self.token = Token.objects.create(
user=self.test_user
)
def test_Model_Initialization(self):
self.client.force_login(user=self.test_user)
data = {
"model_name": "test",
"step_size": "800",
"final_time": "72.0"
}
response = self.client.post(
'/init_model/',
data=data,
format='json',
HTTP_AUTHORIZATION='Token ' + self.token.key
)
self.assertEqual(response.status_code, 201)
class InputViewTest(APITestCase):
"""
Test Input View
"""
def setUp(self) -> None:
self.test_user = User(name='test_user',
email='test@test.com',
password='test user 88')
self.test_user.save()
self.token = Token.objects.create(
user=self.test_user
)
self.model = FmuModel(
model_name="test_model",
user=self.test_user,
step_size=600,
final_time=72.0
)
self.model.save()
def test_input_create(self):
self.client.force_login(user=self.test_user)
data = {
'user': self.test_user.email,
'fmu_model': self.model.model_name,
'time_step': 600,
'yshade': 1.0
}
response = self.client.post(
'/input/',
data=data,
format='json',
HTTP_AUTHORIZATION='Token ' + self.token.key
)
self.assertEqual(response.status_code, 201)
class OutputViewTest(APITestCase):
"""
Test Output View
"""
def setUp(self) -> None:
self.test_user = User(name='test_user',
email='test@test.com',
password='test user 88')
self.test_user.save()
self.token = Token.objects.create(
user=self.test_user
)
self.model = FmuModel(
model_name="test_model",
user=self.test_user,
step_size=600,
final_time=72.0
)
self.model.save()
def test_output_create(self):
self.client.force_login(user=self.test_user)
data = {
'user': self.test_user.email,
'fmu_model': self.model.model_name,
"time_step": "800",
"yshade": "2.4",
"dry_bulb": "5.0",
"troo": "7.0",
"isolext": "4.01",
"sout": "6.89",
"zonesens": "9.111",
"cool_rate": "18.9"
}
response = self.client.post(
'/output/',
data=data,
format='json',
HTTP_AUTHORIZATION='Token ' + self.token.key
)
self.assertEqual(response.status_code, 201)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,426
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/simulator/test_simulation_obj.py
|
import os.path
import sys
from simulator.simulation_obj import SimulationObject
from pyfmi.fmi import load_fmu
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
""" Simple test script to. Tests functionality of the simulation_obj class"""
# model = load_fmu('brandon_working.fmu', log_level=4)
#
# final_time = 60*60*24
# print("INIT")
# model.initialize(0, final_time)
# print("After INIT")
# print(list(model.get_model_variables(causality=1)))
# print(list(model.get_model_variables(type=0)))
# print(list(model.get_model_variables(type=1)))
# instantiate simulation obj with default values
# 300 t-step, 8760 hours
sim_obj = SimulationObject(model_name='brandon_house.fmu', step_size=900, final_time=8760.0, path_to_fmu='brandon_house.fmu')
sim_obj.model_init() # initialize fmu model. Calls pyFMI model.init() and sets start and finish time
# new dictionary with inputs for fmu time step
i = 0
Tset = 23.0
while i < 31536600:
input_dict = {'time_step': i, 'TSet': Tset}
output = sim_obj.do_time_step(input_dict)
print("output -> " + str(output['output_json']) + "\n")
i += 900
print("FINISHED")
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,427
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_generator/generator_tasks.py
|
import celeryconfig
from celery import Celery
from run_eptf import RunEnergyPlusToFMU
app = Celery('generator_tasks')
app.config_from_object(celeryconfig)
# runs EnergyPlusToFMU command and generates FMU
@app.task
def gen_fmu(idf, epw, directory):
energy_plus = RunEnergyPlusToFMU(idf=idf, epw=epw, directory=directory)
result = energy_plus.run()
return result
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,428
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/data_files/gen_csv.py
|
import csv
import random
import sys
def populate_test_csv():
f = open('test.csv', 'w')
with f:
input_fields = ['time_step', 'PlantOnSched', 'HeatingSetpointSchedule']
temp_change = [1, -1]
plant_on_sched_last = 52
heating_setpoint_schedule_last = 20
writer = csv.DictWriter(f, fieldnames=input_fields)
writer.writeheader()
j = 0
for i in range(35040):
k = random.randint(0, 1)
l = random.randint(0, 1)
a = temp_change[k]
b = temp_change[l]
if (10 < (plant_on_sched_last + a) < 55) and (10 < (heating_setpoint_schedule_last + b) < 55):
plant_on_sched_last += temp_change[k]
heating_setpoint_schedule_last += temp_change[l]
writer.writerow({'time_step': j,
'PlantOnSched': plant_on_sched_last,
'HeatingSetpointSchedule': heating_setpoint_schedule_last})
j += 900
f.close()
def populate_new_csv(index):
f = open(f'year{index}.csv', 'w')
with f:
input_fields = ['time_step', 'Tset']
temp_change = [1, -1]
q = 23
writer = csv.DictWriter(f, fieldnames=input_fields)
writer.writeheader()
j = 0
for i in range(35040):
k = random.randint(0, 1)
a = temp_change[k]
if -6 < (q + a) < 53:
q += temp_change[k]
writer.writerow({'time_step': j, 'Tset': q})
j += 900
f.close()
for i in range(1, 20):
populate_new_csv(i)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,429
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_generator/EnergyPlusToFMU.py
|
#!/usr/bin/env python
#--- Purpose.
#
# Export an EnergyPlus model as a Functional Mockup Unit (FMU) for co-simulation.
#--- Note on directory location.
#
# This script uses relative paths to locate some of the files it needs.
# Therefore it should not be moved from its default directory.
# However, this script can be run from a different working directory.
#--- Running this script.
#
# To run this script from the command line:
# > python [python options] <this-file-name> <arguments>
#
# On unix-like systems, this command-line invocation should work as well:
# > ./<this-file-name> <arguments>
#
# To call this script from the Python interpreter, or from another Python script:
# >>> import <this-file-base-name>
# >>> <this-file-base-name>.exportEnergyPlusAsFMU(arguments)
#--- Runtime help.
#
def printCmdLineUsage():
#
print 'USAGE:', os.path.basename(__file__), \
'-i <path-to-idd-file> [-w <path-to-weather-file>] [-a <fmi-version>] [-d] [-L] <path-to-idf-file>'
#
print '-- Export an EnergyPlus model as a Functional Mockup Unit (FMU) for co-simulation'
print '-- Input -i, use the named Input Data Dictionary (required)'
print '-- Option -w, use the named weather file'
print '-- Option -a, specify the FMI version'
print '-- Option -d, print diagnostics'
print '-- Option -L, litter, that is, do not clean up intermediate files'
# TODO: Add -V to set version number of FMI standard. Currently 1.0 is only one supported.
#
# End fcn printCmdLineUsage().
#--- Ensure access.
#
import os
import subprocess
import sys
import zipfile
PLATFORM_NAME = sys.platform
#
if( PLATFORM_NAME.startswith('win') ):
PLATFORM_SHORT_NAME = 'win'
elif( PLATFORM_NAME.startswith('linux')
or PLATFORM_NAME.startswith('cygwin') ):
PLATFORM_SHORT_NAME = 'linux'
elif( PLATFORM_NAME.startswith('darwin') ):
PLATFORM_SHORT_NAME = 'darwin'
else:
raise Exception('Unknown platform {' +PLATFORM_NAME +'}')
#--- Fcn to print diagnostics.
#
def printDiagnostic(messageStr):
#
print '!', os.path.basename(__file__), '--', messageStr
#
# End fcn printDiagnostic().
#--- Fcn to quit due to an error.
#
def quitWithError(messageStr, showCmdLine):
#
print 'ERROR from script file {' +os.path.basename(__file__) +'}'
#
if( messageStr is not None ):
print messageStr
#
if( showCmdLine ):
print
printCmdLineUsage()
#
sys.exit(1)
#
# End fcn quitWithError().
#--- Fcn to verify a file exists.
#
# If file exists, return its absolute path. Otherwise, quit.
#
def findFileOrQuit(fileDesc, fileName):
#
if( not os.path.isfile(fileName) ):
(dirName, fileName) = os.path.split(os.path.abspath(fileName))
if( not os.path.isdir(dirName) ):
quitWithError('Missing directory {' +dirName +'} for ' +fileDesc +' file {' +fileName +'}', False)
quitWithError('Missing ' +fileDesc +' file {' +fileName +'} in directory {' +dirName +'}', False)
#
return( os.path.abspath(fileName) )
#
# End fcn findFileOrQuit().
#--- Fcn to delete a file.
#
# OK if file does not exist.
#
def deleteFile(fileName):
#
if( os.path.isfile(fileName) ):
try:
os.remove(fileName)
except:
quitWithError('Unable to delete file {' +fileName +'}', False)
elif( os.path.isdir(fileName) ):
quitWithError('Expecting {' +fileName +'} to be a file; found a directory', False)
#
# End fcn deleteFile().
#--- Fcn to add a file to a zip file.
#
def addToZipFile(theZipFile, addFileName, toDir, addAsName):
#
# Get path and name for use in zip file.
if( addAsName is None ):
addAsName = os.path.basename(addFileName)
if( toDir is not None ):
addAsName = os.path.join(toDir, addAsName)
#
try:
theZipFile.write(addFileName, addAsName)
except:
# Diagnose error if possible.
if( theZipFile.__class__ != zipfile.ZipFile ):
quitWithError('Expecting a zip file, got {' +theZipFile.__class__.__name__ +'}', False)
# Here, {theZipFile} is a zip file. Won't be using it any further.
theZipFile.close()
# Check whether {addFileName} exists.
addFileName = findFileOrQuit('zip member', addFileName)
# Here, {addFileName} is a good file.
quitWithError('Failed to add file {' +addFileName +'} to zip file; reason unknown', False)
#
# Here, successfully added {addFileName} to {theZipFile}.
#
# End fcn addToZipFile().
#--- Fcn to export an EnergyPlus IDF file as an FMU.
#
def exportEnergyPlusAsFMU(showDiagnostics, litter, iddFileName, wthFileName, fmiVersion, idfFileName):
#
if( showDiagnostics ):
printDiagnostic('Begin exporting IDF file {' +idfFileName +'} as an FMU')
#
# Check file names passed as arguments, and get absolute paths.
idfFileName = findFileOrQuit('IDF', idfFileName)
iddFileName = findFileOrQuit('IDD', iddFileName)
if( wthFileName is None ):
if( showDiagnostics ):
printDiagnostic('Note no WTH file given')
else:
wthFileName = findFileOrQuit('WTH', wthFileName)
#
# Get directory of this script file.
scriptDirName = os.path.abspath(os.path.dirname(__file__))
#
# Load modules expect to find in same directory as this script file.
if( scriptDirName not in sys.path ):
sys.path.append(scriptDirName)
#
findFileOrQuit('utility script', os.path.join(scriptDirName, 'makeFMULib.py'))
try:
import makeFMULib
except:
quitWithError('Unable to import {makeFMULib.py}', False)
#
findFileOrQuit('utility script', os.path.join(scriptDirName, 'makeExportPrepApp.py'))
try:
import makeExportPrepApp
except:
quitWithError('Unable to import {makeExportPrepApp.py}', False)
#
# Get valid model identifier.
modelIdName = os.path.basename(idfFileName)
if( modelIdName.endswith('.idf') or modelIdName.endswith('.IDF') ):
modelIdName = modelIdName[:-4]
modelIdName = makeFMULib.sanitizeIdentifier(modelIdName)
if( showDiagnostics ):
printDiagnostic('Using model identifier {' +modelIdName +'}')
#
# Delete expected outputs if they already exist.
# To prevent confusion in case of an error.
OUT_modelDescFileName = 'modelDescription.xml'
deleteFile(OUT_modelDescFileName)
#
OUT_variablesFileName = 'variables.cfg'
deleteFile(OUT_variablesFileName)
#
OUT_workZipFileName = modelIdName +'.zip'
deleteFile(OUT_workZipFileName)
#
OUT_fmuFileName = modelIdName +'.fmu'
deleteFile(OUT_fmuFileName)
#
# Create export-prep application.
# The resulting executable will extract FMU-related information from an
# EnergyPlus IDF file.
# Do not force a rebuild.
if( showDiagnostics ):
printDiagnostic('Checking for export-prep application')
exportPrepExeName = makeExportPrepApp.makeExportPrepApp(showDiagnostics, litter, True, fmiVersion)
#
# Run the export-prep application.
if( showDiagnostics ):
printDiagnostic('Running export-prep application {' +exportPrepExeName +'}')
runList = [os.path.join(os.path.curdir, exportPrepExeName)]
if( wthFileName is not None ):
runList.extend(['-w', wthFileName])
runList.extend([iddFileName, idfFileName])
subprocess.call(runList)
if( (not os.path.isfile(OUT_modelDescFileName)) or (not os.path.isfile(OUT_variablesFileName)) ):
quitWithError('Failed to extract FMU information from IDF file {' +idfFileName +'}', False)
#
# Create the shared library.
(OUT_fmuSharedLibName, fmuBinDirName) = makeFMULib.makeFmuSharedLib(showDiagnostics, litter, modelIdName, fmiVersion)
findFileOrQuit('shared library', OUT_fmuSharedLibName)
#
# Create zip file that will become the FMU.
# Note to get compression, need zlib, but can proceed without it.
try:
import zlib
if( showDiagnostics ):
printDiagnostic('Creating zip file {' +OUT_workZipFileName +'}, with compression on')
workZipFile = zipfile.ZipFile(OUT_workZipFileName, 'w', zipfile.ZIP_DEFLATED)
except:
# Here, either didn't find zlib, or couldn't create zip file.
if( showDiagnostics ):
printDiagnostic('Creating zip file {' +OUT_workZipFileName +'}, without compression')
try:
workZipFile = zipfile.ZipFile(OUT_workZipFileName, 'w', zipfile.ZIP_STORED)
except:
quitWithError('Failed to create zip file {' +OUT_workZipFileName +'}', False)
#
# Populate zip file.
# Note fcn addToZipFile() closes the zip file if it encounters an error.
addToZipFile(workZipFile, OUT_modelDescFileName, None, None)
addToZipFile(workZipFile, idfFileName, 'resources', modelIdName+'.idf')
addToZipFile(workZipFile, OUT_variablesFileName, 'resources', None)
addToZipFile(workZipFile, iddFileName, 'resources', None)
addToZipFile(workZipFile, exportPrepExeName, 'resources', None)
if( wthFileName is not None ):
addToZipFile(workZipFile, wthFileName, 'resources', None)
addToZipFile(workZipFile, OUT_fmuSharedLibName, os.path.join('binaries',fmuBinDirName), None)
#
# Finish up zip file.
if( showDiagnostics ):
printDiagnostic('Renaming completed zip file {' +OUT_workZipFileName +'} to {' +OUT_fmuFileName +'}')
workZipFile.close()
findFileOrQuit('zip', OUT_workZipFileName)
os.rename(OUT_workZipFileName, OUT_fmuFileName)
#
# Clean up intermediates.
if( not litter ):
if( showDiagnostics ):
printDiagnostic('Cleaning up intermediate files')
# deleteFile(exportPrepExeName) # Keep this executable, since it does not vary from run to run (i.e., not really intermediate).
deleteFile(OUT_modelDescFileName)
deleteFile(OUT_variablesFileName)
deleteFile(OUT_fmuSharedLibName)
#
# End fcn exportEnergyPlusAsFMU().
#--- Run if called from command line.
#
# If called from command line, {__name__} is "__main__". Otherwise,
# {__name__} is base name of the script file, without ".py".
#
if __name__ == '__main__':
#
# Set defaults for command-line options.
iddFileName = None
wthFileName = None
fmiApiVersion = None
showDiagnostics = False
litter = False
fmiVersion = None
#
# Get command-line options.
lastIdx = len(sys.argv) - 1
currIdx = 1
while( currIdx < lastIdx ):
currArg = sys.argv[currIdx]
if( currArg.startswith('-i') ):
currIdx += 1
iddFileName = sys.argv[currIdx]
if( showDiagnostics ):
printDiagnostic('Setting IDD file to {' +iddFileName +'}')
elif( currArg.startswith('-w') ):
currIdx += 1
wthFileName = sys.argv[currIdx]
if( showDiagnostics ):
printDiagnostic('Setting WTH file to {' +wthFileName +'}')
elif( currArg.startswith('-a') ):
currIdx += 1
fmiVersion = sys.argv[currIdx]
if( showDiagnostics ):
printDiagnostic('Setting FMI API version (1 or 2) to {' +fmiApiVersion +'}')
elif( currArg.startswith('-d') ):
showDiagnostics = True
elif( currArg.startswith('-L') ):
litter = True
else:
quitWithError('Bad command-line option {' +currArg +'}', True)
# Here, processed option at {currIdx}.
currIdx += 1
#
# Get {idfFileName}.
if( currIdx != lastIdx ):
# Here, either an option like {-i} consumed the entry at {lastIdx}, or had
# no options or arguments at all.
quitWithError('Require exactly one command-line argument, <path-to-idf-file>', True)
idfFileName = sys.argv[lastIdx]
if( showDiagnostics ):
printDiagnostic('Setting IDF file to {' +idfFileName +'}')
if( idfFileName.startswith('-') and len(idfFileName)==2 ):
quitWithError('Expecting IDF file name, got what looks like a command-line option {' +idfFileName +'}', True)
#
# Get {iddFileName}.
if( iddFileName is None ):
quitWithError('Missing required input, <path-to-idd-file>', True)
# Get {FMI version}.
if( fmiVersion is None ):
fmiVersion = "1.0"
printDiagnostic('FMI version is unspecified. It will be set to {' +fmiVersion +'}')
if not (fmiVersion in [1, 2, "1", "2", "1.0", "2.0"]):
quitWithError('FMI version "1" and "2" are supported, got FMI version {' +fmiVersion +'}', True)
if (int(float(fmiVersion))==2):
import struct
nbits=8 * struct.calcsize("P")
ops=PLATFORM_SHORT_NAME+str(nbits)
if( PLATFORM_NAME.startswith('lin') and str(nbits)=='64'):
dirname, filename = os.path.split(os.path.abspath(__file__))
incLinkerLibs = os.path.join(dirname, "..", "SourceCode", "v20",
"fmusdk-shared", "parser", ops, "libxml2.so.2")
printDiagnostic('\nIMPORTANT NOTE: The FMU generated will run in the fmuChecker 2.0.4 only '
'if libxml2.so.2 is symbollicaly link to {' +incLinkerLibs +'}.\n'
' This version of libxml2.so.2 has been compiled excluding zlib.'
' The official released version of libxml2.so.2 (version 2.9) '
' which includes zlib causes the FMU to fail in the fmuChecker.\n'
' However, the FMU will work fine with master algorithms'
' such as PyFMI even if the FMU links to the official version of libxml2.\n')
if( PLATFORM_NAME.startswith('lin') and str(nbits)=='32'):
quitWithError('FMI version 2.0 for Co-Simulation is not supported on {' +ops +'}', False)
#if( PLATFORM_NAME.startswith('darwin')):
# quitWithError('FMI version 2.0 for Co-Simulation is not supported on {' +ops +'}', False)
# Run.
exportEnergyPlusAsFMU(showDiagnostics, litter, iddFileName, wthFileName, int(float(fmiVersion)), idfFileName)
#--- Copyright notice.
#
# Functional Mock-up Unit Export of EnergyPlus (C)2013, The Regents of
# the University of California, through Lawrence Berkeley National
# Laboratory (subject to receipt of any required approvals from
# the U.S. Department of Energy). All rights reserved.
#
# If you have questions about your rights to use or distribute this software,
# please contact Berkeley Lab's Technology Transfer Department at
# TTD@lbl.gov.referring to "Functional Mock-up Unit Export
# of EnergyPlus (LBNL Ref 2013-088)".
#
# NOTICE: This software was produced by The Regents of the
# University of California under Contract No. DE-AC02-05CH11231
# with the Department of Energy.
# For 5 years from November 1, 2012, the Government is granted for itself
# and others acting on its behalf a nonexclusive, paid-up, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# and perform publicly and display publicly, by or on behalf of the Government.
# There is provision for the possible extension of the term of this license.
# Subsequent to that period or any extension granted, the Government is granted
# for itself and others acting on its behalf a nonexclusive, paid-up, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display publicly,
# and to permit others to do so. The specific term of the license can be identified
# by inquiry made to Lawrence Berkeley National Laboratory or DOE. Neither
# the United States nor the United States Department of Energy, nor any of their employees,
# makes any warranty, express or implied, or assumes any legal liability or responsibility
# for the accuracy, completeness, or usefulness of any data, apparatus, product,
# or process disclosed, or represents that its use would not infringe privately owned rights.
#
#
# Copyright (c) 2013, The Regents of the University of California, Department
# of Energy contract-operators of the Lawrence Berkeley National Laboratory.
# All rights reserved.
#
# 1. Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the copyright notice, this list
# of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# (3) Neither the name of the University of California, Lawrence Berkeley
# National Laboratory, U.S. Dept. of Energy nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# 2. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# 3. You are under no obligation whatsoever to provide any bug fixes, patches,
# or upgrades to the features, functionality or performance of the source code
# ("Enhancements") to anyone; however, if you choose to make your Enhancements
# available either publicly, or directly to Lawrence Berkeley National Laboratory,
# without imposing a separate written license agreement for such Enhancements,
# then you hereby grant the following license: a non-exclusive, royalty-free
# perpetual license to install, use, modify, prepare derivative works, incorporate
# into other computer software, distribute, and sublicense such enhancements or
# derivative works thereof, in binary and source code form.
#
# NOTE: This license corresponds to the "revised BSD" or "3-clause BSD"
# License and includes the following modification: Paragraph 3. has been added.
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,430
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/models.py
|
from django.db import models
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.models import BaseUserManager
from django.contrib.postgres.fields import JSONField
from django.db.models import FileField
class UserManager(BaseUserManager):
def create_user(self, email, name, password=None):
if not email:
raise ValueError('User must have an email address.')
email = self.normalize_email(email)
user = self.model(email=email, name=name)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, name, password):
user = self.create_user(email, name, password)
user.is_superuser = True
user.is_staff = True
user.save()
return user
class User(AbstractBaseUser, PermissionsMixin):
"""represents Users in the system"""
user_id = models.AutoField(primary_key=True)
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['name']
def get_full_name(self):
return self.name
def get_short_name(self):
return self.name
def __str__(self):
return self.email
class FmuModel(models.Model):
"""represents .fmu initialization parameters"""
model_name = models.CharField(max_length=255, unique=True, primary_key=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
container_id = models.CharField(max_length=255, null=True)
model_count = models.IntegerField(default=1, null=True)
idf_file = FileField(upload_to='./Media/', default='', null=True)
epw_file = FileField(upload_to='./Media/', default='', null=True)
# set as single json object
step_size = models.IntegerField(default=0)
final_time = models.DecimalField(max_digits=20, decimal_places=1)
created_on = models.DateTimeField(auto_now_add=True)
objects = models.Manager()
def __str__(self):
return self.model_name
class Input(models.Model):
"""represents inputs from web api going to an fmu model"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
fmu_model = models.ForeignKey(FmuModel, on_delete=models.CASCADE)
time_step = models.IntegerField(null=False)
# set as single json object
input_json = JSONField()
objects = models.Manager()
class Output(models.Model):
"""represents output received from an fmu time step"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
fmu_model = models.ForeignKey(FmuModel, on_delete=models.CASCADE)
time_step = models.IntegerField(null=False)
# set as single json object
output_json = JSONField()
objects = models.Manager()
class FileModel(models.Model):
file = FileField(upload_to='./Media/', default='')
class ContainerHostNames(models.Model):
hostname = models.CharField(max_length=255)
objects = models.Manager()
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,431
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/simulator/simulation_obj.py
|
import json
import os
import sys
import pyfmi
from pyfmi import load_fmu
from pyfmi.fmi import FMUModelCS2
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
""" SimulationObject class represents an FMU model and class methods to access and run the model """
class SimulationObject:
model: pyfmi.fmi.FMUModelCS1
def __init__(self, model_name, step_size=600, final_time=72., path_to_fmu='_fmu_export_variable.fmu'):
"""
fmu model initialize method. Parameters originate from API.
step_size: size of each step in seconds 600 = 10 minutes
final_time: must be multiple of 86400 (a day in seconds) min value is 24.0,
converted to seconds by 60*60*final_time
path_to_fmu: self explanatory
"""
self.model_name = model_name
self.step_size = step_size
self.final_time = 60*60*final_time
self.model = load_fmu(path_to_fmu, kind='CS')
# store dict of current model variables. Key is variable name, used in do_time_step()
self.model_output_vars = list(self.model.get_model_variables(causality=1))
self.model_real_vars = list(self.model.get_model_variables(type=0))
self.model_int_vars = list(self.model.get_model_variables(type=1))
def model_init(self):
""" Initialize model with start and finish time """
self.model.initialize(0, self.final_time)
def do_time_step(self, json_input):
"""
process current time_step.
json_input: input values for current time step. Originates from API.
json_input is converted to dict, values from dict are used to set relevant model inputs.
calls model.do_step,
creates new dict with output then returns output dict as json to pass back to API
"""
input_data = json.loads(json_input)
time_step = input_data['time_step']
print("input -> " + str(input_data))
# identify input variable type
for key in input_data:
if key in self.model_real_vars:
self.model.set(key, float(input_data[key]))
elif key in self.model_int_vars:
self.model.set(key, int(input_data[key]))
self.model.do_step(current_t=time_step, step_size=self.step_size, new_step=True)
do_step_outputs = {'fmu_model': self.model_name, 'time_step': time_step}
output = {}
for key in self.model_output_vars:
output[key] = self.model.get(key)[0]
do_step_outputs['output_json'] = output
return do_step_outputs
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,432
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/simapi.py
|
import csv
import json
import logging
import polling2
import requests
import pandas as pd
user_url = 'http://127.0.0.1:8000/user/'
login_url = 'http://127.0.0.1:8000/login/'
init_url = 'http://127.0.0.1:8000/init_model/'
input_url = 'http://127.0.0.1:8000/input/'
output_url = 'http://127.0.0.1:8000/output/'
graphql_url = 'http://127.0.0.1:8000/graphql/'
send_fmu = 'http://127.0.0.1:8000/send_fmu/'
# TODO add utility method to prepare user csv e.g. add time step column etc.
class SimApi:
def __init__(self, model_name, model_count, step_size, final_time, idf_path, epw_path, csv):
"""
Class represents the programming interface exposed to a user of the SimApi system.
:param model_name: (string) name of model must be unique
:param model_count: (int) number of models to instantiate
:param step_size: (int) size of each step per hour, value in seconds e.g. 4 steps per hour = 900 step size
(15 minutes in seconds)
:param final_time: (int) final runtime of model, value in hours. Will be changed to accommodate run times
over a few days
:param idf_path: (string) absolute path to .idf
:param epw_path: (string) absolute path to .epw
:param csv: (list) absolute path(s) to csv file(s), number of files must equal model count
"""
self.logger = logging.getLogger('simapi')
handler = logging.FileHandler('./simapi.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
self.logger.setLevel(logging.INFO)
self._header = None
self._model_name = model_name
self._model_count = model_count
self._step_size = step_size
self._final_time = final_time
self._idf_path = idf_path
self._epw_path = epw_path
self._csv = csv
# model initialization parameters
self._init_data = {
'model_name': self._model_name, # change name each time script is run!
'container_id': None, # TODO change container_id from hostname to src_simulator_*
'model_count': self._model_count,
'step_size': self._step_size, # step size in seconds. 600 secs = 10 mins
'final_time': self._final_time # 24 hours = 86400 secs
}
self.sim_names = []
@staticmethod
def create_user(user_email='user@user.com', user_name='user', user_password='user user88'):
"""
Creates new user
:param user_email: (string) user email
:param user_name: (string) user name
:param user_password: (string) user password
:return:
"""
# TODO add check for existing user
json_data = {
"name": user_name,
"email": user_email,
"password": user_password
}
return requests.post(user_url, data=json_data)
def login(self, username="user@user.com", password="user user88"):
"""
Login as current user and store user token as a header dictionary to be used in requests
:param username: (string) user name
:param password: (string) user password
"""
data = {"username": username, # username = email
"password": password}
print(login_url)
resp = requests.post(login_url, data=data)
if resp.status_code == 200:
json_resp = resp.json()
token = json_resp['token'] # get validation token
self._header = {'Authorization': 'Token ' + token} # set request header
return resp.status_code
def send_and_generate(self):
"""
Send files needed to generate an fmu. return when fmu has finished generating.
:return: (int) status code of request, 201 if success
"""
idf_file = open(self._idf_path, 'rb')
epw_file = open(self._epw_path, 'rb')
file = {'idf_file': ('update.idf', idf_file),
'epw_file': ('update.epw', epw_file)}
resp = requests.post(init_url, headers=self._header, data=self._init_data, files=file)
idf_file.close()
epw_file.close()
return resp.status_code
def send_and_init(self):
"""
send data and initialize model as a simulation object, returns when simulation object has finished initializing
:return: (int) status code of request, 200 if success
"""
resp = requests.post(send_fmu, headers=self._header, json=self._init_data)
# graphql query for all models in db related to initial_model_name.
model_query = """
{{
fmuModels(modelN: "{0}"){{
modelName
}}
}}
""".format(self._model_name)
r = requests.get(url=graphql_url, json={'query': model_query}).json()['data']['fmuModels']
# TODO check if model count = initialized_model_count and relay to user,
# account for case when initialized_model_count < model count
# initialized_model_count = len(r)
# prints init_data on successful post
return resp.status_code
# TODO split into multiple methods giving the user more control over simulations
def simulate_models(self):
"""
Starts communication with simulation model and returns when model has reached its final time
:return: (int) 200 for success
"""
def test_method(query, url):
resp = requests.get(url=url, json={'query': query})
json_data = resp.json()['data']['outputs']
# self.logger.info("Output current length: {}".format(len(json_data)))
return len(json_data)
# TODO needs rework asap
# query for all models in db related to initial_model_name.
model_query = """
{{
fmuModels(modelN: "{0}"){{
modelName
}}
}}
""".format(self._model_name)
r = requests.get(url=graphql_url, json={'query': model_query})
i = 0
while i < self._model_count:
name = r.json()['data']['fmuModels'][i]['modelName'] # extract model name from graphql query response
print(name)
self.sim_names.append(name) # store extracted model names.
i += 1
f_time = 60 * 60 * self._final_time
data_frames = []
for file in self._csv:
data_frames.append(pd.read_csv(file))
i = 0 # first step
while i < f_time:
j = 0
# TODO process models async client side!
while j < self._model_count:
# TODO store dataframe in generator method and call next each iter
if len(data_frames) > 1:
df = data_frames[j]
else:
df = data_frames[0]
row = df.loc[df['time_step'] == i]
input_dict = row.to_dict('records')
input_dict = input_dict[0]
input_data = {
'fmu_model': self.sim_names[j],
'time_step': i,
'input_json': json.dumps(input_dict)
}
r = requests.post(input_url, headers=self._header, data=input_data)
print(r.text + ' ' + str(r.status_code))
j += 1
output_query = """
{{
outputs(modelN: "{0}", tStep: {1}) {{
outputJson
}}
}}
""".format(self._model_name, i)
try:
polling2.poll(
lambda: test_method(query=output_query, url=graphql_url) == self._model_count,
step=0.1,
timeout=60)
except polling2.TimeoutException:
print("Timeout error occurred\nLength of results is: {}".format(
test_method(query=output_query, url=graphql_url)))
i += self._step_size
# send empty input to kill and restart process in sim container(s)
k = 0
while k < self._model_count:
input_data = {
'fmu_model': self.sim_names[k],
'time_step': 0,
'input_json': json.dumps({"end_proc": -1})
}
r = requests.post(input_url, headers=self._header, data=input_data)
k += 1
print("\nAll data sent to simulation\n")
return 200
def request_model_outputs(self, sim_name):
f_time = 60*60*self._final_time
num_of_steps = f_time/self._step_size
self.logger.info("Expected number of steps: {}".format(num_of_steps))
def test_method(query, url):
resp = requests.get(url=url, json={'query': query})
json_data = resp.json()['data']['outputs']
self.logger.info("Output current length: {}".format(len(json_data)))
return len(json_data)
output_query = """
{{
outputs(modelN: "{0}") {{
timeStep
outputJson
}}
}}
""".format(sim_name)
print("Processing remaining inputs...")
try:
polling2.poll(
lambda: test_method(query=output_query, url=graphql_url) == num_of_steps,
step=0.1,
poll_forever=True)
except polling2.TimeoutException:
print("Timeout error occurred\nLength of results is: {}".format(test_method(query=output_query, url=graphql_url)))
json_output = requests.get(url=graphql_url, json={'query': output_query}).json()['data']['outputs']
# TODO store list of output names and use as csv column
print("Retrieving outputs...")
try:
csv_columns = ['time_step', 'output']
with open(f'output_csv/{sim_name}.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for out in json_output:
writer.writerow({'time_step': out['timeStep'], 'output': json.loads(out['outputJson'])})
except IOError:
print("I/O error")
@staticmethod
def multi_thread_client(self):
"""
Let user make multi-threaded requests, simulations per thread = (number of sims / available threads).
Avoid sequential processing of container requests client side.
:return:
"""
return NotImplementedError
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,433
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/admin.py
|
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.User)
admin.site.register(models.FmuModel)
admin.site.register(models.Input)
admin.site.register(models.Output)
admin.site.register(models.ContainerHostNames)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,434
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/serializers.py
|
from django.db import models
from rest_framework import serializers
from .models import Input, Output, User, FmuModel, ContainerHostNames, FileModel
class UserSerializer(serializers.ModelSerializer):
""""""
class Meta:
model = User
fields = ('user_id', 'name', 'email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
""""""
user = User(
email=validated_data['email'],
name=validated_data['name']
)
user.set_password(validated_data['password'])
user.save()
return user
class FmuModelParametersSerializer(serializers.ModelSerializer):
""""""
class Meta:
model = FmuModel
fields = ('model_name', 'user', 'container_id', 'model_count', 'idf_file', 'epw_file', 'step_size', 'final_time', 'created_on')
"""read-only field user. Can only be created by authenticated user"""
extra_kwargs = {'user': {'read_only': True}}
class InputSerializer(serializers.ModelSerializer):
""""""
class Meta:
model = Input
fields = ('user', 'fmu_model', 'time_step', 'input_json')
constraints = [
models.UniqueConstraint(fields=['fmu_model', 'time_step'], name='unique time step')
]
"""read-only fields user and model_name. Can only be created by authenticated user"""
extra_kwargs = {'user': {'read_only': True},
'fmu_model': {'required': True}}
class OutputSerializer(serializers.ModelSerializer):
"""Outputs received from model time step"""
class Meta:
model = Output
fields = ('user',
'fmu_model',
'time_step',
'output_json')
"""read-only fields user and model_name. Can only be created by authenticated user"""
extra_kwargs = {'user': {'read_only': True},
'fmu_model': {'read_only': True}}
class UploadSerializer(serializers.ModelSerializer):
"""Test File upload model"""
class Meta:
model = FileModel
fields = ('file',)
class HostNameSerializer(serializers.ModelSerializer):
class Meta:
model = ContainerHostNames
fields = ('hostname',)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,435
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/views.py
|
import json
import polling2
from django.http import HttpResponse
from rest_framework import viewsets
from rest_framework.authentication import TokenAuthentication, SessionAuthentication
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.authtoken.views import ObtainAuthToken
from django.db import transaction
from django_celery_results.models import TaskResult
from rest_framework.response import Response
from rest_framework.views import APIView
from . import serializers, models, tasks
def check_result_backend(model_name):
try:
task_result = TaskResult.objects.first()
task_name = task_result.task_name
task_args = task_result.task_args
task_status = task_result.status
if (task_name.endswith('post_model')) and (model_name in task_args) and (task_status == 'SUCCESS'):
print("FMU Ready")
return True
except AttributeError:
return False
return False
def print_str(string):
print(str)
class UserViewSet(viewsets.ModelViewSet):
"""retrieve list of or create new user"""
serializer_class = serializers.UserSerializer
queryset = models.User.objects.all()
authentication_classes = (TokenAuthentication,)
class LoginViewSet(viewsets.ViewSet):
"""checks email and password and returns an auth token"""
serializer_class = AuthTokenSerializer
@staticmethod
def create(request):
return ObtainAuthToken().post(request)
class FmuModelViewSet(viewsets.ModelViewSet):
"""handles creating and reading model initialization parameters"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
serializer_class = serializers.FmuModelParametersSerializer
queryset = models.FmuModel.objects.all()
def perform_create(self, serializer):
if self.request.POST.get('container_id') is None:
self.request.data['container_id'] = 'src_simulator_1'
serializer.save(user=self.request.user, container_id=self.request.data['container_id'])
data = {
'model_name': self.request.data['model_name'],
'step_size': self.request.data['step_size'],
'final_time': self.request.data['final_time'],
'container_id': self.request.data['container_id'],
'Authorization': 'Token ' + str(self.request.auth)
}
if 'model_count' in self.request.data:
data['model_count'] = self.request.data['model_count']
# TODO need to change poll_forever and perform check to see if FMU is created, also rework below
if self.request.data['container_id'] not in self.request.data['model_name']:
transaction.on_commit(lambda: tasks.post_model.apply_async((data,), queue='web', routing_key='web'))
polling2.poll(
lambda: check_result_backend(self.request.data['model_name']) is True,
step=10,
poll_forever=True)
return Response("FMU Ready", status=200)
class InputViewSet(viewsets.ModelViewSet):
"""handles creating and reading model input parameters"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
serializer_class = serializers.InputSerializer
queryset = models.Input.objects.all()
"""
create new input instance. set user as current authenticated user,
fmu_model as current fmu_model related to user
"""
def perform_create(self, serializer, **kwargs):
model = models.FmuModel.objects.get(model_name=self.request.data['fmu_model'])
input_json_field = self.request.data['input_json']
time_step = self.request.data['time_step']
data = {
'time_step': time_step,
'container_id': model.container_id,
'input_data': input_json_field
}
serializer.save(user=self.request.user, fmu_model=model, time_step=time_step, input_json=input_json_field)
transaction.on_commit(lambda: tasks.post_router_input.apply_async((data,),
queue='web',
routing_key='web'))
class OutputViewSet(viewsets.ModelViewSet):
"""handles creating and reading model output parameters"""
authentication_classes = (TokenAuthentication, SessionAuthentication)
serializer_class = serializers.OutputSerializer
queryset = models.Output.objects.all()
"""
create new output instance. set user as current authenticated user,
fmu_model as current init_model related to user
"""
def perform_create(self, serializer, **kwargs):
output = self.request.data
model = models.FmuModel.objects.get(model_name=output['fmu_model'])
output_json_field = output['output_json']
time_step = output['time_step']
serializer.save(user=self.request.user, fmu_model=model, time_step=time_step,
output_json=json.dumps(output_json_field))
class SendFMUView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
def post(self, request, *args, **kwargs):
data = request.data
data['Authorization'] = str(self.request.auth)
result = tasks.send_fmu.apply_async((data,), queue='web', routing_key='web')
return Response(result.get())
class FileUploadView(viewsets.ModelViewSet):
serializer_class = serializers.UploadSerializer
queryset = models.FileModel.objects.all()
@staticmethod
def post(request):
file_model = models.FileModel()
_, file = request.FILES.popitem() # get first element of the uploaded files
file = file[0] # get the file from MultiValueDict
file_model.file = file
file_model.save()
return HttpResponse(content_type='text/plain', content='File uploaded')
class HostNameViewSet(viewsets.ModelViewSet):
serializer_class = serializers.HostNameSerializer
queryset = models.ContainerHostNames.objects.all()
def perform_create(self, serializer):
serializer.save(hostname=self.request.data['hostname'])
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,436
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/__init__.py
|
from . import simulator
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,437
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/urls.py
|
from django.urls import path
# TODO write urls here and import simapi_web
urlpatterns = [
path('user/')
]
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,438
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/sim_timer.py
|
import csv
import time
class SimTimer:
# TODO add "static" list to store each test time then write to csv
_all_times = []
def __init__(self):
self._start_time = 0.0
self._end_time = 0.
def capture_start_time(self):
self._start_time = time.perf_counter()
def capture_end_time(self):
self._end_time = time.perf_counter()
def calc_runtime(self, stage_name):
self._all_times.append({"stage_name": stage_name, "stage_time": f"{(self._end_time - self._start_time):0.4f}"})
print(f"{stage_name} Time: {(self._end_time - self._start_time):0.4f}")
def write_times(self):
# TODO write times to csv
try:
csv_columns = ['stage_name', 'stage_time']
with open('simulation_time.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=csv_columns)
writer.writeheader()
for data in self._all_times:
writer.writerow(data)
except IOError:
print("I/O error")
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,439
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/input_output_router/router_tasks.py
|
from celery import Celery
import celeryconfig
import requests
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
app = Celery('router_tasks')
app.config_from_object(celeryconfig)
@app.task
def post_input(input_json, container_id):
logger.info(f'route_input data {str(input_json)}')
headers = {'Content-type': 'application/json'}
url = 'http://{0}:8000/model_input'.format(container_id)
r = requests.post(url, json=input_json, headers=headers)
logger.info(f'post_input -> request status {str(r.status_code)}')
return r.status_code
@app.task
def post_output(output_json):
output_url = 'http://web:8000/output/'
auth_t = output_json['Authorization']
logger.info(f'post_output -> auth token {auth_t}')
headers = {'Authorization': auth_t, 'Content-type': 'application/json'}
logger.info(f'post_output -> headers {str(headers)}')
r = requests.post(output_url, headers=headers, json=output_json)
logger.info(f'post_output -> request status {r.status_code}')
return r.text
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,440
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/example_simulate.py
|
from simapi import SimApi
from sim_timer import SimTimer
model_name = "test123"
model_count = 1
# TODO change to steps per hour parse on backend
step_size = 900 # in seconds
# TODO convert to dict {'days': , 'months': , 'year': } parse on backend
final_time = 24 # in hours
idf_path = "data_files/new.idf"
epw_path = "data_files/new.epw"
csv = ["data_files/new1.csv"]
timer = SimTimer()
sim = SimApi(
model_name=model_name,
model_count=model_count,
step_size=step_size,
final_time=final_time,
idf_path=idf_path,
epw_path=epw_path,
csv=csv
)
if not sim.login() == 200:
sim.create_user()
sim.login()
print("Generating FMU...")
timer.capture_start_time()
generate_resp = sim.send_and_generate()
print("Generate response: {}".format(generate_resp))
timer.capture_end_time()
timer.calc_runtime("gen_fmu")
if generate_resp == 201:
print("Initializing...")
timer.capture_start_time()
init_resp = sim.send_and_init()
timer.capture_end_time()
timer.calc_runtime("init_fmu")
else:
print("Something went wrong while generating the fmu!")
print(generate_resp)
exit(-1)
if init_resp == 200:
print("Simulating...")
timer.capture_start_time()
simulate_resp = sim.simulate_models()
timer.capture_end_time()
timer.calc_runtime("sim_fmu")
else:
print("Something went wrong while initializing the fmu!")
print(init_resp)
exit(-1)
timer.capture_start_time()
for name in sim.sim_names:
print("Sim name: {}".format(name))
sim.request_model_outputs(name)
print()
timer.capture_end_time()
timer.calc_runtime("req_outs")
timer.write_times()
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,441
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_generator/generator_api.py
|
import time
from pathlib import Path
import requests
from bottle import request, route, run, response
import json
import os.path
import generator_tasks
# receives idf and epw files, generates the FMU and stores.
@route('/file_upload/<model_name>', method='POST')
def file_upload(model_name):
upload = request.files
save_path = '/home/fmu/code/energy/test/' + model_name
try:
os.mkdir(save_path)
except OSError:
print("Creation of the directory %s failed" % save_path)
else:
print("Successfully created the directory %s " % save_path)
if len(upload) == 2:
for name, file in upload.iteritems():
print("Saving: " + name)
file.save(save_path)
else:
response.status = 400
return "Found {0} files. Expected 2".format(len(upload))
directory = os.listdir(save_path)
print(directory)
if model_name + '.idf' in directory and model_name + '.epw' in directory:
epw = '/home/fmu/code/energy/test/' + model_name + '/' + model_name + '.epw'
idf = '/home/fmu/code/energy/test/' + model_name + '/' + model_name + '.idf'
else:
response.status = 400
return 'Error files not saved!'
fmu_store_dir = '/home/fmu/code/fmu_test/' + model_name
try:
os.mkdir(fmu_store_dir)
except OSError:
print("Creation of the directory %s failed" % fmu_store_dir)
else:
print("Successfully created the directory %s " % fmu_store_dir)
result = generator_tasks.gen_fmu.apply_async((idf, epw, fmu_store_dir))
result.get()
fmu_check = Path('/home/fmu/code/fmu_test/{0}/{0}.fmu'.format(model_name))
fmu_zip_check = Path('/home/fmu/code/fmu_test/{0}/{0}.zip'.format(model_name))
if fmu_check.exists():
message = "FMU FILE EXISTS"
elif fmu_zip_check.exists():
message = "FMU ZIP EXISTS"
else:
message = "NO FMU OR ZIP"
return message
return message
# send stored FMU to the correct simulator container
@route('/fmu_to_simulator/<model_name>', method='POST')
def send_fmu(model_name):
json_data = request.json
model_count = json_data['model_count']
i = 1
while i <= int(model_count):
# TODO change isSimOne sim_id and store "src_simulator_{0}".format(i)
if i == 1:
json_data['isSimOne'] = True
else:
json_data['isSimOne'] = False
sim_data = {"initialize": True, "data": json_data}
fmu_file = open('/home/fmu/code/fmu_test/' + model_name + '/' + model_name + '.fmu', 'rb')
file = {'fmu': (model_name + '.fmu', fmu_file, 'application/zip'),
'json': (None, json.dumps(sim_data), 'application/json')}
url = 'http://src_simulator_{0}:8000/receive_fmu/{1}'.format(i, model_name)
r = requests.post(url, files=file)
print(r.status_code)
print(r.text)
fmu_file.close()
if i > 1:
time.sleep(2)
i += 1
response.status = 200
return 'File upload success in sim container for model_name = {0}'.format(model_name)
run(host='0.0.0.0', port=8000, debug=True, reloader=True)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,442
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/sim_proc.py
|
import json
import subprocess
import sys
import requests
from simulator.simulation_obj import SimulationObject
class SimProc:
def __init__(self, sim_data):
self.sim_obj = None
self.model_name = sim_data['model_name']
self.final_time = sim_data['final_time']
self.step_size = sim_data['step_size']
self.is_sim_one = sim_data['isSimOne']
self.header = {'Authorization': 'Token ' + sim_data['Authorization']}
self.prev_time_step = 0
self.kill_proc = False
# TODO add method that validates parameters
def initialize(self):
fmu_path = '/home/deb/code/fmu_data/{0}/{0}.fmu'.format(self.model_name)
# if the simulation container is not designated as the first container in a swarm
if not self.is_sim_one:
# create a new model instance in the django database for this container
init_url = 'http://web:8000/init_model/'
hostname = subprocess.getoutput("cat /etc/hostname")
self.model_name = self.model_name + '_' + hostname
initial_data = {
'model_name': self.model_name, # change name each time script is run!
'step_size': self.step_size, # step size in seconds. 600 secs = 10 mins
'final_time': self.final_time, # 24 hours = 86400 secs
'container_id': hostname
}
requests.post(init_url, headers=self.header, data=initial_data)
self.sim_obj = SimulationObject(model_name=self.model_name, step_size=int(self.step_size),
final_time=float(self.final_time),
path_to_fmu=fmu_path)
self.sim_obj.model_init()
def process_step(self, step_input):
print("\ninput: " + str(step_input))
# run do_step for current time step with current inputs
output_json = self.sim_obj.do_time_step(step_input)
output_url = 'http://router:8000/route_output/'
output_json['Authorization'] = self.header['Authorization']
r = requests.post(output_url, headers=self.header, json=json.dumps(output_json))
step_input = json.loads(step_input)
# when last time step has completed free and terminate instance
if int(step_input['time_step']) == self.sim_obj.final_time - int(self.step_size):
self.kill_proc = True
if __name__ == '__main__':
sim_proc = None
try:
while True:
data = None
for line in open('proc_pipe'):
str_json = line.rstrip('\n')
data = json.loads(str_json)
if data:
if not data['initialize']:
if sim_proc.kill_proc:
break
else:
sim_proc.process_step(data['input_data'])
elif data['initialize']:
sim_proc = SimProc(data['data'])
sim_proc.initialize()
except KeyboardInterrupt:
print("ending process!")
sys.exit(0)
print("ending process!")
sys.exit(1)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,443
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/input_output_router/celeryconfig.py
|
BROKER_URL = 'amqp://user:pass@broker:5672/vhost'
CELERY_RESULT_BACKEND = 'db+postgresql://postgres:backend@backend/backend_db'
CELERY_TASK_ROUTES = {'router_tasks.*': {'queue': 'router'}}
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,444
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/sim_api.py
|
from bottle import request, route, run, response
import os.path
import json
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ''))
# receive timestep value for new inputs. database queried using timestep to retrieve correct input
@route('/model_input', method='POST')
def get_input():
temp = json.loads(request.json)
req_data = json.dumps({"initialize": False, "input_data": temp['data']['input_data']})
os.system("python sim_worker.py '{}'".format(req_data))
# receive FMU file from generator. Saving the FMU triggers the simulation_process
@route('/receive_fmu/<model_name>', method='POST')
def receive_fmu(model_name):
upload = request.files
save_path = '/home/deb/code/fmu_data/' + model_name
try:
os.mkdir(save_path)
except (IOError, OSError) as error:
if error == IOError:
print("Error {0} encountered file already exists".format(error))
else:
print("Error {0} encountered problem saving file".format(error))
else:
print("Successfully created the directory %s " % save_path)
for name, file in upload.iteritems():
print("Saving: " + name)
try:
file.save(save_path)
except IOError as error: # need way to trigger sim process without creating new folder/file
print("Error saving FMU:\n{}".format(error))
json_data = request.forms.pop('json')
os.system("python sim_worker.py '{}'".format(json_data))
response.status = 200
return 'File upload success in sim container for model_name = {0}'.format(model_name)
run(host='0.0.0.0', port=8000, debug=True, reloader=True)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,445
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/tests/test_models.py
|
from django.test import TestCase
from rest_api.models import Input, Output, User, FmuModel
class UserTestCase(TestCase):
"""
Test user and super user creation
"""
def test_user(self):
self.test_user = User.objects.create(
name='test user',
email='testuser@test.com',
password='test user 88'
)
self.assertEquals(
User.objects.count(),
1
)
self.assertEquals(
self.test_user.get_full_name(),
self.test_user.name
)
self.assertEquals(
self.test_user.get_short_name(),
self.test_user.name
)
self.assertEquals(
self.test_user.__str__(),
self.test_user.email
)
self.super_user = User.objects.create_superuser(
name='super_user',
email='testsuper@test.com',
password='super user 88'
)
self.assertEquals(
User.objects.count(),
2
)
self.assertTrue(
self.super_user.is_superuser
)
self.assertTrue(
self.super_user.is_staff
)
class FmuModelTestCase(TestCase):
"""
Test case for FmuModelParameters model
"""
def test_model_initialization(self):
self.user = User.objects.create(
name='test_user',
email='testuser@test.com',
password='test user 88'
)
self.assertEquals(
FmuModel.objects.count(),
0
)
self.model = FmuModel.objects.create(
model_name="test_model",
user=self.user,
step_size=600,
final_time=72.0
)
self.assertEquals(
self.model.__str__(),
self.model.model_name
)
self.assertEquals(
FmuModel.objects.count(),
1
)
FmuModel.objects.create(
model_name="test_model1",
user=self.user,
step_size=800,
final_time=82.0
)
self.assertEquals(
FmuModel.objects.count(),
2
)
class InputTestCase(TestCase):
"""
Test case for Input model
"""
def test_input(self):
# input needs user
self.user = User.objects.create(
name='test_user',
email='testuser@test.com',
password='test user 88'
)
# and model
self.model = FmuModel.objects.create(
model_name="test_model",
user=self.user,
step_size=600,
final_time=72.0
)
self.assertEquals(
Input.objects.count(),
0
)
Input.objects.create(
user=self.user,
fmu_model=self.model,
time_step=600,
yshade=1.0
)
self.assertEquals(
Input.objects.count(),
1
)
class OutputTestCase(TestCase):
"""
Test case for Output model
"""
def test_output(self):
# input needs user
self.user = User.objects.create(
name='test_user',
email='testuser@test.com',
password='test user 88'
)
# and model
self.model = FmuModel.objects.create(
model_name="test_model",
user=self.user,
step_size=600,
final_time=72.0
)
self.assertEquals(
Output.objects.count(),
0
)
Output.objects.create(
user=self.user,
fmu_model=self.model,
time_step=600,
yshade=1.0,
dry_bulb=1.2,
troo=1.5,
isolext=1.6,
sout=1.7,
zonesens=1.8,
cool_rate=1.9
)
Output.objects.create(
user=self.user,
fmu_model=self.model,
time_step=800,
yshade=1.2,
dry_bulb=1.4,
troo=1.8,
isolext=1.9,
sout=1.2,
zonesens=1.3,
cool_rate=1.6
)
self.assertEquals(
Output.objects.count(),
2
)
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,446
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_generator/celeryconfig.py
|
BROKER_URL = 'amqp://user:pass@broker:5672/vhost'
CELERY_RESULT_BACKEND = 'db+postgresql://postgres:backend@backend/backend_db'
CELERY_ROUTES = {'generator_tasks.*': {'queue': 'gen'}}
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,447
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/schema.py
|
import graphene
from graphene_django import DjangoObjectType
from django.db.models import Q
from . import models
class UserType(DjangoObjectType):
class Meta:
model = models.User
class FmuModelType(DjangoObjectType):
class Meta:
model = models.FmuModel
class InputType(DjangoObjectType):
class Meta:
model = models.Input
filter_fields = {'fmu_model': ['exact'],
'time_step': ['exact']}
class OutputType(DjangoObjectType):
class Meta:
model = models.Output
filter_fields = {'fmu_model': ['exact'],
'time_step': ['exact']}
class Query(object):
all_users = graphene.List(UserType)
fmu_models = graphene.List(FmuModelType, model_n=graphene.String())
fmu_model = graphene.List(FmuModelType, model_n=graphene.String())
inputs = graphene.List(InputType, model_n=graphene.String(), t_step=graphene.Int())
outputs = graphene.List(OutputType, model_n=graphene.String(), t_step=graphene.Int())
def resolve_all_users(self, info, **kwargs):
return models.User.objects.all()
def resolve_fmu_models(self, info, model_n=None, **kwargs):
if model_n:
filter = (
Q(model_name__icontains=model_n)
)
return models.FmuModel.objects.filter(filter)
return models.FmuModel.objects.all()
def resolve_fmu_model(self, info, model_n=None, **kwargs):
if model_n:
filter = (
Q(model_name=model_n)
)
return models.FmuModel.objects.filter(filter)
return models.FmuModel.objects.all()
def resolve_inputs(self, info, model_n=None, t_step=None, **kwargs):
if model_n and t_step:
filter = (
Q(fmu_model=model_n) &
Q(time_step=t_step)
)
return models.Input.objects.filter(filter)
if model_n:
filter = (
Q(fmu_model=model_n)
)
return models.Input.objects.filter(filter)
if t_step:
filter = (
Q(time_step=t_step)
)
return models.Input.objects.filter(filter)
return models.Input.objects.all()
def resolve_outputs(self, info, model_n=None, t_step=None, **kwargs):
if model_n and t_step:
filter = (
Q(fmu_model=model_n) &
Q(time_step=t_step)
)
return models.Output.objects.filter(filter)
if model_n:
filter = (
Q(fmu_model=model_n)
)
return models.Output.objects.filter(filter)
if t_step:
filter = (
Q(time_step=t_step)
)
return models.Output.objects.filter(filter)
return models.Output.objects.all()
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,448
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/simapi_web/rest_api/migrations/0001_initial.py
|
# Generated by Django 3.0.3 on 2020-04-12 11:35
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('user_id', models.AutoField(primary_key=True, serialize=False)),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('is_superuser', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContainerHostNames',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='FileModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(default='', upload_to='./Media/')),
],
),
migrations.CreateModel(
name='FmuModel',
fields=[
('model_name', models.CharField(max_length=255, primary_key=True, serialize=False, unique=True)),
('container_id', models.CharField(max_length=255, null=True)),
('model_count', models.IntegerField(default=1, null=True)),
('idf_file', models.FileField(default='', null=True, upload_to='./Media/')),
('epw_file', models.FileField(default='', null=True, upload_to='./Media/')),
('step_size', models.IntegerField(default=0)),
('final_time', models.DecimalField(decimal_places=1, max_digits=20)),
('created_on', models.DateTimeField(auto_now_add=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Output',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time_step', models.IntegerField()),
('output_json', django.contrib.postgres.fields.jsonb.JSONField()),
('fmu_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rest_api.FmuModel')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Input',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time_step', models.IntegerField()),
('input_json', django.contrib.postgres.fields.jsonb.JSONField()),
('fmu_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rest_api.FmuModel')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,449
|
RichieBrady/SimApi-Python
|
refs/heads/master
|
/src/fmu_simulator/sim_worker.py
|
import sys
with open('proc_pipe', 'wt') as f:
f.write(sys.argv[1])
|
{"/src/simapi_web/rest_api/serializers.py": ["/src/simapi_web/rest_api/models.py"], "/example_simulate.py": ["/simapi.py", "/sim_timer.py"]}
|
40,450
|
krishnakarthik9/networks-chatApp
|
refs/heads/master
|
/inputGUI.py
|
from Tkinter import *
import sys
import struct
import socket
from client import messagePacket
class mainWindow(object):
def __init__(self,master, server, userDetails):
self.master=master
master.minsize(width=640, height=480)
self.server=server
self.userDetails=userDetails
self.l=Label(master,text="Enter your command here")
self.l.pack()
self.e = Text(master, height=15, width=100)
self.e.configure(font=("Helvetica", 12))
self.e.pack()
self.b=Button(master,text='Ok',command=self.sendToMain)
self.b.pack()
self.master.title("CS425A Chat Application :\t Client \t{}(you)".format(userDetails['username']))
def sendToMain(self, event=None):
message = self.e.get(1.0,END)
if message == 'logout':
self.master.quit()
sys.stdout.write("<{}(you)>: {}".format(self.userDetails['username'], message))
self.e.delete(1.0, 'end')
packet = messagePacket(message.strip().split(), self.userDetails)
if not packet:
return
l, message = packet
# print l, message
l = str(struct.pack(">q", l))
message = l + str(message)
self.server.send(message)
# self.master.destroy()
|
{"/inputGUI.py": ["/client.py"]}
|
40,451
|
krishnakarthik9/networks-chatApp
|
refs/heads/master
|
/server.py
|
import time
import socket
import threading
from threading import Thread
import SocketServer
from DB import UserDB
from DB import MessageDB
import struct
# register constants
USER_ALREADY_EXISTS = '1'
YOU_ARE_REGISTERED = '2'
# login constants
SUCCESSFULLY_AUTHENTICATED = '0'
NO_SUCH_USER_EXISTS = '1'
PASSWORD_WRONG = '2'
USER_ALREADY_LOGGED_IN = '3'
PASSWORD_WRONG_YOU_ARE_BLOCKED = '4'
YOU_HAVE_BEEN_BLOCKED = '5'
FAILED = 0
SUCCESS = 1
MAX_ATTEMPTS = 3
BLOCK_TIME = 60
PRIVATE = '-p'
BROADCAST = '-b'
WHOELSE = 'whoelse'
WHOISTHERE = 'whoisthere'
LOGOUT = 'logout'
BLOCK = '-block'
UNBLOCK = '-unblock'
MQueue = {}
UsersOnline = {}
UsersRegistered = [False, {}]
class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
# Receive special userPacket for login/register
userPacket = eval(self.request.recv(1024))
userSocket = self.request.getpeername()
print(userSocket)
# get the dict for the user
userDB = UserDB()
userDict = userDB.getUserData(userPacket['username'])
userBlocked = userDB.getUserLoginBlockList(userPacket['username'])
status = FAILED
if userPacket['cmd'] == 'register':
status = self.handleRegister(userPacket, userDB, userDict)
elif userPacket['cmd'] == 'login':
status = self.handleLogin(userPacket, userDB, userDict, userBlocked, userSocket)
if status == SUCCESS:
#TODO: what happens to loop after logging out
messageDB = MessageDB()
self.handleUnread(userDict, messageDB)
global MQueue, UsersOnline
MQueue[userPacket['username']] = False
UsersOnline[userPacket['username']] = True
self.getRegisteredUsers(userDB, userDict)
pingThread = Thread(target=self.pingClient, args=(messageDB, userDict))
pingThread.start()
while userDict['isLoggedIn']:
self.handleChat(userDB, userDict, messageDB)
UsersOnline[userPacket['username']] = False
time.sleep(0.2)
self.request.close()
def handleRegister(self, userPacket, userDB, userDict):
if userDict:
self.request.sendall(USER_ALREADY_EXISTS)
return FAILED
else:
# register
userDB.register(userPacket['username'], userPacket['password'])
self.request.sendall(YOU_ARE_REGISTERED)
# user is auto-logged-in if registration is success
return FAILED
def handleLogin(self, userPacket, userDB, userDict, userBlocked, userSocket):
if not userDict:
self.request.sendall(NO_SUCH_USER_EXISTS)
return FAILED
elif userBlocked:
# Block check: if next possible login time is in future
if userBlocked['initialFailedLoginTime'] > time.time():
self.request.sendall(YOU_HAVE_BEEN_BLOCKED
+ " " + str(userBlocked['initialFailedLoginTime']))
return FAILED
# check password match
if userDict['password'] == userPacket['password']:
# check already logged in
if userDict['isLoggedIn']:
self.request.sendall(USER_ALREADY_LOGGED_IN)
return FAILED
else:
self.request.sendall(SUCCESSFULLY_AUTHENTICATED)
userDict['isLoggedIn'] = True
userDict['socket'] = userSocket
userDB.updateUserData(userDict)
return SUCCESS
else:
if not userBlocked or (time.time() - userBlocked['initialFailedLoginTime'] > 60):
if not userBlocked:
userBlocked = {'username': userPacket['username']}
userBlocked['numAttempts'] = 1
userBlocked['initialFailedLoginTime'] = time.time()
else:
userBlocked['numAttempts'] += 1
if userBlocked['numAttempts'] > MAX_ATTEMPTS:
# block user for BLOCK_TIME
userBlocked['initialFailedLoginTime'] = time.time() + BLOCK_TIME
self.request.sendall(PASSWORD_WRONG_YOU_ARE_BLOCKED +
" " + str(userBlocked['initialFailedLoginTime']))
else:
self.request.sendall(PASSWORD_WRONG)
# Update userBlockList
userDB.updateUserLoginBlockList(userBlocked)
return FAILED
def handleUnread(self, userDict, messageDB):
print userDict['username'], "handleUnread"
msgs = messageDB.getUnreadMessages(userDict['username'])
for msg in msgs:
msg = str(msg)
lenmsg = len(msg)
lenmsg = str(struct.pack(">q", lenmsg))
msg = lenmsg + msg
self.request.send(msg)
messageDB.removeUnreadMessages(userDict['username'])
global MQueue
MQueue[userDict['username']] = False
def handleChat(self, userDB, userDict, messageDB):
print "in handleChat"
lenmsg = self.request.recv(8)
lenmsg = struct.unpack(">q", lenmsg)[0]
print lenmsg
msg = eval(self.request.recv(lenmsg))
if msg['cmd'] == 'send':
if msg['msgType'] == PRIVATE:
self.handlePrivateMessage(userDB, userDict, msg, messageDB, broadcast=False)
elif msg['msgType'] == BROADCAST:
self.handleBroadcastMessage(userDB, userDict, msg, messageDB)
elif msg['cmd'] == BLOCK:
# update userBlockList
userBlockSet = userDB.getUserBlockList(userDict['username'])
print 'enetered block <', userBlockSet, '>'
if userBlockSet is not None:
if msg['blockedUser'] not in userBlockSet['blockSet']:
userBlockSet['blockSet'].append(msg['blockedUser'])
userDB.updateUserBlockList(userBlockSet)
print 'userBlockSet is not None'
else:
userBlockSet = {'username': userDict['username'], 'blockSet': [msg['blockedUser']]}
userDB.updateUserBlockList(userBlockSet)
print 'userBlockSet is None, but added now'
print userDB.getUserBlockList(userDict['username'])
elif msg['cmd'] == UNBLOCK:
# update userBlockList
userBlockSet = userDB.getUserBlockList(userDict['username'])
if userBlockSet is not None:
if msg['unblockedUser'] in userBlockSet['blockSet']:
print userBlockSet['blockSet']
userBlockSet['blockSet'].remove(msg['blockedUser'])
userDB.updateUserBlockList(userBlockSet)
print userBlockSet['blockSet'], " after"
elif msg['cmd'] == LOGOUT:
self.handleLogout(userDB, userDict)
elif msg['cmd'] == WHOISTHERE:
self.handleWhoIsThere(userDB, userDict, messageDB)
elif msg['cmd'] == WHOELSE:
self.handleWhoElse(userDB, userDict, messageDB)
# elif msg['cmd'] == 'timeout' or msg['cmd'] == 'view':
# self.handleUnread(userDict, messageDB)
def handlePrivateMessage(self, userDB, userDict, msgPacket, messageDB, broadcast=False):
toUser = msgPacket['toUser']
receiver = userDB.getUserData(toUser)
if not receiver:
return
# if toUser is blocked then simply exit this method else continue to next step
uBlockList = userDB.getUserBlockList(toUser)
print '<', uBlockList, '>'
if uBlockList is None or userDict['username'] not in uBlockList['blockSet']:
messageDB.addUnreadMessage(toUser, userDict['username'], msgPacket)
global MQueue
if toUser in MQueue:
MQueue[toUser] = True
else:
print 'blocked user: ', userDict['username'], ' by: ', toUser
def handleBroadcastMessage(self, userDB, userDict, msgPacket, messageDB):
activeUsers = userDB.getAllUsersLoggedIn()
for receiver in activeUsers:
msgPacket['toUser'] = receiver
self.handlePrivateMessage(userDB, userDict, msgPacket, messageDB, broadcast=True)
def handleLogout(self, userDB, userDict):
print "logging Out", userDict['username']
userDict['isLoggedIn'] = False
userDict['lastActive'] = time.time()
userDB.updateUserData(userDict)
def handleWhoIsThere(self, userDB, userDict, messageDB):
global UsersOnline, MQueue
msgData = 'Online Users are\n'
for u in UsersOnline:
if UsersOnline[u]:
msgData += '\t\t' + u + '\n'
toUser = userDict['username']
created = time.time()
msgPacket = {'msgType': 'query', 'msgData': msgData,\
'created': created, 'fromUser': 'server'}
messageDB.addUnreadMessage(toUser, 'server', msgPacket)
MQueue[toUser] = True
def handleWhoElse(self, userDB, userDict, messageDB):
global UsersRegistered, MQueue
msgData = 'Registered Users are\n'
for u in UsersRegistered[1]:
msgData += '\t\t' + u + '\n'
toUser = userDict['username']
created = time.time()
msgPacket = {'msgType': 'query', 'msgData': msgData,\
'created': created, 'fromUser': 'server'}
messageDB.addUnreadMessage(toUser, 'server', msgPacket)
MQueue[toUser] = True
def getRegisteredUsers(self, userDB, userDict):
global UsersRegistered
UsersRegistered[1][userDict['username']] = True
if UsersRegistered[0]:
return
else:
allUsers = userDB.getAllUsers()
for u in allUsers:
UsersRegistered[1][u] = True
UsersRegistered[0] = True
def pingClient(self, messageDB, userDict):
global MQueue
start = time.time()
while True:
if time.time()-start >= 0.1:
if MQueue[userDict['username']]:
self.handleUnread(userDict, messageDB)
start = time.time()
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
def main():
global MQueue
HOST = socket.gethostname()
PORT = 9998
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
# start a thread with the server.
# the thread will then start one more thread for each request.
server_thread = threading.Thread(target=server.serve_forever)
# exit the server thread when the main thread terminates
server_thread.daemon = True
server_thread.start() # equivalent to serversocket.listen()
while True:
# do nothing
count = 1
server.shutdown()
if __name__ == '__main__':
main()
|
{"/inputGUI.py": ["/client.py"]}
|
40,452
|
krishnakarthik9/networks-chatApp
|
refs/heads/master
|
/client.py
|
import socket
import sys
import getpass
import time
from threading import Thread
import pymongo
import select
import struct
from util import *
from Tkinter import *
import inputGUI
PRIVATE = '-p'
BROADCAST = '-b'
WHOELSE = 'whoelse'
WOISTHERE = 'whoisthere'
LOGOUT = 'logout'
BLOCK = '-block'
UNBLOCK = '-unblock'
isLoggedIn = False
userDetails = {}
userInput = None
def help():
# print all help material here
print("Usage: help")
sys.exit()
def UserPacket(arguments):
# print(arguments)
if len(arguments) < 2:
help()
# Create complete packet to be sent
Packet = {}
# Read and create packet according to the arguments
if arguments[1] == "-l":
Packet['cmd'] = 'login'
Packet['username'] = raw_input('Please enter your username:\n')
Packet['password'] = getpass.getpass(prompt='Please enter your password:\n')
elif arguments[1] == "-r":
Packet['cmd'] = 'register'
Packet['username'] = raw_input('Please enter your username:\n')
Packet['password'] = getpass.getpass(prompt='Please enter your password:\n')
else:
help()
return Packet
def messagePacket(userInput, userDetails):
# print userDetails
if not userInput:
return None
cmd = userInput[0]
if cmd == PRIVATE:
toUser = userInput[1]
msgData = ' '.join(userInput[2:])
created = time.time()
msg = {'cmd': 'send', 'msgType': cmd, 'toUser': toUser, 'msgData': msgData,\
'created': created, 'fromUser': userDetails['username']}
return len(str(msg)), msg
elif cmd == BROADCAST:
msgData = ' '.join(userInput[1:])
created = time.time()
msg = {'cmd': 'send', 'msgType': cmd, 'msgData': msgData,\
'created': created, 'fromUser': userDetails['username']}
return len(str(msg)), msg
elif cmd == BLOCK:
blockedUser = userInput[1]
msg = {'cmd': cmd, 'blocker': userDetails['username'], 'blockedUser': blockedUser}
return len(str(msg)), msg
elif cmd == BLOCK:
print 'entered Block'
unblockedUser = userInput[1]
msg = {'cmd': cmd, 'unblocker': userDetails['username'], 'unblockedUser': unblockedUser}
return len(str(msg)), msg
elif cmd == WHOELSE:
msg = {'cmd': cmd}
return len(str(msg)), msg
elif cmd == WOISTHERE:
msg = {'cmd': cmd}
return len(str(msg)), msg
elif cmd == LOGOUT:
msg = {'cmd': cmd}
return len(str(msg)), msg
else:
return None
def displayError(ack, cmd):
print(ack)
if cmd == 'login':
if ack.split()[0] == '1':
print("No such User Exists")
elif ack.split()[0] == '2':
print("Incorrect username/password")
elif ack.split()[0] == '3':
print("User already logged in from a different system")
elif ack.split()[0] == '4':
print("Password wrong, exceeded number of incorrect attempts. Retry after 60 secs")
elif ack.split()[0] == '5':
print("You have been blocked. Try after "+ ack.split()[1])
elif cmd == 'register':
if ack.split()[0] == '1':
print("Username is taken")
elif ack.split()[0] == '2':
print("You are registered")
return
def serverInput(server):
global isLoggedIn
while True:
if not isLoggedIn:
break
sockets_list = [server]
read_sockets, write_socket, error_socket = select.select(sockets_list,[],[])
for socks in read_sockets:
if socks == server:
lenmsg = socks.recv(8)
if not lenmsg:
continue
lenmsg = struct.unpack(">q", lenmsg)[0]
message = socks.recv(lenmsg)
displayMessage(message)
def userInput(server, userDetails):
rt = Tk()
m=inputGUI.mainWindow(rt, server, userDetails)
rt.bind('<Return>', m.sendToMain)
rt.mainloop()
print '---exiting---'
global isLoggedIn
isLoggedIn = False
def startChat(server, userDetails):
global isLoggedIn
isLoggedIn = True
userInputThread = Thread(target=userInput, args=(server, userDetails,))
serverInputThread = Thread(target=serverInput, args=(server, ))
userInputThread.start()
serverInputThread.start()
def main():
# create a socket object
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# getServerAddress(), get local machine name
host = socket.gethostname()
port = 9998
# connection to hostname on the port.
s.connect((host, port))
# Create a packet using the command line args
userDetails = UserPacket(sys.argv)
# send it to server address
s.sendall(str(userDetails))
# receive validation from server (continue)
ack = s.recv(1024)
# if authenticated
if ack == '0':
print("Successfully Authenticated")
startChat(s, userDetails)
else:
displayError(ack, userDetails['cmd'])
s.close()
if __name__ == '__main__':
main()
|
{"/inputGUI.py": ["/client.py"]}
|
40,453
|
krishnakarthik9/networks-chatApp
|
refs/heads/master
|
/DB.py
|
import pymongo
import time
from copy import deepcopy
class UserDB(object):
def __init__(self):
mongoServer = 'localhost'
mongoPort = 27017
dbName = 'userDB'
userDataCollection = 'userData'
userLoginBlockListCollection = 'userLoginBlockList-final'
userBlockListCollection = 'userBlockList-final'
connection = pymongo.MongoClient(mongoServer, mongoPort)
db = connection[dbName]
self.userData = db[userDataCollection]
self.userLoginBlockList = db[userLoginBlockListCollection]
self.userBlockList = db[userBlockListCollection]
def getUserData(self, username):
return self.userData.find_one({'username': username})
def getUserLoginBlockList(self, username):
return self.userLoginBlockList.find_one({'username': username})
def updateUserData(self, updatedData):
username = updatedData['username']
oldData = self.getUserData(username)
self.userData.replace_one(oldData, updatedData)
def updateUserLoginBlockList(self, updatedData):
username = updatedData['username']
oldData = self.getUserLoginBlockList(username)
if oldData is None:
self.userLoginBlockList.insert(updatedData)
else:
self.userLoginBlockList.replace_one(oldData, updatedData)
def register(self, username, password):
self.userData.insert({'username': username, 'password': password, 'isLoggedIn': False, 'socket': [-1, -1]})
def getAllUsersLoggedIn(self):
active = []
for user in self.userData.find({'isLoggedIn': True}):
active.append(user['username'])
return active
def getAllUsers(self):
allU = []
for user in self.userData.find({}):
allU.append(user['username'])
return allU
def getUserBlockList(self, username):
print 'get'
return self.userBlockList.find_one({'username': username})
def updateUserBlockList(self, updatedData):
print 'update'
username = updatedData['username']
oldData = self.getUserLoginBlockList(username)
if oldData is None:
self.userBlockList.insert(updatedData)
else:
self.userBlockList.replace_one(oldData, updatedData)
class MessageDB(object):
def __init__(self):
mongoServer = 'localhost'
mongoPort = 27017
dbName = 'messageDB'
messageCollection = 'messageCollection'
connection = pymongo.MongoClient(mongoServer, mongoPort)
db = connection[dbName]
self.messages = db[messageCollection]
def getUnreadMessages(self, username):
msgs = []
for msg in self.messages.find({'toUser': username}, {'_id': False}):
msgs.append(msg)
return msgs
def removeUnreadMessages(self, username):
self.messages.delete_many({'toUser': username})
def addUnreadMessage(self, toUser, fromUser, message):
# self.messages.insert({'toUser': toUser, 'fromUser': fromUser, 'message': message, 'time': int(time.time())})
M = deepcopy(message)
M['toUser'] = toUser
M['fromUser'] = fromUser
M['time'] = time.time()
#print message
self.messages.insert(M)
|
{"/inputGUI.py": ["/client.py"]}
|
40,454
|
krishnakarthik9/networks-chatApp
|
refs/heads/master
|
/util.py
|
import sys
import time
def getTime(epoch):
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(epoch)))
def getType(msgType):
if msgType == '-p':
return 'private'
elif msgType == '-b':
return 'broadcast'
return 'query'
def displayMessage(message):
message = eval(message)
m = '<{}>: {} \t\t(as {} on {})'.format(message['fromUser'],message['msgData'], getType(message['msgType']), getTime(str(message['created'])))
print m
|
{"/inputGUI.py": ["/client.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.