index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
984,300 | 0aedaf487978da49e0731b252f73982bad7ce8a0 |
import pandas as pd
import csv
def EstaCiudad(ruta_archivo:str,Ciudad : str):
suma= ruta_archivo[ruta_archivo["Codigo_Ciudad"]==Ciudad]['Habitantes'].sort_values().sum()
codigo = ruta_archivo['Codigo_Ciudad']
habitantes = ruta_archivo['Habitantes']
diccionario = dict()
diccionario[Ciudad] = suma
return diccionario
ruta_archivo = pd.read_csv("https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv",header=None, index_col=False,names=['Id_Ciudad', "Ciudad","Codigo_Ciudad","Habitantes"])
Ciudad="Bogota01"
print(EstaCiudad(ruta_archivo,Ciudad))
import pandas as pd
import csv
def EstaCiudad (ruta_archivo:str, Ciudad : str) ->dict:
dataFrame = pd.read_csv(ruta_archivo, header=None, index_col=False,names=['Id_Ciudad', "Ciudad","Codigo_Ciudad","Habitantes"])
#suma= dataFrame[dataFrame['Codigo_Ciudad'==Ciudad]['Habitantes'].sort_values().sum()
diccionario = dict()
diccionario ['nombres'] = nombres
#dic[Ciudad] = suma
return dic
# pass
#nombreArchivo= 'titanic3.csv'
print(EstaCiudad('https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv','Bogota01'))
def ejemploReto5 (nombreArchivo:str) -> dict:
import pandas as pd
dataFrame = pd.read_csv(nombreArchivo)
# print (dataFrame)
# dataFrame.info()
nombres = list ( dataFrame ['name'] )
# print (nombres)
mayorEdad = max ( dataFrame['age'])
# print (mayorEdad)
menorEdad = min ( dataFrame ['age'])
# print (menorEdad)
tarifaPromedio = round ( (dataFrame['fare'].mean()), 2)
diccionario = dict()
diccionario ['nombres'] = nombres
diccionario ['edadMayor'] = mayorEdad
diccionario ['edadMenor'] = menorEdad
diccionario ['tarifaPromedio'] = tarifaPromedio
return diccionario
# pass
nombre = 'titanic3.csv'
print (ejemploReto5(nombre))
import pandas as pd
import csv
def EstaCiudad(ruta_archivo:str,Ciudad : str):
dataFrame = pd.read_csv(ruta_archivo, header=None,
index_col=False,names=['Id_Ciudad',
"Ciudad","Codigo_Ciudad","Habitantes"])
suma= dataFrame[dataFrame['Codigo_Ciudad'==Ciudad]
['Habitantes'].sort_values().sum()
return suma
print(EstaCiudad('https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv','Bogota01'))
import pandas as pd
import csv
def EstaCiudad(ruta_archivo:str,Ciudad : str):
dataFrame = pd.read_csv(ruta_archivo, header=None, index_col=False,names=['Id_Ciudad','Ciudad','Codigo_Ciudad','Habitantes'])
suma = str[dataFrame[dataFrame['Codigo_Ciudad'== Ciudad]['Habitantes'].sort_values().sum()]
diccionario = dict()
diccionario[Ciudad] = suma
return Ciudad
# pass
#nombreArchivo= 'titanic3.csv'
print(EstaCiudad('https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv','Bogota01'))
import pandas as pd
import csv
def EstaCiudad(ruta_archivo:str,Ciudad : str):
datos=pd.read_csv(ruta_archivo)
suma= datos[datos["Codigo_Ciudad"]==Ciudad]['Habitantes'].sort_values().sum()
codigo = ruta_archivo['Codigo_Ciudad']
habitantes = ruta_archivo['Habitantes']
diccionario = dict()
diccionario[Ciudad] = suma
return diccionario
#ruta_archivo = pd.read_csv("https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv",header=None, index_col=False,names=['Id_Ciudad', "Ciudad","Codigo_Ciudad","Habitantes"])
#Ciudad="Bogota01"
print(EstaCiudad('https://raw.githubusercontent.com/ebustosc/ebustosc/main/Ciudades.csv','Bogota01')) |
984,301 | 337c64eb50d17f7e6e5a252f66efe04642d81661 | ###########################################################
# Computer Project #6
#
# Algorithm
# Call a function to prompt for file names until they open properly
# Call a function to adds data from all files to total_dict
# Call a function to get wanted data from the full dictionary of data
# Call a function to get the years and averages
# Call a function to plot the graphs
# Print the averages for city/highway data
###########################################################
import csv
import pylab
import matplotlib.patches as patches
def open_files():
"""
Opens files from user input.
Returns: List of file pointers
"""
file_list = []
while True:
try:
decades = input("Input multiple decades separated by commas," +\
" e.g. 1980, 1990, 2000:").split(",")
#Checks to see if it is a valid decade
for index,year in enumerate(decades):
if year.strip() == "1980":
fp1980 = open(year.strip()+"s.csv")
file_list.append(fp1980)
elif year.strip() == "1990":
fp1990 = open(year.strip()+"s.csv")
file_list.append(fp1990)
elif year.strip() == "2000":
fp2000 = open(year.strip()+"s.csv")
file_list.append(fp2000)
elif year.strip() == "2010":
fp2010 = open(year.strip()+"s.csv")
file_list.append(fp2010)
else:
print("Error in decade " + '"' + str(year) + '"')
#If there are still no files loop again, otherwise break and return
if file_list == []:
continue
else:
break
#If the file name was valid but it could not be found, print this
except FileNotFoundError:
for item in file_list:
item.close()
print("File Not Found")
continue
return file_list
def read_file(file):
"""
Reads the file and makes a dictionary of manufacturer, years, city mileage
and highway mileage
Returns: dictionary of data from file
"""
dictionary = {}
csv_fp = csv.reader(file)
#L[46] = manufacturer, L[63] = year
#L[4]= city mileage, L[34]=highway mileage
for line in csv_fp:
#Skip the headings and the year 2017
if (not (line[46] == 'make')) and (not (line[63] == '2017')):
if line[46] in dictionary:
#Add the city and highway mileage if the year has been made
if line[63] in dictionary[line[46]]:
dictionary[line[46]][line[63]][0] += [int(line[4])]
dictionary[line[46]][line[63]][1] += [int(line[34])]
#Add the year and data if it was not made previously
else:
dictionary[line[46]][line[63]] = [[int(line[4])],\
[int(line[34])]]
#Adds a new manufacturer
else:
dictionary[line[46]] = {line[63]:[[int(line[4])],\
[int(line[34])]]}
return dictionary
def merge_dict(target, source):
"""
Merges two dictionaries
Source: Dictionary to get data from
Target: Dictionary to add data to
Returns: updated dictionary (target)
"""
#If the target is empty, just copy the source
if target == {}:
target = source
#Else loop through each key and update the target key with the source info
else:
for manufacturer in source:
if manufacturer in target:
target[manufacturer].update(source[manufacturer])
else:
target[manufacturer] = source[manufacturer]
return target
def get_wanted_data(wanted_data_dict, total_dict):
"""
Sorts through full dictionary for wanted companies
total_dict: Dictionary with every manufacturer
wanted_data_dict: Empty dictionary with wanted companies initialized
Returns:
"""
#Lists to check if it is a wanted manufacturer
Ford = ['Ford', 'Mercury', 'Lincoln']
GM = ['GMC','Chevrolet', 'Pontiac', 'Buick', 'Cadillac', 'Oldsmobile',\
'Saturn']
Toyota = ['Toyota', 'Lexus', 'Scion']
Honda = ['Honda', 'Acura']
#Loop through each key in total_dict
for manufacturer in total_dict:
if manufacturer in Ford:
company = 'Ford'
elif manufacturer in GM:
company = 'GM'
elif manufacturer in Toyota:
company = 'Toyota'
elif manufacturer in Honda:
company = 'Honda'
else:
#Skips any manufacturer that isn't wanted
continue
#Adds the data from each year to wanted_data_dict
for year in total_dict[manufacturer]:
if year in wanted_data_dict[company]:
wanted_data_dict[company][year][0] += total_dict[manufacturer]\
[year][0]
wanted_data_dict[company][year][1] += total_dict[manufacturer]\
[year][1]
else:
wanted_data_dict[company][year] = total_dict[manufacturer]\
[year]
return wanted_data_dict
def get_averages(full_dict):
converter_dict = {}
city_dict = {}
hwy_dict = {}
years = []
#For each manufacturer, calculate the averages and add them to a list, then
#sort the list. Finally, add the list to a dictionary with the key as the
#manufacturer
for manufacturer in full_dict:
converter = []
for year in full_dict[manufacturer]:
city_average = round(sum(full_dict[manufacturer][year][0])/\
len(full_dict[manufacturer][year][0]), 2)
hwy_average = round(sum(full_dict[manufacturer][year][1])/\
len(full_dict[manufacturer][year][1]), 2)
converter.append([year, city_average, hwy_average])
converter.sort()
converter_dict[manufacturer] = converter
#For each company in the dictionary, add the data to a city dictionary,
#hwy dictionary, and year list
for company in converter_dict:
for year in converter_dict[company]:
if not (year[0] in years):
years.append(year[0])
if company in city_dict:
city_dict[company] += [year[1]]
else:
city_dict[company] = [year[1]]
if company in hwy_dict:
hwy_dict[company] += [year[2]]
else:
hwy_dict[company] = [year[2]]
return years, city_dict, hwy_dict
def plot_mileage(years,city,highway):
'''Plot the city and highway mileage data.
Input: years, a list of years;
city, a dictionary with manufacturer as key and list of annual
mileage as value;
highway, a similar dictionary with a list of highway mileage as
values;
Requirement: all lists must be the same length.'''
pylab.figure(1)
pylab.plot(years, city['Ford'], 'r-', years, city['GM'], 'b-', years,
city['Honda'], 'g-', years, city['Toyota'], 'y-')
red_patch = patches.Patch(color='red', label='Ford')
blue_patch = patches.Patch(color='blue', label='GM')
green_patch = patches.Patch(color='green', label='Honda')
yellow_patch = patches.Patch(color='yellow', label='Toyota')
pylab.legend(handles=[red_patch, blue_patch, green_patch, yellow_patch])
pylab.xlabel('Years')
pylab.ylabel('City Fuel Economy (MPG)')
pylab.show()
# Plot the highway mileage data.
pylab.figure(2)
pylab.plot(years, highway['Ford'], 'r-', years, highway['GM'], 'b-', years,
highway['Honda'], 'g-', years, highway['Toyota'], 'y-')
pylab.legend(handles=[red_patch, blue_patch, green_patch, yellow_patch])
pylab.xlabel('Years')
pylab.ylabel('Highway Fuel Economy (MPG)')
pylab.show()
#Open files
files = open_files()
#Error checking to make sure there are files to open
if files == None:
print("Program ending.")
else:
#Adds all data from all files to total_dict
total_dict = {}
for file_input in files:
decade_dict = read_file(file_input)
total_dict = merge_dict(total_dict, decade_dict)
#Gets wanted data from the full dictionary of data
manufacturer_dict = {'Ford':{}, 'GM':{}, 'Toyota':{}, 'Honda':{}}
wanted_manufacturers_dict = get_wanted_data(manufacturer_dict, total_dict)
#Get the years and averages
years_dict, city_averages_dict, hwy_averages_dict = get_averages\
(wanted_manufacturers_dict)
#Plot the graphs
plot_mileage(years_dict,city_averages_dict,hwy_averages_dict)
#Print the averages for city data
print("City")
print("{:>11} {:5}".format("Company:", "Mileage"))
for manufacturer in city_averages_dict:
city_avg = sum(city_averages_dict[manufacturer])/\
len(city_averages_dict[manufacturer])
print("{:>10}: {:3.2f}".format(manufacturer, city_avg))
#Print the averages for highway data
print("Highway")
print("{:>11} {:5}".format("Company:", "Mileage"))
for manufacturer in hwy_averages_dict:
hwy_avg = sum(hwy_averages_dict[manufacturer])/\
len(hwy_averages_dict[manufacturer])
print("{:>10}: {:3.2f}".format(manufacturer, hwy_avg))
# Questions
# Q1: 6
# Q2: 5
# Q3: 2
# Q4: 7 |
984,302 | 79ec2e51034da52dd2db5d98c350da8de246e4b3 | import cv2
import numpy as np
class Postprocess(object):
def __init__(self):
self.h_samples = [160, 170, 180, 190, 200, 210, 220, 230, 240, 250, 260,
270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380, 390, 400, 410, 420,
430, 440, 450, 460, 470, 480, 490, 500, 510, 520, 530, 540, 550, 560, 570, 580,
590, 600, 610, 620, 630, 640, 650, 660, 670, 680, 690, 700, 710]
@staticmethod
def _morphological_process(image, kernel_size=5):
"""
morphological process to fill the hole in the binary segmentation result
:param image:
:param kernel_size:
:return:
"""
if len(image.shape) == 3:
raise ValueError('Binary segmentation result image should be a single channel image')
if image.dtype is not np.uint8:
image = np.array(image, np.uint8)
kernel = cv2.getStructuringElement(shape=cv2.MORPH_ELLIPSE, ksize=(kernel_size, kernel_size))
# close operation fille hole
closing = cv2.morphologyEx(image, cv2.MORPH_CLOSE, kernel, iterations=1)
return closing
def _convert_pts_to_json(self,lane_pts):
pty = lane_pts[:, 1]
pt = []
for h in self.h_samples:
idx = np.where(pty == h)
if idx[0].shape[0] == 0:
pt.append(-2)
else:
ptx = int(round(np.mean(lane_pts[idx,][0, :, 0]) - 0.1))
pt.append(ptx)
pt = np.squeeze(np.vstack(pt))
return pt
def postprocess_tensor(self,binary_image,img_name,lane_exist):
# binary_image:H,W,C(4 lanes)
lane_json = []
for lane_i in range(binary_image.shape[-1]):
lane_image = binary_image[:,:,lane_i]
lant_pts = self._get_one_lane_pts(lane_image)
if lant_pts.size!=0 and lane_exist[lane_i]>=0.5:
lane_json.append(lant_pts)
# print(lant_pts)
# print("process")
lane_json = [self._convert_pts_to_json(lane_pt).tolist() for lane_pt in lane_json]
dict = {}
dict['lanes'] = lane_json
dict['h_samples'] = self.h_samples
dict['raw_file'] = bytes.decode(img_name)
dict['run_time'] = 10
return dict
def _get_one_lane_pts(self,binary_seg_result, min_area_threshold=100):
"""
:param binary_seg_result:
:param instance_seg_result:
:param min_area_threshold:
:param source_image:
:param data_source:
:return:
"""
# convert binary_seg_result
binary_seg_result = np.array(binary_seg_result*255, dtype=np.uint8)
# apply image morphology operation to fill in the hold and reduce the small area
morphological_ret = self._morphological_process(binary_seg_result, kernel_size=5)
idx = np.where(morphological_ret == 255)
lane_pts = np.vstack((idx[1], idx[0])).transpose()
return lane_pts |
984,303 | 876e3a718544cba7397a243ef7a680df58ddaec3 | #importing modules
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model
#dummy data
n_samples = 200
X = np.random.normal(size=n_samples)
y = (X > 0).astype(np.float)
X[X > 0] *= 4
X += .3 * np.random.normal(size=n_samples)
X = X[:, np.newaxis]
# run the classifier
clf = linear_model.LogisticRegression(C=1e5)
clf.fit(X, y)
# plotting the graph
plt.figure(1, figsize=(4, 3))
plt.clf()
plt.scatter(X.ravel(), y, color='g', zorder=20)
X_test = np.linspace(-10, 12, 100)
#Logistic function
def logisticModel(x):
return 1 / (1 + np.exp(-x))
#function call
loss =logisticModel(X_test * clf.coef_ + clf.intercept_).ravel()
#plotting the output
plt.plot(X_test, loss, color='b', linewidth=3)
plt.axhline(0.5, color='r')
plt.title("Sugar levels - diabetic or not")
plt.ylabel('Y')
plt.xlabel('X')
plt.show()
|
984,304 | 4280790e43528be0c4e68983ec9b0a12bc1c7d81 | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 1 17:32:55 2017
@author: Atlas
"""
def genPrimes():
next = 2
primes = []
while True:
is_prime = True
for i in range(len(primes)):
if next % primes[i] == 0:
is_prime = False
break
if is_prime:
primes.append(next)
yield next
next += 1
prime = genPrimes() |
984,305 | 9ce272665b19cb7a732cf7359c3e59be6b2e48fb | /home/ayush/Desktop/auquan/anaconda2/lib/python2.7/sre.py |
984,306 | b891b53005187d2528bce1900e81694db2456ef4 | import unittest
import utils
class SignsTest(unittest.TestCase):
def setUp(self):
self.html = """
<html><body>
<main>
<section></section>
<section></section>
<section>
<div><div><div>
<div></div>
<div>
<p><span>12345</span></p>
</div>
</div></div></div>
</section>
</main>
</body>
</html>
"""
def test_signs_count(self):
self.assertEqual(utils.get_signs_count(self.html), 12345)
|
984,307 | f75d5c4cea37f8a9a83d60f4b3931617c5f110cb | import csv
data = [("One", 1, 1.5), ("Two", 2, 8.0)]
f = open("out.csv", "w")
wrtr = csv.writer(f)
wrtr.writerows(data)
f.close()
|
984,308 | 6b184528fd3a243577bbb0ba3ecbd34a95d85b79 | from application import spark_dataframe
from pyspark.sql import functions as f
import ast
class PySparkFilter:
spark_df = spark_dataframe
average_rating = spark_df.select(f.avg("average_rating")).collect()[0][0]
average_rating_2dp = float(f"{average_rating:.2f}")
@staticmethod
def get_average_rating():
return {"mean": PySparkFilter.average_rating_2dp}
@staticmethod
def get_book_ratings(query_strings):
ratings_functions = {
"average": PySparkFilter.get_average_rating,
"highly-rated": PySparkFilter.get_high_ratings,
"less-rated": PySparkFilter.get_low_ratings
}
query_string_ratings = query_strings["param"]
if query_string_ratings not in ratings_functions:
return {}
return ratings_functions[query_string_ratings]()
@staticmethod
def get_high_ratings():
results = spark_dataframe.filter(
spark_dataframe["average_rating"] >= PySparkFilter.average_rating
).toJSON().collect()
return {"highly-rated": [ast.literal_eval(row) for row in results]}
@staticmethod
def get_low_ratings():
results = spark_dataframe.filter(
spark_dataframe["average_rating"] < PySparkFilter.average_rating
).toJSON().collect()
return {"less-rated": [ast.literal_eval(row) for row in results]}
|
984,309 | 9f5465bc5bd15b1a734202dfea4ac819e02dbaf6 | from constraint_api import *
from test_problems import get_pokemon_problem
#### PART 1: WRITE A DEPTH-FIRST SEARCH CONSTRAINT SOLVER
def has_empty_domains(csp) :
"Returns True if the problem has one or more empty domains, otherwise False"
#raise NotImplementedError
for var in csp.variables:
if len(csp.domains[var])==0:
return True
return False
def check_all_constraints(csp) :
"""Return False if the problem's assigned values violate some constraint,
otherwise True"""
for constraint in csp.get_all_constraints():
assigned1 = csp.get_assigned_value(constraint.var1)
assigned2 = csp.get_assigned_value(constraint.var2)
check = constraint.check(assigned1,assigned2)
if check==False and assigned1!=None and assigned2!=None:
return False
return True
def solve_constraint_dfs(problem) :
"""Solves the problem using depth-first search. Returns a tuple containing:
1. the solution (a dictionary mapping variables to assigned values), and
2. the number of extensions made (the number of problems popped off the agenda).
If no solution was found, return None as the first element of the tuple."""
q = [problem]
extCount = 0
while len(q)!=0:
removed = q[0]
q = q[1:]
extCount+=1
if has_empty_domains(removed) or check_all_constraints(removed)==False:
continue
if len(removed.unassigned_vars)==0:
return (removed.assigned_values,extCount)
var = removed.pop_next_unassigned_var()
extensions = []
for val in removed.get_domain(var):
csp_new = removed.copy()
csp_new.set_assigned_value(var,val)
extensions.append(csp_new)
q = extensions + q
return (None,extCount)
#### PART 2: DOMAIN REDUCTION BEFORE SEARCH
def eliminate_from_neighbors(csp, var) :
"""Eliminates incompatible values from var's neighbors' domains, modifying
the original csp. Returns an alphabetically sorted list of the neighboring
variables whose domains were reduced, with each variable appearing at most
once. If no domains were reduced, returns empty list.
If a domain is reduced to size 0, quits immediately and returns None."""
reduced = []
val = csp.get_assigned_value(var)
replacement = []
for constraint in csp.constraints_between(var,None):
var2 = constraint.var2
domainCopy = csp.domains[var2][:]
numLeft = len(domainCopy)
if (val!=None):
for i in xrange(len(domainCopy)):
possibleVal2 = domainCopy[i]
check = constraint.check(val,possibleVal2)
if (check==False):
didEliminate = csp.eliminate(var2,possibleVal2)
if (didEliminate):
numLeft-=1
if var2 not in reduced:
reduced.append(var2)
if numLeft==0:
return None
return sorted(reduced)
def domain_reduction(csp, queue=None) :
"""Uses constraints to reduce domains, modifying the original csp.
If queue is None, initializes propagation queue by adding all variables in
their default order. Returns a list of all variables that were dequeued,
in the order they were removed from the queue. Variables may appear in the
list multiple times.
If a domain is reduced to size 0, quits immediately and returns None."""
if (queue==None):
queue = csp.get_all_variables()
dequeued = []
while len(queue)!=0:
removedVar = queue[0]
dequeued.append(removedVar)
queue = queue[1:]
for constraint in csp.constraints_between(removedVar,None)[:]:
var2 = constraint.var2
val2 = csp.get_assigned_value(var2)
var2Domain = csp.get_domain(var2)[:]
removedDomain = csp.get_domain(removedVar)[:]
if len(removedDomain)==0 or len(var2Domain)==0:
return None
for domainVal2 in var2Domain:
anyNonViolators = False
for domainVal in removedDomain:
check = constraint.check(domainVal,domainVal2)
if check==True:
anyNonViolators = True
continue
if anyNonViolators==False:
csp.eliminate(var2, domainVal2)
if len(csp.get_domain(var2))==0:
return None
if var2 not in queue:
queue.append(var2)
return dequeued
# QUESTION 1: How many extensions does it take to solve the Pokemon problem
# with dfs if you DON'T use domain reduction before solving it?
# Hint: Use get_pokemon_problem() to get a new copy of the Pokemon problem
# each time you want to solve it with a different search method.
csp = get_pokemon_problem()
ANSWER_1 = solve_constraint_dfs(csp)[1]
# QUESTION 2: How many extensions does it take to solve the Pokemon problem
# with dfs if you DO use domain reduction before solving it?
csp = get_pokemon_problem()
domain_reduction(csp,None)
ANSWER_2 = solve_constraint_dfs(csp)[1]
#### PART 3: PROPAGATION THROUGH REDUCED DOMAINS
def solve_constraint_propagate_reduced_domains(problem) :
"""Solves the problem using depth-first search with forward checking and
propagation through all reduced domains. Same return type as
solve_constraint_dfs."""
q = [problem]
extCount = 0
while len(q)!=0:
removed = q[0]
q = q[1:]
extCount+=1
if has_empty_domains(removed) or check_all_constraints(removed)==False:
continue
if len(removed.unassigned_vars)==0:
return (removed.assigned_values,extCount)
var = removed.pop_next_unassigned_var()
extensions = []
for val in removed.get_domain(var):
csp_new = removed.copy()
csp_new.set_assigned_value(var,val)
domain_reduction(csp_new,[var])
extensions.append(csp_new)
q = extensions + q
return (None,extCount)
# QUESTION 3: How many extensions does it take to solve the Pokemon problem
# with propagation through reduced domains? (Don't use domain reduction
# before solving it.)
csp = get_pokemon_problem()
ANSWER_3 = solve_constraint_propagate_reduced_domains(csp)[1]
#### PART 4: PROPAGATION THROUGH SINGLETON DOMAINS
def domain_reduction_singleton_domains(csp, queue=None) :
"""Uses constraints to reduce domains, modifying the original csp.
Only propagates through singleton domains.
Same return type as domain_reduction."""
if (queue==None):
queue = csp.get_all_variables()
dequeued = []
while len(queue)!=0:
removedVar = queue[0]
dequeued.append(removedVar)
queue = queue[1:]
for constraint in csp.constraints_between(removedVar,None)[:]:
var2 = constraint.var2
val2 = csp.get_assigned_value(var2)
var2Domain = csp.get_domain(var2)[:]
removedDomain = csp.get_domain(removedVar)[:]
if len(removedDomain)==0 or len(var2Domain)==0:
return None
for domainVal2 in var2Domain:
anyNonViolators = False
for domainVal in removedDomain:
check = constraint.check(domainVal,domainVal2)
if check==True:
anyNonViolators = True
continue
if anyNonViolators==False:
csp.eliminate(var2, domainVal2)
if len(csp.get_domain(var2))==0:
return None
if var2 not in queue and len(csp.get_domain(var2))==1:
queue.append(var2)
return dequeued
def solve_constraint_propagate_singleton_domains(problem) :
"""Solves the problem using depth-first search with forward checking and
propagation through singleton domains. Same return type as
solve_constraint_dfs."""
q = [problem]
extCount = 0
while len(q)!=0:
removed = q[0]
q = q[1:]
extCount+=1
if has_empty_domains(removed) or check_all_constraints(removed)==False:
continue
if len(removed.unassigned_vars)==0:
return (removed.assigned_values,extCount)
var = removed.pop_next_unassigned_var()
extensions = []
for val in removed.get_domain(var):
csp_new = removed.copy()
csp_new.set_assigned_value(var,val)
domain_reduction_singleton_domains(csp_new,[var])
extensions.append(csp_new)
q = extensions + q
return (None,extCount)
# QUESTION 4: How many extensions does it take to solve the Pokemon problem
# with propagation through singleton domains? (Don't use domain reduction
# before solving it.)
csp = get_pokemon_problem()
ANSWER_4 = solve_constraint_propagate_singleton_domains(csp)[1]
#### PART 5: FORWARD CHECKING
def propagate(enqueue_condition_fn, csp, queue=None) :
"""Uses constraints to reduce domains, modifying the original csp.
Uses enqueue_condition_fn to determine whether to enqueue a variable whose
domain has been reduced. Same return type as domain_reduction."""
if (queue==None):
queue = csp.get_all_variables()
dequeued = []
while len(queue)!=0:
removedVar = queue[0]
dequeued.append(removedVar)
queue = queue[1:]
for constraint in csp.constraints_between(removedVar,None)[:]:
var2 = constraint.var2
val2 = csp.get_assigned_value(var2)
var2Domain = csp.get_domain(var2)[:]
removedDomain = csp.get_domain(removedVar)[:]
if len(removedDomain)==0 or len(var2Domain)==0:
return None
for domainVal2 in var2Domain:
anyNonViolators = False
for domainVal in removedDomain:
check = constraint.check(domainVal,domainVal2)
if check==True:
anyNonViolators = True
continue
if anyNonViolators==False:
csp.eliminate(var2, domainVal2)
if len(csp.get_domain(var2))==0:
return None
if var2 not in queue and enqueue_condition_fn(csp,var2):
queue.append(var2)
return dequeued
def condition_domain_reduction(csp, var) :
"""Returns True if var should be enqueued under the all-reduced-domains
condition, otherwise False"""
return True
def condition_singleton(csp, var) :
"""Returns True if var should be enqueued under the singleton-domains
condition, otherwise False"""
return len(csp.get_domain(var))==1
def condition_forward_checking(csp, var) :
"""Returns True if var should be enqueued under the forward-checking
condition, otherwise False"""
return False
#### PART 6: GENERIC CSP SOLVER
def solve_constraint_generic(problem, enqueue_condition=None) :
"""Solves the problem, calling propagate with the specified enqueue
condition (a function). If enqueue_condition is None, uses DFS only.
Same return type as solve_constraint_dfs."""
q = [problem]
extCount = 0
while len(q)!=0:
removed = q[0]
q = q[1:]
extCount+=1
if has_empty_domains(removed) or check_all_constraints(removed)==False:
continue
if len(removed.unassigned_vars)==0:
return (removed.assigned_values,extCount)
var = removed.pop_next_unassigned_var()
extensions = []
for val in removed.get_domain(var):
csp_new = removed.copy()
csp_new.set_assigned_value(var,val)
if (enqueue_condition!=None):
propagate(enqueue_condition,csp_new,[var])
extensions.append(csp_new)
q = extensions + q
return (None,extCount)
# QUESTION 5: How many extensions does it take to solve the Pokemon problem
# with DFS and forward checking, but no propagation? (Don't use domain
# reduction before solving it.)
csp = get_pokemon_problem()
ANSWER_5 = solve_constraint_generic(csp, condition_forward_checking)[1]
#### PART 7: DEFINING CUSTOM CONSTRAINTS
def constraint_adjacent(m, n) :
"""Returns True if m and n are adjacent, otherwise False.
Assume m and n are ints."""
return abs(m-n)==1
def constraint_not_adjacent(m, n) :
"""Returns True if m and n are NOT adjacent, otherwise False.
Assume m and n are ints."""
return not constraint_adjacent(m,n)
def all_different(variables) :
"""Returns a list of constraints, with one difference constraint between
each pair of variables."""
constraints = []
for i in xrange(len(variables)):
var1 = variables[i]
for j in xrange(i+1,len(variables)):
var2 = variables[j]
if var1!=var2:
constraints.append(Constraint(var1,var2,constraint_different))
return constraints
#### PART 8: MOOSE PROBLEM (OPTIONAL)
moose_problem = ConstraintSatisfactionProblem(["You", "Moose", "McCain",
"Palin", "Obama", "Biden"])
# Add domains and constraints to your moose_problem here:
# To test your moose_problem AFTER implementing all the solve_constraint
# methods above, change TEST_MOOSE_PROBLEM to True:
TEST_MOOSE_PROBLEM = False
#### SURVEY ###################################################
NAME = "Rebecca Corcillo"
COLLABORATORS = "Nobody"
HOW_MANY_HOURS_THIS_LAB_TOOK = "10"
WHAT_I_FOUND_INTERESTING = ""
WHAT_I_FOUND_BORING = ""
SUGGESTIONS = ""
###########################################################
### Ignore everything below this line; for testing only ###
###########################################################
if TEST_MOOSE_PROBLEM:
# These lines are used in the local tester iff TEST_MOOSE_PROBLEM is True
moose_answer_dfs = solve_constraint_dfs(moose_problem.copy())
moose_answer_propany = solve_constraint_propagate_reduced_domains(moose_problem.copy())
moose_answer_prop1 = solve_constraint_propagate_singleton_domains(moose_problem.copy())
moose_answer_generic_dfs = solve_constraint_generic(moose_problem.copy(), None)
moose_answer_generic_propany = solve_constraint_generic(moose_problem.copy(), condition_domain_reduction)
moose_answer_generic_prop1 = solve_constraint_generic(moose_problem.copy(), condition_singleton)
moose_answer_generic_fc = solve_constraint_generic(moose_problem.copy(), condition_forward_checking)
moose_instance_for_domain_reduction = moose_problem.copy()
moose_answer_domain_reduction = domain_reduction(moose_instance_for_domain_reduction)
moose_instance_for_domain_reduction_singleton = moose_problem.copy()
moose_answer_domain_reduction_singleton = domain_reduction_singleton_domains(moose_instance_for_domain_reduction_singleton)
|
984,310 | 4610d425787b7eff827726b8877c3a1ed6db4631 | from operators.facts_calculator import FactsCalculatorOperator
from operators.has_rows import HasRowsOperator
from operators.s3_to_redshift import S3ToRedshiftOperator
__all__ = [
'FactsCalculatorOperator',
'HasRowsOperator',
'S3ToRedshiftOperator'
]
|
984,311 | 6f4312771e7149e4c4a9bbe783c94b449f7ad729 | for i in range(int(raw_input())):
n1 = int(raw_input())
arr = map(int, raw_input().split())
n2 = int(raw_input())
arr1 = map(int, raw_input().split())
try:
n1 = arr.index(n1)
n2 = arr1.index(n2)
except ValueError:
n1 = ''
n2 = ''
if n1 != '' and n2 != '':
print 'Yes'
else:
print 'No'
|
984,312 | 52ac5d2c5f6c5b413a98a1187a3cc2e5d1204f5f | #encoding:utf-8
from django.db import models
import datetime
CHOICES_TIPO_PREGUNTA = ((0,"Seleccion Multiple"),(1, "Pregunta Abiertas"),(1, "Pregunta Reflexivas"),(1, "Pregunta Cerradas"),(1, "Pregunta Verdadero - Falso"),(1, "Pregunta Abiertas"))
CHOICES_TIPO_USUARIO = ((0,"Psicologo"),(1, "Estudiante"))
Contacto = models.CharField(max_length=70)
Comentario = models.CharField(max_length=70)
class Persona(models.Model):
edad = models.CharField(max_length=3)
nombre = models.CharField(max_length=50)
apellido = models.CharField(max_length=50)
tipo_usuario = models.IntegerField(choices=CHOICES_TIPO_USUARIO)
usuario = models.OneToOneField("auth.User")
def __unicode__(self):
return "%s" % (self.usuario)
class Prueba(models.Model):
nombre = models.CharField(max_length=50)
descripcion = models.CharField(max_length=200)
def __unicode__(self):
return "%s" % (self.nombre)
class Modulo(models.Model):
nombre = models.CharField(max_length=50)
descripcion = models.CharField(max_length=200)
prueba = models.ForeignKey(Prueba)
def secciones_por_contestar(self, persona):
mr = self.seccion_set.exclude(seccioncontestada__usuario=persona, seccioncontestada__fecha_final__isnull=False)
return mr.count()
def secciones_contestadas(self, persona):
sc = self.seccion_set.filter(seccioncontestada__usuario=persona, seccioncontestada__fecha_final__isnull=True)
return sc.count()
def __unicode__(self):
return "%s en %s" % (self.nombre, self.prueba)
class competencia(models.Model):
orden = models.IntegerField ()
nombre = models.CharField(max_length=30)
descripcion = models.CharField(max_length=30)
class Meta:
unique_together = (("orden", "nombre"))
def __unicode__(self):
return "%s la %s" % (self.orden, self.nombre)
class Seccion(models.Model):
nombre = models.CharField(max_length=30)
instruccion = models.TextField()
modulo = models.ForeignKey(Modulo)
competencia = models.ForeignKey(competencia)
def preguntas_sin_contestar(self, persona):
r = self.seccioncontestada_set.filter(usuario=persona)
if r.count() > 0:
return r[0].preguntas_a_contestar().count()
else:
return self.pregunta_set.count()
def __unicode__(self):
return "%s en %s" % (self.nombre, self.modulo)
class competencia_seleccionada(models.Model):
usuario = models.ForeignKey(Persona)
competencia = models.ForeignKey(competencia)
seccion = models.ForeignKey(Seccion)
puntaje = models.IntegerField(null=True, blank=True)
observacion = models.CharField(null=True, blank=True, max_length=200)
class Meta:
unique_together = (("usuario", "seccion"))
def sumar_puntaje(self, puntaje, rppuntaje):
return (puntaje+rppuntaje)
def __unicode__(self):
return "%s en %s" % (self.usuario, self.seccion)
class Pregunta(models.Model):
orden = models.IntegerField()
descripcion_text = models.TextField()
#descripcion_imag = models.ImageField(upload_to='imagen')
tiempo = models.IntegerField(null=True, blank=True)
tipo_pregunta = models.IntegerField(choices=CHOICES_TIPO_PREGUNTA)
seccion = models.ForeignKey(Seccion)
class Meta:
unique_together = (("orden", "seccion"))
def __unicode__(self):
return "%s la %s" % (self.orden, self.seccion)
class Respuesta(models.Model):
pregunta = models.ForeignKey(Pregunta)
descripcion = models.TextField()
orden = models.IntegerField ()
puntaje = models.IntegerField()
@property
def orden_letra(self):
return chr(ord("A") + self.orden-1)
class Meta:
unique_together = (("orden", "pregunta"))
def __unicode__(self):
return "%s en %s" % (self.orden, self.pregunta)
class SeccionContestada(models.Model):
seccion = models.ForeignKey(Seccion)
fecha_inicio = models.DateTimeField(default=datetime.datetime.now)
fecha_final = models.DateTimeField(blank=True, null=True)
usuario = models.ForeignKey(Persona)
class Meta:
unique_together = (("seccion", "usuario"))
def preguntas_a_contestar(self):
return self.seccion.pregunta_set.exclude(seleccion__seccion_contestada__usuario=self.usuario).order_by("orden")
def __unicode__(self):
return "%s la %s" % (self.seccion, self.fecha_inicio)
class Seleccion(models.Model):
respuesta = models.ForeignKey(Respuesta)
pregunta = models.ForeignKey(Pregunta)
seccion_contestada = models.ForeignKey(SeccionContestada)
class Meta:
unique_together = (("pregunta", "seccion_contestada"))
def __unicode__(self):
return "%s la %s" % (self.respuesta, self.seccion_contestada)
|
984,313 | 9771f08b07549eaae1ed47e400efad008fe10504 |
# l=("apple","mango","grapes","banana","kiwi")
# print(l)
# l.append("papaya")
# l.append("lichi")
# l1=[1,2,3,4]
# l.extend(l1)
# l.insert(0,"watermelon")
# print(l)
# l.remove(l[2])
# l.pop()
# print(l)
# print(l[2:5])
# print(len(l))
# l={"apple","mango","grapes","banana","kiwi"}
l={1,3,5,6,7,3,5}
print(l)
s={1,2,4,6,8,9}
l=l.union(s)
print(l)
l=l.intersection(s)
print(l)
l2=l.difference(s)
print(l2)
# l1={1,2,3,6,8,9}
# l2=l.union(l1)
# print(l2)
# l2.remove(3)
# print(l2)
# print(len(l2))
# l2=l.intersection(l1)
# print(l2)
# l2=l.difference(l1)
# print(l2)
# l3={7,9,10,11}
# l2.update(l3)
# print(l2)
# dict = {
# "brand": "Ford",
# "electric": False,
# "year": 1964,
# "colors": ["red", "white", "blue"]
# }
# dict["speed"]=180
# print(dict)
# x=dict["year"]
# print(x)
# dict.pop("speed")
# print(dict)
# for x in dict.values():
# print(x)
|
984,314 | cf16fa70d0bde21691bdfb68229b3a6e3343b671 | from ui import UI
from ui.low.find import Find
__author__ = 'John Underwood'
class PhysicalAddress(UI):
"""
Adds a new physical address for a provider.
Values are preset using our VISTA physical address.
May override the address description, addressType, address1, address2,
city, state, zipCode, and country fields.
"""
def __init__(self, override=None):
super().__init__()
Find(override)
runtime = {
'description': 'QA Physical Address',
'addressType': 'Other',
'address1': '2800 E Cottonwood Pkwy',
'address2': 'Suite 400',
'city': 'Cottonwood Heights',
'state': 'Utah',
'zipCode': '84121',
'country': 'United States',
# Executable runtime elements follow.
'home': (
'Click',
'//*[@id="ribbon_form"]/ul/li/div[3]/div[4]/div[1]/a[5]/i',),
'add': ('Click', '//*[@id="addressGrid_form"]/a'),
'addrDescription': ('Type', '#address_description', '&description;'),
'addrType': ('Select', '#correspondence_method_type_id',
{'value': '&addressType;'}),
'addr1': ('Type', '#address_1', '&address1;'),
'addr2': ('Type', '#address_2', '&address2;'),
'addrCity': ('Type', '#city', '&city;'),
'addrState': ('Select', '#state', {'value': '&state;'}),
'addrZip': ('Type', '#zip_code', '&zipCode;'),
'addrCountry': ('Select', '#country_code', {'value': '&country;'}),
'save': ('Click', '#save-n-check'),
'saveAddressCheck': ('Click', 'css=.waves-effect.waves-light.btn.'
'right-align.modal-action.'
'modal-close'),
'closeAddresses': ('Click', '#addressGridClose'),
'closeWorkspace': ('Click', 'css=.waves-effect.waves-light.'
'btn.right-align')
}
process = UI(override)
process.update(runtime)
process.execute(('home',))
process.wait()
order = ('add', 'addrDescription', 'addrType', 'addr1', 'addr2',
'addrCity', 'addrState', 'addrZip', 'addrCountry', 'save',
'saveAddressCheck',)
process.execute(order)
process.wait()
process.execute(('closeAddresses', 'closeWorkspace'))
process.wait()
|
984,315 | bdc3c536ef0413521b7326c02947cd8564dd32bd | from __future__ import print_function
import time
from panda import Panda
from nose.tools import assert_equal, assert_less, assert_greater
from helpers import time_many_sends, test_two_panda, panda_color_to_serial
@test_two_panda
@panda_color_to_serial
def test_send_recv(serial_sender=None, serial_reciever=None):
p_send = Panda(serial_sender)
p_recv = Panda(serial_reciever)
p_send.set_safety_mode(Panda.SAFETY_ALLOUTPUT)
p_send.set_can_loopback(False)
p_recv.set_can_loopback(False)
assert not p_send.legacy
assert not p_recv.legacy
p_send.can_send_many([(0x1ba, 0, "message", 0)]*2)
time.sleep(0.05)
p_recv.can_recv()
p_send.can_recv()
busses = [0,1,2]
for bus in busses:
for speed in [100, 250, 500, 750, 1000]:
p_send.set_can_speed_kbps(bus, speed)
p_recv.set_can_speed_kbps(bus, speed)
time.sleep(0.05)
comp_kbps = time_many_sends(p_send, bus, p_recv, two_pandas=True)
saturation_pct = (comp_kbps/speed) * 100.0
assert_greater(saturation_pct, 80)
assert_less(saturation_pct, 100)
print("two pandas bus {}, 100 messages at speed {:4d}, comp speed is {:7.2f}, percent {:6.2f}".format(bus, speed, comp_kbps, saturation_pct))
@test_two_panda
@panda_color_to_serial
def test_latency(serial_sender=None, serial_reciever=None):
p_send = Panda(serial_sender)
p_recv = Panda(serial_reciever)
p_send.set_safety_mode(Panda.SAFETY_ALLOUTPUT)
p_send.set_can_loopback(False)
p_recv.set_can_loopback(False)
assert not p_send.legacy
assert not p_recv.legacy
p_send.set_can_speed_kbps(0, 100)
p_recv.set_can_speed_kbps(0, 100)
time.sleep(0.05)
p_send.can_send_many([(0x1ba, 0, "testmsg", 0)]*10)
time.sleep(0.05)
p_recv.can_recv()
p_send.can_recv()
busses = [0,1,2]
for bus in busses:
for speed in [100, 250, 500, 750, 1000]:
p_send.set_can_speed_kbps(bus, speed)
p_recv.set_can_speed_kbps(bus, speed)
time.sleep(0.1)
#clear can buffers
r = [1]
while len(r) > 0:
r = p_send.can_recv()
r = [1]
while len(r) > 0:
r = p_recv.can_recv()
time.sleep(0.05)
latencies = []
comp_kbps_list = []
saturation_pcts = []
num_messages = 100
for i in range(num_messages):
st = time.time()
p_send.can_send(0x1ab, "message", bus)
r = []
while len(r) < 1 and (time.time() - st) < 5:
r = p_recv.can_recv()
et = time.time()
r_echo = []
while len(r_echo) < 1 and (time.time() - st) < 10:
r_echo = p_send.can_recv()
if len(r) == 0 or len(r_echo) == 0:
print("r: {}, r_echo: {}".format(r, r_echo))
assert_equal(len(r),1)
assert_equal(len(r_echo),1)
et = (et - st)*1000.0
comp_kbps = (1+11+1+1+1+4+8*8+15+1+1+1+7) / et
latency = et - ((1+11+1+1+1+4+8*8+15+1+1+1+7) / speed)
assert_less(latency, 5.0)
saturation_pct = (comp_kbps/speed) * 100.0
latencies.append(latency)
comp_kbps_list.append(comp_kbps)
saturation_pcts.append(saturation_pct)
average_latency = sum(latencies)/num_messages
assert_less(average_latency, 1.0)
average_comp_kbps = sum(comp_kbps_list)/num_messages
average_saturation_pct = sum(saturation_pcts)/num_messages
print("two pandas bus {}, {} message average at speed {:4d}, latency is {:5.3f}ms, comp speed is {:7.2f}, percent {:6.2f}"\
.format(bus, num_messages, speed, average_latency, average_comp_kbps, average_saturation_pct))
|
984,316 | 373fb00de9f54bdaa8e01d42c0b82c14dbcec912 | #!/usr/bin/env python3
import numpy as np
from imutils.video import WebcamVideoStream
import cv2, time, threading, math
from flask import Flask, render_template, Response
cap = WebcamVideoStream(src=0).start()
frame = cap.read()
app = Flask(__name__)
def image2jpeg(image):
ret, jpeg = cv2.imencode('.jpg', image)
return jpeg.tobytes()
@app.route('/')
def index():
return render_template('index_control.html')
def gen_cl():
while True:
frame_inet = cap.read()
frameinet = image2jpeg(frame_inet[40:450, :, :])
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frameinet + b'\r\n\r\n')
@app.route('/video_cl')
def video_cl():
return Response(gen_cl(),
mimetype='multipart/x-mixed-replace; boundary=frame')
app.run(host='0.0.0.0', debug=False,threaded=True)
|
984,317 | af66f2d2a4b7474ccef0e8f62f779cdddfc371b6 | """
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import openapi_client
from openapi_client.model.net_corda_core_contracts_command_object import NetCordaCoreContractsCommandObject
from openapi_client.model.net_corda_core_contracts_privacy_salt import NetCordaCoreContractsPrivacySalt
from openapi_client.model.net_corda_core_contracts_state_ref import NetCordaCoreContractsStateRef
from openapi_client.model.net_corda_core_contracts_time_window import NetCordaCoreContractsTimeWindow
from openapi_client.model.net_corda_core_contracts_transaction_state_net_corda_core_contracts_contract_state import NetCordaCoreContractsTransactionStateNetCordaCoreContractsContractState
from openapi_client.model.net_corda_core_identity_party import NetCordaCoreIdentityParty
globals()['NetCordaCoreContractsCommandObject'] = NetCordaCoreContractsCommandObject
globals()['NetCordaCoreContractsPrivacySalt'] = NetCordaCoreContractsPrivacySalt
globals()['NetCordaCoreContractsStateRef'] = NetCordaCoreContractsStateRef
globals()['NetCordaCoreContractsTimeWindow'] = NetCordaCoreContractsTimeWindow
globals()['NetCordaCoreContractsTransactionStateNetCordaCoreContractsContractState'] = NetCordaCoreContractsTransactionStateNetCordaCoreContractsContractState
globals()['NetCordaCoreIdentityParty'] = NetCordaCoreIdentityParty
from openapi_client.model.net_corda_core_transactions_wire_transaction import NetCordaCoreTransactionsWireTransaction
class TestNetCordaCoreTransactionsWireTransaction(unittest.TestCase):
"""NetCordaCoreTransactionsWireTransaction unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNetCordaCoreTransactionsWireTransaction(self):
"""Test NetCordaCoreTransactionsWireTransaction"""
# FIXME: construct object with mandatory attributes with example values
# model = NetCordaCoreTransactionsWireTransaction() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
984,318 | 5b1fa48e45c152e150cd89be0fcfb8eaa3c78e60 | # -*- coding: utf-8 -*-
# Author: yohannxu
# Email: yuhannxu@gmail.com
# CreateTime: 2020-03-05 15:01:30
# Description: COCOๆ ผๅผๆฐๆฎ้
import bisect
import itertools
import json
import math
import os
from glob import glob
import torch
from easydict import EasyDict
from PIL import Image
from pycocotools.coco import COCO
from torch.utils.data import BatchSampler, Dataset
from torchvision.transforms import transforms as T
from ..utils import type_check
class COCODataset(Dataset):
"""
COCOๆ ผๅผๆฐๆฎ้
็จไบ่ฎญ็ปๅ้ช่ฏ
"""
@type_check(object, EasyDict, T.Compose, bool)
def __init__(self, cfg, transforms=None, is_train=True):
"""
Args:
cfg: str, ้
็ฝฎๆไปถ
transforms: ๅพ็้ข่ฎญ็ป
is_train: bool, ่ฎญ็ป่ฟๆฏ้ช่ฏ
"""
super(COCODataset, self).__init__()
if is_train:
self.root = cfg.DATASET.TRAIN_ROOT
self.anno_file = cfg.DATASET.TRAIN_ANNO
else:
self.root = cfg.DATASET.VAL_ROOT
self.anno_file = cfg.DATASET.VAL_ANNO
self.transforms = transforms
if 'coco' in self.root:
# COCOๆฐๆฎ้ไธญ็ฑปๅซ็ดขๅผไธๆฏไป1~80,ๅ ๆญคๆๅจ่ฐๆดๅฐ1~80
with open('faster_rcnn/data/classes.json') as f:
self.classes = json.load(f)
else:
self.classes = {str(i): i for i in range(1, 21)}
# ๅ ่ฝฝๆฐๆฎ้
self.coco = COCO(self.anno_file)
# ๅพๅฐๆๆๅพ็็ดขๅผ
ids = list(sorted(self.coco.imgs.keys()))
# ๅฐไธๅ
ๅซbboxๆ ่ฎฐ็ๅพ็ๅปๆ
self.ids = []
for img_id in ids:
anno_ids = self.coco.getAnnIds(img_id)
if anno_ids:
annos = self.coco.loadAnns(anno_ids)
# ๅฆๆๆๆannos็ๅฎฝ้ซ้ฝๅคงไบ1,ๅฐฑไฟ็่ฏฅๅพ็
# ๅช่ฆๆไธไธชๅฐไบ็ญไบ1,ๅฐฑไธขๅผ่ฏฅๅพ็
# TODO ๅฏไปฅๅชไธขๅผ่ฏฅanno
if not all(any(scale <= 1 for scale in anno['bbox'][2:]) for anno in annos):
self.ids.append(img_id)
def __len__(self):
return len(self.ids)
@type_check(object, int)
def __getitem__(self, idx):
img_id = self.ids[idx]
image_name = self.coco.loadImgs(ids=img_id)[0]['file_name']
image = Image.open(os.path.join(self.root, image_name)).convert('RGB')
target = self.coco.imgToAnns[img_id]
# ๆๅๅบannotationไธญไธ็ฎๆ ๆฃๆต็ธๅ
ณ็้จๅ
bbox = []
cat = []
for ann in target:
bbox.append(ann['bbox'])
cat.append(self.classes[str(ann['category_id'])])
bbox = torch.tensor(bbox)
bbox[:, 2:] = bbox[:, :2] + bbox[:, 2:] - 1
cat = torch.tensor(cat)
data = {
'image': image,
'bbox': bbox,
'cat': cat,
'name': image_name,
'img_id': img_id
}
if self.transforms:
data = self.transforms(data)
return data
def get_info(self, index):
img_id = self.ids[index]
info = self.coco.imgs[img_id]
return info
class InferenceDataset(Dataset):
"""
ๆจ็ๆถ็ๆฐๆฎ้็ฑป
"""
@type_check(object, str, T.Compose)
def __init__(self, image_dir, transforms=None):
"""
Args:
image_dir: str, ้่ฆๆจ็็ๅพ็ๆไปถๅคน่ทฏๅพ
transforms: ๅพ็้ขๅค็
"""
super(InferenceDataset, self).__init__()
self.image_names = glob('{}/*'.format(image_dir))
self.transforms = transforms
def __len__(self):
return len(self.image_names)
@type_check(object, int)
def __getitem__(self, idx):
image_name = self.image_names[idx]
image = Image.open(image_name).convert('RGB')
data = {
'ori_image': image,
'image': image,
'name': image_name
}
if self.transforms:
data = self.transforms(data)
return data
class DataSampler(BatchSampler):
"""
ๅ ่ฝฝๆฐๆฎๆถ็Sampler
ๅฐ้ฟๅฎฝๆฏ็ธ่ฟ็ๅพ็ๆพๅฐๅไธไธชbatchไธญ๏ผ้ไฝๆพๅญๅ ็จ
"""
@type_check(object, Dataset, EasyDict, int, bool)
def __init__(self, dataset, cfg, start_iter=0, is_train=True):
"""
Args:
dataset: ๆฐๆฎ้
cfg: ้
็ฝฎๆไปถ
start_iter: ๅฝๅ่ฟญไปฃๆฌกๆฐ
is_train: ่ฎญ็ป่ฟๆฏ้ช่ฏ
"""
# ๅพๅฐๆๆๅพ็็้ฟๅฎฝๆฏ
aspect_ratios = self.compute_aspect_ratios(dataset)
# ๆ นๆฎ้ฟๅฎฝๆฏๅฏนๅพ็่ฟ่กๅ็ป
group_thresholds = cfg.DATASET.GROUP_THRESHOLD
self.groups = torch.as_tensor(self.divide(aspect_ratios, group_thresholds))
# ็ปid
self.group_ids = torch.unique(self.groups).sort(0)[0]
self.dataset = dataset
self.start_iter = start_iter
self.is_train = is_train
if self.is_train:
self.batch_size = cfg.DATASET.TRAIN_BATCH_SIZE
self.num_iters = cfg.TRAIN.NUM_ITERS
else:
self.batch_size = cfg.DATASET.VAL_BATCH_SIZE
self.num_iters = len(dataset)
@type_check(object, Dataset)
def compute_aspect_ratios(self, dataset):
"""
่ฎก็ฎๆๆๅพ็็้ฟๅฎฝๆฏ, ๆญฃๅบ
"""
aspect_ratios = []
for i in range(len(dataset)):
info = dataset.get_info(i)
aspect_ratio = info['height'] / info['width']
aspect_ratios.append(aspect_ratio)
return aspect_ratios
@type_check(object, list, list)
def divide(self, ratios, thresholds=[1]):
"""
ๆ นๆฎ้ฟๅฎฝๆฏๅ้ๅผๅฐๅพ็ๅไธบๅคไธช็ป
้ป่ฎคๅๅไธบ้ฟๅฎฝๆฏๅฐไบ1ๅๅคงไบ็ญไบ1ไธคไธช็ป
Args:
ratios: ๆๆๅพ็้ฟๅฎฝๆฏ
thresholds: ๅ็ป้ๅผ
"""
thresholds = sorted(thresholds)
groups = list(map(lambda ratio: bisect.bisect_right(thresholds, ratio), ratios))
return groups
def prepare_batches(self):
"""
ๆ นๆฎๅ็ป็ๆbatch
"""
mask = self.sample_ids >= 0
self.groups = self.groups[self.sample_ids]
# ๅพๅฐๆฏไธ็ป็็ดขๅผ
clusters = [(self.groups == i) & mask for i in self.group_ids]
permuted_clusters = [self.sample_ids[idx] for idx in clusters]
# ๅจๆฏไธ็ปๅ
ๅๅbatch
splits = [c.split(self.batch_size) for c in permuted_clusters]
# ๅฐๆๆ็ป็batchๆฑๆป
merged = tuple(itertools.chain.from_iterable(splits))
# ๅพๅฐๆฏไธชbatchไธญ็ฌฌไธๅผ ๅพ็็็ดขๅผ
first_element_of_batch = [t[0].item() for t in merged]
# ๅพๅฐๅพ็็ดขๅผๅจsample_idsไธญ็ไฝ็ฝฎ
inv_sampled_ids_map = {v: k for k, v in enumerate(self.sample_ids.tolist())}
first_index_of_batch = torch.as_tensor(
[inv_sampled_ids_map[s] for s in first_element_of_batch]
)
# ๆ นๆฎไฝ็ฝฎ่ฟ่กๆๅบ
permutation_order = first_index_of_batch.sort(0)[1].tolist()
# ๅพๅฐๆๅบๅ็batch
batches = [merged[i].tolist() for i in permutation_order]
return batches
def __iter__(self):
iteration = self.start_iter
while iteration <= self.num_iters:
# ่ฎญ็ป็ถๆ, ๆไนฑๅพ็้กบๅบ
# ๆฏ่ฟไธไธชepoch, ้ๆฐๆไนฑ้กบๅบ
if self.is_train:
self.sample_ids = torch.randperm(len(self.dataset))
else:
self.sample_ids = torch.arange(len(self.dataset))
batches = self.prepare_batches()
for batch in batches:
yield batch
iteration += 1
if iteration > self.num_iters:
break
def __len__(self):
return self.num_iters
class Collater():
"""
็จไบๆผๆฅไธไธชbatchไธญ็ๆฐๆฎ
"""
@type_check(object, EasyDict, bool)
def __init__(self, cfg, is_train_or_val=True):
self.cfg = cfg
self.is_train_or_val = is_train_or_val
@type_check(object, list)
def __call__(self, batch):
if self.is_train_or_val:
origin_images = [item['image'] for item in batch]
bboxes = [item['bbox'] for item in batch]
cats = [item['cat'] for item in batch]
ratios = [item['ratio'] for item in batch]
names = [item['name'] for item in batch]
img_ids = [item['img_id'] for item in batch]
else:
ori_images = [item['ori_image'] for item in batch]
origin_images = [item['image'] for item in batch]
ratios = [item['ratio'] for item in batch]
names = [item['name'] for item in batch]
# ๆผๆฅ็ผฉๆพๅๅพ็
# ่ฎก็ฎไธไธชbatchไธญๅพ็็ๆๅคงๅฐบๅฏธ
max_w, max_h = 0, 0
for image in origin_images:
h, w = image.shape[1:3]
max_w = w if w > max_w else max_w
max_h = h if h > max_h else max_h
# ๅฐๆๅคงๅฐบๅฏธ่ฐๆดไธบๅบๅๅฐบๅฏธ็ๅๆฐ
base_size = self.cfg.DATASET.BASE
max_w = base_size * math.ceil(max_w / base_size)
max_h = base_size * math.ceil(max_h / base_size)
images = torch.zeros(len(origin_images), 3, max_h, max_w, dtype=torch.float32)
for i, image in enumerate(origin_images):
h, w = image.shape[1:3]
images[i, :, :h, :w] = image
# ๆผๆฅๅพ็ๅฐบๅฏธ
sizes = torch.zeros(len(origin_images), 2, dtype=torch.float32)
for i, image in enumerate(origin_images):
sizes[i, 0] = image.size(1)
sizes[i, 1] = image.size(2)
if self.is_train_or_val:
data = {
'images': images,
'bboxes': bboxes,
'cats': cats,
'sizes': sizes,
'ratios': ratios,
'names': names,
'img_ids': img_ids
}
else:
data = {
'ori_images': ori_images,
'images': images,
'sizes': sizes,
'ratios': ratios,
'names': names
}
return data
|
984,319 | 96fca447a6494efafdf3238eb9d074f8a1547cd5 | #์์ฃผ์ฐ๋ ๋น์ถ ๋ด์ฅํจ์
#๋ด๊ฐ ๋ง๋ค๊ณ ์ ํ๋ ํ๋ก๊ทธ๋จ์ด ์ด๋ฏธ ๋ง๋ค์ด์ ธ ์๋์ง ๋ณด๋๊ฒ ์ค์ํ๋ค.
#์ฐ์ต์ด ์๋์ด์ ๋ ๋ง๋๋๊ฑด ๋ถํ์ํ ์ผ์ด๋ค.
#์ด๋ฏธ ๋ง๋ค์ด์ง ํ๋ก๊ทธ๋จ๋ค์ ํ
์คํธ ๊ณผ์ ์ ์๋ ์์ด ๋ง์ ํ
์คํธ๊ณผ์ ์ ๊ฑฐ์ณ์ ๊ฒ์ฆ๋์ด์๋ค.(ํนํ, ํ์ด์ฌ ๋ฐฐํฌ๋ณธ)
#ํจ์๋ฅผ ์ธ์ฐ์ง๋ ๋ชปํด๋ ๊ทธ๋ฐํจ์๊ฐ ์๋์ง๋ฅผ ์๋ฉด ๋์ค์ ์ฐพ์์ ๋ณด๋ฉด ๋๋ค. ํน์ ์ด๋ฐ ๋ชจ๋์๋ค๊ฐ ๋ค ๋ชจ์๋๋ฉด ์ฐพ์๋ณด๊ธฐ ์ฝ๋ค.
num = -3
print(abs(num))
print(all([1,2,3,4])) #true
print(all([1,2,3,4,0])) #false
print(any([1,2,3,0])) #ํ๋๋ผ๋ true ๋ฉด true
print(divmod(10,3))
print(chr(97)) #์์คํค ์ฝ๋
#enumerate ์ฃผ๋ก for๋ฌธ๊ณผ ํจ๊ป ์ฐ์ธ๋ค. ํ์ฌ index๋ฅผ ์ฝ๊ฒ ์ ์ ์๋ค.
for i,name in enumerate(["body","foo","bar"]):
print(i,name)
#ํํฐ ์ฝ๋ ์๋ฅผ ์ค์ด๊ณ ์๋๊ฐ ๋น ๋ฅด๋ค. ๋ฆฌํด๊ฐ์ด ์ฐธ์ธ๊ฒ๋ค๋ง ๋ฌถ์ด ์ค๋ค.
def positive(x):
return x>0
print(list(filter(positive,[1,3,-2,0,-5,6])))
"""
#์์ ์ฝ๋๋ ๊ฐ์๊ฑธ ํจ์๋ก ์ง๋ฉด
def positive(numberList):
result =[]
for num in numberList:
if(num>0):
result.append(num)
return result
"""
#id ๋ reference (์ฃผ์ ๊ฐ์ ๋ฐํํ๋ค) ํ์ด์ฌ์ ๋ชจ๋ ๊ฒ ๋ค '๊ฐ์ฒด'์ด๋ค ๋ฆฌํฐ๋ด์ด ์๋๋ค.
a=3
b=3
print(id(a))
print(id(b))
print(id(3))
"""
a=input("์ซ์๋ฅผ ๋ฃ์ผ์์ค. ")
print(a)
"""
print(int('3'))#int๋ก ๋ณํํ๊ธฐ.
class Person:
pass
a= Person()
print(isinstance(a,Person)) #์ด์ชฝ ํด๋์ค์ instant์ธ์ง ํ๋ณ, ์ฝ๋๊ฐ ๋งค์ฐ ๊ธธ์ด์ง๊ณ ๋ณต์กํ ๋ ์ฌ์ฉ.
#lamda ๋ def์ผ๋ก ๋ง๋ค๋งํผ ๋ณต์กํ์ง ์์๋ ์ฌ์ฉํ๋ค.(๊ฐ๊ฒฐํ๊ฒ ์ฌ์ฉ ๊ฐ๋ฅ) ์ฝ๋์ ๊ฐ๋
์ฑ์ ๋์ธ๋ค.
#def๋ฅผ ์ฌ์ฉํ ์ ์๋ ๊ณณ์์๋ ์ฐ์ธ๋ค.
sum = lambda a,b:a+b
print(sum(10,7))
myList = [lambda a,b:a+b,lambda a,b:a*b] #๋ฆฌ์คํธ ์์๋ค๊ฐ ํจ์๋ฅผ ๋ฃ์ ์ ์๋ค. for ๊ฐ์๊ฑฐ ๋๋ฆฌ๋๋ ํธํ ๋ฏ.
print(myList) #์ฃผ์๊ฐ์ ๋ฐํํ๋ค.
print(myList[0])
print(myList[0](3,4))
#๊ธธ์ด ๋ฐํ
num = list(range(1,10))
print(len(num))
print(list("python"))
#map ํจ์์ ๋ฐ๋ณต ๊ฐ๋ฅํ ์๋ฃํ์ ์
๋ ฅ ๋ฐ๋๋ค.
def two_times(x):return x*2
print(map(two_times,[1,2,3,4])) #๋ฆฌ์คํธ๋ก ์์๋ฅด๋ฉด ํต์ฑ๋ก ์ธ์ํ๋ค(์ฃผ์๊ฐ์ผ๋ก)
print(list(map(two_times,[1,2,3,4])))
#lambda๋ ํฉ์ณ์ ์ด๋ ๊ฒ๋ ์ด๋ค. ์ด๊ฒ ํ๋ก๊ทธ๋จ ์๋ ๋๋ฌธ์ ์ด๋ ๊ฒ ํ๋ค.
print(list(map(lambda a:a*2,[1,2,3,4])))
print(max([1,2,3,4,5,6]))
print(min([1,2,3,4,5,6]))
print(pow(3,4))
print(list(range(4,10)))
print(list(range(4,10,2)))#๊ฐ๊ฒฉ์ด 2์ฉ
num = list(range(1,10))
print(num)
print(sorted([3,1,2,10,20003,5,7]))
#sorted ๋ ๋ฐํ๊ฐ์ด ์๋๋ฐ
a = [1,10,3,4,500,3]
result = a.sort()
print(result)
print(a) #๋ฐํํ์ง ์๊ณ a ์์ฒด๊ฐ ๋ฐ๋
#๋ฐ์ดํฐ input์ ํ ๋ ์๋ฃํ์ด ์๋ง์ผ๋ฉด ์๋์๊ฐ๋ ๊ฒฝ์ฐ๊ฐ ๋ง๋ค. ์ด๋ด๋ type์ผ๋ก ํ
์คํธ๋ฅผ ํด์ ๋ง๋ ๋ฐ์ดํฐ๋ฅผ ๋ฃ๊ฑฐ๋, ๋ง๋ ๋ฐ์ดํฐ๋ก ์๋ฃํ์ ๋ฐ๊พผ๋ค.
print(type(a))
#๋ฐ์ดํฐ ์ ์ฒ๋ฆฌํ ๋ ์ข๋ค.
print(list(zip([1,2,3],[4,5,6]))) #๋์ผํ ๊ฐ์๋ก ๋ฌถ์ด์ค๋ค. ๊ฐ list์ ์ฒซ์๋ฆฌ๋ค๋ผ๋ฆฌ ๋ฌถ๋๋ค.
print(list(zip([1,2],[3,4],[5,6])))
|
984,320 | 8139e5fdf67c4f01508a2c07799a37a1f1d7154a | from sys import argv
import re
# second argument is the original promoter file
with open(argv[2], 'r') as promoter_file:
names = [line.strip().split('>')[1] for line in promoter_file if ">" in line]
names_dict = dict((name[0:24], number) for number, name in enumerate(names))
input_file = open(argv[1], 'r').read()
for m in re.finditer('MOTIF.+\d{3}', input_file):
motif_n = (m.group(0)).split('\t')[0]
e_value = (m.group(0)).split('E-value = ')[-1]
if float(e_value) <= 5e-2:
motifs = input_file.split(motif_n)[1].split('sites sorted by position p-value')[1].split('--------------------------------------------------------------------------------')[1].split('\n')[3:]
consensus = input_file.split(motif_n)[1].split('sites sorted by position p-value')[0].split('Multilevel')[1].split('\n')[0].strip()
print ('\n' + "MOTIF FINDER: MEME")
print "MOTIF:", consensus
print "INSTANCES:"
for motif in motifs:
if motif:
name = motif.split(' ')[0].split('|')[0]
if name:
seq_number = names_dict[motif.split(' ')[0].split('|')[0]]
direction = re.search("\s+[+|-]\s+", motif).group(0).strip()
location = -(int(re.search("\s+\d+\s+", motif).group(0).strip())) if direction == '-' else (-1000 + int(re.search("\s+\d+\s+", motif).group(0).strip()) - 1)
to_print = str(names_dict[name]) + ',' + str(
location) + ',' + consensus + ',' + direction
print to_print |
984,321 | 068146ba65b358e63037afe883ce1258875495b3 | import sys, time, RPi.GPIO as GPIO
seq0 = [ [1,0,0,0],
[1,1,0,0],
[0,1,0,0],
[0,1,1,0],
[0,0,1,0],
[0,0,1,1],
[0,0,0,1],
[1,0,0,1] ]
seq = [seq0, sorted(seq0)]
def rotate(num, ControlPin):
# num is the direction of rotation
if num !=0 and num != 1:
print("The number should be 0 or 1.")
sys.exit(1)
for i in range(128): # ~= 1 sec
for halfstep in range(8):
for pin in range(4):
GPIO.output(ControlPin[pin], seq[num][halfstep][pin])
time.sleep(0.001)
|
984,322 | 27edfb2ce6e594fb0e6484978733dff13c499a41 | from SPARQLWrapper import SPARQLWrapper
sparql = SPARQLWrapper("http://206.167.181.124:7200/repositories/era-dd")
ignore = [
"http://projecthydra.org/ns/auth/acl#",
"http://fedora.info/definitions/v4/repository#",
"http://www.iana.org/assignments/media-types/",
"info:fedora/fedora-system:def/model#",
"info:fedora/fedora-system:def/relations-external#",
"http://www.loc.gov/premis/rdf/v1#",
"http://www.w3.org/ns/auth/acl#"
]
namespaces = [
{"prefix": "acl", "uri": "http://projecthydra.org/ns/auth/acl#"},
{"prefix": "bibo", "uri": "http://purl.org/ontology/bibo/"},
{"prefix": "cc", "uri": "http://creativecommons.org/ns#"},
{"prefix": "dc", "uri": "http://purl.org/dc/elements/1.1/"},
{"prefix": "dcterms", "uri": "http://purl.org/dc/terms/"},
{"prefix": "ebu", "uri": "http://www.ebu.ch/metadata/ontologies/ebucore/ebucore#"},
{"prefix": "etd_ms", "uri": "http://www.ndltd.org/standards/metadata/etdms/1.0/"},
{"prefix": "fedora", "uri": "http://fedora.info/definitions/v4/repository#"},
{"prefix": "iana", "uri": "http://www.iana.org/assignments/media-types/"},
{"prefix": "info", "uri": "info:fedora/fedora-system:def/model#"},
{"prefix": "lang", "uri": "http://id.loc.gov/vocabulary/iso639-2/"},
{"prefix": "mrel", "uri": "http://id.loc.gov/vocabulary/relators/"},
{"prefix": "lcn", "uri": "http://id.loc.gov/authorities/names/"},
{"prefix": "obo", "uri": "http://purl.obolibrary.org/obo/"},
{"prefix": "owl", "uri": "http://www.w3.org/2002/07/owl#"},
{"prefix": "ore", "uri": "http://www.openarchives.org/ore/terms/"},
{"prefix": "pcdm", "uri": "http://pcdm.org/models#"},
{"prefix": "premis", "uri": "http://www.loc.gov/premis/rdf/v1#"},
{"prefix": "prism", "uri": "http://prismstandard.org/namespaces/basic/3.0/"},
{"prefix": "rels", "uri": "info:fedora/fedora-system:def/relations-external#"},
{"prefix": "rdf", "uri": "http://www.w3.org/1999/02/22-rdf-syntax-ns#"},
{"prefix": "rdfs", "uri": "http://www.w3.org/2000/01/rdf-schema#"},
{"prefix": "schema", "uri": "http://schema.org/"},
{"prefix": "scholar", "uri": "http://scholarsphere.psu.edu/ns#"},
{"prefix": "skos", "uri": "http://www.w3.org/2004/02/skos/core#"},
{"prefix": "status", "uri": "http://www.w3.org/2003/06/sw-vocab-status/ns#"},
{"prefix": "swrc", "uri": "http://ontoware.org/swrc/ontology#"},
{"prefix": "ual", "uri": "http://terms.library.ualberta.ca/"},
{"prefix": "ualdate", "uri": "http://terms.library.ualberta.ca/date/"},
{"prefix": "ualid", "uri": "http://terms.library.ualberta.ca/id/"},
{"prefix": "ualids", "uri": "http://terms.library.ualberta.ca/identifiers/"},
{"prefix": "ualrole", "uri": "http://terms.library.ualberta.ca/role/"},
{"prefix": "ualthesis", "uri": "http://terms.library.ualberta.ca/thesis/"},
{"prefix": "webacl", "uri": "http://www.w3.org/ns/auth/acl#"},
{"prefix": "works", "uri": "http://pcdm.org/works#"},
{"prefix": "vivo", "uri": "http://vivoweb.org/ontology/core#"}
]
profileDefinitions = [
{"term": "acceptedValue", "def": "values belonging to properties with restricted value parameters (only those displayed on form)"},
{"term": "backwardCompatibleWith", "def": "crosswalk to previously used terms (in ERA) for migration mapping"},
{"term": "comments", "def": "Jupiter specific instructions for using or questions about this property"},
{"term": "definedBy", "def": "a link to the Jupiter ontology, including a general description of the property"},
{"term": "dataType", "def": "the kinds of values permitted for use by the property: 'text', 'enumerated text' (i.e. non-URI drop-down), 'uri' (i.e. dropdown with URI), 'auto' (generated by application logic)"},
{"term": "display", "def": "does this property appear when an object is displayed to the user? (boolean)"},
{"term": "displayLabel", "def": "if this object is displayed to the user, what is the label used to describe the property in the display?"},
{"term": "facet", "def": "is this property faceted in SOLR? (boolean)"},
{"term": "indexAs", "def": "another property with which this property should be indexed in SOLR"},
{"term": "onForm", "def": "does this property appear on the form when a user creates a new resource? (boolean)"},
{"term": "propertyName", "def": "an informal name for describing the property"},
{"term": "repeat", "def": "can this property occur more than once? (boolean)"},
{"term": "required", "def": "is the property required to have a value? (boolean)"},
{"term": "search", "def": "is this property searchable in Jupiter? (boolean)"},
{"term": "sort", "def": "is this property sortable in SOLR? (boolean)"},
{"term": "tokenize", "def": "is this property tokenized in SOLR? (boolean)"}
]
definitions = [
{"term": "@type", "def": "the object class. Particulary important for determining scope for use of terms and values."},
{"term": "rdfs:comment", "def": "defines the term or property"},
{"term": "rdfs:domain", "def": "indicates terms (classes, values, datatypes, etc.) that may invoke a given property"},
{"term": "rdfs:range", "def": "indicates terms (classes, values, datatypes, etc.) that must be used with this property"},
{"term": "rdfs:label", "def": "the name of the term or property"},
{"term": "rdfs:preflabel", "def": "the label preferred for display"},
{"term": "owl:deprecated", "def": "indicates whether the property or term is active in the current deployment (default = false)"},
{"term": "owl:backwardCompatible", "def": "mappings to previous vocabularies used in previous deployments"},
{"term": "obo:IAO_0000112", "def": "usage example"},
{"term": "obo:IAO_0000115", "def": "description"}
]
ddWelcome = 'The Jupiter Data Dictionary is a collection of living documents. Below you will find the Jupiter ontology -- definitions for properties (predicates), terms (vocabulary or classes), and values (instances) used in the Jupiter project. Current deployment specifications in Jupiter are described by application profiles. Changes to any of these documents can be suggested by submitting a Github issue. The metadata team will update the document accordingly. FYI: markdown files are accompanied by json files that may also be consulted.'
profileWelcome = 'The Jupiter Data Dictionary is a collection of living documents. Below you will find an application profile for properties implemented in production Jupiter. Changes to these variables can be suggested by submitting a Github ticket. The metadata team will edit the document accordingly.'
|
984,323 | 55da58b9f812846bf02be263d4ece4fd04fcbecd | from pandas import read_csv, DataFrame, concat, Series
import attr
import os
from json import load
from numpy import log10, isfinite, any, all, quantile, nan, isnan
from numbers import Number
tissues = ['Brain', 'Muscle', 'Liver']
polarities = ['positive','negative']
@attr.s(auto_attribs=True)
class AstyanaxLi:
lipids_normalized: str
lipidmaps_js: str
lipids_unnormalized: str = None
def __attrs_post_init__(self):
if self.lipids_unnormalized is not None:
# unnormalized
self.unnormalized = {}
self.unnormalized_polarity = {}
for tissue in tissues:
filename = os.path.join(self.lipids_unnormalized,f'{tissue.lower()}.csv')
column_table = (
read_csv(filename, skiprows=0, nrows=6, index_col=6, header=0)
.iloc[:, 6:]
.transpose()
)
#print(column_table)
row_table = read_csv(filename, skiprows=6, usecols=list(range(6)), index_col=0, header=0)
#print(row_table)
data = read_csv(filename, skiprows=7, header=None).iloc[:, 7:]
data.insert(0,'InChIKey',list(row_table['InChI Key']))
data = data.set_index('InChIKey')
data.columns = list(column_table['Treatment'])
print(f'data before {tissue} {data.shape}')
if tissue != 'Muscle':
t = (row_table.loc[list(data.index.notna())]['ESI mode'] == 'ESI (+)').shape
#print(f't {t}')
self.unnormalized_polarity[tissue] = (row_table.loc[list(data.index.notna())]['ESI mode'] == 'ESI (+)')
#print(f'the {self.unnormalized_polarity[tissue].shape}')
else:
self.unnormalized_polarity[tissue] = (row_table.loc[list(data.index.notna())]['ESI (+)'] == 'ESI (+)')
print(f'un table {tissue} {self.unnormalized_polarity[tissue].shape}')
data = data.loc[data.index.notna()]
#print(f'data 1 {tissue} {data.shape}')
data = data.loc[:,data.columns.notna()]
self.unnormalized[tissue] = data
#print(f'unnormalized data {tissue} {self.unnormalized[tissue].shape}')
# normalized
self.normalized = {}
for tissue in tissues:
for polarity in polarities:
filename = os.path.join(self.lipids_normalized,polarity,f'{tissue.lower()}.csv')
column_table = (
read_csv(filename, skiprows=0, nrows=6, index_col=5, header=0)
.iloc[:, 5:]
.transpose()
)
row_table = read_csv(filename, skiprows=6, usecols=list(range(5)), index_col=0, header=0)
#if tissue == 'Brain' and polarity == 'negative':
#print(print(row_table.loc[row_table['InChI Key'] == 'JBDGKEXQKCCQFK-JWQIMADESA-N',:]))
#stop
data = read_csv(filename, skiprows=7, header=None).iloc[:, 6:]
data.insert(0,'InChIKey',list(row_table['InChI Key']))
data = data.set_index('InChIKey')
data.columns = list(column_table['Treatment'])
data = data.loc[data.index.notna()]
data = data.loc[:,data.columns.notna()]
#if tissue == 'Brain' and polarity == 'negative':
#print(print(data.loc['JBDGKEXQKCCQFK-JWQIMADESA-N',:]))
#stop
#data = data.drop_duplicates()
#https://stackoverflow.com/questions/13035764/remove-rows-with-duplicate-indices-pandas-dataframe-and-timeseries
#data = data.loc[~data.index.duplicated(keep='first')]
data = data.groupby(data.index).sum()
self.normalized[tissue,polarity] = data
with open(self.lipidmaps_js) as f:
self.lipidmaps = load(f)
self.lipidmaps_inchikey = {v['INCHI_KEY']:u for u,v in self.lipidmaps.items()}
self.lipidmaps_inchikey2 = {'-'.join(v['INCHI_KEY'].split('-')[:2]):u for u,v in self.lipidmaps.items()}
self.lipidmaps_inchikey1 = {v['INCHI_KEY'].split('-')[0]:u for u,v in self.lipidmaps.items()}
def get_lipidmap_id(inchikey):
if inchikey in self.lipidmaps_inchikey:
return self.lipidmaps_inchikey[inchikey]
elif '-'.join(inchikey.split('-')[:2]) in self.lipidmaps_inchikey2:
return self.lipidmaps_inchikey2['-'.join(inchikey.split('-')[:2])]
elif inchikey.split('-')[0] in self.lipidmaps_inchikey1:
return self.lipidmaps_inchikey1[inchikey.split('-')[0]]
return nan
def fix_category(c):
if '[' in c:
return c.split('[')[0].strip()
else:
return c
tissuedata = {}
for tissue in tissues:
d = []
for polarity in polarities:
lmd = DataFrame(self.normalized[tissue,polarity].apply(lambda u: get_lipidmap_id(u.name),axis=1), columns=['LMID'])
lmd['Category'] = lmd.apply(lambda u: fix_category(self.lipidmaps[u[0]]['CATEGORY']) if isinstance(u[0],str) else nan,axis=1)
lmd['Class'] = lmd.apply(lambda u: fix_category(self.lipidmaps[u[0]]['MAIN_CLASS']) if isinstance(u[0],str) else nan,axis=1)
lmd['Tissue'] = tissue
lmd['Polarity'] = polarity
lmd['InChIKey'] = self.normalized[tissue,polarity].index
lmd = lmd.set_index("InChIKey")
d.append(lmd)
#lmids = (set(d[0]['LMID']) | set(d[1]['LMID'])) - set([nan])
inchikeys = set(set(d[0].index) | set(d[1].index)) - set([nan])
lmdata = []
from itertools import repeat, chain
n = 0
for inchikey in inchikeys:
try:
#print(self.normalized[tissue,polarities[0]].columns)
if inchikey in d[0].index:
k = 0
elif inchikey in d[1].index:
k = 1
else:
assert False
r = {'LMID':d[k]['LMID'].loc[inchikey], 'Category': d[k]['Category'].loc[inchikey], 'Class': d[k]['Class'].loc[inchikey], 'Tissue': d[k]['Tissue'].loc[inchikey], 'InChIKey': inchikey}
if isinstance(r['LMID'],Number) and isnan(r['LMID']):
continue
n += 1
self.non_numeric_cols= len(r)
r.update({f'{c} {tissue} {k}': v1+v2 for k,c,v1,v2 in zip(
chain.from_iterable(repeat(range(1,1+6),9)),
self.normalized[tissue,polarities[0]].columns,
self.normalized[tissue,polarities[0]].loc[inchikey,:] if inchikey in self.normalized[tissue,polarities[0]].index else [0.]*6*9,
self.normalized[tissue,polarities[1]].loc[inchikey,:] if inchikey in self.normalized[tissue,polarities[1]].index else [0.]*6*9,
)})
lmdata.append(r)
#print(n,len(lmdata))
#print(n,lmdata)
assert n == len(lmdata)
except KeyError:
pass
#print(f'l {len(lmdata)}')
#print(f'n {n}')
lmdata = DataFrame(lmdata)
#lmdata = lmdata.set_index('LMID')
lmdata = lmdata.set_index('InChIKey')
#lmdata = lmdata.groupby('LMID').sum()
tissuedata[tissue] = lmdata
#print(lmdata)
#print(lmdata.iloc[:,self.non_numeric_cols-1:])
#print(len(set(lmdata.index)))
lmdata = concat(tissuedata.values(), axis=1)
#lmdata = lmdata.drop_duplicates(axis=1)
lmdata = lmdata.loc[:,~lmdata.columns.duplicated()]
lmdata = lmdata.dropna()
#lmdata = lmdata.fillna(0.)
self.lmdata = lmdata
#print(lmdata)
#print(len(set(tissuedata['Liver'].index) & set(tissuedata['Brain'].index)))
#print(lmdata.loc['LMFA01030132'])
|
984,324 | 44ac0cf1ac1d5af3a6865c64cf2761eb04945f3d | class Solution(object):
def wiggleSort(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
i, n = 1, len(nums)
flag = False
while i < n:
if not flag and nums[i - 1] > nums[i]:
nums[i], nums[i - 1] = nums[i - 1], nums[i]
if flag and nums[i - 1] < nums[i]:
nums[i], nums[i - 1] = nums[i - 1], nums[i]
i += 1
flag = not flag
|
984,325 | 010b1314af56e802f140463046e3cd26b9fa6252 | #!/usr/bin/env python
# See Fig3-4.ipynb for details.
# Whyjay Zheng
# File created Oct 21, 2021
# Last modified Feb 22, 2022
import pejzero
import rasterio
from netCDF4 import Dataset
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from pathlib import Path
import sys
import os
glacier_file = sys.argv[1]
speed_file = '../data/GRE_G0240_1998_v.tif'
vdiff_file = '../data/GRE_G0240_diff-2018-1998_v.tif'
ds = Dataset(glacier_file, 'r')
flowline_groups, _ = pejzero.get_flowline_groups(ds)
primary_flowlines = [i for i in flowline_groups if 'iter' not in i.path]
results = {}
with rasterio.open(speed_file) as speed_data, rasterio.open(vdiff_file) as vdiff_data:
for flowline_group in primary_flowlines:
data_group = pejzero.cal_pej0_for_each_flowline(flowline_group, speed_data, vdiff_data)
if data_group is not None:
results[flowline_group.name] = data_group
results['avg'] = pejzero.cal_avg_for_each_basin(results)
#### plot results
pej0_plot_length = 200
matplotlib.rc('font', size=24)
matplotlib.rc('axes', linewidth=2)
fig, ax3 = plt.subplots(5, 2, sharex=True, figsize=(26, 20))
gs = ax3[1, 1].get_gridspec()
for ax in ax3[:, 1]:
ax.remove()
axbig = fig.add_subplot(gs[1:4, 1])
for key in results:
if key != 'avg':
ax3[0, 0].plot(results[key]['d'], results[key]['s'], color='xkcd:aquamarine', linewidth=2)
ax3[0, 0].plot(results[key]['d'], results[key]['b'], color='xkcd:brown', linewidth=2)
ax3[1, 0].plot(results[key]['d'], results[key]['u'], color='xkcd:light green', linewidth=2)
ax3[2, 0].plot(results[key]['d'], results[key]['pe_ignore_dslope'], color='xkcd:light red', linewidth=2)
ax3[3, 0].plot(results[key]['d'], results[key]['j0_ignore_dslope'], color='xkcd:light blue', linewidth=2)
ax3[4, 0].plot(results[key]['d'], results[key]['udiff_sm'], color='xkcd:light grey', linewidth=2)
axbig.plot(results[key]['pe_ignore_dslope'][:pej0_plot_length], results[key]['j0_ignore_dslope'][:pej0_plot_length], '.-',
color='xkcd:light purple', linewidth=2)
# plot first non-NaN value (the one closest to the terminus)
axbig.plot(next(x for x in results[key]['pe_ignore_dslope'][:pej0_plot_length] if not np.isnan(x)),
next(x for x in results[key]['j0_ignore_dslope'][:pej0_plot_length] if not np.isnan(x)), '.', color='xkcd:light purple', markersize=25)
else:
ax3[1, 0].plot(results[key]['d'], results[key]['u'], color='xkcd:dark green', linewidth=4)
ax3[2, 0].plot(results[key]['d'], results[key]['pe_ignore_dslope'], color='xkcd:dark red', linewidth=4)
ax3[3, 0].plot(results[key]['d'], results[key]['j0_ignore_dslope'], color='xkcd:dark blue', linewidth=4)
ax3[4, 0].plot(results[key]['d'], results[key]['udiff_sm'], color='xkcd:dark grey', linewidth=4)
axbig.plot(results[key]['pe_ignore_dslope'][:pej0_plot_length], results[key]['j0_ignore_dslope'][:pej0_plot_length], '.-',
color='xkcd:dark purple', linewidth=4, markersize=10)
# plot first non-NaN value (the one closest to the terminus)
axbig.plot(next(x for x in results[key]['pe_ignore_dslope'][:pej0_plot_length] if not np.isnan(x)),
next(x for x in results[key]['j0_ignore_dslope'][:pej0_plot_length] if not np.isnan(x)), '.', color='xkcd:dark purple', markersize=30)
letter_specs = {'fontsize': 30, 'fontweight': 'bold', 'va': 'top', 'ha': 'center'}
ax3[0, 0].set_title(Path(glacier_file).stem)
ax3[0, 0].set_ylabel('Elevantion (m): \n Surface (cyan) \n bed (brown)')
ax3[0, 0].text(0.04, 0.95, 'A', transform=ax3[0, 0].transAxes, **letter_specs)
ax3[1, 0].set_ylabel('Speed 1998 (m yr$^{-1}$)')
ax3[1, 0].text(0.96, 0.95, 'B', transform=ax3[1, 0].transAxes, **letter_specs)
ax3[2, 0].set_ylabel(r'$\frac{P_e}{\ell}$ (m$^{-1}$)')
ax3[2, 0].text(0.96, 0.95, 'C', transform=ax3[2, 0].transAxes, **letter_specs)
ax3[3, 0].set_ylabel(r'$J_0$ (m yr$^{-1}$)')
ax3[3, 0].text(0.96, 0.95, 'D', transform=ax3[3, 0].transAxes, **letter_specs)
ax3[4, 0].set_xlabel('Distance from terminus (km)')
ax3[4, 0].set_ylabel('Speed change \n 1998โ2018 (m yr$^{-1}$)')
ax3[4, 0].text(0.96, 0.95, 'E', transform=ax3[4, 0].transAxes, **letter_specs)
axbig.set_xlabel(r'$\frac{P_e}{\ell}$ (m$^{-1}$)')
axbig.set_ylabel(r'$J_0$ (m yr$^{-1}$)')
axbig.set_title('Dot spacing: 50 m; \n Big dot indicates the first non-NaN value \n (closest to the terminus)')
axbig.text(0.03, 0.985, 'F', transform=axbig.transAxes, **letter_specs)
pe_labels = ['{:.6f}'.format(x) for x in axbig.get_xticks()]
axbig.set_xticklabels(pe_labels, rotation=45)
outdir = '../data/results/single_basins/'
if not os.path.exists(outdir):
os.makedirs(outdir)
plt.savefig(outdir + Path(glacier_file).stem + '.png') |
984,326 | ec7541f18335fa5fff8a0b19ffda4ab98ec7709b | import platform
import usb.core
import usb.util
import struct
from Monsoon import Operations as op
from copy import deepcopy
import numpy as np
import array
DEVICE = None
DEVICE_TYPE = None
epBulkWriter = None
epBulkReader = None
VID = '0x2ab9'
PID = '0xffff'
class bootloaderMonsoon(object):
def __init__(self,*args, **kwargs):
pass
def setup_usb(self):
"""Sets up the USB connection."""
global DEVICE
global epBulkWriter
global epBulkReader
global VID
global PID
DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)
if DEVICE is None:#If not a LVPM, look for an HVPM.
DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)
VID = '0x4d8'
PID = '0xb'
if "Linux" == platform.system():
try:
DEVICE.detach_kernel_driver(0)
except:
pass # already unregistered
DEVICE.set_configuration()
cfg = DEVICE.get_active_configuration()
intf = cfg[(0,0)]
epBulkWriter = usb.util.find_descriptor(
intf,
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_OUT)
epBulkReader = usb.util.find_descriptor(
intf,
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_IN)
def __bootCommand(self,Command,length,address,data):
"""Sends boot command."""
sendData = []
sendData.append(Command)
sendData.append(length)
sendData.append(address[2])
sendData.append(address[1])
sendData.append(address[0])
for i in range(0,len(data)):
sendData.append(data[i])
for i in range(len(data),length):
sendData.append(0)
test = epBulkWriter.write(sendData,timeout=10000)
ret = epBulkReader.read(length+5,timeout=10000)
return ret
def writeFlash(self, hex_):
"""Writes a hex file to the Power Monitor's PIC. Uses Intel HEX file format."""
Flash, EEPROM,IDlocs,Config = self.__formatHex(hex_)
print("Erasing Flash...")
self.__writeRegion(op.BootloaderMemoryRegions.Flash,op.BootloaderCommands.EraseFlash,0x0800,Flash,None)
print("Writing Flash...")
if(self.__writeRegion(op.BootloaderMemoryRegions.Flash,op.BootloaderCommands.WriteFlash,0x0800,Flash,op.BootloaderCommands.ReadFlash)):
print("Flash written OK")
#Don't actually erase the EEPROM, this would wipe out all of the calibration data.
#if(self.writeRegion(op.BootloaderMemoryRegions.EEPROM,op.BootloaderCommands.WriteEEPROM,0x0000,EEPROM,op.BootloaderCommands.ReadEEPROM)):
# print("EEPROM written OK")
if(self.__writeChunk(op.BootloaderMemoryRegions.IDLocs,op.BootloaderCommands.WriteFlash,0x0000,IDlocs,op.BootloaderCommands.ReadFlash)):
print("IDLocs written OK")
if(self.__writeChunk(op.BootloaderMemoryRegions.Config,op.BootloaderCommands.WriteConfig,0x0000,Config,op.BootloaderCommands.ReadConfig)):
print("Config written OK")
def __writeRegion(self, memoryRegion,command,addressStart,regionData,errorCheckCommand):
"""Writes information to a memory region."""
address = [0 for _ in range(3)]
data = [0 for _ in range(16)]
result = True
progressThresholds = [x*10 for x in range(11)]
progressindex = 0
len(regionData)
for i in range(addressStart, len(regionData), 16):
memoryIndex = struct.unpack("BBBB",struct.pack('I', i))
address[0] = memoryRegion
address[1] = memoryIndex[1]
address[2] = memoryIndex[0]
data = regionData[i:i+16]
#self.bootCommand(op.BootloaderCommands.EraseFlash,16,address,[])
self.__bootCommand(command,len(data),address,data)
if(errorCheckCommand != None):
dataout = self.__bootCommand(errorCheckCommand,16,address,[])
dataout = dataout[5:len(dataout)]
if not self.__compare(data,dataout):
result = False
print("Write error")
percentComplete = (i*1.0 / len(regionData)) * 100
if(progressThresholds[progressindex] < percentComplete):
print('%.0f percent complete' % percentComplete)
progressindex += 1
return result
def __writeChunk(self, memoryRegion,command,addressStart,regionData,errorCheckCommand):
result = True
address = [0 for _ in range(3)]
address[0] = memoryRegion
address[1] = 0
address[2] = 0
data = regionData
if(memoryRegion != op.BootloaderMemoryRegions.Config):
self.__bootCommand(op.BootloaderCommands.EraseFlash,16,address,[])
self.__bootCommand(command,len(data),address,data)
#dataout = self.bootCommand(errorCheckCommand,16,address,[])
#dataout = dataout[5:len(dataout)]
#if not self.compare(data,dataout):
# result = False
# print("Reflash Write error")
return result
def __compare(self,data,dataout):
"""Compare read data to the data we think we wrote."""
if(data == None or dataout == None):
return False
if(len(data) != len(dataout)):
return False
for i in range(len(data)):
if(data[i] != dataout[i]):
return False
return True
def __byteLine(self, line):
"""Translate a HEX file line into address, linetype, data, and checksum"""
output = []
for offset in range(1,len(line)-1,2):
output.append(struct.unpack("B",struct.pack('B',np.int(line[offset:offset+2],16))[0])[0])
address = []
length = output[0]
address.append(output[1])
address.append(output[2])
type_ = output[3]
Data = output[4:4+length]
checksum = output[len(output)-1]
return address, type_, Data, checksum
def getHeaderFromFWM(self, filename):
"""Strips the header from a Monsoon FWM file, returns the HEX file and the formatted header.
Header format [VID,PID,Rev,Model]"""
f = open(filename,'r')
hex_ = f.read()
f.close()
headerEnd = hex_.find(':')
header = hex_[0:headerEnd]
offset = 7
count = array.array('B', header[offset])[0]
offset += 1
hex_ = hex_[headerEnd:len(hex_)]
outHeader = [0 for _ in range(4)]
headers = []
i = 0
for i in range(count):
outHeader[0] = array.array('H', header[offset:offset+2])[0] #VID
offset += 2
outHeader[1] = array.array('H', header[offset:offset+2])[0] #PID
offset += 2
outHeader[2] = array.array('H', header[offset:offset+2])[0] #Rev
offset += 2
outHeader[3] = array.array('H', header[offset:offset+2])[0] #Model
offset += 2
test = deepcopy(outHeader)
headers.append(test)
i+= 1
return headers, hex_
def getHexFile(self, filename):
"""Reads an Intel HEX file."""
f = open(filename,'r')
hex_ = f.read()
f.close()
return hex_
def __formatHex(self,hex_):
"""Takes raw hex_ input, and turns it into an array of hex_ lines."""
output = []
lineEnd = hex_.find('\n')
while lineEnd > 0:
output.append(hex_[0:lineEnd])
hex_ = hex_[lineEnd+1:len(hex_)]
lineEnd = hex_.find('\n')
Flash, EEPROM,IDlocs,Config = self.__formatAsPICFlash(output)
return Flash, EEPROM,IDlocs,Config
def __formatAsPICFlash(self, hex_):
"""Formats an array of hex_ lines as PIC memory regions."""
flash = [0xff for _ in range(32768)]
EEPROM = [0xff for _ in range(256)]
IDlocs = [0xff for _ in range(16)]
Config = [0xff for _ in range(14)]
addressMSB = 0
for line in hex_:
address, type_, Data, _ = self.__byteLine(line)
intAddress = struct.unpack("h",struct.pack("BB", address[1],address[0]))[0]
if(type_ == op.hexLineType.ExtendedLinearAddress):
addressMSB = Data[1]
if(type_ == op.hexLineType.Data):
if(addressMSB == op.BootloaderMemoryRegions.Flash):
for byte in Data:
flash[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.EEPROM):
intAddress = address[1]
for byte in Data:
EEPROM[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.IDLocs):
intAddress = address[1]
for byte in Data:
IDlocs[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.Config):
intAddress = address[1]
for byte in Data:
Config[intAddress] = byte
intAddress += 1
return flash, EEPROM, IDlocs, Config
def verifyHeader(self, headers):
"""Verifies the header matches the physical hardware being reflashed."""
for head in headers:
if(hex(head[0]) == VID and hex(head[1]) == PID):
return True
return False
def getSerialNumber(self):
"""The bootloader lacks a get command for the serial number, but we can just read the EEPROM value directly with the appropriate boot command"""
address = [op.BootloaderMemoryRegions.EEPROM,0,8]#Memory address of the Serial number
ret = self.__bootCommand(op.BootloaderCommands.ReadEEPROM,2,address,[])
rawSerial = ret[5:7]
serialno = struct.unpack('H', struct.pack('B'*2,rawSerial[0],rawSerial[1]))[0]
return serialno
def resetToMainSection(self):
"""
Exits bootloader mode and returns to normal mode.
This will disconnect the device, and you should reconnect with HVPM.py or LVPM.py, depending on your hardware.
Most LVPM units have an older version of the bootloader, and this command may be nonfunctional on them.
In that case, just manually power cycle the unit."""
wValue = 0
wIndex = 0
wLength = 0
try:
self.__bootCommand(op.BootloaderCommands.Reset,1,[0,0,0],[])
except:
#This will always throw an exception because it disconnects the device and re-enumerates as a normal Power Monitor
print("Resetting to Main Section.")
|
984,327 | 1e4475319372784930d39641434e895f63760c93 | # encoding: utf-8
"""
@author: xingyu liao
@contact: sherlockliao01@gmail.com
"""
import json
import logging
import os
from fastreid.data.build import _root
from fastreid.data.build import build_reid_train_loader, build_reid_test_loader
from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.transforms import build_transforms
from fastreid.engine import DefaultTrainer
from fastreid.evaluation.clas_evaluator import ClasEvaluator
from fastreid.utils import comm
from fastreid.utils.checkpoint import PathManager
from .dataset import ClasDataset
class ClasTrainer(DefaultTrainer):
idx2class = None
@classmethod
def build_train_loader(cls, cfg):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_train_loader`.
Overwrite it if you'd like a different data loader.
"""
logger = logging.getLogger("fastreid.clas_dataset")
logger.info("Prepare training set")
train_items = list()
for d in cfg.DATASETS.NAMES:
data = DATASET_REGISTRY.get(d)(root=_root)
if comm.is_main_process():
data.show_train()
train_items.extend(data.train)
transforms = build_transforms(cfg, is_train=True)
train_set = ClasDataset(train_items, transforms)
cls.idx2class = train_set.idx_to_class
data_loader = build_reid_train_loader(cfg, train_set=train_set)
return data_loader
@classmethod
def build_test_loader(cls, cfg, dataset_name):
"""
Returns:
iterable
It now calls :func:`fastreid.data.build_reid_test_loader`.
Overwrite it if you'd like a different data loader.
"""
data = DATASET_REGISTRY.get(dataset_name)(root=_root)
if comm.is_main_process():
data.show_test()
transforms = build_transforms(cfg, is_train=False)
test_set = ClasDataset(data.query, transforms, cls.idx2class)
data_loader, _ = build_reid_test_loader(cfg, test_set=test_set)
return data_loader
@classmethod
def build_evaluator(cls, cfg, dataset_name, output_dir=None):
data_loader = cls.build_test_loader(cfg, dataset_name)
return data_loader, ClasEvaluator(cfg, output_dir)
@staticmethod
def auto_scale_hyperparams(cfg, num_classes):
cfg = DefaultTrainer.auto_scale_hyperparams(cfg, num_classes)
# Save index to class dictionary
output_dir = cfg.OUTPUT_DIR
if comm.is_main_process() and output_dir:
path = os.path.join(output_dir, "idx2class.json")
with PathManager.open(path, "w") as f:
json.dump(ClasTrainer.idx2class, f)
return cfg
|
984,328 | 473ad6c5526cb0962f023b0a5cb13eb5308f8927 | from collections import deque
class GamePlan(object):
"""
initialise the tournament object with an overall list of players and the system definition (swiss or robin)
input:
a list of players
output:
a list (len = number of rounds) of lists of tuples
with players' names (maybe change to IDs from db) in white, black order
GamePlans with odd number of players have each person sitting out
Created as a tuple with ('_BYE', 'real player')
Template needs to check for '_BYE' in each tuple and
where each tuple is of the form
(['people', 'playing'], 'person sitting out')
Thanks to @DRMacIver
"""
def __init__(self, PLAYERS):
self.players = PLAYERS
def berger_robin(self, players):
n = len(players)
shift = n/2
last = players.pop()
pl_deque = deque(players)
TOURNAMENT = []
for x in xrange(n-1):
matches = []
if x % 2 == 0:
matches.append((last, pl_deque[0]))
else:
matches.append((pl_deque[0], last))
other_games = [(pl_deque[x], pl_deque[x+1]) for x in xrange(1,(len(pl_deque)-1), 2)]
pl_deque.rotate(shift)
TOURNAMENT.append(matches+other_games)
return TOURNAMENT
def generate(self):
if len(self.players) % 2 == 0:
players = self.players
return self.berger_robin(players)
else:
players = self.players
players.append('_BYE')
return self.berger_robin(players)
|
984,329 | ba8b71a55c689c54b9653ba62c5ced9ad8ee6e41 | import numpy as np
import math
import argparse
import scipy.ndimage
from imageio import imread
from numpy.ma.core import exp
from scipy.constants.constants import pi
from skimage.measure import compare_ssim #skimage version 0.16
from skimage.measure import compare_psnr #skimage version 0.16
def get_args():
parser = argparse.ArgumentParser(
conflict_handler='resolve',
description='eg: python3 -img1 file1 -img2 file1 -m 1 -c 0' )
parser.add_argument('-img1','--image_1',required=True,
help='image file_1 URL')
parser.add_argument('-img2','--image_2',required=True,
help='image file_2 URL')
parser.add_argument('-m','--metric',required=True,type = int,
help='metric method\
0: PSNR ,1:SSIM ')
parser.add_argument('-c','--isColored',required=True,type = int,
help= 'weather the input img is colored')
return parser.parse_args()
def RGB2YUV( rgb ):
m = np.array([[ 0.29900, -0.16874, 0.50000],
[0.58700, -0.33126, -0.41869],
[ 0.11400, 0.50000, -0.08131]])
yuv = np.dot(rgb,m)
yuv[:,:,1:]+=128.0
return yuv
def YUV2RGB( yuv ):
m = np.array([[ 1.0, 1.0, 1.0],
[-0.000007154783816076815, -0.3441331386566162, 1.7720025777816772],
[ 1.4019975662231445, -0.7141380310058594 , 0.00001542569043522235] ])
rgb = np.dot(yuv,m)
rgb[:,:,0]-=179.45477266423404
rgb[:,:,1]+=135.45870971679688
rgb[:,:,2]-=226.8183044444304
rgb =rgb.clip(0,255)
return rgb
def readimage(img1,img2,isColored):
im1_data = imread(img1)
#im1_data = im1_data[scale[0]:-scale[0],scale[1]:-scale[1]]
im1_data = im1_data.astype(np.float)
if(isColored != 0):
im1_data = RGB2YUV(im1_data)
im1_data = im1_data[:,:,0]
im2_data = imread(img2)
#im2_data = im2_data[scale[0]:-scale[0],scale[1]:-scale[1]]
im2_data = im2_data.astype(np.float)
if(isColored != 0):
im2_data = RGB2YUV(im2_data)
im2_data = im2_data[:,:,0]
return [im1_data,im2_data]
def psnr(img1, img2, isColored):
[im1_data,im2_data] = readimage(img1,img2,isColored)
#return compare_psnr(im1_data,im2_data)
diff = im1_data - im2_data
mse = np.mean(diff ** 2)
return 10 * math.log10(255.0**2/mse)
def ssimnaive(img1, img2,isColored):
[im1_data,im2_data] = readimage(img1,img2,isColored)
#Variables for Gaussian kernel definition
gaussian_kernel_sigma=1.5
gaussian_kernel_width=11
gaussian_kernel=np.zeros((gaussian_kernel_width,gaussian_kernel_width))
#Fill Gaussian kernel
for i in range(gaussian_kernel_width):
for j in range(gaussian_kernel_width):
gaussian_kernel[i,j]=\
(1/(2*pi*(gaussian_kernel_sigma**2)))*\
exp(-(((i-5)**2)+((j-5)**2))/(2*(gaussian_kernel_sigma**2)))
#squares of input img
im1_sq = im1_data**2
im2_sq = im2_data**2
im1_im2 = im1_data * im2_data
#Variances obtained by Gaussian filtering of inputs' squares
im1_data_sigma = scipy.ndimage.filters.convolve(im1_sq,gaussian_kernel)
im2_data_sigma = scipy.ndimage.filters.convolve(im2_sq,gaussian_kernel)
#Covariance
im1_im2_sigma = scipy.ndimage.filters.convolve(im1_im2,gaussian_kernel)
#Centered squares of variances
im1_data_sigma = im1_data_sigma - im1_sq
im2_data_sigma = im2_data_sigma - im2_sq
im1_im2_sigma = im1_im2_sigma - im1_im2
#c1/c2 constants
#First use: manual fitting
c_1=6.5025
c_2=58.5225
#Second use: change k1,k2 & c1,c2 depend on L (width of color map)
l=255
k_1=0.01
c_1=(k_1*l)**2
k_2=0.03
#Numerator of SSIM
num_ssim=(2*im1_im2+c_1)*(2*im1_im2_sigma+c_2)
#Denominator of SSIM
den_ssim=(im1_sq+im2_sq+c_1)*\
(im1_data_sigma+im2_data_sigma+c_2)
#SSIM
ssim_map=num_ssim/den_ssim
index=np.average(ssim_map)
return index
def ssim(img1, img2,isColored):
[im1_data,im2_data] = readimage(img1,img2,isColored)
return compare_ssim(im1_data,im2_data)
def main():
args = get_args()
if(args.metric == 0) :
print(psnr(args.image_1,args.image_2,args.isColored))
elif(args.metric == 1) :
print(ssim(args.image_1,args.image_2,args.isColored))
if __name__ == '__main__':
main()
|
984,330 | 9d4286f51f796ee07f9c9689d5e16a93c185dad9 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUVMResyncsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUAlarmsFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUVMInterfacesFetcher
from .fetchers import NUVRSsFetcher
from .fetchers import NUEventLogsFetcher
from bambou import NURESTObject
class NUVM(NURESTObject):
""" Represents a VM in the VSD
Notes:
API that can retrieve the VMs associated with a domain, zone or subnet for mediation created VM's for REST created VM's you need to set the additional proxy user header in http request : X-Nuage-ProxyUservalue of the header has to be either :1) enterpriseName@UserName (example : Alcatel Lucent@bob), or 2) external ID of user in VSD, typically is UUID generally decided by the CMS tool in questionUser needs to have CMS privileges to use proxy user header.
"""
__rest_name__ = "vm"
__resource_name__ = "vms"
## Constants
CONST_REASON_TYPE_SHUTDOWN_UNKNOWN = "SHUTDOWN_UNKNOWN"
CONST_REASON_TYPE_CRASHED_UNKNOWN = "CRASHED_UNKNOWN"
CONST_REASON_TYPE_PAUSED_IOERROR = "PAUSED_IOERROR"
CONST_STATUS_SHUTDOWN = "SHUTDOWN"
CONST_REASON_TYPE_SHUTDOWN_LAST = "SHUTDOWN_LAST"
CONST_STATUS_DELETE_PENDING = "DELETE_PENDING"
CONST_REASON_TYPE_RUNNING_UNKNOWN = "RUNNING_UNKNOWN"
CONST_STATUS_RUNNING = "RUNNING"
CONST_REASON_TYPE_RUNNING_LAST = "RUNNING_LAST"
CONST_REASON_TYPE_RUNNING_UNPAUSED = "RUNNING_UNPAUSED"
CONST_REASON_TYPE_PAUSED_FROM_SNAPSHOT = "PAUSED_FROM_SNAPSHOT"
CONST_REASON_TYPE_PAUSED_MIGRATION = "PAUSED_MIGRATION"
CONST_REASON_TYPE_RUNNING_BOOTED = "RUNNING_BOOTED"
CONST_REASON_TYPE_UNKNOWN = "UNKNOWN"
CONST_STATUS_UNREACHABLE = "UNREACHABLE"
CONST_STATUS_BLOCKED = "BLOCKED"
CONST_REASON_TYPE_SHUTOFF_DESTROYED = "SHUTOFF_DESTROYED"
CONST_REASON_TYPE_SHUTOFF_FROM_SNAPSHOT = "SHUTOFF_FROM_SNAPSHOT"
CONST_REASON_TYPE_SHUTOFF_UNKNOWN = "SHUTOFF_UNKNOWN"
CONST_STATUS_NOSTATE = "NOSTATE"
CONST_REASON_TYPE_PAUSED_DUMP = "PAUSED_DUMP"
CONST_REASON_TYPE_CRASHED_LAST = "CRASHED_LAST"
CONST_STATUS_CRASHED = "CRASHED"
CONST_REASON_TYPE_PAUSED_LAST = "PAUSED_LAST"
CONST_REASON_TYPE_BLOCKED_LAST = "BLOCKED_LAST"
CONST_REASON_TYPE_SHUTOFF_LAST = "SHUTOFF_LAST"
CONST_STATUS_SHUTOFF = "SHUTOFF"
CONST_REASON_TYPE_SHUTOFF_SHUTDOWN = "SHUTOFF_SHUTDOWN"
CONST_REASON_TYPE_NOSTATE_UNKNOWN = "NOSTATE_UNKNOWN"
CONST_REASON_TYPE_PAUSED_SAVE = "PAUSED_SAVE"
CONST_REASON_TYPE_RUNNING_FROM_SNAPSHOT = "RUNNING_FROM_SNAPSHOT"
CONST_STATUS_UNKNOWN = "UNKNOWN"
CONST_REASON_TYPE_PAUSED_UNKNOWN = "PAUSED_UNKNOWN"
CONST_REASON_TYPE_SHUTOFF_FAILED = "SHUTOFF_FAILED"
CONST_REASON_TYPE_SHUTOFF_SAVED = "SHUTOFF_SAVED"
CONST_REASON_TYPE_SHUTOFF_MIGRATED = "SHUTOFF_MIGRATED"
CONST_STATUS_LAST = "LAST"
CONST_REASON_TYPE_RUNNING_MIGRATED = "RUNNING_MIGRATED"
CONST_REASON_TYPE_RUNNING_SAVE_CANCELED = "RUNNING_SAVE_CANCELED"
CONST_REASON_TYPE_SHUTDOWN_USER = "SHUTDOWN_USER"
CONST_REASON_TYPE_RUNNING_MIGRATION_CANCELED = "RUNNING_MIGRATION_CANCELED"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_STATUS_PAUSED = "PAUSED"
CONST_STATUS_INIT = "INIT"
CONST_REASON_TYPE_BLOCKED_UNKNOWN = "BLOCKED_UNKNOWN"
CONST_REASON_TYPE_NOSTATE_LAST = "NOSTATE_LAST"
CONST_REASON_TYPE_RUNNING_RESTORED = "RUNNING_RESTORED"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_REASON_TYPE_SHUTOFF_CRASHED = "SHUTOFF_CRASHED"
CONST_REASON_TYPE_PAUSED_USER = "PAUSED_USER"
CONST_DELETE_MODE_TIMER = "TIMER"
CONST_REASON_TYPE_PAUSED_WATCHDOG = "PAUSED_WATCHDOG"
CONST_REASON_TYPE_PAUSED_SHUTTING_DOWN = "PAUSED_SHUTTING_DOWN"
def __init__(self, **kwargs):
""" Initializes a VM instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vm = NUVM(id=u'xxxx-xxx-xxx-xxx', name=u'VM')
>>> vm = NUVM(data=my_dict)
"""
super(NUVM, self).__init__()
# Read/Write Attributes
self._l2_domain_ids = None
self._vrsid = None
self._uuid = None
self._name = None
self._last_updated_by = None
self._reason_type = None
self._delete_expiry = None
self._delete_mode = None
self._resync_info = None
self._site_identifier = None
self._interfaces = None
self._enterprise_id = None
self._enterprise_name = None
self._entity_scope = None
self._domain_ids = None
self._compute_provisioned = None
self._zone_ids = None
self._orchestration_id = None
self._user_id = None
self._user_name = None
self._status = None
self._subnet_ids = None
self._external_id = None
self._hypervisor_ip = None
self.expose_attribute(local_name="l2_domain_ids", remote_name="l2DomainIDs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="vrsid", remote_name="VRSID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="uuid", remote_name="UUID", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="reason_type", remote_name="reasonType", attribute_type=str, is_required=False, is_unique=False, choices=[u'BLOCKED_LAST', u'BLOCKED_UNKNOWN', u'CRASHED_LAST', u'CRASHED_UNKNOWN', u'NOSTATE_LAST', u'NOSTATE_UNKNOWN', u'PAUSED_DUMP', u'PAUSED_FROM_SNAPSHOT', u'PAUSED_IOERROR', u'PAUSED_LAST', u'PAUSED_MIGRATION', u'PAUSED_SAVE', u'PAUSED_SHUTTING_DOWN', u'PAUSED_UNKNOWN', u'PAUSED_USER', u'PAUSED_WATCHDOG', u'RUNNING_BOOTED', u'RUNNING_FROM_SNAPSHOT', u'RUNNING_LAST', u'RUNNING_MIGRATED', u'RUNNING_MIGRATION_CANCELED', u'RUNNING_RESTORED', u'RUNNING_SAVE_CANCELED', u'RUNNING_UNKNOWN', u'RUNNING_UNPAUSED', u'SHUTDOWN_LAST', u'SHUTDOWN_UNKNOWN', u'SHUTDOWN_USER', u'SHUTOFF_CRASHED', u'SHUTOFF_DESTROYED', u'SHUTOFF_FAILED', u'SHUTOFF_FROM_SNAPSHOT', u'SHUTOFF_LAST', u'SHUTOFF_MIGRATED', u'SHUTOFF_SAVED', u'SHUTOFF_SHUTDOWN', u'SHUTOFF_UNKNOWN', u'UNKNOWN'])
self.expose_attribute(local_name="delete_expiry", remote_name="deleteExpiry", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="delete_mode", remote_name="deleteMode", attribute_type=str, is_required=False, is_unique=False, choices=[u'TIMER'])
self.expose_attribute(local_name="resync_info", remote_name="resyncInfo", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="site_identifier", remote_name="siteIdentifier", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="interfaces", remote_name="interfaces", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="enterprise_id", remote_name="enterpriseID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="enterprise_name", remote_name="enterpriseName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="domain_ids", remote_name="domainIDs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="compute_provisioned", remote_name="computeProvisioned", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="zone_ids", remote_name="zoneIDs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="orchestration_id", remote_name="orchestrationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="user_id", remote_name="userID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="user_name", remote_name="userName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'BLOCKED', u'CRASHED', u'DELETE_PENDING', u'INIT', u'LAST', u'NOSTATE', u'PAUSED', u'RUNNING', u'SHUTDOWN', u'SHUTOFF', u'UNKNOWN', u'UNREACHABLE'])
self.expose_attribute(local_name="subnet_ids", remote_name="subnetIDs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="hypervisor_ip", remote_name="hypervisorIP", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.vm_resyncs = NUVMResyncsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.alarms = NUAlarmsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.vm_interfaces = NUVMInterfacesFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.vrss = NUVRSsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def l2_domain_ids(self):
""" Get l2_domain_ids value.
Notes:
Array of IDs of the l2 domain that the VM is connected to
This attribute is named `l2DomainIDs` in VSD API.
"""
return self._l2_domain_ids
@l2_domain_ids.setter
def l2_domain_ids(self, value):
""" Set l2_domain_ids value.
Notes:
Array of IDs of the l2 domain that the VM is connected to
This attribute is named `l2DomainIDs` in VSD API.
"""
self._l2_domain_ids = value
@property
def vrsid(self):
""" Get vrsid value.
Notes:
Id of the VRS that this VM is attached to.
This attribute is named `VRSID` in VSD API.
"""
return self._vrsid
@vrsid.setter
def vrsid(self, value):
""" Set vrsid value.
Notes:
Id of the VRS that this VM is attached to.
This attribute is named `VRSID` in VSD API.
"""
self._vrsid = value
@property
def uuid(self):
""" Get uuid value.
Notes:
UUID of the VM
This attribute is named `UUID` in VSD API.
"""
return self._uuid
@uuid.setter
def uuid(self, value):
""" Set uuid value.
Notes:
UUID of the VM
This attribute is named `UUID` in VSD API.
"""
self._uuid = value
@property
def name(self):
""" Get name value.
Notes:
Name of the VM
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of the VM
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def reason_type(self):
""" Get reason_type value.
Notes:
Reason of the event associated with the VM.
This attribute is named `reasonType` in VSD API.
"""
return self._reason_type
@reason_type.setter
def reason_type(self, value):
""" Set reason_type value.
Notes:
Reason of the event associated with the VM.
This attribute is named `reasonType` in VSD API.
"""
self._reason_type = value
@property
def delete_expiry(self):
""" Get delete_expiry value.
Notes:
reflects the VM Deletion expiry timer in secs , deleteMode needs to be non-null value for deleteExpiry to be taken in to effect. CMS created VM's will always have deleteMode set to TIMER
This attribute is named `deleteExpiry` in VSD API.
"""
return self._delete_expiry
@delete_expiry.setter
def delete_expiry(self, value):
""" Set delete_expiry value.
Notes:
reflects the VM Deletion expiry timer in secs , deleteMode needs to be non-null value for deleteExpiry to be taken in to effect. CMS created VM's will always have deleteMode set to TIMER
This attribute is named `deleteExpiry` in VSD API.
"""
self._delete_expiry = value
@property
def delete_mode(self):
""" Get delete_mode value.
Notes:
reflects the mode of VM Deletion - TIMER Possible values are TIMER, .
This attribute is named `deleteMode` in VSD API.
"""
return self._delete_mode
@delete_mode.setter
def delete_mode(self, value):
""" Set delete_mode value.
Notes:
reflects the mode of VM Deletion - TIMER Possible values are TIMER, .
This attribute is named `deleteMode` in VSD API.
"""
self._delete_mode = value
@property
def resync_info(self):
""" Get resync_info value.
Notes:
Information of the status of the resync operation of a VM
This attribute is named `resyncInfo` in VSD API.
"""
return self._resync_info
@resync_info.setter
def resync_info(self, value):
""" Set resync_info value.
Notes:
Information of the status of the resync operation of a VM
This attribute is named `resyncInfo` in VSD API.
"""
self._resync_info = value
@property
def site_identifier(self):
""" Get site_identifier value.
Notes:
This property specifies the site the VM belongs to, for Geo-redundancy.
This attribute is named `siteIdentifier` in VSD API.
"""
return self._site_identifier
@site_identifier.setter
def site_identifier(self, value):
""" Set site_identifier value.
Notes:
This property specifies the site the VM belongs to, for Geo-redundancy.
This attribute is named `siteIdentifier` in VSD API.
"""
self._site_identifier = value
@property
def interfaces(self):
""" Get interfaces value.
Notes:
List of VM interfaces associated with the VM
"""
return self._interfaces
@interfaces.setter
def interfaces(self, value):
""" Set interfaces value.
Notes:
List of VM interfaces associated with the VM
"""
self._interfaces = value
@property
def enterprise_id(self):
""" Get enterprise_id value.
Notes:
ID of the enterprise that this VM belongs to
This attribute is named `enterpriseID` in VSD API.
"""
return self._enterprise_id
@enterprise_id.setter
def enterprise_id(self, value):
""" Set enterprise_id value.
Notes:
ID of the enterprise that this VM belongs to
This attribute is named `enterpriseID` in VSD API.
"""
self._enterprise_id = value
@property
def enterprise_name(self):
""" Get enterprise_name value.
Notes:
Name of the enterprise that this VM belongs to
This attribute is named `enterpriseName` in VSD API.
"""
return self._enterprise_name
@enterprise_name.setter
def enterprise_name(self, value):
""" Set enterprise_name value.
Notes:
Name of the enterprise that this VM belongs to
This attribute is named `enterpriseName` in VSD API.
"""
self._enterprise_name = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def domain_ids(self):
""" Get domain_ids value.
Notes:
Array of IDs of the domain that the VM is connected to
This attribute is named `domainIDs` in VSD API.
"""
return self._domain_ids
@domain_ids.setter
def domain_ids(self, value):
""" Set domain_ids value.
Notes:
Array of IDs of the domain that the VM is connected to
This attribute is named `domainIDs` in VSD API.
"""
self._domain_ids = value
@property
def compute_provisioned(self):
""" Get compute_provisioned value.
Notes:
computeProvisioned
This attribute is named `computeProvisioned` in VSD API.
"""
return self._compute_provisioned
@compute_provisioned.setter
def compute_provisioned(self, value):
""" Set compute_provisioned value.
Notes:
computeProvisioned
This attribute is named `computeProvisioned` in VSD API.
"""
self._compute_provisioned = value
@property
def zone_ids(self):
""" Get zone_ids value.
Notes:
Array of IDs of the zone that this VM is attached to
This attribute is named `zoneIDs` in VSD API.
"""
return self._zone_ids
@zone_ids.setter
def zone_ids(self, value):
""" Set zone_ids value.
Notes:
Array of IDs of the zone that this VM is attached to
This attribute is named `zoneIDs` in VSD API.
"""
self._zone_ids = value
@property
def orchestration_id(self):
""" Get orchestration_id value.
Notes:
Orchestration ID
This attribute is named `orchestrationID` in VSD API.
"""
return self._orchestration_id
@orchestration_id.setter
def orchestration_id(self, value):
""" Set orchestration_id value.
Notes:
Orchestration ID
This attribute is named `orchestrationID` in VSD API.
"""
self._orchestration_id = value
@property
def user_id(self):
""" Get user_id value.
Notes:
ID of the user that created this VM
This attribute is named `userID` in VSD API.
"""
return self._user_id
@user_id.setter
def user_id(self, value):
""" Set user_id value.
Notes:
ID of the user that created this VM
This attribute is named `userID` in VSD API.
"""
self._user_id = value
@property
def user_name(self):
""" Get user_name value.
Notes:
Username of the user that created this VM
This attribute is named `userName` in VSD API.
"""
return self._user_name
@user_name.setter
def user_name(self, value):
""" Set user_name value.
Notes:
Username of the user that created this VM
This attribute is named `userName` in VSD API.
"""
self._user_name = value
@property
def status(self):
""" Get status value.
Notes:
Status of the VM.
"""
return self._status
@status.setter
def status(self, value):
""" Set status value.
Notes:
Status of the VM.
"""
self._status = value
@property
def subnet_ids(self):
""" Get subnet_ids value.
Notes:
Array of IDs of the subnets that the VM is connected to
This attribute is named `subnetIDs` in VSD API.
"""
return self._subnet_ids
@subnet_ids.setter
def subnet_ids(self, value):
""" Set subnet_ids value.
Notes:
Array of IDs of the subnets that the VM is connected to
This attribute is named `subnetIDs` in VSD API.
"""
self._subnet_ids = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def hypervisor_ip(self):
""" Get hypervisor_ip value.
Notes:
IP address of the hypervisor that this VM is currently running in
This attribute is named `hypervisorIP` in VSD API.
"""
return self._hypervisor_ip
@hypervisor_ip.setter
def hypervisor_ip(self, value):
""" Set hypervisor_ip value.
Notes:
IP address of the hypervisor that this VM is currently running in
This attribute is named `hypervisorIP` in VSD API.
"""
self._hypervisor_ip = value
|
984,331 | f79856f601f3e5c151d58b19f299dd776fe7cc84 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
# In[2]:
datos = ['1.1', 'Python', '0.5', 'pandas', '2.8']
serie = pd.Series(datos)
serie
# In[5]:
serie = pd.Series(serie).sort_values()
# In[6]:
serie
# In[ ]:
|
984,332 | c93e88878f50cb9e75033216b43fbb17eaa24ffb | import cv2
import numpy
from vidutils import _vid_capture
PRIMARY_CAMERA = 0
def processing(source=None):
# use the webcam if no source input.
if source is None:
source = PRIMARY_CAMERA
# background subtractor
fgbg = cv2.BackgroundSubtractorMOG2()
# pos_x = 0
# pos_y = 0
# fixed_position = false
with _vid_capture(source) as cap:
# getting the background in 5 frames
for i in xrange(5):
valid, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
fgbg.apply(gray)
print "I'm ready."
while (valid):
# read...
valid, frame = cap.read()
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# loading to background subtractor object
fgmask = fgbg.apply(frame)
# display
cv2.imshow('video', frame)
# Should I use a box that put the hand in there?
# cv2.rectangle(frame,(150,150),(350,350),(255,0,255),2)
# Reduce noise
ret, thres = cv2.threshold(fgmask, 0, 255, cv2.THRESH_BINARY)
# blur
blur = cv2.medianBlur(thres, 5)
# find contours
contours, hierarchy = cv2.findContours(
blur, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if contours:
# get the largest contour
cnt = max(contours, key=cv2.contourArea)
# draw contours
cv2.drawContours(blur, cnt, -1, (255, 0, 255), 3)
# get the max values of the box around the contours
# x,y,w,h = cv2.boundingRect(cnt)
# cv2.rectangle(blur,(x,y),(x+w,y+w+100),(255,0,255),2)
# choose the position that's comfortable with the hand
# if cv2.waitKey(1) & 0xFF == ord('s'):
# pos_x = x
# pos_x = y
# fixed_position = True
cv2.imshow('subtracted', blur)
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
# user interaction
raw_input("Press enter when you're ready. \
\nThere shouldn't be anything moving around at this time")
print "Processing..."
processing()
|
984,333 | 9df51665e97afd874a077b04bd161a243d913dbc | # -*- coding: utf-8 -*-
L = [
['Apple', 'Google', 'Microsoft'],
['Java', 'Python', 'Ruby', 'PHP'],
['Adam', 'Bart', 'Lisa']
]
# ๆๅฐApple:
print(L[0][0])
# ๆๅฐPython:
print(L[1][1])
# ๆๅฐLisa:
print(L[2][2])
height = float(input('่ฏท่พๅ
ฅๆจ็่บซ้ซ:'))
weight = float(input('่ฏท่พๅ
ฅๆจ็ไฝ้:'))
bmi = weight / height**2
print(bmi)
if bmi < 18.5:
print('็ซฅ้ดไฝ ๅคช็ฆไบ')
elif bmi >=18.5 and bmi < 25:
print('็ซฅ้ดไฝ ่บซๆๅพๆฃๅฆ')
elif bmi >= 25 and bmi < 28:
print('็ซฅ้ดไฝ ๆ็น่ๅฆ')
elif bmi >= 28 and bmi < 32:
print('็ซฅ้ดไฝ ่ฏฅๅ่ฅไบ')
else:
print('็ซฅ้ดใใใ')
#ๅพช็ฏ
l = ['Bart','Lisa','Adam']
for x in l:
print('hello:',x)
a = len(l)
while a>0:
print('hello:',l[a-1])
a = a-1
#break่ทณๅบๅ
จ้จๅพช็ฏ
n = 1
while n<=100:
if n>10:
break
print(n)
n = n+1
print('END')
#continue่ทณๅบๆฌๆฌกๅพช็ฏ
s = 0
while s<10:
s = s+1
if s%2 == 0:
continue
print(s)
|
984,334 | 583b2e7d3c242200fe708ffc00a1908c24428953 | # Low Level DXF modules
# Copyright (c) 2011-2022, Manfred Moitzi
# License: MIT License
|
984,335 | 60a70b738fc172aa5698e5770559ae705305f1ee | import visa,time,string
import random
class device:
def __init__(self,add="GPIB0::14"):
self.device=visa.instrument(add)
self.tauset={
0 : "10mus",
1 : "30mus",
2 : "100mus",
3 : "300mus",
4 : "1ms",
5 : "3ms",
6 : "10ms",
7 : "30ms",
8 : "100ms",
9 : "300ms",
10 : "1s",
11 : "3s",
12 : "10s",
13 : "30s",
14 : "100s",
15 : "300s",
16 : "1ks",
17 : "3ks",
18 : "10ks",
19 : "30ks"}
self.sensset={
0 : "2nV",
1 : "5nV",
2 : "10nV",
3 : "20nV",
4 : "50 nV",
5 : "100nV",
6 : "200nV",
7 : "500nV",
8 : "1muV",
9 : "2muV",
10 : "5muV",
11 : "10muV",
12 : "20muV",
13 : "50muV",
14 : "100muV",
15 : "200muV",
16 : "500muV",
17 : "1mV",
18 : "2mV",
19 : "5mV",
20 : "10mV",
21 : "20mV",
22 : "50mV",
23 : "100mV",
24 : "200mV",
25 : "500mV",
26 : "1V"}
def reset(self):
self.device.write('*RST')
def clear(self):
self.device.write('*CLS')
def disable_front_panel(self):
self.device.write('OVRM 1')
def enable_front_panel(self):
self.device.write('OVRM 0')
def auto_phase(self):
self.device.write('APHS')
def auto_gain(self):
self.device.write('AGAN')
def auto_reserve(self):
self.device.write('ARSV')
def auto_offset(self,channel):
self.device.write('AOFF %i' % channel )
#get settings
def get_tau(self):
return self.device.ask('OFLT?')
def get_sens(self):
return self.device.ask('SENS?')
def get_trigsource(self):
return self.device.ask('FMOD?')
def get_trigshape(self):
return self.device.ask('RSLP?')
def get_harm(self):
return self.device.ask('HARM?')
def get_input(self):
return self.device.ask('ISRC?')
def get_ground(self):
return self.device.ask('IGND?')
def get_couple(self):
return self.device.ask('ICPL?')
def get_filter(self):
return self.device.ask('ILIN?')
def get_reserve(self):
return self.device.ask('RMOD?')
def get_slope(self):
return self.device.ask('OFSL?')
def get_sync(self):
return self.device.ask('SYNC?')
def get_disp_rat(self,channel):
return self.device.ask('DDEF? %i' % channel)
def get_exp_off(self,channel):
return self.device.ask('OEXP? %i' % channel)
#set settings
def set_freq(self,freq):
self.device.write('FREQ %f' % freq )
def set_ampl(self,ampl):
self.device.write('SLVL %f' % ampl)
def set_mode(self,mode):
self.device.write('FMOD %i' % mode)
def set_tau(self,tau):
self.device.write('OFLT %i' % tau)
def set_sens(self,sens):
self.device.write('SENS %i' % sens)
def set_phase(self,phase):
self.device.write('PHAS %f' % phase)
def set_aux(self,output,value):
self.device.write('AUXV %(out)i, %(val).3f' % {'out':output,'val':value})
def set_trigsource(self,ref):
self.device.write('FMOD %e' % ref)
def set_trigshape(self, trigshape):
self.device.write('RSLP %i' % trigshape)
def set_disp_rat(self,channel,disp,ratio):
self.device.write('DDEF %(channel)i, %(disp)i, %(ratio)i' % {'channel':channel,'disp':disp, 'ratio':ratio})
def set_exp_off(self,channel,offset,expand):
self.device.write('OEXP %(channel)i, %(offset)f, %(expand)i' % {'channel':channel,'offset':offset, 'expand':expand})
def set_reserve(self,reserve):
self.device.write('RMOD %i' % reserve)
def set_filter(self,filt):
self.device.write('ILIN %i' % filt)
def set_input(self, inp):
self.device.write('ISRC %i' % inp)
def set_ground(self,gnd):
self.device.write('IGND %i' % gnd)
def set_couple(self, coup):
self.device.write('ICPL %i' % coup)
def set_slope(self,slope):
self.device.write('OFSL %i' % slope)
def set_sync(self,sync):
self.device.write('SYNC %i' % sync)
#get data
def get_all(self):
return self.device.ask("SNAP?1,2,3,4")
def get_X(self):
return float(self.device.ask('OUTP? 1'))
def get_Y(self):
return float(self.device.ask('OUTP? 2'))
def get_R(self):
return float(self.device.ask('OUTP? 3'))
def get_Theta(self):
return float(self.device.ask('OUTP? 4'))
def get_freq(self):
return float(self.device.ask('FREQ?'))
def get_ampl(self):
return float(self.device.ask('SLVL?'))
def get_phase(self):
return float(self.device.ask('PHAS?'))
def get_harm(self):
return float(self.device.ask('HARM?'))
def get_oaux(self,value):
return float(self.device.ask('OAUX? %i' %value))
def read_aux(self,output):
return float(self.device.ask('AUXV? %i' %output))
if (__name__ == '__main__'):
add="GPIB0::14"
lockin=device(add)
#f = open('test.dat','wb');
data=lockin.get_all()
x=float(data.split(',')[0])
y=float(data.split(',')[1])
r=float(data.split(',')[2])
theta=float(data.split(',')[3]) |
984,336 | da9b001fbd0a7ccd71e25b7c9936a79ce4996f86 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Blueprint of the /countries route.
This route will be registered in `server.py`.
'''
import os
import flask
import app.utilities.load as Load
from rq import Queue
from redis import Redis
from app.classes.ckan import CKAN
from app.functions.manage_queue import getStatus
from app.functions.fetch_store import fetchAndStore
ckan = CKAN().init()
REDIS_HOST = os.environ.get('REDIS_PORT_6379_TCP_ADDR')
blueprint_countries = flask.Blueprint('countries', __name__)
@blueprint_countries.route('/countries')
def computeCountries():
'''
Computes information about all countries of a
CKAN instance.
'''
key = 'countries'
status = getStatus(key)
queue = Queue(connection=Redis(host=REDIS_HOST), name=key)
countries = ckan.action.group_list()
if status['empty']:
for country in countries:
job = queue.enqueue(fetchAndStore, key, country)
response = {
'success': True,
'message': 'Computing countries information. {n} before finished.'.format(n=status['count']),
'endpoint': key,
'time': None,
'ETA': None,
'computations': {
'total': len(countries),
'completed': len(countries) - status['count'],
'queued': status['count'],
'progress': round(((len(countries) - status['count']) / len(countries)) * 100, 2)
}
}
return flask.jsonify(**response)
|
984,337 | e9149656b423a3ea9650ad5ad8eb2df3ae8a41b6 | from pytools import testutil
import sys
import basecase
class calcphotCase1(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=True
self.etcid="ACS.MISC.1.IMAG.029"
self.setglobal(__file__)
self.runpy()
class calcphotCase2(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),5,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.032"
self.setglobal(__file__)
self.runpy()
class calcphotCase3(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=True
self.etcid="ACS.MISC.1.IMAG.029"
self.setglobal(__file__)
self.runpy()
class calcphotCase4(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),30.0,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.035"
self.setglobal(__file__)
self.runpy()
class calcphotCase5(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*1.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.037"
self.setglobal(__file__)
self.runpy()
class calcphotCase6(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*1.25"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.036"
self.setglobal(__file__)
self.runpy()
class calcphotCase7(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*2.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.038"
self.setglobal(__file__)
self.runpy()
class calcphotCase8(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*4.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.039"
self.setglobal(__file__)
self.runpy()
class calcphotCase9(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f220w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase10(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f220w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase11(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase12(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase13(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase14(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.0e-17,flam)"
self.subset=True
self.etcid="ACS.HRC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase15(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase16(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase17(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase18(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f344n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase19(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f344n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase20(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f435w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase21(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f435w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase22(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f475w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase23(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f475w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase24(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f502n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase25(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f502n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase26(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f550m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase27(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f550m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase28(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),0,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.027"
self.setglobal(__file__)
self.runpy()
class calcphotCase29(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),10,vegamag)"
self.subset=True
self.etcid="ACS.MISC.1.IMAG.025"
self.setglobal(__file__)
self.runpy()
class calcphotCase30(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.023"
self.setglobal(__file__)
self.runpy()
class calcphotCase31(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),5,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.026"
self.setglobal(__file__)
self.runpy()
class calcphotCase32(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.023"
self.setglobal(__file__)
self.runpy()
class calcphotCase33(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.019"
self.setglobal(__file__)
self.runpy()
class calcphotCase34(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.020"
self.setglobal(__file__)
self.runpy()
class calcphotCase35(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.018"
self.setglobal(__file__)
self.runpy()
class calcphotCase36(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase37(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase38(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase39(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase40(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f606w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase41(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f606w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase42(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f625w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase43(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase44(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f658n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase45(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f658n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase46(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f775w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class calcphotCase47(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f775w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class calcphotCase48(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f850lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class calcphotCase49(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class calcphotCase50(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f892n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase51(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,f892n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=True
self.etcid="ACS.HRC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase52(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase53(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,15400,0.0,3.9),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase54(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,3500,0.0,4.6),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase55(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase56(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,4850,0.0,1.1),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase57(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase58(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase59(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=True
self.etcid="ACS.HRC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase60(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.021"
self.setglobal(__file__)
self.runpy()
class calcphotCase61(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase62(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.E-15,flam)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase63(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/g191b2b_mod_004.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase64(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase65(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase66(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase67(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase68(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase69(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4592"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.022"
self.setglobal(__file__)
self.runpy()
class calcphotCase70(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4592"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.022"
self.setglobal(__file__)
self.runpy()
class calcphotCase71(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr505n#5050"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase72(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr505n#5050"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase73(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr656n#6560"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase74(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,fr656n#6560"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase75(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase76(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase77(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase78(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.SPEC.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase79(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase80(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase81(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase82(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase83(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase84(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f122m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.SBC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase85(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f122m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase86(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase87(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-17,flam)"
self.subset=True
self.etcid="ACS.SBC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase88(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase89(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f140lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.SBC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase90(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f140lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase91(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(bb(10000),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase92(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase93(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(pl(4000.0,-1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase94(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase95(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase96(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase97(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f165lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase98(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,f165lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase99(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase100(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*2.0"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase101(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase102(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*2.0"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase103(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.SPEC.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase104(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f435w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase105(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f435w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase106(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f475w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase107(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f475w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase108(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f502n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase109(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f502n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase110(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f550m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase111(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f550m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase112(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w,pol_v"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=True
self.etcid="ACS.WFC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase113(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w,pol_v"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase114(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class calcphotCase115(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase116(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase117(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.A1.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase118(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase119(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase120(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class calcphotCase121(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.IMAG.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase122(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase123(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f606w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase124(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f606w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase125(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.WFC.EXT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase126(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=True
self.etcid="ACS.WFC.EXT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase127(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase128(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f658n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase129(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f658n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase130(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f660n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase131(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f660n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase132(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f775w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase133(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f775w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase134(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f814w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase135(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f814w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase136(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase137(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=True
self.etcid="ACS.WFC.EXT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase138(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase139(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f892n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase140(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,f892n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase141(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr1016n#10000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase142(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr1016n#10000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class calcphotCase143(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="em(3880.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase144(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.024"
self.setglobal(__file__)
self.runpy()
class calcphotCase145(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,15400,0.0,3.9),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase146(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,3500,0.0,4.6),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.019"
self.setglobal(__file__)
self.runpy()
class calcphotCase147(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.016"
self.setglobal(__file__)
self.runpy()
class calcphotCase148(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,4850,0.0,1.1),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.020"
self.setglobal(__file__)
self.runpy()
class calcphotCase149(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),15,vegamag)"
self.subset=True
self.etcid="ACS.WFC.PT.RAMP.018"
self.setglobal(__file__)
self.runpy()
class calcphotCase150(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.023"
self.setglobal(__file__)
self.runpy()
class calcphotCase151(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.025"
self.setglobal(__file__)
self.runpy()
class calcphotCase152(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase153(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase154(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase155(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.E-15,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase156(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/g191b2b_mod_004.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.022"
self.setglobal(__file__)
self.runpy()
class calcphotCase157(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.021"
self.setglobal(__file__)
self.runpy()
class calcphotCase158(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.017"
self.setglobal(__file__)
self.runpy()
class calcphotCase159(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3881"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.018"
self.setglobal(__file__)
self.runpy()
class calcphotCase160(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3881"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.018"
self.setglobal(__file__)
self.runpy()
class calcphotCase161(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr423n#4230"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase162(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr423n#4230"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class calcphotCase163(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase164(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4590"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase165(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4620"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase166(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4620"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class calcphotCase167(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr462n#4620"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase168(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr462n#4620"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class calcphotCase169(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr505n#5000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase170(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr505n#5000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class calcphotCase171(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr551n#5500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase172(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr551n#5500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class calcphotCase173(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr601n#6000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase174(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr601n#6000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class calcphotCase175(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr647m#6470"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase176(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr647m#6470"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class calcphotCase177(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr656n#6500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase178(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr656n#6500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class calcphotCase179(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr716n#7100"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase180(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr716n#7100"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class calcphotCase181(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr782n#7900"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase182(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr782n#7900"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class calcphotCase183(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr853n#8500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase184(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr853n#8500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class calcphotCase185(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr914m#9000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.015"
self.setglobal(__file__)
self.runpy()
class calcphotCase186(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr914m#9000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.015"
self.setglobal(__file__)
self.runpy()
class calcphotCase187(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr931n#9300"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase188(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,fr931n#9300"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class calcphotCase189(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.001"
self.setglobal(__file__)
self.runpy()
class calcphotCase190(basecase.calcphotCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class calcspecCase1(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="bb(10000)"
self.subset=False
self.etcid="['ACS.SBC.PT.IMAG.008', 'ACS.SBC.SPEC.007', 'ACS.SBC.SPEC.008', 'ACS.WFC.PT.IMAG.015', 'ACS.WFC.PT.RAMP.024', 'ACS.WFC.SPEC.003', 'ACS.HRC.PT.IMAG.019', 'ACS.HRC.PT.RAMP.013', 'ACS.HRC.SPEC.007', 'ACS.HRC.SPEC.008']"
self.setglobal(__file__)
self.runpy()
class calcspecCase11(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,15400,0.0,3.9)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase13(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,3500,0.0,4.6)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase15(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,44500,0.0,5.0)"
self.subset=True
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase18(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,4850,0.0,1.1)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase20(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="icat(k93models,5770,0.0,4.5)"
self.subset=False
self.etcid="None"
self.setglobal(__file__)
self.runpy()
class calcspecCase31(basecase.calcspecCase):
def setUp(self):
self.obsmode="None"
self.spectrum="pl(4000.0,-1.0,flam)"
self.subset=False
self.etcid="['ACS.SBC.PT.IMAG.007', 'ACS.SBC.SPEC.005', 'ACS.SBC.SPEC.006', 'ACS.WFC.PT.IMAG.014', 'ACS.WFC.PT.RAMP.025', 'ACS.WFC.SPEC.002', 'ACS.HRC.PT.IMAG.018', 'ACS.HRC.PT.RAMP.014', 'ACS.HRC.SPEC.005', 'ACS.HRC.SPEC.006']"
self.setglobal(__file__)
self.runpy()
class countrateCase1(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="em(3880.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase2(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.019"
self.setglobal(__file__)
self.runpy()
class countrateCase3(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class countrateCase4(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class countrateCase5(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.024"
self.setglobal(__file__)
self.runpy()
class countrateCase6(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(bb(10000),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class countrateCase7(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,15400,0.0,3.9),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class countrateCase8(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,15400,0.0,3.9),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.017"
self.setglobal(__file__)
self.runpy()
class countrateCase9(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,3500,0.0,4.6),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class countrateCase10(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,3500,0.0,4.6),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.019"
self.setglobal(__file__)
self.runpy()
class countrateCase11(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class countrateCase12(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.016"
self.setglobal(__file__)
self.runpy()
class countrateCase13(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(icat(k93models,44500,0.0,5.0),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class countrateCase14(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,4850,0.0,1.1),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class countrateCase15(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,4850,0.0,1.1),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.020"
self.setglobal(__file__)
self.runpy()
class countrateCase16(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class countrateCase17(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.018"
self.setglobal(__file__)
self.runpy()
class countrateCase18(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.020"
self.setglobal(__file__)
self.runpy()
class countrateCase19(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class countrateCase20(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class countrateCase21(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.023"
self.setglobal(__file__)
self.runpy()
class countrateCase22(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.018"
self.setglobal(__file__)
self.runpy()
class countrateCase23(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class countrateCase24(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class countrateCase25(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.025"
self.setglobal(__file__)
self.runpy()
class countrateCase26(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(pl(4000.0,-1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class countrateCase27(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class countrateCase28(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),0,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.027"
self.setglobal(__file__)
self.runpy()
class countrateCase29(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),10,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.025"
self.setglobal(__file__)
self.runpy()
class countrateCase30(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.029"
self.setglobal(__file__)
self.runpy()
class countrateCase31(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase32(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.023"
self.setglobal(__file__)
self.runpy()
class countrateCase33(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.021"
self.setglobal(__file__)
self.runpy()
class countrateCase34(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4592"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.022"
self.setglobal(__file__)
self.runpy()
class countrateCase35(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase36(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.A1.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase37(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.017"
self.setglobal(__file__)
self.runpy()
class countrateCase38(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3881"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.018"
self.setglobal(__file__)
self.runpy()
class countrateCase39(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class countrateCase40(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase41(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w,pol_v"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase42(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class countrateCase43(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),22,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase44(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),5,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.032"
self.setglobal(__file__)
self.runpy()
class countrateCase45(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),5,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.026"
self.setglobal(__file__)
self.runpy()
class countrateCase46(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.0e-17,flam)"
self.subset=False
self.etcid="ACS.HRC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase47(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase48(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase49(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class countrateCase50(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.E-15,flam)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase51(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class countrateCase52(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr505n#5050"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase53(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr656n#6560"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase54(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr1016n#10000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class countrateCase55(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class countrateCase56(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.E-15,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class countrateCase57(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr423n#4230"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase58(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4620"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class countrateCase59(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr462n#4620"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase60(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr505n#5000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class countrateCase61(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr551n#5500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class countrateCase62(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr601n#6000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class countrateCase63(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr647m#6470"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class countrateCase64(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr656n#6500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class countrateCase65(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr716n#7100"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class countrateCase66(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr782n#7900"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class countrateCase67(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr853n#8500"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class countrateCase68(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr914m#9000"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.015"
self.setglobal(__file__)
self.runpy()
class countrateCase69(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr931n#9300"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.e-15,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class countrateCase70(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-17,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase71(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f220w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase72(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase73(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase74(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f344n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase75(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f435w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class countrateCase76(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f475w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class countrateCase77(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f502n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class countrateCase78(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f550m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class countrateCase79(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class countrateCase80(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f606w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class countrateCase81(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f625w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class countrateCase82(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f658n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class countrateCase83(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f775w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class countrateCase84(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f850lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class countrateCase85(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f892n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class countrateCase86(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase87(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f122m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.SBC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase88(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f140lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase89(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class countrateCase90(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f165lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase91(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f435w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase92(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f475w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase93(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f502n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase94(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f550m"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase95(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class countrateCase96(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f606w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase97(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase98(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f658n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class countrateCase99(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f660n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=True
self.etcid="ACS.WFC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class countrateCase100(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f775w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class countrateCase101(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f814w"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class countrateCase102(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase103(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f892n"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1e-18,flam)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class countrateCase104(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/g191b2b_mod_004.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class countrateCase105(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/g191b2b_mod_004.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.022"
self.setglobal(__file__)
self.runpy()
class countrateCase106(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase107(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class countrateCase108(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class countrateCase109(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class countrateCase110(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.021"
self.setglobal(__file__)
self.runpy()
class countrateCase111(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.029"
self.setglobal(__file__)
self.runpy()
class countrateCase112(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase113(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w,coron"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.023"
self.setglobal(__file__)
self.runpy()
class countrateCase114(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase115(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4590"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.EXT.RAMP.001"
self.setglobal(__file__)
self.runpy()
class countrateCase116(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr459m#4592"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.022"
self.setglobal(__file__)
self.runpy()
class countrateCase117(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr505n#5050"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase118(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,fr656n#6560"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.HRC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase119(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.004"
self.setglobal(__file__)
self.runpy()
class countrateCase120(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.005"
self.setglobal(__file__)
self.runpy()
class countrateCase121(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase122(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.006"
self.setglobal(__file__)
self.runpy()
class countrateCase123(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase124(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w,pol_v"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase125(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase126(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase127(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr1016n#10000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.012"
self.setglobal(__file__)
self.runpy()
class countrateCase128(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.017"
self.setglobal(__file__)
self.runpy()
class countrateCase129(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr388n#3881"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.018"
self.setglobal(__file__)
self.runpy()
class countrateCase130(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr423n#4230"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.002"
self.setglobal(__file__)
self.runpy()
class countrateCase131(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4590"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.EXT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase132(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr459m#4620"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.013"
self.setglobal(__file__)
self.runpy()
class countrateCase133(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr462n#4620"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.003"
self.setglobal(__file__)
self.runpy()
class countrateCase134(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr505n#5000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.004"
self.setglobal(__file__)
self.runpy()
class countrateCase135(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr551n#5500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.005"
self.setglobal(__file__)
self.runpy()
class countrateCase136(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr601n#6000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.006"
self.setglobal(__file__)
self.runpy()
class countrateCase137(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr647m#6470"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.014"
self.setglobal(__file__)
self.runpy()
class countrateCase138(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr656n#6500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.007"
self.setglobal(__file__)
self.runpy()
class countrateCase139(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr716n#7100"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.008"
self.setglobal(__file__)
self.runpy()
class countrateCase140(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr782n#7900"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.009"
self.setglobal(__file__)
self.runpy()
class countrateCase141(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr853n#8500"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.010"
self.setglobal(__file__)
self.runpy()
class countrateCase142(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr914m#9000"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.015"
self.setglobal(__file__)
self.runpy()
class countrateCase143(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,fr931n#9300"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.WFC.PT.RAMP.011"
self.setglobal(__file__)
self.runpy()
class countrateCase144(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.001"
self.setglobal(__file__)
self.runpy()
class countrateCase145(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*2.0"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.008"
self.setglobal(__file__)
self.runpy()
class countrateCase146(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))*2.0"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.009"
self.setglobal(__file__)
self.runpy()
class countrateCase147(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f220w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase148(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f250w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase149(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f330w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase150(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f344n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase151(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f435w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class countrateCase152(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f475w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class countrateCase153(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f502n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class countrateCase154(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f550m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class countrateCase155(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class countrateCase156(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f606w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class countrateCase157(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class countrateCase158(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f658n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.014"
self.setglobal(__file__)
self.runpy()
class countrateCase159(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f775w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.015"
self.setglobal(__file__)
self.runpy()
class countrateCase160(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.016"
self.setglobal(__file__)
self.runpy()
class countrateCase161(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,f892n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.PT.IMAG.017"
self.setglobal(__file__)
self.runpy()
class countrateCase162(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class countrateCase163(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.HRC.SPEC.002"
self.setglobal(__file__)
self.runpy()
class countrateCase164(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f115lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase165(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f122m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase166(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f125lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.EXT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase167(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f140lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase168(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f150lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class countrateCase169(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,f165lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase170(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.007"
self.setglobal(__file__)
self.runpy()
class countrateCase171(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.SBC.SPEC.003"
self.setglobal(__file__)
self.runpy()
class countrateCase172(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f435w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.001"
self.setglobal(__file__)
self.runpy()
class countrateCase173(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f475w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.002"
self.setglobal(__file__)
self.runpy()
class countrateCase174(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f502n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.003"
self.setglobal(__file__)
self.runpy()
class countrateCase175(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f550m"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.004"
self.setglobal(__file__)
self.runpy()
class countrateCase176(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f555w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.005"
self.setglobal(__file__)
self.runpy()
class countrateCase177(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f606w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.006"
self.setglobal(__file__)
self.runpy()
class countrateCase178(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f625w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.007"
self.setglobal(__file__)
self.runpy()
class countrateCase179(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f658n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.008"
self.setglobal(__file__)
self.runpy()
class countrateCase180(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f660n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.009"
self.setglobal(__file__)
self.runpy()
class countrateCase181(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f775w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.010"
self.setglobal(__file__)
self.runpy()
class countrateCase182(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f814w"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.011"
self.setglobal(__file__)
self.runpy()
class countrateCase183(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f850lp"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.012"
self.setglobal(__file__)
self.runpy()
class countrateCase184(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,f892n"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.PT.IMAG.013"
self.setglobal(__file__)
self.runpy()
class countrateCase185(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),22.7,vegamag)+(spec(el1215a.fits)+spec(el1302a.fits)+spec(el1356a.fits)+spec(el2471a.fits))"
self.subset=False
self.etcid="ACS.WFC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class countrateCase186(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+rn(spec(Zodi.fits),band(johnson,v),30.0,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.035"
self.setglobal(__file__)
self.runpy()
class countrateCase187(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*1.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.037"
self.setglobal(__file__)
self.runpy()
class countrateCase188(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*1.25"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.036"
self.setglobal(__file__)
self.runpy()
class countrateCase189(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*2.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.038"
self.setglobal(__file__)
self.runpy()
class countrateCase190(basecase.countrateCase):
def setUp(self):
self.obsmode="acs,hrc,coron,fr388n#3880"
self.spectrum="spec(earthshine.fits)*0.5+spec(Zodi.fits)*4.0"
self.subset=False
self.etcid="ACS.MISC.1.IMAG.039"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase1(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="em(1400.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.SBC.SPEC.011"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase2(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="em(1400.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.SBC.SPEC.012"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase3(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="em(4000.0,10.0,1.0E-16,flam)"
self.subset=True
self.etcid="ACS.HRC.SPEC.011"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase4(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="em(6500.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.HRC.SPEC.010"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase5(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="em(6500.0,10.0,1.0E-16,flam)"
self.subset=False
self.etcid="ACS.WFC.SPEC.005"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase6(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.SPEC.008"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase7(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.SPEC.007"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase8(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.007"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase9(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.008"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase10(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="rn(bb(10000),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.SPEC.003"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase11(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.SPEC.009"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase12(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.SPEC.012"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase13(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.009"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase14(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.010"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase15(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="rn(icat(k93models,5770,0.0,4.5),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.SPEC.004"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase16(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=True
self.etcid="ACS.HRC.SPEC.005"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase17(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.HRC.SPEC.006"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase18(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.005"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase19(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.SBC.SPEC.006"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase20(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="rn(pl(4000.0,-1.0,flam),band(johnson,v),20,vegamag)"
self.subset=False
self.etcid="ACS.WFC.SPEC.002"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase21(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.004"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase22(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.005"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase23(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.006"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase24(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.MISC.1.SPEC.009"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase25(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="rn(unit(1.0,flam),band(johnson,v),15,vegamag)"
self.subset=False
self.etcid="ACS.A1.SPEC.001"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase26(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.HRC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase27(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.HRC.SPEC.002"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase28(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.SPEC.002"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase29(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.SBC.SPEC.003"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase30(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="rn(unit(1.0,flam),box(5500.0,1.0),1.5e-16,flam)"
self.subset=False
self.etcid="ACS.WFC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase31(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,g800l"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.SPEC.003"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase32(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,hrc,pr200l"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.HRC.SPEC.004"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase33(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr110l"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.SBC.SPEC.001"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase34(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,sbc,pr130l"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.SBC.SPEC.004"
self.setglobal(__file__)
self.runpy()
class SpecSourcerateSpecCase35(basecase.SpecSourcerateSpecCase):
def setUp(self):
self.obsmode="acs,wfc1,g800l"
self.spectrum="spec(/grp/hst/cdbs//calspec/gd71_mod_005.fits)"
self.subset=False
self.etcid="ACS.WFC.SPEC.006"
self.setglobal(__file__)
self.runpy()
if __name__ == '__main__':
if 'debug' in sys.argv:
testutil.debug(__name__)
else:
testutil.testall(__name__,2)
#calcspec:40 - 33 dup =7
#thermback:0 - 0 dup =0
#calcphot:190 - 0 dup =190
#countrate:190 - 0 dup =190
#SpecSourcerateSpec:35 - 0 dup =35
|
984,338 | 9fa46aca34208385241bf061501da38204eb5e15 |
log = logging.getLogger(__name__)
|
984,339 | 43ba5409a651a3d9ceffbe57978a72707dc29445 | # Madeleine Nightengale-Luhan
# CSCI 101 - Section A
# Python Lab 1B
# References: No One
# Time: 30 Minutes
F0 = 0
F1 = 1
F2 = F0 + F1
F3 = F1 + F2
F4 = F2 + F3
F5 = F3 + F4
F6 = F4 + F5
F7 = F5 + F6
F8 = F6 + F7
F9 = F7 + F8
print('F0 = ', F0)
print('F1 = ', F1)
print('F2 = ', F2)
print('F3 = ', F3)
print('F4 = ', F4)
print('F5 = ', F5)
print('F6 = ', F6)
print('F7 = ', F7)
print('F8 = ', F8)
print('F9 = ', F9)
|
984,340 | a8826aae88da4ae11fea85aec93570fc819f1e46 | # coding: utf-8
from django.db.migrations.autodetector import MigrationAutodetector
from operation import PartitionOperation
def generate_altered_db_table_new(self):
self.generate_altered_db_table_orig()
self.generate_altered_partition()
def generate_altered_partition(self):
option_name = PartitionOperation.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_value = old_model_state.options.get(option_name) or (None, None, None)
new_value = new_model_state.options.get(option_name) or (None, None, None)
if old_value != new_value:
self.add_operation(app_label, PartitionOperation(model_name, *new_value))
def patch_autodetector():
MigrationAutodetector.generate_altered_partition = generate_altered_partition
MigrationAutodetector._detect_changes_orig = MigrationAutodetector._detect_changes
MigrationAutodetector.generate_altered_db_table_orig = MigrationAutodetector.generate_altered_db_table
MigrationAutodetector.generate_altered_db_table = generate_altered_db_table_new |
984,341 | bf9705106dd7dca9b5359997a338c3ec49fc6fdb | from . import message_wizard
from . import wizard_assign_mail |
984,342 | 1b988b458f303c3d0ecc2edf59e0ec1e61304e4c |
class Space:
def __init__(self, entity):
self.type = type
self.entity = entity
def setSpace(self, entity):
self.entity = entity
def setFighter(self, fighter):
self.fighter = fighter
class Field:
def __init__(self, xSize, ySize):
self.xSize = xSize
self.ySize = ySize
self.spaces = []
for i in range(0,xSize):
self.spaces.append([])
for j in range(0, ySize):
self.spaces[i].append(0)
def addSpace(self, x, y, space):
self.spaces[x][y] = space
def changeSpace(self, x, y, entity):
self.spaces[x][y].setSpace(entity)
class Fighter:
def __init__(self, xPos, yPos, entity):
self.enemyAdjacent = False
self.hp = 200
self.xPos = xPos
self.yPos = yPos
self.entity = entity
self.alive = True
def __eq__(self, other):
return (self.xPos == other.xPos and self.yPos == other.yPos)
def __lt__(self, other):
if self.yPos == other.yPos:
return self.xPos < other.xPos
else:
return self.yPos < other.yPos
def dijkstra(start):
distanceDict = {}
for i in range(0, field.xSize):
for j in range(0, field.ySize):
if (field.spaces[i][j].entity == 0):
distanceDict[(i,j)] = [100000,]
workSet = dict()
workSet[start] = 0
neighbours = []
i = start[0]
j = start[1]
if field.spaces[i][j-1].entity == 0:
neighbours.append((i,j-1))
if field.spaces[i+1][j].entity == 0:
neighbours.append((i+1,j))
if field.spaces[i-1][j].entity == 0:
neighbours.append((i-1,j))
if field.spaces[i][j+1].entity == 0:
neighbours.append((i,j+1))
n = 0
while(len(workSet) > 0):
coord = min(workSet, key=workSet.get)
distance = workSet.pop(coord)
if n > 0:
for neighbour in adjacencyMap[coord]:
if distance + 1 < distanceDict[neighbour][0]:
distanceDict[neighbour][0] = distance + 1
workSet[neighbour] = distance+1
distanceDict[neighbour][1:] = distanceDict[coord][1:]
distanceDict[neighbour].append(neighbour)
elif distance + 1 == distanceDict[neighbour][0]:
newfirstStep = distanceDict[coord][1]
prevfirstStep = distanceDict[neighbour][1]
if ((newfirstStep[1] < prevfirstStep[1]) or ((newfirstStep[1] == prevfirstStep[1]) and (newfirstStep[0] < prevfirstStep[0]))):
distanceDict[neighbour][0] = distance + 1
workSet[neighbour] = distance+1
distanceDict[neighbour][1:] = distanceDict[coord][1:]
distanceDict[neighbour].append(neighbour)
else:
for neighbour in neighbours:
if distance + 1 < distanceDict[neighbour][0]:
distanceDict[neighbour][0] = distance + 1
workSet[neighbour] = distance+1
if coord != start:
distanceDict[neighbour][1:] = distanceDict[coord][1:]
distanceDict[neighbour].append(neighbour)
n+=1
return distanceDict
data = []
yCount = 0
file = open("day15data.txt", "r")
for line in file:
xCount = len(line)
data.append(line)
yCount+=1
field = Field(xCount, yCount)
for att in range (4, 100):
fighters = []
elvesAlive = 0
goblinsAlive = 0
y = -1
for line in data:
y+=1
x= -1
for ch in line:
x+=1
if ch == '#':
spa = Space(-1)
field.addSpace(x, y, spa)
elif ch == ".":
spa = Space(0)
field.addSpace(x, y, spa)
elif ch == "G":
spa = Space(1)
field.addSpace(x, y, spa)
figh = Fighter(x, y, 1)
fighters.append(figh)
field.spaces[x][y].setFighter(figh)
goblinsAlive+=1
elif ch == "E":
spa = Space(2)
field.addSpace(x, y, spa)
figh = Fighter(x, y, 2)
fighters.append(figh)
field.spaces[x][y].setFighter(figh)
elvesAlive+=1
distance = 10000
route = []
allDead = False
elfDied = False
counter = 0
elfDead = False
while allDead == False and elfDied == False:
if elfDead:
print("Elf died for " + str(att) + "attack")
break
elfAttack = att
"""
for i in range(0,field.xSize):
output = ""
for j in range(0,field.ySize):
sp = field.spaces[j][i]
if sp.entity == -1:
output+='#'
elif sp.entity == 0:
output+='.'
elif sp.entity == 1:
output+='G'
elif sp.entity == 2:
output+='E'
print(output)
print('\n')
"""
if (counter > 90):
breakpoint = 1
# fighters = sorted(fighters)
for j,fig in enumerate(fighters):
#deleteIndices = []
#for i,f in enumerate(fighters):
# if f.hp < 1:
# deleteIndices.append(i)
#deleteIndices = reversed(deleteIndices)
#for i in deleteIndices:
# fighters.pop(i)
f = fighters[j]
if f.entity == 2:
breakpoints = 2
if f.hp > 0:
goblinDestinations = []
elfDestinations = []
for a,g in enumerate(fighters):
if g.hp > 0:
pos = [g.xPos, g.yPos]
xPos = g.xPos
yPos = g.yPos
if g.entity == 2:
if field.spaces[xPos+1][yPos].entity == 0 and (((xPos+1, yPos) in goblinDestinations) == False):
goblinDestinations.append((xPos+1, yPos))
if field.spaces[xPos-1][yPos].entity == 0 and (((xPos-1, yPos) in goblinDestinations) == False):
goblinDestinations.append((xPos-1, yPos))
if field.spaces[xPos][yPos+1].entity == 0 and (((xPos, yPos+1) in goblinDestinations) == False):
goblinDestinations.append((xPos, yPos+1))
if field.spaces[xPos][yPos-1].entity == 0 and (((xPos, yPos-1) in goblinDestinations) == False):
goblinDestinations.append((xPos, yPos-1))
elif g.entity == 1:
if field.spaces[xPos+1][yPos].entity == 0 and (((xPos+1, yPos) in elfDestinations) == False):
elfDestinations.append((xPos+1, yPos))
if field.spaces[xPos-1][yPos].entity == 0 and (((xPos-1, yPos) in elfDestinations) == False):
elfDestinations.append((xPos-1, yPos))
if field.spaces[xPos][yPos+1].entity == 0 and (((xPos, yPos+1) in elfDestinations) == False):
elfDestinations.append((xPos, yPos+1))
if field.spaces[xPos][yPos-1].entity == 0 and (((xPos, yPos-1) in elfDestinations) == False):
elfDestinations.append((xPos, yPos-1))
adjacencyMap = {(0,0):[(0,0),],}
#Dijkstra, construction adjacency map for the first time:
#key should be [x,y], values should be [[x,y],[x,y],etc]
for i in range(0, field.xSize):
for j in range(0, field.ySize):
spa = field.spaces[i][j]
if spa.entity == 0:
adjacencyMap[(i,j)] = []
if field.spaces[i][j-1].entity == 0:
adjacencyMap[(i,j)].append((i,j-1))
if field.spaces[i+1][j].entity == 0:
adjacencyMap[(i,j)].append((i+1,j))
if field.spaces[i-1][j].entity == 0:
adjacencyMap[(i,j)].append((i-1,j))
if field.spaces[i][j+1].entity == 0:
adjacencyMap[(i,j)].append((i,j+1))
#if (len(adjacencyMap[(i,j)]) == 0):
# adjacencyMap.pop((i,j))
adjacencyMap.pop((0,0))
if f.entity == 2:
breakpoint = 2
minDistance = 10000000
distances = dijkstra((f.xPos, f.yPos))
entity = 0
selected = 0
fight = 0
selectedHp = 1000
if f.entity == 1:
attack = 3
elif f.entity == 2:
attack = elfAttack
if field.spaces[f.xPos][f.yPos-1].entity + f.entity == 3:
if field.spaces[f.xPos][f.yPos-1].fighter.hp > 0:
selected = 1
selectedHp = field.spaces[f.xPos][f.yPos-1].fighter.hp
if field.spaces[f.xPos-1][f.yPos].entity + f.entity == 3:
if field.spaces[f.xPos-1][f.yPos].fighter.hp < selectedHp and field.spaces[f.xPos-1][f.yPos].fighter.hp > 0:
selected = 2
selectedHp = field.spaces[f.xPos-1][f.yPos].fighter.hp
if field.spaces[f.xPos+1][f.yPos].entity + f.entity == 3:
if field.spaces[f.xPos+1][f.yPos].fighter.hp < selectedHp and field.spaces[f.xPos+1][f.yPos].fighter.hp > 0:
selected = 3
selectedHp = field.spaces[f.xPos+1][f.yPos].fighter.hp
if field.spaces[f.xPos][f.yPos+1].entity + f.entity == 3:
if field.spaces[f.xPos][f.yPos+1].fighter.hp < selectedHp and field.spaces[f.xPos][f.yPos+1].fighter.hp > 0:
selected = 4
selectedHp = field.spaces[f.xPos][f.yPos+1].fighter.hp
if selected == 1:
field.spaces[f.xPos][f.yPos-1].fighter.hp -= attack
if field.spaces[f.xPos][f.yPos-1].fighter.hp <= 0:
if field.spaces[f.xPos][f.yPos-1].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos][f.yPos-1].entity = 0
elif selected == 2:
field.spaces[f.xPos-1][f.yPos].fighter.hp -= attack
if field.spaces[f.xPos-1][f.yPos].fighter.hp <= 0:
if field.spaces[f.xPos-1][f.yPos].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos-1][f.yPos].entity = 0
elif selected == 3:
field.spaces[f.xPos+1][f.yPos].fighter.hp -= attack
if field.spaces[f.xPos+1][f.yPos].fighter.hp <= 0:
if field.spaces[f.xPos+1][f.yPos].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos+1][f.yPos].entity = 0
elif selected == 4:
field.spaces[f.xPos][f.yPos+1].fighter.hp -= attack
if field.spaces[f.xPos][f.yPos+1].fighter.hp <= 0:
if field.spaces[f.xPos][f.yPos+1].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos][f.yPos+1].entity = 0
else:
route = []
finaldest = (0,0)
if f.entity == 2:
entity = 2
for dest in elfDestinations:
if dest in distances:
dist = distances[dest][0]
if dist < minDistance:
minDistance = dist
route = distances[dest][1:]
finaldest = dest
elif(dist == minDistance and (dest[1] < finaldest[1] or (dest[1] == finaldest[1] and dest[0] < finaldest[0]))):
minDistance = dist
route = distances[dest][1:]
finaldest = dest
elif f.entity == 1:
entity = 1
for dest in goblinDestinations:
if dest in distances:
dist = distances[dest][0]
if (dist < minDistance):
minDistance = dist
route = distances[dest][1:]
finaldest = dest
elif(dist == minDistance and (dest[1] < finaldest[1] or (dest[1] == finaldest[1] and dest[0] < finaldest[0]))):
minDistance = dist
route = distances[dest][1:]
finaldest = dest
if len(route) == 0:
continue
moveTo = route[0]
moveToX = moveTo[0]
moveToY = moveTo[1]
oldX = f.xPos
oldY = f.yPos
#updateAdj((f.xPos, f.yPos), (moveToX, moveToY), entity)
field.spaces[oldX][oldY].containsEntity = False
field.spaces[f.xPos][f.yPos].entity = 0
field.spaces[moveToX][moveToY].entity = entity
field.spaces[moveToX][moveToY].containsEntity = True
f.xPos = moveToX
f.yPos = moveToY
field.spaces[f.xPos][f.yPos].setFighter(f)
selected = 0
selectedHp = 1000
if f.entity == 1:
attack = 3
elif f.entity == 2:
attack = elfAttack
if field.spaces[f.xPos][f.yPos-1].entity + f.entity == 3:
if field.spaces[f.xPos][f.yPos-1].fighter.hp > 0:
selected = 1
selectedHp = field.spaces[f.xPos][f.yPos-1].fighter.hp
if field.spaces[f.xPos-1][f.yPos].entity + f.entity == 3:
if field.spaces[f.xPos-1][f.yPos].fighter.hp < selectedHp and field.spaces[f.xPos-1][f.yPos].fighter.hp > 0:
selected = 2
selectedHp = field.spaces[f.xPos-1][f.yPos].fighter.hp
if field.spaces[f.xPos+1][f.yPos].entity + f.entity == 3:
if field.spaces[f.xPos+1][f.yPos].fighter.hp < selectedHp and field.spaces[f.xPos+1][f.yPos].fighter.hp > 0:
selected = 3
selectedHp = field.spaces[f.xPos+1][f.yPos].fighter.hp
if field.spaces[f.xPos][f.yPos+1].entity + f.entity == 3:
if field.spaces[f.xPos][f.yPos+1].fighter.hp < selectedHp and field.spaces[f.xPos][f.yPos+1].fighter.hp > 0:
selected = 4
selectedHp = field.spaces[f.xPos][f.yPos+1].fighter.hp
if selected == 1:
field.spaces[f.xPos][f.yPos-1].fighter.hp -= attack
if field.spaces[f.xPos][f.yPos-1].fighter.hp <= 0:
if field.spaces[f.xPos][f.yPos-1].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos][f.yPos-1].entity = 0
elif selected == 2:
field.spaces[f.xPos-1][f.yPos].fighter.hp -= attack
if field.spaces[f.xPos-1][f.yPos].fighter.hp <= 0:
if field.spaces[f.xPos-1][f.yPos].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos-1][f.yPos].entity = 0
elif selected == 3:
field.spaces[f.xPos+1][f.yPos].fighter.hp -= attack
if field.spaces[f.xPos+1][f.yPos].fighter.hp <= 0:
if field.spaces[f.xPos+1][f.yPos].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos+1][f.yPos].entity = 0
elif selected == 4:
field.spaces[f.xPos][f.yPos+1].fighter.hp -= attack
if field.spaces[f.xPos][f.yPos+1].fighter.hp <= 0:
if field.spaces[f.xPos][f.yPos+1].fighter.entity == 2:
elfDead = True
field.spaces[f.xPos][f.yPos+1].entity = 0
elves = 0
goblins = 0
totalhp = 0
fighters = sorted(fighters)
for f in fighters:
#print("Fighter at: " + str(f.xPos) + ", " + str(f.yPos))
if f.entity==1 and f.hp>0:
#print("goblin hp: " + str(f.hp))
totalhp+=f.hp
goblins+=1
elif f.entity==2 and f.hp>0:
#print("elf hp: " + str(f.hp))
totalhp+=f.hp
elves+=1
if elves == 0 or goblins == 0:
print("nr of rounds: " + str(counter))
print("elves remaining: " + str(elves))
print("goblins remaining: " + str(goblins))
print("Hp remaining: " + str(totalhp))
print("Multiplied: " + str(totalhp*counter))
break
counter+=1
|
984,343 | e15d977735c1cb758d61539acc428bbb4d23641f | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-04-12 19:42
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('servico', '0012_tipoevento_loaddata'),
]
operations = [
migrations.CreateModel(
name='ServicoEvento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(blank=True, null=True, verbose_name='Criado em')),
('descricao', models.CharField(blank=True, max_length=2000, null=True, verbose_name='Descriรงรฃo')),
('equipe', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='servico.EquipeAtendimento', verbose_name='Equipe de atendimento')),
('evento', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='servico.TipoEvento')),
('nivel', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='servico.NivelAtendimento', verbose_name='Nรญvel de etendimento')),
('numero', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='servico.NumeroDocumento', verbose_name='Nรบmero')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Usuรกrio')),
],
options={
'verbose_name': 'Evento relacionado a serviรงos',
'verbose_name_plural': 'Eventos relacionados a serviรงos',
'db_table': 'fo2_serv_servico_evento',
},
),
]
|
984,344 | c57d31128bcd9699553af7007fb9b9ce59473119 | #!/usr/bin/env python3
import json
#write function readcurrency(filename)
# read file
#Create a dictionary
def readcurrency(filename):
with open(filename) as f:
data = []
list_data= []
dict_data ={}
for line in f:
# print(line, end='')
list_line = line.strip('\n')
lines = list_line.split(" ")
data.append(lines)
for i in data:
dict_data["symbole"] = i[0]
dict_data["rate"] = i[1]
list_data.append(dict_data)
return list_data
def save(filename, data):
with open(filename, "w") as js_file:
new_data = {"data":data}
json.dump(new_data,js_file, indent=2)
readcurrency("currency.txt")
save("currency.json", readcurrency("currency.txt"))
|
984,345 | e938b0d5bd5aac636117de8f4f21f4bba16c5835 | # best test
# type "python hw2_best_test.py {X_test} {hw2_best.csv}" to execute
import sys
import time
import csv
import warnings
import numpy as np
import pandas as pd
from sklearn.externals import joblib
# =============================
warnings.filterwarnings('ignore')
# =============================
def feature_scaling(x_data, x_mean, x_std):
'''
Returns: numpy array, same shape as x_data
'''
ans = (x_data - x_mean)/x_std
# if x_std == 0, then (x_data - x_mean) == 0
# simply set those invalid terms(nan and inf) to zero
ans_nan = np.isnan(ans)
ans_inf = np.isinf(ans)
ans_invalid = np.logical_or(ans_nan, ans_inf)
ans[ans_invalid] = 0
return ans
def load_data(X_test):
'''
Returns: x_test (numpy array)
'''
df = pd.read_csv(X_test)
x_test = df.to_numpy().astype('float64')
return x_test
def convert_probability_to_classes(result):
'''
Returns: y (numpy array)
'''
mask = (result >= 0.5)
y = np.zeros(len(result))
y[mask] = 1
return y.astype('int32')
def predict_result(model, x_test):
result = model.predict(x_test)
if result.ndim == 2:
result = result.ravel()
return convert_probability_to_classes(result)
def output_result(result, output):
'''
Write the result into output file
'''
with open(output, 'w') as fout:
op_rows = csv.writer(fout, delimiter = ',', lineterminator = '\n')
op_rows.writerow(['id', 'label'])
result_list = list()
for i in range(len(result)):
result_list.append([i+1, result[i]])
op_rows.writerows(result_list)
def main(script, X_test, output):
# load model
model = joblib.load('./hw2_best_model/model.pkl')
# load testing data
x_test = load_data(X_test)
# feature scaling
x_mean = np.load('./hw2_best_model/x_mean.npy')
x_std = np.load('./hw2_best_model/x_std.npy')
x_test = feature_scaling(x_test, x_mean, x_std)
# testing
result = predict_result(model, x_test)
# output the result
output_result(result, output)
# =============================
if __name__ == '__main__':
t = time.perf_counter()
main(*sys.argv)
t = time.perf_counter() - t
print('Test time: %.3f seconds' %t) |
984,346 | 867f799ad31bcdc40b1159fe6d014f8dbf0526ba | #!/usr/bin/env python
'''
Command line tool(s) for Strava.
'''
# 2017-07-29 - Shaun L. Cloherty <s.cloherty@ieee.org>
import os, getpass, sys, time, random, webbrowser;
from datetime import datetime;
from argparse import ArgumentParser;
import argparse;
from backend import client, app; # strava-tools backend
import logging;
def auth(client_id,client_secret,port):
# perform Strava OAuth authorization...
#
# note: passing the client_id and client_secret in the url
# isn't recommended!
url = client.authorization_url(
client_id = client_id,
# redirect_uri = 'http://localhost:{0}/auth?client_id={1}&client_secret={2}'.format(port,client_id,client_secret), # nasty hack, bad idea!
redirect_uri = 'http://localhost:{0}/auth'.format(port),
scope = ['read_all','activity:write']);
# open url in the default browser
webbrowser.open_new_tab(url);
# launch the flask backend to catch the OAuth redirect from strava.com
app.run(port = port);
def gearCmd(client,args):
# tag activities with specified gearId
id = getattr(args,'gearId');
if id is None:
# list all bikes
athlete = client.get_athlete();
for id in athlete.bikes:
logging.info("Bike: %s (%s) %ikm",id.name,id.id,id.distance/1e3);
return
cnt = [0,0];
for activity in client.get_activities(after = getattr(args,'after'),
before = getattr(args,'before')):
if activity.gear_id is None:
if not getattr(args,'dryrun'):
client.update_activity(activity_id = activity.id,gear_id = id);
dt = random.expovariate(1.0/1.5); # rate limiting...
logging.info("Sleeping %fs", dt);
time.sleep(dt);
cnt[1] += 1;
cnt[0] += 1;
logging.info("Total activities: %i, Updated activities: %i.",cnt[0],cnt[1]);
return 0;
def commuteCmd(client,args):
# tag activities as commutes
# for our purposes, we define a commute as a sequence of rides starting at
# orig (e.g., home) and ending at dest (e.g., work)
import config; # see config.py
orig = getattr(args,'orig');
if orig is None:
try:
orig = config.orig[getattr(args,'user')]
except AttributeError:
raise;
dest = getattr(args,'dest');
if dest is None:
try:
dest = config.dest[getattr(args,'user')]
except AttributeError:
raise;
tol = getattr(args,'tol');
if tol is None:
try:
tol = config.tol;
except AttributeError:
tol = 1e3; # default: 1km
ride = []; # empty list
# loop over activities, getting id, date, start and end coords
for activity in client.get_activities(after = getattr(args,'after'),
before = getattr(args,'before')):
info = {"id": None, "date": None, "start": None, "end": None}; info["id"] = activity.id;
info["date"] = activity.start_date_local;
info["name"] = activity.name;
info["distance"] = activity.distance;
for latlng in [orig, dest]:
if distance(latlng,activity.start_latlng) <= tol:
info["start"] = latlng;
if distance(latlng,activity.end_latlng) <= tol:
info["end"] = latlng;
ride.append(info);
commute = getCommutes(ride,orig,dest);
if args.rtrn:
commute.extend(getCommutes(ride,dest,orig));
# also flag dest-dest rides as commutes...?
# TODO: this is a hack, fix this (shaun)
for r in ride:
if r["start"] == dest and r["end"] == dest:
commute.append(r);
# commute.sort(key = lambda x: x["date"]);
logging.debug("Found %i/%i activities that look like commutes...",
len(commute),len(ride));
for activity in commute:
logging.debug("{0}: {1} {2} [{3}]".format(activity["date"],activity["name"],activity["distance"],activity["id"]));
if not getattr(args,'dryrun'):
client.update_activity(activity_id = activity["id"],commute = True);
dt = random.expovariate(1.0/1.5); # rate limiting...
logging.info("Sleeping %fs", dt);
time.sleep(dt);
logging.info("Total activities: %i, Updated activities: %i.",
len(ride),len(commute));
def getCommutes(ride,latlng0,latlng1):
# sort by date/time... newest to oldest
ride.sort(key = lambda x: x["date"], reverse = True);
commute = []; # empty list
for ii in range(len(ride)):
# print "{0}: {1}".format(ii,ride[ii]);
if ride[ii]["start"] != latlng0:
continue
# a candidate commute
if ride[ii]["end"] == latlng0:
continue # not a commute...
if ride[ii]["end"] == latlng1:
commute.append(ride[ii]);
continue
# ride could be part of a "multi-ride" commute
commute_ = [ride[ii]];
jj = 1;
while True:
if ride[ii-jj]["date"].date() != ride[ii]["date"].date():
# not a commute
# TODO: could be multi-day commute?
commute_ = [];
break
if ride[ii-jj]["start"] == latlng0 or ride[ii-jj]["end"] == latlng0:
# not a commute
commute_ = [];
break
if ride[ii-jj]["end"] == latlng1:
commute_.extend(ride[ii-jj:ii]);
break
jj += 1;
commute.extend(commute_);
return commute;
def main(args):
logging.basicConfig(stream = sys.stderr,
format='%(levelname)s:%(message)s',
level = args.loglevel or logging.INFO);
logging.debug("args = %s", args);
import config; # see config.py
if getattr(args,'action') == "auth":
# perform OAuth authorization...
auth(config.CLIENT_ID,config.CLIENT_SECRET,port = getattr(args,'port'))
return
try:
client.access_token = config.users[getattr(args,'user')];
except KeyError:
logging.error("Unknown user %s!", getattr(args,'user'));
return;
# parse filter arguments...
before = getattr(args,'before')
if before is not None:
try:
args.before = datetime.strptime(before,'%Y-%m-%d')
except ValueError:
logging.error("Invalid date format %s.",before);
return
after = getattr(args,'after')
if after is not None:
try:
args.after = datetime.strptime(after,'%Y-%m-%d')
except ValueError:
logging.error("Invalid date format %s.",after);
return
# this is where we break out to handle different actions...
return args.cmdfn(client,args);
# compute geodesic distance (in meters) from p0 to p1
def distance(latlng0,latlng1):
from geographiclib.geodesic import Geodesic;
geod = Geodesic.WGS84 # use the WGS84 ellipsoid??
g = geod.Inverse(latlng0[0], latlng0[1], latlng1[0], latlng1[1])
return g['s12'] # distance in meters
if __name__ == "__main__":
prog = os.path.basename(sys.argv[0]);
rev = 0.2; # increment this if modifying the script
version = "%s v%s" % (prog, rev);
p = ArgumentParser(
description = __doc__,
conflict_handler = "resolve");
# common options/arguments
pcommon = ArgumentParser(add_help = False);
pcommon.add_argument("--version", action = "version", version = version);
# control debugging output/verbosity
group = pcommon.add_mutually_exclusive_group();
group.add_argument("-v","--verbose",
action = "store_const", const = logging.DEBUG,
dest = "loglevel",
help = "increase verbosity");
group.add_argument("-q","--quiet",
action = "store_const", const = logging.WARN,
dest = "loglevel",
help = "suppress non-error messages");
# optional arguments
pcommon.add_argument("-u","--user",
action = 'store',
default = getpass.getuser(),
help = "Strava user/token defined in config.py");
pcommon.add_argument("-n","--dry-run",
action = "store_const", const = True,
default = False,
dest = "dryrun",
help = "show what would have been modified");
pfilter = ArgumentParser(add_help = False);
pfilter.add_argument("--before",
action = 'store',
default = None,
metavar = "YYYY-MM-DD",
help = "get activities before date");
pfilter.add_argument("--after",
action = 'store',
default = None,
metavar = "YYYY-MM-DD",
help = "get activities after date");
# pfilter.add_argument("-a","--activity",
# action = 'store',
# default = None,
# metavar = "ID",
# help = "activity identifier (e.g., ????????)");
# actions...
subparsers = p.add_subparsers(title = "actions",
dest = "action");
# auth cmd
poauth = subparsers.add_parser("auth",
parents = [pcommon],
help = "get Strava OAuth access token");
poauth.add_argument("-p","--port",
action = 'store',
default = "8282", # can be anything >1024?
help = "local port for OAuth callback");
# gear cmd
pgear = subparsers.add_parser("gear",
parents = [pcommon, pfilter],
help = "add gear to activities");
pgear.set_defaults(cmdfn = gearCmd)
pgear.add_argument("-i","--id",
action = "store",
default = None,
dest = "gearId",
metavar = "ID",
help = "gear identifier (e.g., b4063944)");
# commute cmd
pcommute = subparsers.add_parser("commute",
parents = [pcommon, pfilter],
help = "automatically flag commutes");
pcommute.set_defaults(cmdfn = commuteCmd)
pcommute.add_argument("-o","--orig",
action = "store",
default = None,
dest = "orig",
metavar = "(LAT,LNG)",
help = "origin for your commute");
pcommute.add_argument("-d","--dest",
action = "store",
default = None,
dest = "dest",
metavar = "(LAT,LNG)",
help = "destination for your commute");
pcommute.add_argument("-t","--tolerance",
action = "store",
default = None,
dest = "tol",
help = "tolerance for matching orig/dest");
pcommute.add_argument("-r","--return",
action = "store_const", const = True,
default = False,
dest = "rtrn",
help = "tag return (dest --> orig) as a commute also");
args = p.parse_args();
exit(main(args));
|
984,347 | 5ffb4008147b2282c1df65ff5dc7113568d37369 | BENCHMARK_NAMES = [
"resnet",
"ssd",
"maskrcnn",
"transformer",
"gnmt",
"ncf",
"minigo"
]
SUBM_META_PROPS = [
"org",
"poc_email"
]
ENTRY_META_PROPS = [
"division",
"status",
"hardware",
"framework",
"power",
"notes",
"interconnect",
"nodes",
"os",
"libraries",
"compilers"
]
NODE_META_PROPS = [
"num_nodes",
"cpu",
"num_cores",
"num_vcpus",
"accelerator",
"num_accelerators",
"sys_mem_size",
"sys_storage_type",
"sys_storage_size",
"cpu_accel_interconnect",
"network_card",
"num_network_cards",
"notes"
]
REQUIRED_RESULT_NUM = {
"resnet": 5,
"ssd": 5,
"maskrcnn": 5,
"transformer": 10,
"gnmt": 10,
"ncf": 100,
"minigo": 20
}
REFERENCE_RESULTS = {
"resnet": 1.0,
"ssd": 1.0,
"maskrcnn": 1.0,
"transformer": 1.0,
"gnmt": 1.0,
"ncf": 1.0,
"minigo": 1.0
}
RESULT_SUBM_META_COLUMNS = [
"org",
]
RESULT_ENTRY_META_COLUMNS = [
"division",
"status",
"hardware",
"framework",
"power",
"notes"
]
DIVISION_COMPLIANCE_CHECK_LEVEL = {
"open": 1,
"closed": 2
}
# check result status
SUCCESS = "success"
FAILURE = "failure"
ERROR = "error"
|
984,348 | 1df42db95ffef83824f9f901457733de4caeeded | def countRange(lst, minimum, maximum):
cnt = 0
for i in lst:
if minimum <= i <= maximum:
cnt += 1
return cnt
def main():
lst = list(map(int, input('Enter the numbers(a b c...): ').split()))
minimum = int(input('Enter the minimum value: '))
maximum = int(input('Enter the maximum value: '))
print(f'The number between {minimum} and {maximum} is {countRange(lst, minimum, maximum)}')
main() |
984,349 | 7fecefa9eaead1b016e8ce83da2161853348fab3 | '''
Created on Aug 2, 2019
@author: Faizan-Uni
'''
import os
import h5py
import numpy as np
import matplotlib.pyplot as plt
from adjustText import adjust_text
from matplotlib.lines import Line2D
import matplotlib.colors as mpl_clrs
from scipy.interpolate import interp1d
from ..models import (
get_asymms_sample,
get_ns_cy,
get_ln_ns_cy,
get_kge_cy,
get_mean,
get_demr,
get_ln_mean,
get_ln_demr,
)
from ..misc import mkdir_hm, traceback_wrapper
plt.ioff()
@traceback_wrapper
def plot_cat_diags(plot_args):
cat_db, = plot_args
with h5py.File(cat_db, 'r') as db:
out_dir = db['data'].attrs['main']
kfolds = db['data'].attrs['kfolds']
cat = db.attrs['cat']
off_idx = db['data'].attrs['off_idx']
# grid_rows = db['data/rows'][...]
# grid_rows -= grid_rows.min()
#
# grid_cols = db['data/cols'][...]
# grid_cols -= grid_cols.min()
for kf in range(1, kfolds + 1):
for run_type in ['calib', 'valid']:
if run_type not in db:
continue
if (('qact_arr' not in db[f'{run_type}/kf_{kf:02d}']) or
('qsim_arr' not in db[f'{run_type}/kf_{kf:02d}'])):
continue
plot_cat_diags_1d_cls = PlotCatDiagnostics1D(
db[f'{run_type}/kf_{kf:02d}/qact_arr'][...],
db[f'{run_type}/kf_{kf:02d}/qsim_arr'][...],
db[f'{run_type}/kf_{kf:02d}/ppt_arr'][...],
off_idx,
cat,
kf,
run_type,
# grid_rows,
# grid_cols,
os.path.join(out_dir, '13_diagnostics_1D'),
)
plot_cat_diags_1d_cls.plot_emp_cops()
plot_cat_diags_1d_cls.plot_fts()
plot_cat_diags_1d_cls.plot_lorenz_curves()
try:
plot_cat_diags_1d_cls.plot_quantile_effs()
except:
pass
plot_cat_diags_1d_cls.plot_sorted_sq_diffs()
plot_cat_diags_1d_cls.plot_peak_qevents()
plot_cat_diags_1d_cls.plot_mw_discharge_ratios()
plot_cat_diags_1d_cls.plot_hi_err_qevents()
try:
plot_cat_diags_1d_cls.plot_quantile_stats()
except:
pass
plot_cat_diags_1d_cls.plot_theoretical_error_reduction()
plot_cat_diags_1d_cls.plot_mw_discharge_stds()
return
class PlotCatsDiagnostics2D:
'''For internal use only'''
def __init__(self):
return
class PlotCatDiagnostics1D:
'''For internal use only'''
def __init__(
self,
qobs_arr,
qsim_arr,
ppt_arr,
off_idx,
cat,
kf,
run_type,
# grid_rows,
# grid_cols,
out_dir,
):
self._qobs_arr = qobs_arr[off_idx:]
self._qsim_arr = qsim_arr[off_idx:]
self._ppt_arr = ppt_arr.mean(axis=0)[off_idx:]
self._off_idx = off_idx
self._cat = cat
self._kf = kf
self._run_type = run_type
self._out_dir = out_dir
self._n_steps = self._qobs_arr.shape[0]
self._qobs_mean = self._qobs_arr.mean()
self._qobs_var = self._qobs_arr.var()
self._qsim_mean = self._qsim_arr.mean()
self._qsim_var = self._qsim_arr.var()
self._eff_ftns_dict = {
'ns': get_ns_cy,
'ln_ns': get_ln_ns_cy,
'kge': get_kge_cy,
}
self._stat_ftns_dict = {
'mean': np.mean,
'med': np.median
}
mkdir_hm(self._out_dir)
self._qobs_ranks = np.argsort(np.argsort(self._qobs_arr)) + 1
self._qobs_probs = self._qobs_ranks / (self._n_steps + 1.0)
self._qsim_ranks = np.argsort(np.argsort(self._qsim_arr)) + 1
self._qsim_probs = self._qsim_ranks / (self._n_steps + 1.0)
self._ppt_ranks = np.argsort(np.argsort(self._ppt_arr)) + 1
# if ppt_arr.shape[0] > 1:
# self._ppt_dist_arr = ppt_arr[:, off_idx:]
# self._grid_rows = grid_rows
# self._grid_cols = grid_cols
# self._grid_shape = (
# self._grid_rows.max() + 1, self._grid_cols.max() + 1)
#
# else:
# self._ppt_dist_arr = None
# self._grid_rows = None
# self._grid_cols = None
# self._grid_shape = None
return
def plot_mw_discharge_stds(self):
out_dir = os.path.join(self._out_dir, 'mw_stds')
mkdir_hm(out_dir)
line_alpha = 0.7
line_lw = 0.9
ws = 365
(ws_x_crds,
qobs_mw_std_arr,
qsim_mw_std_arr,
qmw_diff_arr,
qmw_ratio_arr) = (
self._get_mw_stds_arrs(ws))
fig = plt.figure(figsize=(15, 7))
mwq_ax = plt.subplot2grid(
(4, 1), (0, 0), rowspan=1, colspan=1, fig=fig)
mwq_ax.plot(
ws_x_crds,
qobs_mw_std_arr,
alpha=line_alpha,
color='red',
label='std-obs',
lw=line_lw)
mwq_ax.plot(
ws_x_crds,
qsim_mw_std_arr,
alpha=line_alpha,
color='blue',
label='std-sim',
lw=line_lw)
mwq_ax.set_ylabel('Moving window\ndischarge std')
mwq_ax.set_xticklabels([])
mwq_ax.grid()
mwq_ax.legend(framealpha=0.3)
mwq_ax.locator_params('y', nbins=4)
diff_ratio_ax = plt.subplot2grid(
(4, 1), (1, 0), rowspan=3, colspan=1, fig=fig)
diff_ratio_ax.plot(
ws_x_crds,
qmw_diff_arr,
alpha=line_alpha,
color='blue',
lw=line_lw,
label='window diff.')
diff_ratio_ax.axhline(
qmw_diff_arr.mean(),
alpha=line_alpha,
color='blue',
lw=line_lw + 0.5,
ls='-.',
label='mean diff.')
max_abs_diff = max(abs(qmw_diff_arr.min()), abs(qmw_diff_arr.max()))
diff_ratio_ax.set_ylim(-max_abs_diff, +max_abs_diff)
diff_ratio_ax.grid()
diff_ratio_ax.legend(framealpha=0.3, loc=1)
diff_ratio_ax.set_xlabel('Time')
diff_ratio_ax.set_ylabel('Moving window discharge std difference')
ratio_ax = diff_ratio_ax.twinx()
ratio_ax.plot(
ws_x_crds,
qmw_ratio_arr,
alpha=line_alpha,
color='green',
lw=line_lw,
label='window ratio')
ratio_ax.axhline(
qmw_ratio_arr.mean(),
alpha=line_alpha,
color='green',
lw=line_lw + 0.5,
ls='-.',
label='mean ratio')
ratio_ax.set_ylabel('Moving window discharge std ratio')
ratio_ax.set_ylim(0, 2)
ratio_ax.legend(framealpha=0.3, loc=4)
plt.suptitle(
f'Moving window qsim by qobs std ratio for window size: '
f'{ws} steps\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Min. ratio: {qmw_ratio_arr.min():0.3f}, '
f'Mean ratio: {qmw_ratio_arr.mean():0.3f}, '
f'Max. ratio: {qmw_ratio_arr.max():0.3f}\n'
f'Min. diff: {qmw_diff_arr.min():0.3f}, '
f'Mean diff: {qmw_diff_arr.mean():0.3f}, '
f'Max. diff: {qmw_diff_arr.max():0.3f}'
)
fig_name = (
f'mwq_std_ratio_ws_{ws}_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_theoretical_error_reduction(self,):
out_dir = os.path.join(self._out_dir, 'err_red')
mkdir_hm(out_dir)
line_alpha = 0.8
line_lw = 2.0
clrs = list(mpl_clrs.TABLEAU_COLORS)
qobs_sort_idxs_arr = np.argsort(self._qobs_arr)[::-1]
qobs_ranks_arr = np.argsort(np.argsort(self._qobs_arr))[::-1] + 1
qsim_ranks_arr = np.argsort(np.argsort(self._qsim_arr))[::-1] + 1
qobs_sort_arr = self._qobs_arr[qobs_sort_idxs_arr]
qsim_sort_arr = self._qsim_arr[qobs_sort_idxs_arr]
err_red_abs_arrs = self._get_err_red_arrs(qobs_sort_arr, qsim_sort_arr)
err_red_rnk_arrs = self._get_err_red_arrs(
qobs_ranks_arr.astype(float, order='c'),
qsim_ranks_arr.astype(float, order='c'))
qobs_pcnt_idx_vals = np.arange(
0, self._n_steps + 1, dtype=float) / self._n_steps
clrs_ctr = 0
plt.figure(figsize=(15, 7))
for eff_ftn in err_red_abs_arrs:
plt.plot(
qobs_pcnt_idx_vals,
err_red_abs_arrs[eff_ftn],
lw=line_lw,
alpha=line_alpha,
label='abs_' + eff_ftn,
c=clrs[clrs_ctr])
clrs_ctr += 1
for eff_ftn in err_red_rnk_arrs:
plt.plot(
qobs_pcnt_idx_vals,
err_red_rnk_arrs[eff_ftn],
lw=line_lw,
alpha=line_alpha,
label='rank_' + eff_ftn,
c=clrs[clrs_ctr])
clrs_ctr += 1
plt.xlabel('Percentage discharge value (descending) index')
plt.ylabel('Objective function value')
plt.ylim(0, 1)
plt.grid()
plt.legend()
plt.title(
f'Error reduction by rectifying successive simulated values\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}'
)
fig_name = (
f'err_red_abs_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_quantile_stats(self):
clrs = list(mpl_clrs.TABLEAU_COLORS)
out_dir = os.path.join(self._out_dir, 'quant_stats')
mkdir_hm(out_dir)
line_alpha = 0.7
n_quants = 10
qobs_quants_masks_dict = self._get_quant_masks_dict(n_quants)
q_quant_stats_dict = self._get_q_quant_stats_dict(
qobs_quants_masks_dict)
bar_x_crds = (
np.arange(1., n_quants + 1) / n_quants) - (0.5 / n_quants)
plt.figure(figsize=(15, 7))
plt_texts = []
add_legend_items = []
sim_labs = ['obs', 'sim']
clr_idx = 0
for k, stat_ftn_lab in enumerate(self._stat_ftns_dict):
ratios_arr = (
q_quant_stats_dict[stat_ftn_lab][:, 1] /
q_quant_stats_dict[stat_ftn_lab][:, 0])
plt.plot(
bar_x_crds,
ratios_arr,
marker='o',
label=f'{stat_ftn_lab}-ratio',
color=clrs[
(len(sim_labs) * len(q_quant_stats_dict)) + k],
alpha=line_alpha)
for j, sim_lab in enumerate(sim_labs):
for i in range(n_quants):
txt_obj = plt.text(
bar_x_crds[i],
ratios_arr[i],
f' {q_quant_stats_dict[stat_ftn_lab][i, j]:0.2f} ',
va='top',
ha='left',
color=clrs[clr_idx],
alpha=line_alpha,
size='small')
plt_texts.append(txt_obj)
legend_sym = Line2D(
[0],
[0],
color='w',
markerfacecolor=clrs[clr_idx],
label=f'{stat_ftn_lab}-{sim_lab}',
marker='o',
alpha=line_alpha)
add_legend_items.append(legend_sym)
clr_idx += 1
plt.axhline(1, color='k')
adjust_text(plt_texts)
bar_x_crds_labs = [
f'{bar_x_crds[i]:0.3f} - '
f'({int(qobs_quants_masks_dict[i].sum())})'
for i in range(n_quants)]
plt.xticks(bar_x_crds, bar_x_crds_labs, rotation=90)
plt.xlabel('Mean interval prob. - (N)')
plt.ylabel('Ratio')
plt.grid()
legend_handles, legend_labels = plt.gca().get_legend_handles_labels()
legend_handles.extend(add_legend_items)
legend_labels.extend(
[l_item.get_label() for l_item in add_legend_items])
plt.legend(handles=legend_handles, labels=legend_labels, loc=0)
plt.title(
f'Statisitics for {n_quants} quantiles of discharges '
f'(sim. by obs.)\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Quantile indices using observed'
)
fig_name = (
f'quants_stat_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_hi_err_qevents(self):
time_freq = 'D'
if time_freq == 'D':
# Day
n_evts = 2
bef_steps = 10
aft_steps = 10
elif time_freq == 'H':
# 1H
n_evts = 2 * 4
bef_steps = 10 * 24
aft_steps = 10 * 24
else:
raise NotImplementedError
assert 0 < n_evts < self._n_steps
sq_diffs = (self._qobs_arr - self._qsim_arr) ** 2
qmax = max(self._qobs_arr.max(), self._qsim_arr.max())
sum_sq_diffs = sq_diffs.sum()
ppt_max = self._ppt_arr.max()
qerr_idxs = np.argsort(sq_diffs)[::-1]
hi_qerr_idxs = [qerr_idxs[0]]
i = 0
for qerr_idx in qerr_idxs[1:]:
take_idx = True
for hi_qerr_idx in hi_qerr_idxs:
if ((hi_qerr_idx - bef_steps) <
qerr_idx <
(hi_qerr_idx + aft_steps)):
take_idx = False
break
if take_idx:
hi_qerr_idxs.append(qerr_idx)
i += 1
if i == n_evts:
break
self._plot_hi_err_qevents(
hi_qerr_idxs,
ppt_max,
qmax,
bef_steps,
aft_steps,
sq_diffs,
sum_sq_diffs,
time_freq)
#
# if self._ppt_dist_arr is not None:
# self._plot_hi_err_qevents_2d_ppt(
# hi_qerr_idxs,
# ppt_max,
# bef_steps,
# aft_steps,
# sq_diffs,
# sum_sq_diffs)
return
def plot_mw_discharge_ratios(self):
out_dir = os.path.join(self._out_dir, 'mw_ratios')
mkdir_hm(out_dir)
line_alpha = 0.7
line_lw = 0.9
ws = 365
(ws_x_crds,
qobs_mw_mean_arr,
qsim_mw_mean_arr,
qobs_mw_med_arr,
qsim_mw_med_arr,
qmw_diff_arr,
qmw_ratio_arr) = (
self._get_mw_qdiff_ratio_arrs(ws))
fig = plt.figure(figsize=(15, 7))
mwq_ax = plt.subplot2grid(
(4, 1), (0, 0), rowspan=1, colspan=1, fig=fig)
mwq_ax.plot(
ws_x_crds,
qobs_mw_mean_arr,
alpha=line_alpha,
color='red',
label='mean-obs',
lw=line_lw)
mwq_ax.plot(
ws_x_crds,
qsim_mw_mean_arr,
alpha=line_alpha,
color='blue',
label='mean-sim',
lw=line_lw)
mwq_ax.plot(
ws_x_crds,
qobs_mw_med_arr,
alpha=line_alpha,
color='red',
label='med-obs',
ls='-.',
lw=line_lw)
mwq_ax.plot(
ws_x_crds,
qsim_mw_med_arr,
alpha=line_alpha,
color='blue',
label='med-sim',
ls='-.',
lw=line_lw)
mwq_ax.set_ylabel('Moving window\ndischarge')
mwq_ax.set_xticklabels([])
mwq_ax.grid()
mwq_ax.legend(framealpha=0.3)
mwq_ax.locator_params('y', nbins=4)
diff_ratio_ax = plt.subplot2grid(
(4, 1), (1, 0), rowspan=3, colspan=1, fig=fig)
diff_ratio_ax.plot(
ws_x_crds,
qmw_diff_arr,
alpha=line_alpha,
color='blue',
lw=line_lw,
label='window diff.')
diff_ratio_ax.axhline(
qmw_diff_arr.mean(),
alpha=line_alpha,
color='blue',
lw=line_lw + 0.5,
ls='-.',
label='mean diff.')
max_abs_diff = max(abs(qmw_diff_arr.min()), abs(qmw_diff_arr.max()))
diff_ratio_ax.set_ylim(-max_abs_diff, +max_abs_diff)
diff_ratio_ax.grid()
diff_ratio_ax.legend(framealpha=0.3, loc=1)
diff_ratio_ax.set_xlabel('Time')
diff_ratio_ax.set_ylabel('Moving window discharge difference')
ratio_ax = diff_ratio_ax.twinx()
ratio_ax.plot(
ws_x_crds,
qmw_ratio_arr,
alpha=line_alpha,
color='green',
lw=line_lw,
label='window ratio')
ratio_ax.axhline(
qmw_ratio_arr.mean(),
alpha=line_alpha,
color='green',
lw=line_lw + 0.5,
ls='-.',
label='mean ratio')
ratio_ax.set_ylabel('Moving window discharge ratio')
ratio_ax.set_ylim(0, 2)
ratio_ax.legend(framealpha=0.3, loc=4)
plt.suptitle(
f'Moving window qsim by qobs ratio for window size: {ws} steps\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Min. ratio: {qmw_ratio_arr.min():0.3f}, '
f'Mean ratio: {qmw_ratio_arr.mean():0.3f}, '
f'Max. ratio: {qmw_ratio_arr.max():0.3f}\n'
f'Min. diff: {qmw_diff_arr.min():0.3f}, '
f'Mean diff: {qmw_diff_arr.mean():0.3f}, '
f'Max. diff: {qmw_diff_arr.max():0.3f}'
)
fig_name = (
f'mwq_ratio_ws_{ws}_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_peak_qevents(self):
out_dir = os.path.join(self._out_dir, 'peaks_cmp')
mkdir_hm(out_dir)
time_freq = 'D'
line_alpha = 0.7
line_lw = 1.3
if time_freq == 'D':
# 1D
bef_steps = 10
aft_steps = 10
ws = 30
steps_per_cycle = 365 # should be enough to have peaks_per_cycle peaks
peaks_per_cycle = 1
elif time_freq == 'H':
# 1H
bef_steps = 10 * 24
aft_steps = 10 * 24
ws = 30 * 24
steps_per_cycle = 365 * 24 # should be enough to have peaks_per_cycle peaks
peaks_per_cycle = 2
else:
raise NotImplementedError
peaks_mask = self._get_peaks_mask(
ws, steps_per_cycle, peaks_per_cycle)
ppt_max = self._ppt_arr.max()
qmax = max(self._qobs_arr.max(), self._qsim_arr.max())
evt_idxs = np.where(peaks_mask)[0]
for evt_idx in evt_idxs:
fig = plt.figure(figsize=(15, 7))
dis_ax = plt.subplot2grid(
(4, 1), (1, 0), rowspan=3, colspan=1, fig=fig)
ppt_ax = plt.subplot2grid(
(4, 1), (0, 0), rowspan=1, colspan=1, fig=fig)
bef_idx = max(0, evt_idx - bef_steps)
aft_idx = min(evt_idx + aft_steps + 1, self._qobs_arr.shape[0])
x_arr = np.arange(bef_idx, aft_idx)
dis_ax.plot(
x_arr,
self._qsim_arr[bef_idx:aft_idx],
alpha=line_alpha,
color='blue',
label='sim',
lw=line_lw)
dis_ax.plot(
x_arr,
self._qobs_arr[bef_idx:aft_idx],
label='obs',
color='red',
alpha=line_alpha,
lw=line_lw + 0.2)
dis_ax.axvline(
evt_idx,
alpha=line_alpha,
color='orange',
label='event_step',
lw=line_lw)
if time_freq == 'H':
pass
elif time_freq == 'D':
for x, y in zip(x_arr, self._qsim_arr[bef_idx:aft_idx]):
text = f'{self._qobs_ranks[x]}, {self._qsim_ranks[x]}'
if y < (0.5 * qmax):
va = 'bottom'
text = ' ' + text
else:
va = 'top'
text = text + ' '
dis_ax.text(
x,
y,
text,
rotation=90,
alpha=0.8,
va=va,
size='x-small')
else:
raise NotImplementedError
dis_ax.set_xlabel('Time')
dis_ax.set_ylabel('Discharge')
dis_ax.legend()
dis_ax.grid()
dis_ax.set_ylim(0, qmax)
ppt_ax.fill_between(
x_arr,
0,
self._ppt_arr[bef_idx:aft_idx],
label='ppt',
alpha=line_alpha * 0.7,
lw=line_lw + 0.2)
ppt_ax.axvline(
evt_idx,
alpha=line_alpha,
color='orange',
lw=line_lw)
if time_freq == 'H':
pass
elif time_freq == 'D':
for x, y in zip(x_arr, self._ppt_arr[bef_idx:aft_idx]):
text = f'{self._ppt_ranks[x]}'
if y < (0.5 * ppt_max):
va = 'bottom'
text = ' ' + text
else:
va = 'top'
text = text + ' '
ppt_ax.text(
x,
y,
text,
rotation=90,
alpha=0.8,
va=va,
size='x-small')
else:
raise NotImplementedError
ppt_ax.set_ylim(0, ppt_max)
ppt_ax.set_ylabel('Precipitation')
ppt_ax.legend()
ppt_ax.grid()
ppt_ax.set_xticklabels([])
ppt_ax.locator_params('y', nbins=4)
plt.suptitle(
f'Peak discharge comparison at index: {evt_idx}\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}'
)
fig_name = (
f'peak_cmp_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}_idx_{evt_idx}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_sorted_sq_diffs(self):
out_dir = os.path.join(self._out_dir, 'sq_diffs')
mkdir_hm(out_dir)
line_alpha = 0.7
sorted_q_idxs = np.argsort(self._qobs_arr)
sorted_qobs = self._qobs_arr[sorted_q_idxs]
sorted_qsim = self._qsim_arr[sorted_q_idxs]
sorted_sq_diffs = (sorted_qobs - sorted_qsim) ** 2
signs = (sorted_qobs - sorted_qsim)
sorted_sq_diffs *= signs
plt.figure(figsize=(20, 7))
plt.plot(sorted_qobs, sorted_sq_diffs, alpha=line_alpha)
ylim_max = np.abs(plt.ylim()).max()
plt.text(
+0.5 * sorted_qobs[-1],
+0.5 * ylim_max,
'Observed discharge higher',
horizontalalignment='center',
verticalalignment='center')
plt.text(
+0.5 * sorted_qobs[-1],
-0.5 * ylim_max,
'Observed discharge lower',
horizontalalignment='center',
verticalalignment='center')
plt.ylim(-ylim_max, +ylim_max)
plt.xlabel('Observed discharge')
plt.ylabel('Signed squared difference')
plt.grid()
plt.title(
f'Sorted signed squared differences of discharge\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}'
)
fig_name = (
f'sq_diffs_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_quantile_effs(self):
out_dir = os.path.join(self._out_dir, 'quant_effs')
mkdir_hm(out_dir)
line_alpha = 0.7
n_quants = 10
qobs_quants_masks_dict = self._get_quant_masks_dict(n_quants)
q_quant_effs_dict = self._get_q_quant_effs_dict(qobs_quants_masks_dict)
bar_x_crds = (
np.arange(1., n_quants + 1) / n_quants) - (0.5 / n_quants)
plt.figure(figsize=(15, 7))
plt_texts = []
for eff_ftn_lab in self._eff_ftns_dict:
text_color = plt.plot(
bar_x_crds,
q_quant_effs_dict[eff_ftn_lab],
marker='o',
label=eff_ftn_lab,
alpha=line_alpha)[0].get_color()
for i in range(n_quants):
txt_obj = plt.text(
bar_x_crds[i],
q_quant_effs_dict[eff_ftn_lab][i],
f' {q_quant_effs_dict[eff_ftn_lab][i]:0.2f}',
va='top',
ha='left',
color=text_color,
alpha=line_alpha,
size='x-small')
plt_texts.append(txt_obj)
adjust_text(plt_texts)
bar_x_crds_labs = [
f'{bar_x_crds[i]:0.3f} - '
f'({int(qobs_quants_masks_dict[i].sum())})'
for i in range(n_quants)]
plt.xticks(bar_x_crds, bar_x_crds_labs, rotation=90)
plt.xlabel('Mean interval prob. - (N)')
plt.ylabel('Efficiency')
plt.grid()
plt.legend()
plt.title(
f'Efficiences for {n_quants} quantiles of discharges\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Quantile indices using observed'
)
fig_name = (
f'quants_eff_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_lorenz_curves(self):
out_dir = os.path.join(self._out_dir, 'lorenz')
mkdir_hm(out_dir)
line_alpha = 0.7
sorted_abs_diffs = np.sort(
((self._qobs_arr - self._qsim_arr) ** 2)).cumsum()
cumm_sq_diff = sorted_abs_diffs[-1]
sorted_abs_diffs = sorted_abs_diffs / cumm_sq_diff
plt.figure(figsize=(15, 7))
lorenz_x_vals = np.linspace(
1., self._n_steps, self._n_steps) / (self._n_steps + 1.)
trans_lorenz_x_vals = 1 - lorenz_x_vals
plt.semilogx(
trans_lorenz_x_vals,
1 - trans_lorenz_x_vals,
color='red',
label=f'equal_contrib',
alpha=line_alpha)
plt.semilogx(
trans_lorenz_x_vals,
sorted_abs_diffs,
label=f'sim',
alpha=0.5)
x_ticks = np.array([1e-4, 1e-3, 1e-2, 1e-1, 1e0])
plt.xticks(x_ticks, 1 - x_ticks)
plt.gca().invert_xaxis()
plt.xlabel('Rel. cumm. steps')
plt.ylabel('Rel. cumm. sq. diff.')
plt.legend()
plt.grid()
plt.title(
f'Lorenz error contribution curves\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Cummulative squared difference: {cumm_sq_diff:0.3f}'
)
fig_name = (
f'lorenz_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def plot_fts(self):
out_dir = os.path.join(self._out_dir, 'fts')
mkdir_hm(out_dir)
n_ft_pts = self._qobs_arr.shape[0]
if n_ft_pts % 2:
n_ft_pts -= 1
ft_ofst_idx = 1
ft_obs = np.fft.rfft(self._qobs_arr[:n_ft_pts])[ft_ofst_idx:]
ft_obs_phas = np.angle(ft_obs)
ft_obs_amps = np.abs(ft_obs)
freq_cov_cntrb_obs_obs = np.cumsum(ft_obs_amps ** 2)
max_cov_obs = freq_cov_cntrb_obs_obs[-1]
freq_cov_cntrb_obs_obs /= max_cov_obs
_obs_indiv_cntrb = ft_obs_amps * ft_obs_amps
_obs_indiv_cntrb /= max_cov_obs
freq_cov_cntrb_grad_obs = (
_obs_indiv_cntrb[1:] - _obs_indiv_cntrb[:-1]) / (
_obs_indiv_cntrb[1:])
freq_cov_cntrb_grad_obs[np.abs(freq_cov_cntrb_grad_obs) > 20] = 20
ft_sim = np.fft.rfft(self._qsim_arr[:n_ft_pts])[ft_ofst_idx:]
ft_sim_phas = np.angle(ft_sim)
ft_sim_amps = np.abs(ft_sim)
max_cov_sim = (
(ft_obs_amps ** 2).sum() * (ft_sim_amps ** 2).sum()) ** 0.5
freq_cov_cntrb_sim_obs = np.cumsum(
(ft_obs_amps * ft_sim_amps) * np.cos(ft_obs_phas - ft_sim_phas))
freq_cov_cntrb_sim_obs /= max_cov_sim
_sim_indiv_cntrb = (
(ft_obs_amps * ft_sim_amps) * np.cos(ft_obs_phas - ft_sim_phas))
_sim_indiv_cntrb /= max_cov_sim
freq_cov_cntrb_grad_sim = (
_sim_indiv_cntrb[1:] - _sim_indiv_cntrb[:-1]) / (
_sim_indiv_cntrb[1:])
freq_cov_cntrb_grad_sim[np.abs(freq_cov_cntrb_grad_sim) > 20] = 20
freq_cov_cntrb_sim_sim = np.cumsum(ft_sim_amps ** 2)
max_cov_sim_sim = freq_cov_cntrb_sim_sim[-1]
freq_cov_cntrb_sim_sim /= max_cov_sim_sim
self._plot_fts_wvcbs(
freq_cov_cntrb_obs_obs,
freq_cov_cntrb_sim_obs,
freq_cov_cntrb_sim_sim,
out_dir)
self._plot_fts_wvcbs_grad(
freq_cov_cntrb_grad_obs,
freq_cov_cntrb_grad_sim,
out_dir)
self._plot_fts_phas_diff(ft_obs_phas, ft_sim_phas, out_dir)
self._plot_fts_amps_diff(ft_obs_amps, ft_sim_amps, out_dir)
self._plot_fts_amps(ft_obs_amps, ft_sim_amps, out_dir)
self._plot_fts_abs_diff(ft_obs, ft_sim, out_dir)
return
def plot_emp_cops(self):
out_dir = os.path.join(self._out_dir, 'ecops')
mkdir_hm(out_dir)
spcorr = np.corrcoef(self._qobs_arr, self._qsim_arr)[0, 1]
asymm_1, asymm_2 = get_asymms_sample(
self._qobs_probs,
self._qsim_probs)
plt.figure(figsize=(12, 10))
plt.scatter(
self._qobs_probs,
self._qsim_probs,
alpha=0.1,
color='blue')
plt.xlabel('Observed Discharge')
plt.ylabel('Simulated Discharge')
plt.grid()
plt.title(
f'Empirical copula between observed and simulated discharges\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Spearman Correlation: {spcorr:0.4f}, '
f'Asymm_1: {asymm_1:0.4E}, Asymm_2: {asymm_2:0.4E}')
fig_name = (
f'ecop_obs_sim_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(
os.path.join(out_dir, fig_name),
bbox_inches='tight')
plt.close()
return
def _get_mw_stds_arrs(self, ws):
ref_mv_std_arr = np.zeros(self._n_steps - ws)
sim_mv_std_arr = ref_mv_std_arr.copy()
ws_xcrds = []
for i in range(self._n_steps - ws):
ref_mv_std_arr[i] = self._qobs_arr[i:i + ws].std()
sim_mv_std_arr[i] = self._qsim_arr[i:i + ws].std()
ws_xcrds.append(i + int(0.5 * ws))
ws_xcrds = np.array(ws_xcrds)
diff_arr = (sim_mv_std_arr - ref_mv_std_arr)
ratio_arr = (sim_mv_std_arr / ref_mv_std_arr)
return (
ws_xcrds,
ref_mv_std_arr,
sim_mv_std_arr,
diff_arr,
ratio_arr)
def _get_err_red_arrs(self, qobs_sort_arr, qsim_sort_arr):
# first value is objective function without rectifying
err_red_arrs = {}
for eff_ftn in self._eff_ftns_dict:
err_red_arr = np.full(
self._n_steps + 1, np.nan)
if eff_ftn == 'ns':
mean = get_mean(qobs_sort_arr, 0)
demr = get_demr(qobs_sort_arr, mean, 0)
sq_diffs_arr = (qobs_sort_arr - qsim_sort_arr) ** 2
sq_diffs_sum = sq_diffs_arr.sum()
err_red_arr[0] = 1.0 - (sq_diffs_sum / demr)
for i in range(self._n_steps):
sq_diffs_sum -= sq_diffs_arr[i]
err_red_arr[i + 1] = 1.0 - (sq_diffs_sum / demr)
if not np.isclose(sq_diffs_sum, 0.0):
print(
f'Square differences sum ({sq_diffs_sum:0.5e}) '
f'not close to zero:')
elif eff_ftn == 'ln_ns':
mean = get_ln_mean(qobs_sort_arr, 0)
demr = get_ln_demr(qobs_sort_arr, mean, 0)
sq_diffs_arr = np.log((qobs_sort_arr / qsim_sort_arr)) ** 2
sq_diffs_sum = sq_diffs_arr.sum()
err_red_arr[0] = 1.0 - (sq_diffs_sum / demr)
for i in range(self._n_steps):
sq_diffs_sum -= sq_diffs_arr[i]
err_red_arr[i + 1] = 1.0 - (sq_diffs_sum / demr)
assert np.isclose(sq_diffs_sum, 0.0)
elif eff_ftn == 'kge':
continue
else:
raise NotImplementedError
err_red_arrs[eff_ftn] = err_red_arr
return err_red_arrs
def _get_q_quant_stats_dict(self, qobs_quants_masks_dict):
n_q_quants = len(qobs_quants_masks_dict)
quant_stats_dict = {}
for stat_ftn_key, stat_ftn in self._stat_ftns_dict.items():
quant_stats = []
for i in range(n_q_quants):
mask = qobs_quants_masks_dict[i]
assert mask.sum() > 0
quant_stats.append([
stat_ftn(self._qobs_arr[mask]),
stat_ftn(self._qsim_arr[mask])])
quant_stats_dict[stat_ftn_key] = np.array(quant_stats)
return quant_stats_dict
def _plot_hi_err_qevents(
self,
hi_qerr_idxs,
ppt_max,
qmax,
bef_steps,
aft_steps,
sq_diffs,
sum_sq_diffs,
time_freq):
out_dir = os.path.join(self._out_dir, 'hi_qerrs')
mkdir_hm(out_dir)
line_alpha = 0.7
line_lw = 1.3
for hi_qerr_idx in hi_qerr_idxs:
fig = plt.figure(figsize=(15, 7))
dis_ax = plt.subplot2grid(
(4, 1), (1, 0), rowspan=3, colspan=1, fig=fig)
ppt_ax = plt.subplot2grid(
(4, 1), (0, 0), rowspan=1, colspan=1, fig=fig)
bef_idx = max(0, hi_qerr_idx - bef_steps)
aft_idx = min(hi_qerr_idx + aft_steps + 1, self._qobs_arr.shape[0])
x_arr = np.arange(bef_idx, aft_idx)
dis_ax.plot(
x_arr,
self._qsim_arr[bef_idx:aft_idx],
alpha=line_alpha,
color='blue',
label='sim',
lw=line_lw)
dis_ax.plot(
x_arr,
self._qobs_arr[bef_idx:aft_idx],
label='obs',
color='red',
alpha=line_alpha,
lw=line_lw + 0.2)
dis_ax.axvline(
hi_qerr_idx,
alpha=line_alpha,
color='orange',
label='event_step',
lw=line_lw)
if time_freq == 'D':
for x, y in zip(x_arr, self._qsim_arr[bef_idx:aft_idx]):
text = (
f'{100 * (sq_diffs[x] / sum_sq_diffs):0.3f}%, '
f'{self._qobs_ranks[x]}, {self._qsim_ranks[x]}')
if y < (0.5 * qmax):
va = 'bottom'
text = ' ' + text
else:
va = 'top'
text = text + ' '
dis_ax.text(
x,
y,
text,
rotation=90,
alpha=0.8,
va=va,
size='x-small')
elif time_freq == 'H':
pass
else:
raise NotImplementedError
dis_ax.set_xlabel('Time')
dis_ax.set_ylabel('Discharge')
dis_ax.legend()
dis_ax.grid()
dis_ax.set_ylim(0, qmax)
ppt_ax.fill_between(
x_arr,
0,
self._ppt_arr[bef_idx:aft_idx],
label='ppt',
alpha=line_alpha * 0.7,
lw=line_lw + 0.2)
ppt_ax.axvline(
hi_qerr_idx,
alpha=line_alpha,
color='orange',
lw=line_lw)
if time_freq == 'D':
for x, y in zip(x_arr, self._ppt_arr[bef_idx:aft_idx]):
text = f'{self._ppt_ranks[x]}'
if y < (0.5 * ppt_max):
va = 'bottom'
text = ' ' + text
else:
va = 'top'
text = text + ' '
ppt_ax.text(
x,
y,
text,
rotation=90,
alpha=0.8,
va=va,
size='x-small')
elif time_freq == 'H':
pass
else:
raise NotImplementedError
ppt_ax.set_ylim(0, ppt_max)
ppt_ax.set_ylabel('Precipitation')
ppt_ax.legend()
ppt_ax.grid()
ppt_ax.set_xticklabels([])
ppt_ax.locator_params('y', nbins=4)
sq_diff = sq_diffs[hi_qerr_idx]
tot_pcnt = 100 * (sq_diff / sum_sq_diffs)
plt.suptitle(
f'Hi error discharge comparison at index: {hi_qerr_idx}\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Squared difference: {sq_diff:0.2f}, {tot_pcnt:0.3f}% '
f'of the total ({sum_sq_diffs:0.2f})'
)
fig_name = (
f'hi_qerr_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}_idx_{hi_qerr_idx}.png')
plt.savefig(
os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
# def _plot_hi_err_qevents_2d_ppt(
# self,
# hi_qerr_idxs,
# ppt_max,
# bef_steps,
# aft_steps,
# sq_diffs,
# sum_sq_diffs):
#
# out_dir = os.path.join(self._out_dir, 'hi_qerrs_ppt')
# mkdir_hm(out_dir)
#
# n_evt_steps = aft_steps + bef_steps
#
# loc_rows = max(1, int(0.25 * n_evt_steps))
# loc_cols = max(1, int(np.ceil(n_evt_steps / loc_rows)))
#
# sca_fac = 3
# loc_rows *= sca_fac
# loc_cols *= sca_fac
#
# legend_rows = 1
# legend_cols = loc_cols
#
# plot_shape = (loc_rows + legend_rows, loc_cols)
#
# cen_crds = (0.5 * np.array(self._grid_shape)).astype(int)
#
# for hi_qerr_idx in hi_qerr_idxs:
# bef_idx = max(0, hi_qerr_idx - bef_steps)
# aft_idx = min(hi_qerr_idx + aft_steps + 1, self._qobs_arr.shape[0])
#
# curr_row = 0
# curr_col = 0
#
# sq_diff = sq_diffs[hi_qerr_idx]
# tot_pcnt = 100 * (sq_diff / sum_sq_diffs)
#
# plt.figure(figsize=(12, 14))
#
# for step in range(bef_idx, aft_idx):
# ax = plt.subplot2grid(
# plot_shape,
# loc=(curr_row, curr_col),
# rowspan=sca_fac,
# colspan=sca_fac)
#
# plot_grid = np.full(self._grid_shape, np.nan)
# plot_grid[self._grid_rows, self._grid_cols] = (
# self._ppt_dist_arr[:, step])
#
# plot_grid = np.flipud(plot_grid)
#
# ps = ax.imshow(
# plot_grid,
# origin='lower',
# cmap=plt.get_cmap('gist_rainbow'),
# zorder=1,
# vmin=0,
# vmax=ppt_max)
#
# ax.text(
# *cen_crds,
# f' {step}\n({self._ppt_ranks[step]:0.2f})',
# size='x-small')
#
# ax.set_ylim(0, self._grid_shape[0])
# ax.set_xlim(0, self._grid_shape[1])
#
# ax.set_xticks([])
# ax.set_yticks([])
# ax.set_xticklabels([])
# ax.set_yticklabels([])
# ax.set_axis_off()
#
# curr_col += sca_fac
# if curr_col >= loc_cols:
# curr_col = 0
# curr_row += sca_fac
#
# cb_ax = plt.subplot2grid(
# plot_shape,
# loc=(plot_shape[0] - 1, 0),
# rowspan=1,
# colspan=legend_cols)
#
# cb_ax.set_axis_off()
# cb = plt.colorbar(
# ps,
# ax=cb_ax,
# fraction=0.9,
# aspect=20,
# orientation='horizontal')
#
# cb.set_label('Precipitation')
#
# plt.suptitle(
# f'Hi error discharge precipitation comparison at '
# f'index: {hi_qerr_idx}\n'
# f'Catchment: {self._cat}, Kf: {self._kf}, '
# f'Run Type: {self._run_type.upper()}, '
# f'Steps: {self._n_steps}\n'
# f'Squared difference: {sq_diff:0.2f}, {tot_pcnt:0.3f}% '
# f'of the total ({sum_sq_diffs:0.2f})'
# )
#
# fig_name = (
# f'hi_qerr_ppt_kf_{self._kf:02d}_{self._run_type}_'
# f'cat_{self._cat}_idx_{hi_qerr_idx}.png')
#
# plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
# plt.close()
# return
def _get_mw_qdiff_ratio_arrs(self, ws):
ref_mv_mean_arr = np.zeros(self._n_steps - ws)
sim_mv_mean_arr = ref_mv_mean_arr.copy()
ref_mv_med_arr = ref_mv_mean_arr.copy()
sim_mv_med_arr = ref_mv_mean_arr.copy()
ws_xcrds = []
for i in range(self._n_steps - ws):
ref_mv_mean_arr[i] = self._qobs_arr[i:i + ws].mean()
sim_mv_mean_arr[i] = self._qsim_arr[i:i + ws].mean()
ref_mv_med_arr[i] = np.median(self._qobs_arr[i:i + ws])
sim_mv_med_arr[i] = np.median(self._qsim_arr[i:i + ws])
ws_xcrds.append(i + int(0.5 * ws))
ws_xcrds = np.array(ws_xcrds)
diff_arr = (sim_mv_mean_arr - ref_mv_mean_arr)
ratio_arr = (sim_mv_mean_arr / ref_mv_mean_arr)
return (
ws_xcrds,
ref_mv_mean_arr,
sim_mv_mean_arr,
ref_mv_med_arr,
sim_mv_med_arr,
diff_arr,
ratio_arr)
def _get_peaks_mask(self, ws, steps_per_cycle, peaks_per_cycle):
rising = self._qobs_arr[1:] - self._qobs_arr[:-1] > 0
recing = self._qobs_arr[1:-1] - self._qobs_arr[2:] > 0
peaks_mask = np.concatenate(([False], rising[:-1] & recing, [False]))
assert peaks_mask.sum(), 'No peaks?'
n_steps = self._qobs_arr.shape[0]
assert steps_per_cycle > peaks_per_cycle
assert steps_per_cycle > ws
assert steps_per_cycle < n_steps
window_sums = np.full(steps_per_cycle, np.inf)
for i in range(ws, steps_per_cycle + ws - 1):
window_sums[i - ws] = self._qobs_arr[i - ws:i].sum()
assert np.all(window_sums > 0)
min_idx = int(0.5 * ws) + np.argmin(window_sums)
if min_idx > (0.5 * steps_per_cycle):
beg_idx = 0
end_idx = min_idx
else:
beg_idx = min_idx
end_idx = min_idx + steps_per_cycle
assert n_steps >= end_idx - beg_idx, 'Too few steps!'
out_mask = np.zeros(n_steps, dtype=bool)
while (end_idx - n_steps) < 0:
loop_mask = np.zeros(n_steps, dtype=bool)
loop_mask[beg_idx:end_idx] = True
loop_mask &= peaks_mask
highest_idxs = np.argsort(
self._qobs_arr[loop_mask])[-peaks_per_cycle:]
out_mask[np.where(loop_mask)[0][highest_idxs]] = True
beg_idx = end_idx
end_idx += steps_per_cycle
assert out_mask.sum(), 'No peaks selected!'
return out_mask
def _get_q_quant_effs_dict(self, qobs_quants_masks_dict):
n_q_quants = len(qobs_quants_masks_dict)
quant_effs_dict = {}
for eff_ftn_key in self._eff_ftns_dict:
eff_ftn = self._eff_ftns_dict[eff_ftn_key]
quant_effs = []
for i in range(n_q_quants):
mask = qobs_quants_masks_dict[i]
assert mask.sum() > 0
quant_effs.append(
eff_ftn(self._qobs_arr[mask], self._qsim_arr[mask], 0))
quant_effs_dict[eff_ftn_key] = quant_effs
return quant_effs_dict
def _get_quant_masks_dict(self, n_quants):
interp_ftn = interp1d(
np.sort(self._qobs_probs),
np.sort(self._qobs_arr),
bounds_error=False,
fill_value=(self._qobs_arr.min(), self._qobs_arr.max()))
quant_probs = np.linspace(0., 1., n_quants + 1, endpoint=True)
quants = interp_ftn(quant_probs)
quants[-1] = quants[-1] * 1.1
masks_dict = {}
for i in range(n_quants):
masks_dict[i] = (
(self._qobs_arr >= quants[i]) &
(self._qobs_arr < quants[i + 1]))
return masks_dict
def _plot_fts_abs_diff(self, ft_obs, ft_sim, out_dir):
line_alpha = 0.7
line_lw = 0.9
# abs difference
plt.figure(figsize=(15, 7))
plt.semilogx(
np.abs(ft_obs - ft_sim),
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Absolute FT difference')
plt.grid()
plt.title(
f'Discharge Fourier transform absolute difference\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Obs. and Sim. mean: {self._qobs_mean:0.3f}, '
f'{self._qsim_mean:0.3f}, '
f'Obs. and Sim. variance: {self._qobs_var:0.3f}, '
f'{self._qsim_var:0.3f}'
)
fig_name = (
f'ft_abs_diff_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def _plot_fts_amps(self, ft_obs_amps, ft_sim_amps, out_dir):
line_alpha = 0.7
line_lw = 0.9
# amplitudes
plt.figure(figsize=(15, 7))
plt.semilogx(
ft_obs_amps,
label='obs',
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.semilogx(
ft_sim_amps,
label='sim',
alpha=line_alpha,
color='blue',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Amplitude')
plt.grid()
plt.legend()
plt.title(
f'Discharge Fourier amplitudes\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Obs. and Sim. mean: {self._qobs_mean:0.3f}, '
f'{self._qsim_mean:0.3f}, '
f'Obs. and Sim. variance: {self._qobs_var:0.3f}, '
f'{self._qsim_var:0.3f}'
)
fig_name = (
f'ft_amps_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def _plot_fts_phas_diff(self, ft_obs_phas, ft_sim_phas, out_dir):
line_alpha = 0.7
line_lw = 0.9
# phase difference
plt.figure(figsize=(15, 7))
plt.plot(
ft_obs_phas - ft_sim_phas,
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Phase difference')
plt.grid()
plt.title(
f'Discharge Fourier phase difference\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}'
)
fig_name = (
f'ft_phas_diff_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def _plot_fts_amps_diff(self, ft_obs_amps, ft_sim_amps, out_dir):
line_alpha = 0.7
line_lw = 0.9
# normalized amplitude difference
plt.figure(figsize=(15, 7))
plt.plot(
(ft_obs_amps - ft_sim_amps) / ft_obs_amps,
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Amplitude difference')
plt.grid()
plt.title(
f'Discharge Fourier normalized amplitude difference\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Obs. and Sim. mean: {self._qobs_mean:0.3f}, '
f'{self._qsim_mean:0.3f}, '
f'Obs. and Sim. variance: {self._qobs_var:0.3f}, '
f'{self._qsim_var:0.3f}'
)
fig_name = (
f'ft_amps_diff_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def _plot_fts_wvcbs(
self,
freq_cov_cntrb_obs_obs,
freq_cov_cntrb_sim_obs,
freq_cov_cntrb_sim_sim,
out_dir):
line_alpha = 0.7
line_lw = 0.9
# wvcb
plt.figure(figsize=(15, 7))
plt.semilogx(
freq_cov_cntrb_obs_obs,
label='obs_obs',
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.semilogx(
freq_cov_cntrb_sim_obs,
label='sim_obs',
alpha=line_alpha,
color='blue',
lw=line_lw)
plt.semilogx(
freq_cov_cntrb_sim_sim,
label='sim_sim',
alpha=line_alpha,
color='green',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Cumulative correlation contribution')
plt.legend()
plt.grid()
plt.title(
f'Discharge Fourier frequency correlation contribution\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Sim-to-Obs fourier correlation: '
f'{freq_cov_cntrb_sim_obs[-1]:0.4f}\n'
f'Obs. and Sim. mean: {self._qobs_mean:0.3f}, '
f'{self._qsim_mean:0.3f}, '
f'Obs. and Sim. variance: {self._qobs_var:0.3f}, '
f'{self._qsim_var:0.3f}'
)
fig_name = (
f'ft_full_wvcb_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
def _plot_fts_wvcbs_grad(
self,
freq_cov_cntrb_grad_obs,
freq_cov_cntrb_grad_sim,
out_dir):
line_alpha = 0.7
line_lw = 0.9
# wvcb gradient
plt.figure(figsize=(15, 7))
plt.plot(
freq_cov_cntrb_grad_obs,
label='obs',
alpha=line_alpha + 0.2,
color='red',
lw=line_lw)
plt.plot(
freq_cov_cntrb_grad_sim,
label='sim',
alpha=line_alpha,
color='blue',
lw=line_lw)
plt.xlabel('Frequency')
plt.ylabel('Cumulative correlation contribution gradient')
plt.legend()
plt.grid()
plt.title(
f'Discharge Fourier frequency correlation contribution normalized'
f' gradient\n'
f'Catchment: {self._cat}, Kf: {self._kf}, '
f'Run Type: {self._run_type.upper()}, Steps: {self._n_steps}\n'
f'Obs. and Sim. mean: {self._qobs_mean:0.3f}, '
f'{self._qsim_mean:0.3f}, '
f'Obs. and Sim. variance: {self._qobs_var:0.3f}, '
f'{self._qsim_var:0.3f}'
)
fig_name = (
f'ft_full_wvcb_grad_kf_{self._kf:02d}_{self._run_type}_'
f'cat_{self._cat}.png')
plt.savefig(os.path.join(out_dir, fig_name), bbox_inches='tight')
plt.close()
return
|
984,350 | 197c612125b71f6f2c87f20af571c6431409b8d4 | from unittest.mock import Mock
from . import responses
from buycoins import Query
from buycoins.utils import allowed_currencies
class TestQueries:
Query = Mock()
def test_get_balances(self):
self.Query.get_balances.return_value = responses.get_balances
balances = self.Query.get_balances()
assert "getBalances" in balances["data"]
assert len(balances["data"]["getBalances"]) == 6
for currency in balances["data"]["getBalances"]:
assert currency["cryptocurrency"] in allowed_currencies
def test_get_specific_balance(self):
self.Query.get_balances.return_value = responses.get_balance
balance = self.Query.get_balances(cryptocurrency="bitcoin")
assert "getBalances" in balance["data"]
assert balance["data"]["getBalances"][0]["id"] == "QWNjb3VudC0="
assert balance["data"]["getBalances"][0]["confirmedBalance"] == "0.0"
assert balance["data"]["getBalances"][0]["cryptocurrency"] == "bitcoin"
def test_get_bank_accounts(self):
self.Query.get_bank_accounts.return_value = responses.get_bank_accounts
bank_accounts = self.Query.get_bank_accounts()
assert "getBankAccounts" in bank_accounts["data"]
assert (
bank_accounts["data"]["getBankAccounts"][0]["id"]
== "QmFua0FjY291bnQtNjlkZGM2MjEtYzM0My00Mzg1LTlkMDYtY2VkNTM2MWY1Yjkz"
)
assert (
bank_accounts["data"]["getBankAccounts"][0]["bankName"]
== "ALAT by WEMA"
)
assert (
bank_accounts["data"]["getBankAccounts"][0]["accountName"]
== "kolapo ayesebotan"
)
assert (
bank_accounts["data"]["getBankAccounts"][0]["accountNumber"]
== "0235959654"
)
assert (
bank_accounts["data"]["getBankAccounts"][0]["accountType"]
== "withdrawal"
)
def test_get_specific_bank_account(self):
self.Query.get_bank_accounts.return_value = responses.get_bank_accounts
bank_account = self.Query.get_bank_accounts(account_number="0235959654")
assert "getBankAccounts" in bank_account["data"]
assert (
bank_account["data"]["getBankAccounts"][0]["id"]
== "QmFua0FjY291bnQtNjlkZGM2MjEtYzM0My00Mzg1LTlkMDYtY2VkNTM2MWY1Yjkz"
)
assert (
bank_account["data"]["getBankAccounts"][0]["bankName"]
== "ALAT by WEMA"
)
assert (
bank_account["data"]["getBankAccounts"][0]["accountName"]
== "kolapo ayesebotan"
)
assert (
bank_account["data"]["getBankAccounts"][0]["accountNumber"]
== "0235959654"
)
assert (
bank_account["data"]["getBankAccounts"][0]["accountType"]
== "withdrawal"
)
def test_get_estimated_network_fee_default(self):
self.Query.get_estimated_network_fee.return_value = (
responses.get_estimated_network_fee_default
)
network_fee = self.Query.get_estimated_network_fee(
0.03
) # Bitcoin is implied by default
assert "getEstimatedNetworkFee" in network_fee["data"]
assert (
network_fee["data"]["getEstimatedNetworkFee"]["total"] == "0.03036"
)
assert (
network_fee["data"]["getEstimatedNetworkFee"]["estimatedFee"]
== "0.00036"
)
def test_get_estimated_network_fee_ethereum(self):
self.Query.get_estimated_network_fee.return_value = (
responses.get_estimated_network_fee_ethereum
)
network_fee = self.Query.get_estimated_network_fee(
0.03, cryptocurrency="ethereum"
)
assert "getEstimatedNetworkFee" in network_fee["data"]
assert network_fee["data"]["getEstimatedNetworkFee"]["total"] == "0.04"
assert (
network_fee["data"]["getEstimatedNetworkFee"]["estimatedFee"]
== "0.01"
)
def test_get_market_book_default(self):
self.Query.get_market_book.return_value = responses.get_market_book_default
market_book = self.Query.get_market_book() # Bitcoin is implied by default
buy_node = market_book["data"]["getMarketBook"]["orders"]["edges"][0]["node"]
sell_node = market_book["data"]["getMarketBook"]["orders"]["edges"][1]["node"]
assert len(market_book["data"]["getMarketBook"]["orders"]["edges"]) == 2
assert buy_node["id"] == "UG9zdE9yZGVyLTcxY2JmZjAxLTk2NTEtNGQzOC1hMGIyLWE2YzRkMDUzNWVkMA=="
assert buy_node["cryptocurrency"] == "bitcoin"
assert buy_node["coinAmount"] == "0.013797"
assert buy_node["side"] == "buy"
assert buy_node["status"] == "active"
assert buy_node["createdAt"] == 1612808624
assert buy_node["pricePerCoin"] == "19501000.0"
assert buy_node["priceType"] == "static"
assert buy_node["staticPrice"] == "1950100000"
assert sell_node["id"] == "UG9zdE9yZGVyLTM5ODg2ZTNlLTJmZDQtNDgxNy05ODRjLWNlMTFlYmIwMzhlMw=="
assert sell_node["cryptocurrency"] == "bitcoin"
assert sell_node["coinAmount"] == "0.00653659"
assert sell_node["side"] == "sell"
assert sell_node["status"] == "active"
assert sell_node["createdAt"] == 1612800454
assert sell_node["pricePerCoin"] == "20500000.0"
assert sell_node["priceType"] == "static"
assert sell_node["staticPrice"] == "2050000000"
def test_get_market_book_usd_tether(self):
self.Query.get_market_book.return_value = responses.get_market_book_usd_tether
market_book = self.Query.get_market_book(cryptocurrency="usd_tether")
buy_node = market_book["data"]["getMarketBook"]["orders"]["edges"][0]["node"]
sell_node = market_book["data"]["getMarketBook"]["orders"]["edges"][1]["node"]
assert len(market_book["data"]["getMarketBook"]["orders"]["edges"]) == 2
assert buy_node["id"] == "UG9zdE9yZGVyLWZmYTliOTdiLThmZjUtNDE4Mi05ZDJjLWM4ZWM5MzNlMTliZg=="
assert buy_node["cryptocurrency"] == "usd_tether"
assert buy_node["coinAmount"] == "100.0"
assert buy_node["side"] == "buy"
assert buy_node["status"] == "active"
assert buy_node["createdAt"] == 1611770385
assert buy_node["pricePerCoin"] == "460.0"
assert buy_node["priceType"] == "static"
assert buy_node["staticPrice"] == "46000"
assert sell_node["id"] == "UG9zdE9yZGVyLWI5ZWJkYWNmLTQ1MjYtNDYxYS1hYzFlLTljZTZlNTRmOWFkOA=="
assert sell_node["cryptocurrency"] == "usd_tether"
assert sell_node["coinAmount"] == "234.0"
assert sell_node["side"] == "sell"
assert sell_node["status"] == "active"
assert sell_node["createdAt"] == 1612413166
assert sell_node["pricePerCoin"] == "499.0"
assert sell_node["priceType"] == "static"
assert sell_node["staticPrice"] == "49900"
def test_get_orders(self):
self.Query.get_orders.return_value = responses.get_orders
orders = self.Query.get_orders("open")
node = orders["data"]["getOrders"]["orders"]["edges"][0]["node"]
assert node["id"] == "UG9zdE9yZGVyLWEzYTAwNzQxLTJhMWUtNGJkMi1iZWFkLWE2ZWU0MzQ1ZmI2Yw=="
assert node["cryptocurrency"] == "bitcoin"
assert node["coinAmount"] == "0.005"
assert node["side"] == "buy"
assert node["status"] == "active"
assert node["createdAt"] == 1605000847
assert node["pricePerCoin"] == "10900.09"
assert node["priceType"] == "static"
assert node["staticPrice"] == "1090009"
def test_get_payments(self):
self.Query.get_payments.return_value = responses.get_payments
payments = self.Query.get_payments()
node = payments["data"]["getPayments"]["edges"][0]["node"]
assert node["id"] == "UG9zdE9yZGVyLTg5MDM4MzI4LTc5MzItNGUxMS1hZWZjLTkwYjg4ZTFhY2JjOA=="
assert node["fee"] == "0.0046"
assert node["amount"] == "10000.00"
assert node["createdAt"] == 1605000847
assert node["reference"] == "38d5d9018bde98e88058746788d72e936d158f5ad753073f4763dc1d4aa5a48e"
assert node["status"] == "success"
assert node["totalAmount"] == "10000.004600"
assert node["type"] == "deposit"
def test_get_prices(self):
self.Query.get_prices.return_value = responses.get_prices
prices = self.Query.get_prices()
assert len(prices["data"]["getPrices"]) == 4
for price in prices["data"]["getPrices"]:
assert price["cryptocurrency"] in allowed_currencies
bitcoin_price = prices["data"]["getPrices"][0]
assert bitcoin_price["id"] == "QnV5Y29pbnNQcmljZS01OTkwYTQ0NC1hYjY4LTQxM2MtODUzZC04OWJhYzRhMWNjZjE="
assert bitcoin_price["status"] == "active"
assert bitcoin_price["cryptocurrency"] == "bitcoin"
assert bitcoin_price["minBuy"] == "0.001"
assert bitcoin_price["minSell"] == "0.001"
assert bitcoin_price["maxBuy"] == "1.78700697"
assert bitcoin_price["maxSell"] == "1.20119207"
assert bitcoin_price["minCoinAmount"] == "0.001"
assert bitcoin_price["expiresAt"] == 1612847212
assert bitcoin_price["buyPricePerCoin"] == "21956210.523"
assert bitcoin_price["sellPricePerCoin"] == "21521388.24"
def test_get_specific_price(self):
self.Query.get_prices.return_value = responses.get_price
prices = self.Query.get_prices(cryptocurrency="ethereum")
eth_price = prices["data"]["getPrices"][0]
assert eth_price["id"] == "QnV5Y29pbnNQcmljZS0yOWFmZWY4MS1mZjI5LTQwYTQtYmQ3Zi1iOTgzMTA3NmU5NDg="
assert eth_price["status"] == "active"
assert eth_price["cryptocurrency"] == "ethereum"
assert eth_price["minBuy"] == "0.02"
assert eth_price["minSell"] == "0.02"
assert eth_price["maxBuy"] == "48.07685652"
assert eth_price["maxSell"] == "0"
assert eth_price["minCoinAmount"] == "0.02"
assert eth_price["expiresAt"] == 1612847332
assert eth_price["buyPricePerCoin"] == "816107.8759"
assert eth_price["sellPricePerCoin"] == "799786.8945"
|
984,351 | c82d3d9774b14ab45521da9a2e4262bcea094fb4 | import numpy as np
import math
def Lstability(npstar1x,npstar2x,mass0):
npstar1pos=npstar1x[:,0:3]
npstar1v=npstar1x[:,3:]
npstar2pos=npstar2x[:,0:3]
npstar2v=npstar2x[:,3:]
npstar1L=mass0*np.cross(npstar1v,npstar1pos-npstar2pos)
deltaL=np.abs((np.max(npstar1L[:,2])-np.min(npstar1L[:,2]))/np.mean(npstar1L[:,2]))
return deltaL, np.mean(npstar1L)
|
984,352 | 2a301de90bba2f07086494d89bba478d56352ea0 | import os
import math
import numpy as np
import tensorflow as tf
from vgg16 import Vgg16
from concept import Concept
import pdb
class Teacher:
def __init__(self, sess, rl_gamma, boltzman_beta,
belief_var_1d, num_distractors, attributes_size,
message_space_size, img_length):
self.sess = sess
self.num_distractors_ = num_distractors
self.attributes_size_ = attributes_size
self.message_space_size_ = message_space_size
self.rl_gamma_ = rl_gamma
self.boltzman_beta_ = boltzman_beta
self.belief_var_1d_ = belief_var_1d
self.img_length = img_length
################
# Placeholders #
################
with tf.variable_scope('Teacher'):
self.distractors_ = tf.placeholder(tf.float32, name = 'distractors',
shape = [None, self.num_distractors_, self.img_length, self.img_length, 3])
self.distractors_tensor_ = tf.reshape(self.distractors_, [-1, self.img_length, self.img_length, 3])
self.message_ = tf.placeholder(tf.float32, shape = [None, self.message_space_size_], name = 'message')
self.teacher_belief_ = tf.placeholder(tf.float32, shape = [None, self.num_distractors_], name = 'teacher_belief')
self.student_belief_ = tf.placeholder(tf.float32, shape = [None, self.num_distractors_], name = 'student_belief')
self.student_belief_spvs_ = tf.placeholder(tf.float32, shape = [None, self.num_distractors_], name = 'student_belief_spvs')
self.q_net_spvs_ = tf.placeholder(tf.float32, shape = [None])
# self.is_train = tf.placeholder(tf.bool, shape = [])
########################
# Belief Update Module #
########################
self.global_step = tf.Variable(0, trainable=False)
self.starter_learning_rate = 1e-3
self.learning_rate = tf.train.exponential_decay(self.starter_learning_rate, self.global_step, 20000, 0.5, staircase=True)
self.feature_update_opt_ = tf.train.AdamOptimizer(learning_rate = self.learning_rate)
with tf.variable_scope('Teacher_Feature_Extract'):
self.perceptor_ = Vgg16()
self.image_features_ = self.perceptor_.build(self.distractors_tensor_)
#self.feature_extracted_pre_1_ = tf.contrib.layers.fully_connected(self.image_features_, 1024, activation_fn=tf.nn.leaky_relu)
self.feature_extracted_ = tf.reshape(self.image_features_, [-1, self.num_distractors_, 1, np.product(self.image_features_.get_shape()[1:])])
print(self.feature_extracted_.shape)
self.feature_train_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) if v.name.startswith('Teacher_Feature_Extract')]
self.feature_extract_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) if v.name.startswith('Teacher_Feature_Extract')]
self.belief_update_opt_ = tf.train.AdamOptimizer(learning_rate = self.learning_rate)
with tf.variable_scope('Belief_Update'):
self.df2_ = self.feature_extracted_
# self.df1_ = tf.layers.conv2d(self.feature_extracted_, 3 * self.message_space_size_, kernel_size = [1, 1],
# kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
# activation = tf.nn.leaky_relu)
# self.df2_ = tf.layers.conv2d(self.df1_, 2 * self.message_space_size_, kernel_size = [1, 1],
# kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
# activation = tf.nn.leaky_relu)
# self.df3_ = tf.layers.conv2d(self.df2_, 1 * self.message_space_size_, kernel_size = [1, 1],
# kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
# activation = None)
self.msg_from_df_1_ = []
for _ in range(self.num_distractors_):
self.msg_from_df_1_.append(tf.layers.conv2d(self.df2_, 32, kernel_size = [self.num_distractors_, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
padding = 'valid', activation = tf.nn.leaky_relu))
self.msg_est_tensor_1_ = tf.concat(self.msg_from_df_1_, axis = 1)
self.msg_from_df_2_ = []
for _ in range(self.num_distractors_):
self.msg_from_df_2_.append(tf.layers.conv2d(self.msg_est_tensor_1_, 10, kernel_size = [self.num_distractors_, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
padding = 'valid', activation = tf.nn.leaky_relu))
self.msg_est_tensor_2_ = tf.concat(self.msg_from_df_2_, axis = 1)
self.reg_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) if v.name.startswith('Belief') or v.name.startswith('Teacher_Feature_Extract')]
#######################
#network belief update#
#######################
self.msg_est_tensor_2d_ = tf.squeeze(self.msg_est_tensor_2_, axis = 2, name = "pre_softmax")
self.belief_var_1d_ = tf.exp(tf.Variable(initial_value = self.belief_var_1d_, trainable = True, dtype = tf.float32))
self.boltzman_beta_ = tf.Variable(initial_value = self.boltzman_beta_, trainable = False, dtype = tf.float32, name = 'boltzman_beta')
self.msg_indices_ = tf.where(tf.not_equal(self.message_, 0))
self.word_embedding_ = tf.get_variable(shape = [10, self.message_space_size_], name = 'word_embedding',
initializer = tf.initializers.random_normal(mean = 0, stddev = 1e-2))
self.msg_embeddings_ = tf.expand_dims(tf.transpose(tf.gather(self.word_embedding_, self.msg_indices_[:, 1], axis = 1)), 1)
self.df_msg_2_norm_ = tf.nn.sigmoid(tf.reduce_sum(tf.multiply(self.msg_embeddings_, self.msg_est_tensor_2d_), axis = 2))
self.belief_pred_1_ = tf.multiply(self.df_msg_2_norm_, self.student_belief_)
self.belief_pred_full_ = tf.concat([self.belief_pred_1_, self.belief_var_1d_ * tf.slice(tf.ones_like(self.belief_pred_1_), [0, 0], [-1, 1])], axis = 1)
self.belief_pred_full_norm_ = tf.div_no_nan(self.belief_pred_full_, tf.reduce_sum(self.belief_pred_full_, axis = 1, keepdims = True))
self.belief_pred_ = tf.slice(self.belief_pred_full_norm_, [0, 0], [-1, self.num_distractors_])
self.regularization_ = 1e-3 * tf.add_n([ tf.nn.l2_loss(v) for v in self.reg_varlist_ if 'bias' not in v.name ])
self.cross_entropy_1_ = -1 * tf.reduce_mean(tf.reduce_sum(tf.multiply(self.student_belief_spvs_, tf.math.log(self.belief_pred_ + 1e-9)), axis = 1))
self.cross_entropy_2_ = -1 * tf.reduce_mean(tf.reduce_sum(tf.multiply(self.belief_pred_, tf.math.log(self.student_belief_spvs_ + 1e-9)), axis = 1))
self.cross_entropy_ = self.cross_entropy_1_ + self.cross_entropy_2_
# self.cross_entropy_1_ = -1 * tf.reduce_sum(tf.multiply(self.student_belief_spvs_, tf.math.log(self.belief_pred_ + 1e-9)), axis = 1)
# self.cross_entropy_2_ = -1 * tf.reduce_sum(tf.multiply(self.belief_pred_, tf.math.log(self.student_belief_spvs_ + 1e-9)), axis = 1)
# self.cross_entropy_ = tf.reduce_mean(tf.cast(tf.count_nonzero(self.student_belief_spvs_, axis = 1) / 4 + 1, tf.float32) * (self.cross_entropy_1_ + self.cross_entropy_2_))
self.belief_train_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) if v.name.startswith('Belief_Update')]
self.belief_update_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) if v.name.startswith('Belief_Update')]
self.belief_update_train_op_ = self.belief_update_opt_.minimize(self.cross_entropy_, var_list = self.belief_train_varlist_, global_step=self.global_step)
self.feature_update_train_op_ = self.feature_update_opt_.minimize(self.cross_entropy_, var_list = self.feature_train_varlist_)
# self.feature_belief_update_train_op_ = self.belief_update_opt_.minimize(self.cross_entropy_)
self.feature_extract_saver_ = tf.train.Saver()
self.feature_extract_loader_ = tf.train.Saver(self.feature_extract_varlist_)
self.belief_update_saver_ = tf.train.Saver()
self.belief_update_loader_ = tf.train.Saver(self.belief_update_varlist_)
self.feature_belief_saver_ = tf.train.Saver()
self.feature_belief_loader_ = tf.train.Saver(self.feature_extract_varlist_ + self.belief_update_varlist_)
# print(self.feature_train_varlist_)
# print(self.belief_train_varlist_)
####################
# Q-network Module #
####################
self.q_net_opt_ = tf.train.AdamOptimizer(learning_rate = 1e-5)
with tf.variable_scope('q_net'):
self.distct_feat_1_ = tf.layers.conv2d(self.feature_extracted_, 3 * self.message_space_size_, kernel_size = [1, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
activation = tf.nn.leaky_relu)
self.distct_feat_2_ = tf.layers.conv2d(self.distct_feat_1_, 2 * self.message_space_size_, kernel_size = [1, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
activation = tf.nn.leaky_relu)
self.distct_feat_2_weighted_ = tf.multiply(self.distct_feat_2_, tf.expand_dims(tf.expand_dims(self.belief_pred_, -1), -1))
self.distcts_feat_1_ = []
for _ in range(self.num_distractors_):
self.distcts_feat_1_.append(tf.layers.conv2d(self.distct_feat_2_weighted_, 1 * self.message_space_size_, kernel_size = [self.num_distractors_, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
padding = 'valid', activation = tf.nn.leaky_relu))
self.distcts_feat_tensor_1_ = tf.concat(self.distcts_feat_1_, axis = 1)
self.distcts_feat_2_ = []
for _ in range(self.num_distractors_):
self.distcts_feat_2_.append(tf.layers.conv2d(self.distcts_feat_tensor_1_, 1 * self.message_space_size_, kernel_size = [self.num_distractors_, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
padding = 'valid', activation = None))
self.distcts_feat_tensor_2_ = tf.concat(self.distcts_feat_2_, axis = 1)
self.custome_activaiton_ = lambda x: tf.where(tf.math.greater(x, 0), (tf.exp(x) - 1), (-1 * tf.exp(-x) + 1))
self.distcts_feat_3_ = []
for _ in range(self.num_distractors_):
self.distcts_feat_3_.append(tf.layers.conv2d(self.distcts_feat_tensor_2_, 1, kernel_size = [self.num_distractors_, 1],
kernel_initializer = tf.random_normal_initializer(mean = 0.0, stddev = 1e-1),
padding = 'valid', activation = self.custome_activaiton_))
self.distcts_feat_tensor_3_ = tf.concat(self.distcts_feat_3_, axis = 1)
self.value_param_1_ = tf.Variable(initial_value = -1, trainable = False, dtype = tf.float32)
self.value_ = tf.reduce_sum(tf.multiply(tf.squeeze(self.distcts_feat_tensor_3_), self.teacher_belief_), axis = 1) +\
(1 - tf.reduce_sum(self.belief_pred_, axis = 1)) * self.value_param_1_
self.reg_varlist_q_ = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) if v.name.startswith('q_net')]
self.regularization_q_ = 1e-4 * tf.add_n([ tf.nn.l2_loss(v) for v in self.reg_varlist_q_ if 'bias' not in v.name ])
self.q_net_loss_pre_ = tf.square(self.value_ - self.q_net_spvs_)
self.success_mask_ = tf.to_float(tf.math.greater(self.q_net_spvs_, 0.0))
self.fail_mask_ = tf.to_float(tf.math.greater(0.0, self.q_net_spvs_))
self.imbalance_penalty_ = self.success_mask_ + self.fail_mask_ * tf.div_no_nan(tf.reduce_sum(self.success_mask_), tf.reduce_sum(self.fail_mask_))
# self.q_net_loss_ = tf.reduce_mean(self.q_net_loss_pre_ * tf.to_float(self.q_net_loss_pre_ > 0.05) * self.imbalance_penalty_) + self.regularization_q_
self.q_net_loss_ = tf.reduce_mean(self.q_net_loss_pre_ * self.imbalance_penalty_) + self.regularization_q_
self.q_net_varlist_ = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) if v.name.startswith('q_net')]
self.q_net_train_op_ = self.q_net_opt_.minimize(self.q_net_loss_, var_list = self.q_net_varlist_)
self.total_loader_ = tf.train.Saver([v for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) if 'Adam' not in v.name])
self.total_saver_ = tf.train.Saver()
def train_belief_update(self, data_batch, fix_feature):
# _, cross_entropy, belief_pred, posterior, likelihood, feature = self.sess.run([self.belief_update_train_op_ if fix_feature else self.feature_belief_update_train_op_, \
# self.cross_entropy_, self.belief_pred_, self.belief_pred_1_, self.df_msg_2_norm_, self.feature_extracted_],
# feed_dict = {self.student_belief_: data_batch['prev_belief'],
# self.message_: data_batch['message'],
# self.distractors_: data_batch['distractors'],
# self.student_belief_spvs_: data_batch['new_belief']})
# return cross_entropy, belief_pred, posterior, likelihood, np.squeeze(feature)
if fix_feature:
_, cross_entropy, belief_pred = self.sess.run([self.belief_update_train_op_, \
self.cross_entropy_, self.belief_pred_],
feed_dict = {self.student_belief_: data_batch['prev_belief'],
self.message_: data_batch['message'],
self.distractors_: data_batch['distractors'],
self.student_belief_spvs_: data_batch['new_belief']})
return cross_entropy, belief_pred
else:
# _, _, cross_entropy, belief_pred = self.sess.run([self.belief_update_train_op_, self.feature_update_train_op_, \
# self.cross_entropy_, self.belief_pred_],
# feed_dict = {self.student_belief_: data_batch['prev_belief'],
# self.message_: data_batch['message'],
# self.distractors_: data_batch['distractors'],
# self.student_belief_spvs_: data_batch['new_belief']})
_, cross_entropy, belief_pred, lr = self.sess.run([self.belief_update_train_op_, \
self.cross_entropy_, self.belief_pred_, self.learning_rate],
feed_dict = {self.student_belief_: data_batch['prev_belief'],
self.message_: data_batch['message'],
self.distractors_: data_batch['distractors'],
self.student_belief_spvs_: data_batch['new_belief']})
return cross_entropy, belief_pred, lr
def pretrain_bayesian_belief_update(self, concept_generator, teacher_pretraining_steps, teacher_pretrain_batch_size,
teacher_pretrain_ckpt_dir, teacher_pretrain_ckpt_name, continue_steps = 0, silent = False):
if not os.path.exists(teacher_pretrain_ckpt_dir):
os.makedirs(teacher_pretrain_ckpt_dir)
ckpt = tf.train.get_checkpoint_state(teacher_pretrain_ckpt_dir)
train_steps = teacher_pretraining_steps
if ckpt:
self.feature_extract_loader_.restore(self.sess, ckpt.model_checkpoint_path)
print('Loaded teacher belief update ckpt from %s' % teacher_pretrain_ckpt_dir)
train_steps = continue_steps
else:
print('Cannot loaded teacher belief update ckpt from %s' % teacher_pretrain_ckpt_dir)
accuracies = []
l1_diffs = []
bayesian_wrongs = []
for ts in range(train_steps):
data_batch = concept_generator.generate_batch(teacher_pretrain_batch_size)
cross_entropy, belief_pred, lr = self.train_belief_update(data_batch, fix_feature = False)
l1_diff = np.sum(abs(belief_pred - data_batch['new_belief']), axis = 1)
correct = (l1_diff <= 5e-2)
bayesian_wrong = np.mean(np.sum((data_batch['new_belief'] == 0) * (belief_pred > 1e-5), axis = 1) > 0)
accuracies.append(np.mean(correct))
l1_diffs.append(np.mean(l1_diff))
bayesian_wrongs.append(bayesian_wrong)
if np.sum(np.isnan(belief_pred)) != 0:
print(belief_pred)
pdb.set_trace()
if ts % 100 == 0 and not silent:
print('[T%d] batch mean cross entropy: %f, mean accuracies: %f, mean l1: %f, bayesian wrong: %f'\
% (ts + 1, cross_entropy, np.mean(accuracies), np.mean(l1_diffs), np.mean(bayesian_wrongs)))
print('new_belief \\ predict_belief: ')
for n, p in zip(data_batch['new_belief'][:5], belief_pred[:5]):
print(n)
print(p)
print()
# for n, p, f in zip(data_batch['new_belief'][:5], belief_pred[:5], feature[:5]):
# print(n)
# print(p)
# print(np.transpose(f))
# print()
# print('prior: ')
# print(data_batch['prev_belief'][:10])
# print('likelihood: ')
# print(likelihood)
# print('posterior: ')
# print(posterior)
boltzman_beta, belief_var_1d = self.sess.run([self.boltzman_beta_, self.belief_var_1d_])
print('boltzman_beta: %f, belief_var_1d: %f' % (boltzman_beta, belief_var_1d))
if np.mean(accuracies) > 0.0:
#idx = np.random.randint(teacher_pretrain_batch_size)
idx = 3
for i in range(idx):
print('\t target:', data_batch['new_belief'][i, :])
print('\t predict', belief_pred[i, :])
accuracies = []
l1_diffs = []
bayesian_wrongs = []
if (ts + 1) % 1000 == 0:
self.feature_belief_saver_.save(self.sess, os.path.join(teacher_pretrain_ckpt_dir,
teacher_pretrain_ckpt_name),
global_step = teacher_pretraining_steps)
print('Saved teacher belief update ckpt to %s after %d training'\
% (teacher_pretrain_ckpt_dir, ts))
if train_steps != 0:
self.feature_belief_saver_.save(self.sess, os.path.join(teacher_pretrain_ckpt_dir,
teacher_pretrain_ckpt_name),
global_step = teacher_pretraining_steps)
print('Saved teacher belief update ckpt to %s after %d training'\
% (teacher_pretrain_ckpt_dir, train_steps))
def train_q_net(self, data_batch):
_, q_net_loss, value = self.sess.run([self.q_net_train_op_, self.q_net_loss_, self.value_],\
feed_dict = {self.q_net_spvs_: data_batch['target_q'],
self.student_belief_: data_batch['student_belief'],
self.message_: data_batch['message'],
self.distractors_: data_batch['distractors'],
self.teacher_belief_: data_batch['teacher_belief']})
print('Q learning loss: %f' % q_net_loss)
ridx = np.random.randint(value.shape[0])
#print(value[ridx], data_batch['target_q'][ridx])
print('0.8: %f, 0.2: %f' % (np.sum(value * (data_batch['target_q'] == 0.8)) / np.sum(data_batch['target_q'] == 0.8),
np.sum(value * (data_batch['target_q'] == -0.2)) / np.sum(data_batch['target_q'] == -0.2)))
print('Teacher value est:', value[ridx: ridx + 10], data_batch['target_q'][ridx: ridx + 10])
#print(distcts_feat_tensor_3[ridx, :])
return q_net_loss
def get_q_value_for_all_msg(self, teacher_belief, student_belief, embeded_concepts):
all_msg_embeddings = np.identity(self.message_space_size_)
teacher_belief_tile = np.tile(teacher_belief, (self.message_space_size_, 1))
student_belief_tile = np.tile(student_belief, (self.message_space_size_, 1))
embeded_concepts_tile = np.tile(embeded_concepts, (self.message_space_size_, 1, 1, 1, 1))
q_values, belief_pred, distcts_feat_tensor_3, belief_dst, msg_est_tensor = self.sess.run([self.value_, self.belief_pred_, self.distcts_feat_tensor_3_, self.value_, self.msg_est_tensor_2_],
feed_dict = {self.distractors_: embeded_concepts_tile,
self.message_: all_msg_embeddings,
self.teacher_belief_: teacher_belief_tile,
self.student_belief_: student_belief_tile})
return q_values, belief_pred, distcts_feat_tensor_3, belief_dst, msg_est_tensor[0]
def update_net(self, belief_update_tuples, q_learning_tuples, update_term = 'Both'):
debug_structure = {}
belief_update_batch = {}
belief_update_batch['prev_belief'] = []
belief_update_batch['new_belief'] = []
belief_update_batch['message'] = []
belief_update_batch['distractors'] = []
for belief_tuple in belief_update_tuples:
belief_update_batch['distractors'].append(belief_tuple[0])
belief_update_batch['prev_belief'].append(belief_tuple[1])
belief_update_batch['message'].append(belief_tuple[2])
belief_update_batch['new_belief'].append(belief_tuple[3])
for k in belief_update_batch:
belief_update_batch[k] = np.array(belief_update_batch[k])
if update_term == 'Both' or update_term == 'Belief':
cross_entropy, belief_pred = self.train_belief_update(belief_update_batch, fix_feature = True)
print('Teacher\'s belief esimate cross_entropy: %f' % cross_entropy)
debug_structure['teacher_belief_prediction'] = belief_pred
q_learning_batch = {}
q_learning_batch['student_belief'] = []
q_learning_batch['teacher_belief'] = []
q_learning_batch['message'] = []
q_learning_batch['distractors'] = []
q_learning_batch['target_q'] = []
for q_learning_tuple in q_learning_tuples:
q_learning_batch['distractors'].append(q_learning_tuple[0])
q_learning_batch['student_belief'].append(q_learning_tuple[1])
q_learning_batch['teacher_belief'].append(q_learning_tuple[2])
q_learning_batch['message'].append(q_learning_tuple[3])
q_learning_batch['target_q'].append(q_learning_tuple[4])
for k in q_learning_batch:
q_learning_batch[k] = np.array(q_learning_batch[k])
if update_term == 'Both' or update_term == 'Q-Net':
q_net_loss = self.train_q_net(q_learning_batch)
return debug_structure
if __name__ == '__main__':
main()
|
984,353 | abd5b9d46f8644b2e4669c7df1d6765b049c76b0 | N,M = list(map(int, input().split()))
if M == 0:
print(N);
exit()
xy = [map(int, input().split())
for _ in range(M)]
A, B = [list(i) for i in zip(*xy)]
dic = {}
for i in range(len(A)):
A[i] -= 1
B[i] -= 1
if A[i] not in dic:
dic[A[i]] = [B[i]]
else:
dic[A[i]].append(B[i])
def dfs(stt: int, dic):
stack = [stt]
se = set(); se.add(n)
while len(stack) > 0:
cur_town = stack[0]; stack.pop(0)
if cur_town not in dic:
continue
for next_town in dic[cur_town]:
if next_town not in se:
stack.append(next_town)
se.add(next_town)
return len(se)
ans = 0
for n in range(N):
stt = n #ๅบ็บ
# DFSใใ
ans += dfs(stt, dic)
print(ans) |
984,354 | 7f8521f919415c45971e1a4fa51f2131f9ffa220 | from django.shortcuts import render, HttpResponse, redirect
from .models import Team, User_Team
from django.contrib import messages
def success(request):
context = {
"teams": Team.objects.all(),
"your_team" Teams.objects.filter(),
}
return render(request, 'team_app/success.html', context)
def create_team(request):
create_team = Team.objects.create_team(request.POST)
if create_team == True:
return redirect('/success')
else:
pass
def remove(request, id):
context = {
'teams' : Team.objects.get(id = id)
}
return render(request, 'team_app/remove.html', context)
def delete(request, id):
Team.objects.filter(id = id).delete()
return redirect('/success')
# def current_team(request, id):
# context = {
# 'current_team': Teams.objects.get(id = id)
# }
# return render(request, 'team_app/teams.html')
# def login(request):
# login_user = User.objects.login(request.POST)
# if login_user == True:
# return render(request, 'login_app/success.html')
# else:
# for i in login_user[1]:
# messages.error(request, i)
# return redirect("/")
#
# def register(request):
# register_user = User.objects.register(request.POST)
# if register_user == True:
# return render(request, 'login_app/success.html')
# else:
# for i in register_user[1]:
# messages.error(request, i)
# return redirect("/")
|
984,355 | 2c6f2d46b0ac0c0c312bed4c048075f2f3d9e188 | #!/usr/bin/env python
import math
import psycopg2
import random
import sys
from tournament import connect, \
playerStandings, \
registerPlayer, \
reportMatch, \
swissPairings
from util.logger import logger
def create_db():
"""
Create tournament database.
"""
# in order to create the tournament db we need to connect to postgres db
# first and then execute db creation command.
conn = psycopg2.connect(dbname='postgres')
# autocommit needs to be set to ON in order to create or drop databases
conn.set_session(autocommit=True)
c = conn.cursor()
c.execute("DROP DATABASE IF EXISTS tournament;")
c.execute("CREATE DATABASE tournament;")
# since autocommit is ON there is no need to commit
c.close()
conn.close()
def create_tables():
"""
Create players and matches tables.
"""
conn = connect()
c = conn.cursor()
# Create players table
c.execute(
"""
CREATE TABLE players (
name text NOT NULL,
id serial PRIMARY KEY
);
""")
# Create matches table
c.execute(
"""
CREATE TABLE matches (
winner int REFERENCES players (id),
loser int REFERENCES players (id),
PRIMARY KEY (winner, loser)
);
""")
conn.commit()
conn.close()
def create_indices():
"""
Create indices for tables.
"""
conn = connect()
c = conn.cursor()
# To prevent rematch btw players
c.execute(
"""
CREATE UNIQUE INDEX matches_uniq_idx ON matches
(greatest(winner, loser), least(winner, loser));
""")
conn.commit()
conn.close()
def create_views():
"""
Create the views for the following:
v_numMatches: The number of matches each player has played
v_numWins: The number of wins for each player
v_playerStandings
"""
conn = connect()
c = conn.cursor()
# Create v_numMatches view
c.execute(
"""
CREATE VIEW v_numMatches AS
SELECT id, COUNT(winner) AS matchesPlayed
FROM players LEFT JOIN matches
ON (winner = id OR loser = id)
GROUP BY players.id
ORDER BY players.id;
""")
# Create v_numWins view
c.execute(
"""
CREATE VIEW v_numWins AS
SELECT players.id, COUNT(winner) AS wins
FROM players LEFT JOIN matches
ON players.id = matches.winner
GROUP BY players.id
ORDER BY wins DESC;
""")
# Create v_playerStandings view
c.execute(
"""
CREATE VIEW v_playerStandings AS
SELECT players.id, players.name, v_numWins.wins,
v_numMatches.matchesPlayed AS matches
FROM players
LEFT JOIN v_numWins ON
(players.id = v_numWins.id)
JOIN v_numMatches ON (players.id = v_numMatches.id)
ORDER BY wins DESC;
""")
conn.commit()
conn.close()
if __name__ == '__main__':
# start logging
logger.info('Started')
# create the tournament DB
create_db()
logger.info('Created DB')
# create tables and views
create_tables()
logger.info('Created tables')
create_indices()
logger.info('Created indices')
create_views()
logger.info('Created views')
# Register players
PLAYERS = ['Player 1', 'Player 2', 'Player 3', 'Player 4', 'Player 5', 'Player 6', 'Player 7', 'Player 8', 'Player 9', 'Player 10', 'Player 11', 'Player 12', 'Player 13', 'Player 14', 'Player 15', 'Player 16', 'Player 17', 'Player 18', 'Player 19', 'Player 20', 'Player 21', 'Player 22', 'Player 23', 'Player 24', 'Player 25', 'Player 26', 'Player 27', 'Player 28', 'Player 29', 'Player 30', 'Player 31', 'Player 32', 'Player 33', 'Player 34', 'Player 35', 'Player 36', 'Player 37', 'Player 38', 'Player 39', 'Player 40', 'Player 41', 'Player 42', 'Player 43', 'Player 44', 'Player 45', 'Player 46', 'Player 47', 'Player 48', 'Player 49', 'Player 50', 'Player 51', 'Player 52', 'Player 53', 'Player 54', 'Player 55', 'Player 56', 'Player 57', 'Player 58', 'Player 59', 'Player 60', 'Player 61', 'Player 62', 'Player 63', 'Player 64',]
# Shuffle PLAYERS in order to have a random list
random.shuffle(PLAYERS)
# Register all players
for player in PLAYERS:
registerPlayer(player)
logger.info('Registered all players')
game_rounds = int(math.log(len(PLAYERS), 2))
# Allow the app to try 5 times before gracefully quiting with an error
# message.
tries = 1
for game_round in xrange(game_rounds):
logger.info('%s Round: %s %s', '=' * 10, game_round, '=' * 10)
try:
logger.info("\t'populate.py' Try: %s", tries)
sp = swissPairings()
for pair in sp:
winner_id = pair[0]
loser_id = pair[2]
reportMatch(winner_id, loser_id)
except psycopg2.IntegrityError as e:
logger.error(e)
tries += 1
if tries > 5:
msg = """
The app exceeded number of allowed tries (5). Please try again
later.
"""
logger.info(msg)
print msg
sys.exit(1)
msg = "All players matched successfully in %s attempts!" % tries
logger.info(msg)
print msg
sys.exit(0)
|
984,356 | 4a2d676fd93064a70aa5b58449270664b36ed164 | from RVObject import RVObject
import xml.etree.ElementTree as xmltree
class NSNumber(RVObject):
def __init__(self, xmlelement=None):
self.hint = "float"
self.value = 0
if xmlelement is None:
return
self.deserializexml(xmlelement)
def deserializexml(self, xmlelement):
self.hint = xmlelement.get('hint')
self.value = xmlelement.text
def serializexml(self):
xmlelement = xmltree.Element('NSNumber')
xmlelement.set('hint', self.hint)
xmlelement.text = self.value
return xmlelement
class RVEffectFloatVariable(RVObject):
def __init__(self, xmlelement=None):
self.type = 1
self.name = ""
self.min = -1
self.max = 1
self.defValue = 0
self.value = 0
if xmlelement is None:
return
self.deserializexml(xmlelement)
def deserializexml(self, xmlelement):
self.type = float(xmlelement.get('type'))
self.name = xmlelement.get('name')
self.min = float(xmlelement.get('min'))
self.max = float(xmlelement.get('max'))
self.defValue = float(xmlelement.get('defValue'))
self.value = float(xmlelement.get('value'))
def serializexml(self):
xmlelement = xmltree.Element('RVEffectFloatVariable')
xmlelement.set('type', "{:.0f}".format(self.type))
xmlelement.set('name', self.name)
xmlelement.set('min', "{:.6f}".format(self.min))
xmlelement.set('max', "{:.6f}".format(self.max))
xmlelement.set('defValue', "{:.6f}".format(self.defValue))
xmlelement.set('value', "{:.6f}".format(self.value))
return xmlelement |
984,357 | 27b9d02be31131ad6f46893e04440263f85c3cdf | import smbus
import RPi.GPIO as GPIO
class I2cRelay(object):
def __init__(self, dev_addr, slave_addr):
self._dev_addr = dev_addr
self._slave_addr = slave_addr
# BUS
self._bus = smbus.SMBus(1)
def open(self):
self._bus.write_byte_data(self._dev_addr, self._slave_addr, 0xff)
def close(self):
self._bus.write_byte_data(self._dev_addr, self._slave_addr, 0x00)
class GPIORelay(object):
def __init__(self, pin):
self._pin = pin
# GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self._pin, GPIO.OUT)
def open(self):
GPIO.output(self._pin, 0)
def close(self):
GPIO.output(self._pin, 1)
if __name__ == '__main__':
import time
_i2c = I2cRelay(0x11, 1) # slave=1, 2, 3, 4
_i2c.open()
time.sleep(1)
_i2c.close()
_gpio = GPIORelay(19) # pin=19, 13, 6, 5, 11, 9, 10, 22, 27
_gpio.open()
time.sleep(1)
_gpio.close()
|
984,358 | 1ac974ff6619b3f0e4ec41972ed7fe7cdf9658cc | import os;
'''
ๆดๆขๆๅญ
'''
# with open('''/Users/fuzhipeng/blog/source/_posts/uptest.md''',"r") as f:
# content=f.readlines()
#
# with open('''/Users/fuzhipeng/blog/source/_posts/uptest.md''',"w") as f:
# for line in content:
# f.writelines(line.replace("test","test_"))
cdClean='''hexo c'''
cdGenerate='''hexo g'''
cdDeploy='''hexo d'''
cds=[
cdClean,
cdGenerate,
cdDeploy,
]
os.chdir("/Users/fuzhipeng/blog")
# os.popenๆฏ้ปๅก็๏ผไธๆง่กๅฎไธไผๆง่กไธๆก่ฏญๅฅ็
for cd in cds:
print(os.popen(cd).readlines()) |
984,359 | 2ffec30f96104d3b327bf039bcec7768f8ce754e | from aw import build_app
debug = True
app = build_app()
app.run(debug=debug)
|
984,360 | 6de6eb9043f6288c912b1121cd0cccce71b36b4e |
def nextPrime(n) :
while(True):
n +=1
for j in range(2,n,1) :
if n%j==0:
break
else :
return n
x = int(input(('Enter number for next prime\n')))
print(nextPrime(x))
|
984,361 | 8d94997e803a7f0077b17de94208c3f55579c0d7 | class Solution:
"""
@param A: an integer sorted array
@param target: an integer to be inserted
@return: An integer
"""
def searchInsert(self, A, target):
if len(A) == 0:
return 0
elif len(A) > 0:
leftIndex = 0
rightIndex = len(A) - 1
if A[leftIndex] >= target:
return 0
elif A[rightIndex] < target:
return rightIndex + 1
else:
while leftIndex <= rightIndex:
midIndex = (leftIndex + rightIndex) / 2
if A[midIndex] == target:
return midIndex
elif A[midIndex] < target:
leftIndex = midIndex + 1
else:
rightIndex = midIndex - 1
if A[leftIndex] >= target:
return leftIndex
elif A[rightIndex] < target:
return rightIndex + 1 |
984,362 | f970baeb1b67e54e34b9e9dc33af3e6129a6d64d | from Ship import Ship
class Flota():
def __init__(self):
self.ship_list = []
self.backup_list = []
def load_flota_file(self, file_name):
"""
:param file_name: file where flota is
:return: ship list with all ships from fille
"""
everything_from_file = open(file_name).readlines()
ships_in_flota = []
ship_list =[]
for i in everything_from_file:
ships_in_flota.append(i.rsplit())
for i in ships_in_flota:
for j in range(0,int(i[1])):
ship_list.append(Ship(i[0]))
self.ship_list = ship_list
self.backup_list = ship_list
def load_flota_list(self, data):
"""
create flota from ship list
:param data: list ships to create for example [("mt",100),("dt",1)]
:return: list with object class Ship
"""
ship_list =[]
for i in data:
for j in range(0,i[1]):
ship_list.append(Ship(i[0]))
self.ship_list = ship_list
self.backup_list = ship_list
def reset(self):
"""
reset ship list for example when old one is changed
"""
self.ship_list = self.backup_list
def __str__(self):
return str(self.ship_list)
def __len__(self):
return len(self.ship_list)
def __iter__(self):
for i in range(0,len(self.ship_list)):
yield self.ship_list[i]
def __setitem__(self, key, value):
self.ship_list = value
|
984,363 | 12c500873882e3659680dec5e6e991d72f33a805 | import numpy as np
import matplotlib.pyplot as plt
#Ejercicio 1
nu=np.random.uniform(-10,10,1000)
plt.hist(nu, label="datos uniformes")
plt.title("Valores uniformes")
plt.ylabel("")
plt.xlabel("")
plt.legend()
plt.savefig("uniforme.pdf")
centro=17
sigma=5
ng=np.random.normal(centro,sigma,1000)
plt.hist(ng, label="datos normales")
plt.title("Valores normales")
plt.legend()
plt.savefig("gausiana.pdf")
#Ejercicio 2
datosR=np.random.uniform(0,30.5,1000)
cuadrado=np.linspace(0,30.5,1000)
plt.scatter(cuadrado,datosR)
plt.title("datos dentro del cuadrado")
plt.savefig("cuadrado.pdf")
r=23
datosC=np.random.uniform(-23,23,1000)
areaC=np.pi*(r**2)
circulo=[]
for i in range(1000):
if (datosC[i]<=areaC):
circulo.append(datosC[i])
plt.scatter(circulo,datosC)
plt.title("datos dentro del circulo")
plt.savefig("circulo.pdf")
#Ejercicio 4
npasos=100
N=1000
sigma=0.25
pasosx=np.empty((0))
pasosy=np.empty((0))
pasosx=np.append(pasosx, np.random.random())
pasosy=np.append(pasosy, np.random.random())
for i in range(N):
for j in range(npasos):
pasosx = np.random.normal(pasosx[i][j], sigma)
pasosy = np.random.normal(pasosy[i][j], sigma)
if (pasosx[i][j] >30.5):
pasosx[i][j]=0+pasosx[i][j]
if (pasosy[i][j]>30.5):
pasosy[i][j]=0+pasosy[i][j]
|
984,364 | 962025df2abf3deea6e50e105e28bc7a58b44765 | #libraries
from Xception_Model import Xception_Model
from PreProcessing import PreProcessing
from keras.models import model_from_json
if __name__ == '__main__':
# Directory path for images
Base_directory = '/kaggle/input/flame-dataset-fire-classification'
test_path = 'Test/Test'
Training_path = 'Training/Training'
input_shape = (254, 254, 3)
image_size = (254,254)
batch = 16
labels = ['Fire', 'No_Fire']
# defining the full path for the files
Full_Training_path = '{0}/{1}'.format(Base_directory, Training_path)
Full_Test_path = '{0}/{1}'.format(Base_directory, test_path)
#object of PreProcessing class
pp = PreProcessing()
#image generators
train_generator,validation_generator,test_generator = pp.image_generators(Full_Training_path=Full_Training_path,Full_Test_path=Full_Test_path,batch=batch,img_size=image_size)
#object of Xception_Model Class
Xception_mdl = Xception_Model()
# If user wants to start training then press 1 else input 2
mode = input("Please Enter 1 for Training, 2 for loading the saved Model for Evaluation")
if int(mode)==1:
# create the Xception Model
model = Xception_mdl.create_Model(input_shape)
#Train the model
model,history = Xception_mdl.train_model(model,train_generator,validation_generator)
elif int(mode)==2:
# load json and create model
json_file = open('{0}/{1}'.format(Base_directory, 'Xception_saved_model.json'), 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# Load the saved weights
model.load_weights('{0}/{1}'.format(Base_directory, 'Xception_saved_weights.h5'))
else:
print("wrong option please start again")
if int(mode) in [1,2]:
# evaluate the model
Xception_mdl.model_evaluation(model, test_generator)
|
984,365 | c164c1a3668071d80c688be93409b6c9712d4a68 | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AdiEmsWebApiV2DtoUploadUploadProcessingStatus(Model):
"""Represents the status of an upload processing through EMS.
:param download_record: The upload's download record, if known
:type download_record: int
:param download_state: A description of the download record's processing
state. This should be checked before assuming
that the download is correct and complete. Possible values include:
'notProcessed', 'processed', 'failure'
:type download_state: str or ~emsapi.models.enum
:param flights: The upload's flight information, if known
:type flights:
list[~emsapi.models.AdiEmsWebApiV2DtoUploadUploadProcessingFlightStatus]
:param error_message: Any error message associated with the processing of
the upload, if any
:type error_message: str
"""
_attribute_map = {
'download_record': {'key': 'downloadRecord', 'type': 'int'},
'download_state': {'key': 'downloadState', 'type': 'str'},
'flights': {'key': 'flights', 'type': '[AdiEmsWebApiV2DtoUploadUploadProcessingFlightStatus]'},
'error_message': {'key': 'errorMessage', 'type': 'str'},
}
def __init__(self, download_record=None, download_state=None, flights=None, error_message=None):
super(AdiEmsWebApiV2DtoUploadUploadProcessingStatus, self).__init__()
self.download_record = download_record
self.download_state = download_state
self.flights = flights
self.error_message = error_message
|
984,366 | 42d40635e4ddbb4fbc5ad51aac0611173c8568e4 | from django.db.models import Avg
from api.v1.match_player_stat.models import MatchPlayerStat
from api.v1.player.models import Player
class PlayerUtil:
@staticmethod
def filter_from_percentile(player_queryset, percentile):
# get all team player average values
pl_avg_list = []
avg_scores = []
for player in player_queryset:
avg_score = MatchPlayerStat.objects.filter(player=player).aggregate(Avg('score')).get('score__avg')
if not avg_score:
avg_score = 0
pl_avg = {
'id': player.id,
'avg': avg_score
}
pl_avg_list.append(pl_avg)
avg_scores.append(avg_score)
# sort lowest to highest
avg_scores.sort()
# calculate 90th percentile value
index = round(percentile * len(avg_scores) / 100)
percentile_threshold = avg_scores[index - 1]
# filter players according to percentile threshold value
eligible_players = []
for pl_avg in pl_avg_list:
score = pl_avg.get('avg')
if score > percentile_threshold:
player_id = pl_avg.get('id')
eligible_players.append(player_id)
eligible_quertset = Player.objects.filter(id__in=eligible_players)
return eligible_quertset
|
984,367 | a464b5236d18c8477b193a3f1aeddf949e72227d | # Accessing the list using index
books = ["Learn Python the hard way", "Web Application Pentesting", "The Art of Exploitation"]
print("First Book is : {}".format(books[0]))
print("Second Book is : {}".format(books[1]))
print("Third Book is : {}".format(books[2])) |
984,368 | b26461e980ba6f99a8c0963b3345a69ddee07ed8 | # encoding: utf-8
import os
import shutil
FPATH = os.path.dirname(os.path.realpath(__file__))
MOCKUPS_PATH = FPATH + '/mockups'
def clear_mockups_out():
shutil.rmtree(MOCKUPS_PATH + '/out')
def remove_if_exists(fpath):
if os.path.exists(fpath):
os.remove(fpath) |
984,369 | 8b629bbecd06e90e684aa156b9cfa31a18ce8635 | class Solution(object):
def maxCoins(self, piles):
"""
:type piles: List[int]
:rtype: int
"""
piles=sorted(piles)
piles=piles[::-1]
tot=0
c=(len(piles)/3)
i=1
while (i<len(piles)) and (c>0):
tot=tot+piles[i]
i=i+2
c=c-1
return(tot)
|
984,370 | a40775df6a91647c4829e16a7c19008e0ca5fb1a | n = int(input())
l = []
while n != 1:
l.append(n)
if n%2 == 0: n = n // 2
else: n = 3*n + 1
l.append(n)
print('->'.join([str(i) for i in l[-15:]])) |
984,371 | e2ed61d420e5d30e5597f86604e95ebd96ca6cbf | """En tu programa pide al usuario ingresar 3 nรบmeros: un lรญmite inferior,
un lรญmite superior y uno de comparaciรณn.
Si tu nรบmero de comparaciรณn se encuentra en el rango de los dos lรญmites, imprรญmelo en pantalla.
En caso de estar por debajo del inferior o arriba del superior, tambiรฉn muรฉstralo en pantalla y
pide ingresar otro nรบmero para repetir el proceso.
"""
import random
def numbers():
number_comp = random.choice(range(0, 100))
number_min = int(input("insert an inferior limit: "))
number_max = int(input("insert a superior limit: "))
number_user = int(input("insert a number to compare: "))
if number_comp == number_user:
print(f"Congratulations, you guessed it!")
elif number_comp >= number_min and number_comp <= number_max:
print(f"My number was {number_comp}")
else:
print(f"My number was {number_comp} and is not in the range. Don't worry, let's go again!")
numbers()
if __name__ == '__main__':
numbers() |
984,372 | 31b364368b428294dfd94a7b0f2c22d028e1d17e | import numpy as np
################################# Task 2.1: Convolution --- basic forward pass
from conv_layers import conv_layer_forward
batch_size = 1
num_filters = 2
channels_x, height_x, width_x = 3, 4, 4
height_w, width_w = 3, 3
stride = 1
pad_size = 1
x_shape = (batch_size, channels_x, height_x, width_x)
w_shape = (num_filters, channels_x, height_w, width_w)
input_layer = np.linspace(-0.4, 0.3, num=np.prod(x_shape)).reshape(x_shape)
weight = np.linspace(-0.2, 0.3, num=np.prod(w_shape)).reshape(w_shape)
bias = np.linspace(-0.1, 0.2, num=num_filters)
output_layer = conv_layer_forward(input_layer, weight, bias, pad_size, stride)
correct_out = np.array(
[[[[ 0.15470494, 0.28520674, 0.26826174, 0.14451626], # y[0, 0, 0, :]
[ 0.28745885, 0.47927338, 0.44816540, 0.25953031], # y[0, 0, 1, :]
[ 0.20956242, 0.35484143, 0.32373344, 0.17151746], # y[0, 0, 2, :]
[ 0.07288238, 0.14856283, 0.12403051, 0.03908872]], # y[0, 0, 3, :]
[[ 0.07425532, 0.04867523, 0.10001606, 0.15511441], # y[0, 1, 0, :]
[ 0.15335608, 0.17933360, 0.25065436, 0.26199920], # y[0, 1, 1, :]
[ 0.34860297, 0.46461662, 0.53593737, 0.44712967], # y[0, 1, 2, :]
[ 0.35662385, 0.45831794, 0.50207146, 0.41387796]]]] # y[0, 1, 3, :]
)
print('Output_layer valid?:',np.array_equal(np.round(output_layer,decimals=8), np.round(correct_out,decimals=8)))
################################# Task 2.1: Convolution --- basic forward pass [MULTI]
from conv_layers import conv_layer_forward
batch_size = 2
num_filters = 2
channels_x, height_x, width_x = 3, 5, 5
height_w, width_w = 3, 3
stride = 2
pad_size = 1
x_shape = (batch_size, channels_x, height_x, width_x)
w_shape = (num_filters, channels_x, height_w, width_w)
input_layer = np.linspace(-0.4, 0.3, num=np.prod(x_shape)).reshape(x_shape)
weight = np.linspace(-0.2, 0.3, num=np.prod(w_shape)).reshape(w_shape)
bias = np.linspace(-0.1, 0.2, num=num_filters)
output_layer = conv_layer_forward(input_layer, weight, bias, pad_size, stride)
correct_out = np.array(
[[[[ 0.17033051, 0.32060403, 0.18923389], # y[0, 0, 0, :]
[ 0.33279093, 0.56466886, 0.35157275], # y[0, 0, 1, :]
[ 0.18810941, 0.33769913, 0.19424845]], # y[0, 0, 2, :]
[[-0.35023427, -0.57793339, -0.28825123], # y[0, 1, 0, :]
[-0.43650753, -0.69081423, -0.35310624], # y[0, 1, 1, :]
[-0.11705711, -0.23774091, -0.06783842]]], # y[0, 1, 2, :]
[[[-0.07697860, -0.08027605, -0.09796378], # y[1, 0, 0, :]
[-0.12792200, -0.17127517, -0.16897303], # y[1, 0, 1, :]
[-0.17886539, -0.24267950, -0.21261492]], # y[1, 0, 2, :]
[[ 0.47944789, 0.63667342, 0.50154236], # y[1, 1, 0, :]
[ 0.71826643, 0.99647208, 0.74183487], # y[1, 1, 1, :]
[ 0.59295935, 0.79736735, 0.60228948]]]] # y[1, 1, 2, :]
)
# Compare your output to ours
print('Output_layer valid?:',np.array_equal(np.round(output_layer,decimals=8), np.round(correct_out,decimals=8)))
################################# Task 2.2: Convolution --- basic backward pass
from conv_layers import conv_layer_forward, conv_layer_backward, eval_numerical_gradient_array
np.random.seed(231)
batch_size = 1
num_filters = 2
channels_x, height_x, width_x = 3, 7, 7
height_w, width_w = 3, 3
stride = 1
pad_size = 1
input_layer = np.random.randn(batch_size, channels_x, height_x, width_x)
weight = np.random.randn(num_filters, channels_x, height_w, width_w)
bias = np.random.randn(num_filters,)
output_layer_gradient = np.random.randn(batch_size, num_filters, height_x, width_x)
numeric_input_layer_gradient = eval_numerical_gradient_array(
lambda x: conv_layer_forward(x, weight, bias, pad_size, stride), input_layer, output_layer_gradient)
numeric_weight_gradient = eval_numerical_gradient_array(
lambda w: conv_layer_forward(input_layer, w, bias, pad_size, stride), weight, output_layer_gradient)
numeric_bias_gradient = eval_numerical_gradient_array(
lambda b: conv_layer_forward(input_layer, weight, b, pad_size, stride), bias, output_layer_gradient)
input_layer_gradient, weight_gradient, bias_gradient = conv_layer_backward(
output_layer_gradient, input_layer, weight, bias, pad_size, stride)
# Compare your output to ours
print('gradient of L wrt w, valid?:',np.array_equal(np.round(weight_gradient,decimals=6), np.round(numeric_weight_gradient,decimals=6)))
print('gradient of L wrt x, valid?:',np.array_equal(np.round(input_layer_gradient,decimals=6), np.round(numeric_input_layer_gradient,decimals=6)))
print('gradient of L wrt b, valid?:',np.array_equal(np.round(bias_gradient,decimals=6), np.round(numeric_bias_gradient,decimals=6)))
################################# Task 2.2: Convolution --- basic backward pass [MULTI]
from conv_layers import conv_layer_forward, conv_layer_backward, eval_numerical_gradient_array
np.random.seed(231)
batch_size = 2
num_filters = 2
channels_x, height_x, width_x = 3, 7, 7
height_w, width_w = 3, 3
stride = 1
pad_size = 1
input_layer = np.random.randn(batch_size, channels_x, height_x, width_x)
weight = np.random.randn(num_filters, channels_x, height_w, width_w)
bias = np.random.randn(num_filters,)
output_layer_gradient = np.random.randn(batch_size, num_filters, height_x, width_x)
numeric_input_layer_gradient = eval_numerical_gradient_array(
lambda x: conv_layer_forward(x, weight, bias, pad_size, stride), input_layer, output_layer_gradient)
numeric_weight_gradient = eval_numerical_gradient_array(
lambda w: conv_layer_forward(input_layer, w, bias, pad_size, stride), weight, output_layer_gradient)
numeric_bias_gradient = eval_numerical_gradient_array(
lambda b: conv_layer_forward(input_layer, weight, b, pad_size, stride), bias, output_layer_gradient)
input_layer_gradient, weight_gradient, bias_gradient = conv_layer_backward(
output_layer_gradient, input_layer, weight, bias, pad_size, stride)
# Compare your output to ours
print('gradient of L wrt w multi, valid?:',np.array_equal(np.round(weight_gradient,decimals=6), np.round(numeric_weight_gradient,decimals=6)))
print('gradient of L wrt x multi, valid?:',np.array_equal(np.round(input_layer_gradient,decimals=6), np.round(numeric_input_layer_gradient,decimals=6)))
print('gradient of L wrt b multi, valid?:',np.array_equal(np.round(bias_gradient,decimals=6), np.round(numeric_bias_gradient,decimals=6))) |
984,373 | fbdada9c0f28746539ddb3ea1a8a3a1b37111695 | from rest_framework import serializers
from project_api import models
class HelloSerializer(serializers.Serializer):
"""Seriallizers a name field for testing our APIView"""
name = serializers.CharField(max_length=10)
class UserProfileSerializer(serializers.ModelSerializer):
"""Seriallizers a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id','email','name','password')
# chแปฉa nhแปฏng field muแปn cแบฅu hรฌnh thรชm
extra_kwargs ={
'password': {
'write_only':True,
'style':{'input_type':'password'}
}
}
|
984,374 | fefdff72704ad88e54b8f4ebbeabf8f58ca4d11f | # Generated by Django 3.2.3 on 2021-05-23 14:18
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('productname', models.CharField(default='laku', max_length=20)),
('packagesize', models.CharField(default=3, max_length=20)),
('unitprice', models.IntegerField(default=3)),
('unitsinstock', models.IntegerField(default=3)),
('companyname', models.CharField(default='lakufirma', max_length=50)),
],
),
migrations.CreateModel(
name='Supplier',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('companyname', models.CharField(default='lakufirma', max_length=50)),
('contactname', models.CharField(default='tommi', max_length=50)),
('address', models.CharField(default='tie 3', max_length=100)),
('phone', models.CharField(default='47563956', max_length=20)),
('email', models.CharField(default='simo.silli@silli.com', max_length=50)),
('country', models.CharField(default='Finland', max_length=20)),
],
),
]
|
984,375 | 564dcd2578aee7f3da2e5df75b74cea6488fae0a | def factorial(x)
i=1
s=1
while i<=x:
s=s*i
i=i+1
continue
print(str.format("The factorial of {0} number is: {1}",,x,s))
return s
|
984,376 | aabeb411edcf99e4b3252dffd2ac2586511ca4ae | import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import random
from dataset import *
from model import *
import pandas as pd
from matplotlib import pyplot as plt
from torch.utils.tensorboard import SummaryWriter
import torch.optim as optim
from torch.optim import lr_scheduler
import torchvision
from torchvision import datasets, models, transforms
import os, sys, shutil, copy, time
from torch.utils.data import Dataset, DataLoader
import seaborn as sns
import gc
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--log_root', type=str, default='/data/hdim-forecast/log')
parser.add_argument('--feat_size', type=int, default=128)
parser.add_argument('--n_sample', type=int, default=256)
parser.add_argument('--n_past', type=int, default=2)
parser.add_argument('--n_future', type=int, default=10)
# Modeling parameters
parser.add_argument('--predictor_model', type=str, default='big')
parser.add_argument('--learning_rate', type=float, default=1e-4)
parser.add_argument('--batch_size', type=int, default=8)
# Run related parameters
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--run_label', type=int, default=0)
args = parser.parse_args()
device = torch.device('cuda:%d' % args.gpu)
args.device = device
while True:
args.name = 'model=%s-seq=%d/%d-ns=%d-feat_size=%d-bs=%d-lr=%.5f-run=%d' % \
(args.predictor_model, args.n_past, args.n_future, args.n_sample, args.feat_size,
args.batch_size, args.learning_rate, args.run_label)
args.log_dir = os.path.join(args.log_root, 'pred', args.name)
if not os.path.isdir(args.log_dir):
os.makedirs(args.log_dir)
break
args.run_label += 1
print("Run number = %d" % args.run_label)
writer = SummaryWriter(args.log_dir)
log_writer = open(os.path.join(args.log_dir, 'results.txt'), 'w')
start_time = time.time()
global_iteration = 0
random.seed(args.run_label) # Set a different random seed for different run labels
torch.manual_seed(args.run_label)
def log_scalar(name, value, epoch):
writer.add_scalar(name, value, epoch)
log_writer.write('%f ' % value)
def message(epoch):
print("Finished epoch %d, time elapsed %.1f" % (epoch, time.time() - start_time))
feat_model = FeatureNetC(args.feat_size)
feat_model.load_state_dict(torch.load('pretrained/representation-c-%d.pt' % args.feat_size), strict=False)
feat_model = feat_model.to(device)
feat_model.eval()
multi_dataset = MovingMNISTMulti(train=True, n_past=args.n_past, n_future=args.n_future,
n_sample=args.n_sample, deterministic=False, last_only=True)
multi_loader = DataLoader(multi_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.batch_size)
predictor = predictors[args.predictor_model](args.feat_size).to(device)
exp_optim = optim.Adam(predictor.parameters(), lr=args.learning_rate)
scheduler = optim.lr_scheduler.StepLR(exp_optim, 20, 0.9)
# Learn the conditional expectation
for epoch in range(2000):
multi_dataset.set_nsample(4)
for idx, data in enumerate(multi_loader):
exp_optim.zero_grad()
bx, by, bl = data
bx = bx.to(device)
actual_feat = feat_model(bx[:, :, -1].view(-1, 1, 64, 64)).view(args.batch_size, 4, args.feat_size).detach()
actual_exp = actual_feat.mean(dim=1)
pred_exp = predictor(bx[:, 0, 0:2])
loss_l2 = (actual_exp - pred_exp).pow(2).mean()
loss_l2.backward()
writer.add_scalar('loss_l2', loss_l2, global_iteration)
exp_optim.step()
global_iteration += 1
errors = []
baseline_error = []
num_elem = 0
multi_dataset.set_nsample(args.n_sample)
plt.figure(figsize=(20, 20))
palette = sns.color_palette('hls', 4)
with torch.no_grad():
for idx, data in enumerate(multi_loader):
bx, by, bl = data
bx = bx.to(device)
actual_feat = feat_model(bx[:, :, -1].view(-1, 1, 64, 64)).view(args.batch_size, args.n_sample, args.feat_size)
actual_exp = actual_feat.mean(dim=1)
pred_exp = predictor(bx[:, 0, 0:2])
errors.append(actual_exp - pred_exp)
baseline_error.append(actual_feat[:, :args.n_sample//2, :].mean(dim=1) - actual_feat[:, args.n_sample//2:, :].mean(dim=1))
num_elem += args.batch_size
if num_elem > 1000:
break
if idx < 4:
for i in range(36):
plt.subplot(6, 6, i+1)
plt.hist(actual_feat[0, :, i].cpu().numpy(), bins=20, color=palette[idx], alpha=0.5)
plt.axvline(pred_exp[0, i], color=palette[idx])
plt.axvline(actual_exp[0, i], color=palette[idx], linestyle=':')
elif idx == 4:
os.makedirs(os.path.join(args.log_dir, 'plot'), exist_ok=True)
plt.savefig(os.path.join(args.log_dir, 'plot', 'hist-%d.png' % (epoch // 10)))
plt.close()
errors = torch.cat(errors)
baseline_error = torch.cat(baseline_error)
writer.add_scalar('loss_exp_l1', errors.abs().mean(), global_iteration)
writer.add_scalar('loss_exp_l1_base', baseline_error.abs().mean(), global_iteration)
scheduler.step()
message(epoch)
if (epoch+1) % 10 == 0:
torch.save(predictor.state_dict(), 'pretrained/predictor2_%d-%d-%s.pt' % (args.feat_size, args.n_future, args.predictor_model)) |
984,377 | 781dbd4d93f6312e86b3b803c170e7fc22a5bdef | import responder
import requests
from prometheus_client import Counter, Summary, start_http_server
import time
import asyncio
import os
import json
import data
import dinghy_dns
import dns.rdatatype
import socket
import logging
from urllib.parse import urlparse
from kubernetes import client, config
from kubernetes.client.rest import ApiException
# Prometheus metrics
COMPLETED_REQUEST_COUNTER = Counter('dingy_pings_completed', 'Count of completed dinghy ping requests')
FAILED_REQUEST_COUNTER = Counter('dingy_pings_failed', 'Count of failed dinghy ping requests')
REQUEST_TIME = Summary('dinghy_request_processing_seconds', 'Time spent processing request')
TAIL_LINES_DEFAULT = 100
LOGS_PREVIEW_LENGTH = 1000
# Configure kubernetes client
if not "IN_TRAVIS" in os.environ:
config.load_incluster_config()
k8s_client = client.CoreV1Api()
def to_pretty_json(value):
return json.dumps(value, sort_keys=True,
indent=4, separators=(',', ': '))
api = responder.API(title="Dinghy Ping", version="1.0", openapi="3.0.0", docs_route="/docs")
api.jinja_env.filters['tojson_pretty'] = to_pretty_json
# For local mac docker image creation and testing, switch to host.docker.internal
redis_host = os.getenv("REDIS_HOST", default="127.0.0.1")
@api.route("/")
def dinghy_html(req, resp):
"""Index route to Dinghy-ping input html form"""
print(os.getcwd())
resp.content = api.template(
'../views/templates/index.html',
get_all_pinged_urls=_get_all_pinged_urls()
)
@api.route("/ping/domains")
async def ping_multiple_domains(req, resp):
"""
Async process to test multiple domains and return JSON with results
Post request data example
{
"domains": [
{
"protocol": "https",
"domain": "google.com",
"headers: { "header1": "valule" }
},
{
"protocol": "https",
"domain": "microsoft.com"
}
]
}
Return results
{
"domains": [
{
"protocol": "https",
"domain": "google.com",
"domain_response_code": "200",
"domain_response_time_ms": "30.0ms"
"
},
{
"protocol": "https",
"domain": "microsoft.com"
"domain_response_code": "200",
"domain_response_time_ms": "200.1ms"
}
]
}
"""
results = []
def build_domain_results(protocol, request_domain, results, headers):
domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers = _process_request(protocol, request_domain, req.params, headers)
results.append({
"protocol": protocol,
"domain": request_domain,
"domain_response_code": domain_response_code,
"domain_response_headers": domain_response_headers,
"domain_response_time_ms": domain_response_time_ms
})
def gather_results(data):
for domain in data['domains']:
protocol = domain['protocol']
request_domain = domain['domain']
headers = domain['headers']
build_domain_results(protocol, request_domain, results, headers)
resp.media = {"domains_response_results": results, "wait": gather_results(await req.media())}
@api.route("/ping/{protocol}/{domain}")
def domain_response_html(req, resp, *, protocol, domain):
"""
API endpoint for sending a request to a domain via user specified protocol
response containts status_code, body text and response_time_ms
"""
headers = {}
domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers = (
_process_request(protocol, domain, req.params, headers)
)
resp.content = api.template(
'ping_response.html',
domain=domain,
domain_response_code=domain_response_code,
domain_response_text=domain_response_text,
domain_response_headers=domain_response_headers,
domain_response_time_ms=domain_response_time_ms
)
@api.route("/form-input")
def form_input(req, resp):
"""Dinghy-ping html input form for http connection"""
url = urlparse(req.params['url'])
if 'headers' in req.params.keys():
headers = json.loads(req.params['headers'])
else:
headers = {}
if url.scheme == "":
scheme_notes = "Scheme not given, defaulting to https"
else:
scheme_notes = f'Scheme {url.scheme} provided'
domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers = (
_process_request(url.scheme, url.netloc + url.path, url.query, headers)
)
resp.content = api.template(
'ping_response.html',
request=f'{req.params["url"]}',
scheme_notes=scheme_notes,
domain_response_code=domain_response_code,
domain_response_text=domain_response_text,
domain_response_headers=domain_response_headers,
domain_response_time_ms=domain_response_time_ms
)
@api.route("/form-input-tcp-connection-test")
async def form_input_tcp_connection_test(req, resp):
"""Form input endpoint for tcp connection test"""
logging.basicConfig(level=logging.DEBUG)
tcp_endpoint = req.params['tcp-endpoint']
tcp_port = req.params['tcp-port']
loop = asyncio.get_running_loop()
try:
reader, writer = await asyncio.open_connection(host=tcp_endpoint, port=tcp_port)
connection_info = f'Connection created to {tcp_endpoint} on port {tcp_port}'
d = data.DinghyData(redis_host,
domain_response_code=None,
domain_response_time_ms=None,
request_url=f'{tcp_endpoint}:{tcp_port}'
)
d.save_ping()
resp.content = api.template(
'ping_response_tcp_conn.html',
request=tcp_endpoint,
port=tcp_port,
connection_results = connection_info
)
except (asyncio.TimeoutError, ConnectionRefusedError):
print("Network port not responding")
connection_info = f'Failed to connect to {tcp_endpoint} on port {tcp_port}'
resp.status_code = api.status_codes.HTTP_402
resp.content = api.template(
'ping_response_tcp_conn.html',
request=tcp_endpoint,
port=tcp_port,
connection_results = connection_info
)
@api.route("/form-input-dns-info")
async def form_input_dns_info(req, resp):
"""Form input endpoint for dns info"""
domain = req.params['domain']
if 'nameserver' in req.params.keys():
nameserver = req.params['nameserver']
else:
nameserver = None
dns_info_A=_gather_dns_A_info(domain, nameserver)
dns_info_NS=_gather_dns_NS_info(domain, nameserver)
dns_info_MX=_gather_dns_MX_info(domain, nameserver)
resp.content = api.template(
'dns_info.html',
domain = domain,
dns_info_A=dns_info_A,
dns_info_NS=dns_info_NS,
dns_info_MX=dns_info_MX
)
@api.route("/list-pods")
def list_pods(req, resp):
"""Route to list pods"""
namespace = req.params['namespace']
return _get_all_pods(namespace)
@api.route("/get/pod-logs")
def dinghy_get_pod_logs(req, resp):
"""Form input page for pod logs, input namespace"""
resp.content = api.template(
'pod_logs.html'
)
@api.route("/post/pod-logs")
def dinghy_post_pod_logs(req, resp, namespace="default", tail_lines=TAIL_LINES_DEFAULT):
"""Landing page for Dinghy-ping pod logs input html form"""
if 'namespace' in req.params.keys():
namespace = req.params['namespace']
if 'tail_lines' in req.params.keys():
tail_lines = req.params['tail_lines']
resp.content = api.template(
'pod_logs_input.html',
all_pods=_get_all_pods(namespace=namespace),
tail_lines=tail_lines
)
@api.route("/input-pod-logs")
def form_input_pod_logs(req, resp, *, tail_lines=TAIL_LINES_DEFAULT):
"""List pods in namespace and click on one to display logs"""
pod = req.params['pod']
namespace = req.params['namespace']
tail_lines = req.params['tail_lines']
logs = _get_pod_logs(pod, namespace, tail_lines)
resp.content = api.template(
'pod_logs_output.html',
logs=logs
)
@api.route("/deployment-logs/{namespace}/{name}")
def dinghy_deployment_logs(req, resp, *,
namespace, name,
tail_lines=TAIL_LINES_DEFAULT,
preview=LOGS_PREVIEW_LENGTH):
"""Get pod logs for a given deployment"""
if 'tail_lines' in req.params.keys():
tail_lines = req.params['tail_lines']
logs = _get_deployment_logs(namespace, name, tail_lines)
logs_preview = logs[0:preview]
if 'json' in req.params.keys():
if 'preview' in req.params.keys():
resp.media = {"logs": logs_preview}
else:
resp.media = {"logs": logs}
else:
resp.content = api.template(
'pod_logs_output.html',
logs=logs
)
def _get_deployment_logs(namespace, name, tail_lines=TAIL_LINES_DEFAULT):
"""Gather pod names via K8s label selector"""
pods = []
try:
api_response = k8s_client.list_namespaced_pod(namespace, label_selector='release={}'.format(name))
for api_items in api_response.items:
pods.append(api_items.metadata.name)
except ApiException as e:
print("Exception when calling CoreV1Api->list_namespaced_pod: %s\n" % e)
# Iterate over list of pods and concatenate logs
logs = ""
try:
for pod in pods:
logs += pod + "\n"
logs += k8s_client.read_namespaced_pod_log(pod, namespace, tail_lines=tail_lines)
except ApiException as e:
logging.error("Exception when calling CoreV1Api->read_namespaced_pod_log: %s\n" % e)
return logs
def _get_pod_logs(pod, namespace, tail_lines=TAIL_LINES_DEFAULT):
"""Read pod logs"""
try:
ret = k8s_client.read_namespaced_pod_log(pod, namespace, tail_lines=tail_lines)
except ApiException as e:
logging.error("Exception when calling CoreV1Api->read_namespaced_pod_log: %s\n" % e)
return ret
def _get_all_namespaces():
namespaces = []
ret = k8s_client.list_namespace(watch=False)
for i in ret.items:
namespaces.append(i.metadata.name)
return namespaces
def _get_all_pods(namespace=None):
pods = {}
if namespace:
ret = k8s_client.list_namespaced_pod(namespace, watch=False)
else:
ret = k8s_client.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
pod = i.metadata.name
namespace = i.metadata.namespace
pods.update({ pod: i.metadata.namespace} )
return pods
def _gather_dns_A_info(domain, nameserver):
dns_info_A = dinghy_dns.DinghyDns(domain, rdata_type=dns.rdatatype.A, nameserver=nameserver)
return dns_info_A.dns_query()
def _gather_dns_NS_info(domain, nameserver):
dns_info_NS = dinghy_dns.DinghyDns(domain, rdata_type=dns.rdatatype.NS, nameserver=nameserver)
return dns_info_NS.dns_query()
def _gather_dns_MX_info(domain, nameserver):
dns_info_MX = dinghy_dns.DinghyDns(domain, rdata_type=dns.rdatatype.MX, nameserver=nameserver)
return dns_info_MX.dns_query()
@REQUEST_TIME.time()
def _process_request(protocol, domain, params, headers):
"""
Internal method to run request process, takes protocol and domain for input
"""
if protocol == "":
protocol = "https"
domain_response_code = ""
domain_response_text = ""
domain_response_time_ms = ""
domain_response_headers = {}
try:
r = requests.get(f'{protocol}://{domain}', params=params, timeout=5, headers=headers)
COMPLETED_REQUEST_COUNTER.inc()
except requests.exceptions.Timeout as err:
domain_response_text = f'Timeout: {err}'
FAILED_REQUEST_COUNTER.inc()
return domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers
except requests.exceptions.TooManyRedirects as err:
domain_response_text = f'TooManyRedirects: {err}'
FAILED_REQUEST_COUNTER.inc()
return domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers
except requests.exceptions.RequestException as err:
domain_response_text = f'RequestException: {err}'
FAILED_REQUEST_COUNTER.inc()
return domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers
domain_response_code = r.status_code
domain_response_text = r.text
domain_response_headers = dict(r.headers)
domain_response_time_ms = r.elapsed.microseconds / 1000
print(domain_response_headers)
d = data.DinghyData(redis_host, domain_response_code, domain_response_time_ms, r.url)
d.save_ping()
return domain_response_code, domain_response_text, domain_response_time_ms, domain_response_headers
def _get_all_pinged_urls():
"""Get pinged URLs from Dinghy-ping data module"""
p = data.DinghyData(redis_host)
return p.get_all_pinged_urls()
if __name__ == '__main__':
start_http_server(8000)
api.run(address="0.0.0.0", port=80, debug=True)
|
984,378 | dbd68cf10ff7361286eaa7a1259a956ea04f3341 | def get_longest_subsequence_with_property(lst, list_property_predicate):
result = []
length = len(lst)
width = 1
while width <= length:
for start in range(0, length - width + 1):
sub_sequence = lst[start:start + width]
if list_property_predicate(sub_sequence):
result = sub_sequence
break
width += 1
return result
"""
Determines the longest sub-sequence with a given property for a list.
:param lst: The input list of numbers.
:param property_predicate: The list predicate representing the given property. Should be a function (list[]) -> bool type.
:return: The longest sub-sequence with that property. If multiple longest sub-sequences with the same length exist only the first from left to right is returned.
"""
def is_even(number):
return number % 2 == 0
def is_prime(number):
if number < 2:
return False
if number != 2 and is_even(number):
return False
for factor in range(3, number // 2 + 1, 2):
if number % factor == 0:
return False
return True
def is_list_of_primes(lst):
for el in lst:
if not is_prime(el):
return False
return True
def get_longest_all_primes(lst):
"""
Determines the longest sub-sequence of primes for 'lst' list.
:param lst: The input list of numbers.
:return: The longest sub-sequence of primes if exits, [] otherwise.
"""
return get_longest_subsequence_with_property(lst, is_list_of_primes)
def test_get_longest_all_primes():
assert get_longest_all_primes([2]) == [2]
assert get_longest_all_primes([2, 3]) == [2, 3]
assert get_longest_all_primes([1]) == []
assert get_longest_all_primes([]) == []
assert get_longest_all_primes([1, 6, 8]) == []
assert get_longest_all_primes([2, 4, 6, 5, 7, 1, 6, 12]) == [5, 7]
assert get_longest_all_primes([1, 2, 3, 5, 6, 7, 8, 9, 11, 13, 19, 23, 17]) == [11, 13, 19, 23, 17]
def is_below_average(lst, average):
el_sum = 0
for el in lst:
el_sum += el
return float(el_sum / len(lst)) <= average
def get_longest_average_below(lst, average):
return get_longest_subsequence_with_property(lst, (lambda l_lst: is_below_average(l_lst, average)))
"""
Determines the longest sub-sequence of lst whose numbers have their average not above 'average'.
:param lst: The input list of numbers.
:param average: The average threshold.
:return: The longest sub-sequence with average not above 'average' threshold if exists, [] otherwise.
"""
def test_get_longest_average_below():
assert get_longest_average_below([], 4.0) == []
assert get_longest_average_below([4], 4.0) == [4]
assert get_longest_average_below([3, 6], 4.0) == [3]
assert get_longest_average_below([5], 4.0) == []
assert get_longest_average_below([1, 2, 3, 5, 6, 7, 8, 9, 11, 13, 19, 23, 17], 4.0) == [1, 2, 3, 5, 6, 7]
assert get_longest_average_below([8, 9, 11, 1, 2, 3, 5, 6, 7, 13, 19, 23, 17], 4.0) == [1, 2, 3, 5, 6, 7]
assert get_longest_average_below([8, 9, 11, 13, 19, 23, 17, 1, 2, 3, 5, 6, 7], 4.0) == [1, 2, 3, 5, 6, 7]
assert get_longest_average_below([5, 6, 7, 8, 3, 12, 2, 3, 88], 4.0) == [2, 3]
assert get_longest_average_below([1, 9, 2, 8, 3, 7, 4, 6, 5, 5], 5.0) == [1, 9, 2, 8, 3, 7, 4, 6, 5, 5]
def all_elements_divisible_with_factor(lst, k):
for element in lst:
if element % k != 0:
return False
return True
def test_all_elements_divisible_with_factor():
assert all_elements_divisible_with_factor([2, 4, 6], 2) is True
assert all_elements_divisible_with_factor([], 2) is True
assert all_elements_divisible_with_factor([2, 4, 6], 3) is False
def get_longest_div_k(lst, factor):
'''
Finds the longest subsequence where all elements are divisible with factor 'factor'.
:param lst: Input lst of integers.
:param factor: The factor to test divisibility against.
:return: The longest subsequence where all elements are divisible with factor 'factor' if exists, [] otherwise.
'''
return get_longest_subsequence_with_property(lst,
(lambda l_list: all_elements_divisible_with_factor(l_list, factor)))
def test_get_longest_div_k():
assert get_longest_div_k([], 2) == []
assert get_longest_div_k([2], 2) == [2]
assert get_longest_div_k([2, 4, 6, 8, 12, 18], 2) == [2, 4, 6, 8, 12, 18]
assert get_longest_div_k([2, 4, 6, 8, 12, 18], 3) == [12, 18]
assert get_longest_div_k([2, 4, 6, 8, 12, 18], 4) == [8, 12]
assert get_longest_div_k([2, 4, 6, 8, 12, 18], 5) == []
def test_all():
test_all_elements_divisible_with_factor()
test_get_longest_all_primes()
test_get_longest_average_below()
test_get_longest_div_k()
test_all()
def show_options():
print('''
1.Read input list elements.
2.Find longest sub-sequence of primes.
3.Find longest sub-sequence of elements with average below threshold(inclusive).
4.Find longest sub-sequence of elements divisible with a given factor.
5.Exit the interactive menu.
''')
def read_input_elements():
elements = []
no_elements = int(input('Number of elements='))
for index in range(0, no_elements):
el = int(input(f'el[{index + 1}]='))
elements.append(el)
return elements
def show_longest_of_primes(lst):
print(f"Longest subsequence of primes is:{get_longest_all_primes(lst)}.")
def show_longest_below_average(lst):
avg_threshold = float(input("Average threshold is:"))
print(f"Longest subsequence of numbers below average {avg_threshold} "
f"is:{get_longest_average_below(lst, avg_threshold)}.")
def show_longest_of_divisible_with_factor(lst):
factor = int(input("Divisibility factor is:"))
print(f"Longest subsequence of numbers divisible with factor {factor} is: {get_longest_div_k(lst, factor)}")
def interactive_menu():
lst_data = []
while True:
show_options()
option = input("Your option is:")
if option == '1':
lst_data = read_input_elements()[:]
elif option == '2':
show_longest_of_primes(lst_data)
elif option == '3':
show_longest_below_average(lst_data)
elif option == "4":
show_longest_of_divisible_with_factor(lst_data)
elif option == "5":
break
else:
print("Unknown option, try again.")
print("Exiting the menu.")
interactive_menu()
|
984,379 | 1fe67e0ed7439a30a0f2ea219ab356e196007061 | from .settings import *
DEBUG = False
ADMIN_URL = env.str("DJANGO_ADMIN_URL")
# Use S3 for static content
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_BUCKET_NAME = "golf-api-static-21sd3asfa"
AWS_S3_BUCKET_NAME_STATIC = AWS_S3_BUCKET_NAME
AWS_S3_KEY_PREFIX = "media"
AWS_S3_KEY_PREFIX_STATIC = "static"
AWS_REGION = env.str('AWS_REGION')
AWS_S3_CUSTOM_DOMAIN = f'{AWS_S3_BUCKET_NAME}.s3.amazonaws.com'
STATIC_URL = f'https://{AWS_S3_CUSTOM_DOMAIN}/static/'
STATICFILES_STORAGE = 'django_s3_storage.storage.StaticS3Storage'
PUBLIC_MEDIA_LOCATION = 'media'
MEDIA_URL = f'https://{AWS_S3_CUSTOM_DOMAIN}/{PUBLIC_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'django_s3_storage.storage.S3Storage'
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': env.str('AURORA_DB'), # dbname
# 'USER': env.str('AURORA_ADMIN'), # master username
# 'PASSWORD': env.str('AURORA_PASSWORD'), # master password
# 'HOST': env.str('AURORA_ENDPOINT'), # Endpoint
# 'PORT': '3306',
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': env.str('AURORA_DB'),
'USER': env.str('AURORA_ADMIN'),
'PASSWORD': env.str('AURORA_PASSWORD'),
'HOST': env.str('AURORA_ENDPOINT'),
'PORT': 5432,
},
} |
984,380 | 34394c3753d593bb267d7434e84234bbb354f9ff | import pandas as pd
#from sklearn.cross_validation import train_test_split
#from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
dataset = pd.read_csv('sample_submission.csv')
X = dataset.iloc[:,:-1].values
y = dataset.iloc[:,1].values
plt.scatter(X,y)
plt.show()
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = 0)
regressor = LinearRegression()
regressor.fit(X_train, y_train)
#y_pred = regressor.predict(X_test)
#plt.scatter(X_train, y_train, color = 'red')
#plt.plot(X_train, regressor.predict(X_train), color = 'blue')
#plt.show()
|
984,381 | 9a550a421a90d0175eae4ad1b30a968eaf7add25 | from message.send import Send
from message.receive import Receive
from time import sleep
if __name__ == "__main__":
receive = Receive()
send = Send()
color = 'blue'
robot_id = 0
send.send_msg(robot_id, 100, 100, 0, power=1.0, d = 300000)
sleep(3)
receive.get_info(color, robot_id)
print("vx = :", receive.robot_info['vx'])
print("vy = :", receive.robot_info['vy'])
send.send_msg(robot_id, 10, 20, 0, power=0.0, d = 1000000)
sleep(10)
receive.get_info(color, robot_id)
print("vx = :", receive.robot_info['vx'])
print("vy = :", receive.robot_info['vy'])
|
984,382 | 883564560a2a5a999bffad947fc2c31c77c11722 | import ast
import codecs
import pkgutil
import re
from os import path
def escape(string):
encoder = codecs.getencoder('unicode_escape')
string = encoder(string)[0].decode('ascii')
return '"""{0}"""'.format(string)
class ImportTarget:
def __init__(self, absolute_path, module_path):
self.absolute_path = absolute_path
self.module_path = path.normpath(module_path)
def read(self):
with open(self.absolute_path) as f:
return f.read()
def imports(self):
tree = ast.parse(self.read(), self.absolute_path)
for node in ast.walk(tree):
if not isinstance(node, (ast.Import, ast.ImportFrom)):
continue
names = [a.name for a in node.names]
if isinstance(node, ast.Import):
yield from map(ImportLine.with_name, names)
if isinstance(node, ast.ImportFrom):
yield ImportLine(node.module or '.', names)
class ImportLine:
builtins = [m.name for m in pkgutil.iter_modules()]
def __init__(self, import_path, items):
import_path = import_path.replace('.', '/')
import_path = re.sub('^/', './', import_path)
self.import_path = import_path
self.items = items
@property
def is_builtin(self):
return self.import_path in self.builtins
@staticmethod
def with_name(name):
return ImportLine(name, [])
class ModuleWriterGenerator:
def __init__(self, sys_path):
self._sys_path = sys_path
self.modules = {}
def build(self):
return ''.join([
f'__pyndler__.write_module({escape(module_path)}, {escape(module_source)})\n'
for module_path, module_source in self.modules.items()
])
def generate_for_file(self, python_file_path):
self._generate_for_module(ImportTarget(python_file_path, '.'))
def _generate_for_module(self, python_module):
for import_line in python_module.imports():
if not import_line.is_builtin:
self._generate_for_import(python_module, import_line)
def _generate_for_import(self, python_module, import_line):
import_targets = self._read_possible_import_targets(python_module, import_line)
for import_target in import_targets:
if import_target.module_path not in self.modules:
self.modules[import_target.module_path] = import_target.read()
self._generate_for_module(import_target)
def _read_possible_import_targets(self, python_module, import_line):
import_path_parts = import_line.import_path.split('/')
possible_init_module_paths = [
path.join(path.join(*import_path_parts[0:index + 1]), '__init__.py')
for index in range(len(import_path_parts))
]
possible_module_paths = [import_line.import_path + '.py'] + possible_init_module_paths
for item in import_line.items:
possible_module_paths += [
path.join(import_line.import_path, item + '.py'),
path.join(import_line.import_path, item, '__init__.py')
]
import_targets = [
self._find_module(python_module, module_path)
for module_path in possible_module_paths
]
valid_import_targets = [target for target in import_targets if target is not None]
return valid_import_targets
def _find_module(self, importing_python_module, module_path):
relative_module_path = path.join(path.dirname(importing_python_module.absolute_path), module_path)
if path.exists(relative_module_path):
return ImportTarget(relative_module_path,
path.join(path.dirname(importing_python_module.module_path), module_path))
full_module_path = path.join(self._sys_path, module_path)
if path.exists(full_module_path):
return ImportTarget(full_module_path, module_path)
|
984,383 | 6fa0a724f104e22e21a81398bf7856c9112ccc71 | import sys
import io
import requests
sys.stdout = io.TextIOWrapper(sys.stdout.detach(), encoding = 'utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.detach(), encoding = 'utf-8')
#Response ์ํ์ฝ๋
s = requests.Session()
r = s.get('http://httpbin.org/get')
#print(r.status_code)
#print(r.ok)
#https://jsonplaceholder.typicode.com
r = s.get('https://jsonplaceholder.typicode.com/posts/1')
#print(r.text)
print(r.json())
print(r.json().keys())
print(r.json().values()) #ํค๊ฐ ์ ์ธํ๊ณ , ๋ฒจ๋ฅ๊ฐ๋ง ์ถ๋ ฅ
print(r.encoding) #ํ๊ธ ๊นจ์ง๋๊ฑฐ ๋ฐฉ์ง ์ง์ง ์ค์
print(r.content) #๋ฐ์ด๋๋ฆฌ ํํ์ ๋ฐ์ดํฐ๋ก ๊ฐ์ ธ์ด
print(r.raw) #๋ก์ฐ ํํ์ ๋ฐ์ดํฐ๋ก ๊ฐ์ ธ์ด
|
984,384 | c29db71e7f35f8f892bdcd590a1d6092efa4ea86 | import numpy as np
import pandas as pd
import util
import csv
# Predict via the median number of plays.
train_file = 'train.csv'
test_file = 'test.csv'
soln_file = 'global_median.csv'
# Load the training data.
train_data = {}
with open(train_file, 'r') as train_fh:
train_csv = csv.reader(train_fh, delimiter=',', quotechar='"')
next(train_csv, None)
for row in train_csv:
user = row[0]
artist = row[1]
plays = int(row[2])
if not user in train_data:
train_data[user] = {}
train_data[user][artist] = plays
# Compute the global median.
sol_dic = {}
for user, user_data in train_data.iteritems():
plays_array = []
for artist, plays in user_data.iteritems():
plays_array.append(plays)
train_data[user]["median"] = np.median(np.array(plays_array))
if len(plays_array) >= 5:
if np.std(np.array(plays_array)) / np.median(np.array(plays_array)) < 1.0:
train_data[user]["median"] = (np.median(np.array(plays_array)) + np.median(np.array(plays_array[1:])) + np.median(np.array(plays_array[:-2])))/3.0
#print train_data[user]["median"]
print "done this part"
#global_median = np.median(np.array(plays_array))
#print "global median:", global_median
df = pd.read_pickle("newtrain.pd")
df_all = pd.read_pickle("newtrain_0.pd")
dic_df = df.set_index("ID")["ratio"].to_dict()
dic_df_all = df_all.set_index("ID")["ratio"].to_dict()
# Write out test solutions.
with open(test_file, 'r') as test_fh:
test_csv = csv.reader(test_fh, delimiter=',', quotechar='"')
next(test_csv, None)
with open(soln_file, 'w') as soln_fh:
soln_csv = csv.writer(soln_fh,
delimiter=',',
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
soln_csv.writerow(['Id', 'plays'])
counter = 0
for row in test_csv:
counter += 1
if (counter%1000 == 0):
util.drawProgressBar(counter/4154805.0)
id = row[0]
user = row[1]
artist = row[2]
#print df[df['ID'] == str(id)]["ratio"]
weight = float(dic_df[user])
weight_all = float(dic_df_all[user])
#print weight, weight_all
scale = weight_all
if weight == 0:
weight_all = 1 #go back to median
soln_csv.writerow([id, train_data[user]["median"]])
|
984,385 | e8eb5fa371998f254c96f4cdf32b2d4a6f863a5a | from func import *
FILE_OPEN = False
location = ""
def open_file():
global FILE_OPEN, location
location = input("ํ์ผ ๊ฒฝ๋ก ์
๋ ฅ > ")
# ํ์ผ ๊ฒฝ๋ก ์ง์ ํ๋ ํจ์์ location ํ๋ผ๋ฏธํฐ ๊ฐ์ผ๋ก ๋ฃ๊ธฐ
FILE_OPEN = True
return location
while True:
print("1. ํ์ผ ์ด๊ธฐ")
print("2. ์นํฐ ์ ๋ณด")
print("3. ํํฐ์
์ ๋ณด")
print("4. FAT32 ์ ๋ณด")
print("5. ํ์ผ ์ ๋ณด")
print("0. ์ข
๋ฃ")
print()
select = int(input("๋ฉ๋ด ์ ํ : "))
if select == 1:
location = open_file()
elif select == 2:
# ์นํฐ ์ ๋ณด๋ฅผ ์กฐํํ๋ ํจ์
if not FILE_OPEN: # ํ์ผ ๊ฒฝ๋ก๊ฐ ์ง์ ๋์ง ์์ ์ํ์ผ ๊ฒฝ์ฐ ํ์ผ ์ด๊ธฐ๋ฅผ ์ ํ์ผ๋ก ์ํ
ShowMbrSector(open_file()).show()
else:
ShowMbrSector(location).show()
elif select == 3:
# ํํฐ์
์ ๋ณด๋ฅผ ์กฐํํ๋ ํจ์
if not FILE_OPEN:
ShowPartition(open_file()).show()
else:
ShowPartition(location).show()
elif select == 4:
# FAT32 ์ ๋ณด๋ฅผ ์กฐํํ๋ ํจ์
if not FILE_OPEN:
ShowFat32Info(open_file()).show()
else:
ShowFat32Info(location).show()
elif select == 5:
# ๋ฃจํธ ๋๋ ํ ๋ฆฌ์ ํ์ผ ์ ๋ณด๋ฅผ ์กฐํํ๋ ํจ์
if not FILE_OPEN:
ShowFilesInfo(open_file()).show()
else:
ShowFilesInfo(location).show()
elif select == 0:
exit()
|
984,386 | 5e4931e6fdca2393b1d64c4f6066333f556f6459 | '''
A generalization of Bรฉzier surfaces, called the S-patch, uses an interesting scheme for indexing its control points.
In the case of an n-sided surface of degree d, each index has n non-negative integers that sum to d, and all possible configurations are used.
For example, for a 3-sided quadratic (degree 2) surface the control points are:
indices 3 2 => [[0,0,2],[0,1,1],[0,2,0],[1,0,1],[1,1,0],[2,0,0]]
Given the degree and the number of sides, generate all control point indices. The order of the indices in the list can be arbitrary, so for the above example
[[1,1,0],[2,0,0],[0,0,2],[0,2,0],[0,1,1],[1,0,1]]
is also a good solution.
'''
def indices(n, d):
if d == 0:
return [[0] * n]
elif n == 1:
return [d]
elif d == 1:
result=[]
for i in range(0, n):
element = [0] * n
element[i] = 1
result.append(element)
return result
elif n == 2:
return [[i, d-i] for i in range(0, d+1)]
elif n > 2:
result =[]
for i in range(0, d+1):
lower_dim = indices(n-1, d-i)
for element in lower_dim:
element.extend([i])
result.append(element)
return result
def main():
print(indices(3, 4))
main() |
984,387 | 376e7f9fe8681a89a6629054a1c672165c9a08e4 | # -*- coding: utf-8 -*-
from teachablerobots.src.Communicate import SocketComm
from teachablerobots.src.GridSpace import *
import math
from time import sleep
#import threading
import ast
from multiprocessing import Process, Queue, Event, Value, Lock, Manager
from ctypes import c_char_p
class Robot(object):
'''
Attributes:
lowColor: The minimum HSV value of the robot to track
highColor: The maximum HSV value of the robot to track
robot: An (x, y, w, h) tuple that describes the robots location
and dimensions
contour: The contour of the robot
ellipse: an ((x,y),(w,l), a) tuple where (x,y) is the center,
(w,l) is the width and length, and a is the angle of rotation.
Used to track the robots angle.
heading: The robots relative angle
dir: the direction the robot is moving, "fwd", "bk"
Functions:
SetGoal(self, goal)
Run(self)
FindRobot(self, frame)
FrameOverlay(self, frame)
LocationToCoordinates(self, location)
CoordinatesToLocation(self, coordinates)
GetHeading(self, frame)
DrawGoal(self, goal)
DrawLine(self, point1, point2)
def DrawPolygon(self, startPoint, sideLength, numberOfSides)
'''
def __init__(self, gridSpace, color):
if(color == "green"):
self.low = (48, 52, 149)
self.high = (89, 325, 340)
if(color == "pink"):
self.low = (56, 82, 170)
self.high = (180,271,258)
if(color == "blue"):
self.low = (55,132,142)
self.high = (114,273,273)
self.robot = ((0,0),(0,0), 0)
self.contour = []
self.heading = 0
self.dir = "fwd"
self.rLoc = (0,0)
self.goal = (0,0)
self.goalFound = False
self.displayGoals = False
self.displayGoalLoc = False
self._finished = False
self.mazeFinished = False
self.gs = gridSpace
self.m = Manager()
self.lock = Lock()
self.location = self.m.Value(c_char_p, b"(4,1)")
self.direction = self.m.Value(c_char_p, b"Up")
self.range = self.m.Value("i", 0)
self.distanceTravelled = self.m.Value('i', 0)
self.robotServer = SocketComm(5580)
self.robotComm = Process(target=self.GetRobotResponse, args=(self.location,self.direction,self.distanceTravelled,self.range,))
self.robotComm.e = Event()
#self.robotComm.daemon = True
def GetRobotResponse(self, loc, _dir, dist, r):
d = dict()
while(not self.robotServer.finished.value):
#print("size of inbox: " + str(self.robotServer.inbox.qsize()))
if(not self.robotServer.inbox.empty()):
temp = ast.literal_eval(self.robotServer.inbox.get())
try:
if("location" in temp):
self.lock.acquire()
loc.value = temp["location"].rstrip().encode('ascii')
self.lock.release()
dist.value = dist.value + 1
#print("distance travelled: " + str(dist.value))
#print("location: " + loc.value.decode('ascii'))
elif("direction" in temp):
self.lock.acquire()
_dir.value = temp["direction"].rstrip().encode('ascii')
self.lock.release()
#print("direction: " + _dir.value.decode('ascii'))
elif("range" in temp):
self.lock.acquire()
r.value = temp["range"]
print("range: " + str(temp["range"]))
self.lock.release()
else:
print("unknown: " + str(temp))
finally:
pass
return
def SendCommandSequence(self, seq):
if(len(seq) == 1 and seq == "0"):
self.robotServer.sendMessage("0")
return
else:
d = dict()
d["sequence"] = seq
self.robotServer.sendMessage(str(d))
return
def SendObjective(self, objective):
d = dict()
d["objective"] = objective
self.robotServer.sendMessage(str(d)) # i.e. objective is to drive to first quadrant
print("sent: " + objective)
return
def SetGoal(self, goal):
self.goal = goal
return
def Run(self):
c = 0
i = 0
if(self.robotServer.connected):
self.robotCommThread.start()
print("starting comm thread")
print("starting...")
while(not self._finished):
#print("length of inbox in loop: " + str(len(self.robotServer.inbox)))
self.gs.Update(self.FrameOverlay)
#self.FindRobot()
#self.gs.ShowFrame(title=self.gs.title)
key = cv2.waitKey(1) & 0xFF
if(key == ord("q")):
self.finished = True
elif(key == ord("c")):
cv2.imwrite("picture%i.jpg" %i, window)
i += 1
self.robotServer.e.set()
self.robotServer.finished.value = True
print("closing connection")
self.robotServer.closeConnection()
def FindRobot(self):
contours = cv2.findContours(self.gs.processedFrame, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]
if(len(contours) > 0):
cont = max(contours, key=cv2.contourArea)
if(cv2.contourArea(cont) > 200 and cv2.contourArea(cont) < 700):
temp = cv2.minAreaRect(cont)
if(abs(temp[0][0] - self.robot[0][0]) > .02 and abs(temp[0][1] - self.robot[0][1]) > .02):
self.contour = cont
self.robot = temp
return
def FrameOverlay(self): #TODO draw point, student name in text area
if(self.displayGoals):
self.DrawGoal(self.LocToCoord(self.goal), self.displayGoalLoc)
if(len(self.contour) > 0):
box = cv2.boxPoints(self.robot)
box = np.int0(box)
cv2.drawContours(self.gs.frame, [box], 0, (0, 255, 0), 2)
(x,y) = self.LocToCoord(self.robot[0])
if(not self.mazeFinished and abs(5-x) < .5 and abs(0-y) < .5):
self.goalFound = True
cv2.putText(self.gs.frameCopy, "Good Job!", (100, 240), 2, 1, (0, 255, 0), 3)
return self.gs.frame
def LocToCoord(self, location):
return (location[0] - self.gs.frameCenter[0]) / 38, (self.gs.frameCenter[1] - location[1]) / 38
def CoordToLoc(self, coordinates):
return (int(coordinates[0] *38 + self.gs.frameCenter[0])), (int(-coordinates[1]*38 + self.gs.frameCenter[1]))
def DrawGoal(self, goal, showXY):
cv2.circle(self.frame,(goal[0], goal[1]), 2, (220,80,80), 2)
cv2.circle(self.frame,(goal[0], goal[1]), 7, (220,80,80), 2)
cv2.circle(self.frame,(goal[0], goal[1]), 12, (220,80,80), 2)
if(showXY):
cv2.putText(self.frame, str(self.CoordToLoc(goal)), (goal[0]+10, goal[1]+10), cv2.FONT_HERSHEY_PLAIN, .95, (50,100,200), 2)
def DrawLine(self, point1, point2):
cv2.line(self.frame, point1, point2, (255,50,155), 4)
pass
def DrawPolygon(self, startPoint, sideLength, numberOfSides):
pass
def GetHeading(self, frame):
pass
#r = Robot(GridSpace(mode=""), "green")
#r.Run()
|
984,388 | 463d1b602a4127ebd65a12a942d35e9361463ee7 | #!/usr/bin/env python3
import os
import base64
import hashlib
import random
import flask
from gen_db import DATABASE
app = flask.Flask(__name__)
app.secret_key = "dljsaklqk24e21cjn!Ew@@dsa5"
N = int("00ab76f585834c3c2b7b7b2c8a04c66571539fa660d39762e338cd8160589f08e3d223744cb7894ea6b424ebab899983ff61136c8315d9d03aef12bd7c0486184945998ff80c8d3d59dcb0196fb2c37c43d9cbff751a0745b9d796bcc155cfd186a3bb4ff6c43be833ff1322693d8f76418a48a51f43d598d78a642072e9fff533", 16)
g = 2
k = 3
b = random.randint(0, N - 1)
salt = str(random.randint(0, 2**32 - 1))
def gen_seed():
return random.randint(0, N - 1)
def xor_data(binary_data_1, binary_data_2):
return bytes([b1 ^ b2 for b1, b2 in zip(binary_data_1, binary_data_2)])
def modular_pow(base, exponent, modulus):
if modulus == -1:
return 0
result = 1
base %= modulus
while exponent > 0:
if exponent % 2:
result = (result * base) % modulus
exponent >>= 1
base = (base * base) % modulus
return result
def hmac_sha256(key, message):
if len(key) > 64:
key = sha256(key).digest()
if len(key) < 64:
key += b'\x00' * (64 - len(key))
o_key_pad = xor_data(b'\x5c' * 64, key)
i_key_pad = xor_data(b'\x36' * 64, key)
return hashlib.sha256(o_key_pad + hashlib.sha256(i_key_pad + message).digest()).hexdigest()
def hasher(data):
return int(hashlib.sha256(data.encode()).hexdigest(), 16)
app.jinja_env.globals.update(
gen_seed=gen_seed,
modular_pow=modular_pow,
N=N,
)
@app.route("/", methods=["GET", "POST"])
def home():
if flask.request.method == "POST":
username = flask.request.form.get("username")
if username is None:
flask.flash("Error encountered on server-side.")
return flask.redirect(flask.url_for("home"))
hmac = flask.request.form.get("computed")
if (hmac is not None):
return flask.redirect(flask.url_for("dashboard", user=username, hmac=hmac))
try:
pwd = DATABASE[username]
except KeyError:
flask.flash("Cannot find password for username in database")
return flask.redirect(flask.url_for("home"))
try:
A = int(flask.request.form.get("token1"))
except Exception as e:
flask.flash("Error encountered on server-side")
return flask.redirect(flask.url_for("home"))
if A is None:
flask.flash("Error encountered on server-side.")
return flask.redirect(flask.url_for("home"))
if A in [0, N]:
flask.flash("Error encountered on server-side. >:)")
return flask.redirect(flask.url_for("home"))
xH = hasher(salt + str(pwd))
v = modular_pow(g, xH, N)
B = (k * v + modular_pow(g, b, N)) % N
u = hasher(str(A) + str(B))
S = modular_pow(A * modular_pow(v, u, N), b, N)
K = hashlib.sha256(str(S).encode()).digest()
flask.session["server_hmac"] = hmac_sha256(K, salt.encode())
return flask.jsonify(nacl=salt, token2=B)
else:
return flask.render_template("home.html")
@app.route("/dash/<user>", methods=["POST", "GET"])
def dashboard(user):
if "hmac" not in flask.request.args:
flask.flash("Error encountered on server-side.")
return flask.redirect(flask.url_for("home"))
hmac = flask.request.args["hmac"]
servermac = flask.session.get("server_hmac", None)
print(hmac, servermac, not (hmac != servermac))
if hmac != servermac:
flask.flash("Incorrect password.")
return flask.redirect(flask.url_for("home"))
print("IT WORKS !!!")
pwd = DATABASE[user]
return flask.render_template("dashboard.html", username=user, pwd=pwd)
if __name__ == "__main__":
app.run()
|
984,389 | ef9aad95d3ea333ccb87f788d9004e63a1610ee9 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Main of the instance validator. This script takes a YAML building
configuration file as an argument and validates it for coherence with the
Digital Buildings ontology.
This is done by first ensuring the file syntax is valid YAML, then by
parsing the ontology and comparing it with the file contents.
This tool allows clients to independently validate their configuration files.
It saves time and provides more accuracy than manual error checks."""
from __future__ import print_function
from validate import generate_universe
from validate import entity_instance
from validate import instance_parser
from validate import subscriber
from validate import telemetry
import argparse
import sys
# TODO(nkilmer): update as you see good
def message_handler(message):
"""Handles a pubsub message.
Args:
message: a pubsub message containing telemetry payload.
"""
t = telemetry.Telemetry(message)
for key, value in t.points.items():
print()
print('-point: ', key)
print('-- point_name: ', value.point_name)
print('-- present_value: ', value.present_value)
message.ack()
# TODO add input and return type checks in all functions
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Validate a YAML building configuration file')
parser.add_argument('-i', '--input',
dest='filename',
required=True,
help='Filepath to YAML building configuration',
metavar='FILE')
parser.add_argument('-m', '--modified-ontology-types',
dest='modified_types_filepath',
required=False,
help='Filepath to modified type filepaths',
metavar='MODIFIED_TYPE_FILEPATHS')
parser.add_argument('-s', '--subscription',
dest='subscription',
required=False,
help='pubsub subscription',
metavar='subscription')
parser.add_argument('-a', '--service-account',
dest='service_account',
required=False,
help='service account',
metavar='service-account')
arg = parser.parse_args()
# SYNTAX VALIDATION
print('\nValidator starting ...\n')
filename = arg.filename
pubsub_validation_set = False
if arg.subscription is not None and arg.service_account is not None:
pubsub_validation_set = True
elif arg.subscription is None and arg.service_account is None:
pubsub_validation_set = False
else:
print('Subscription and a service account file are both '
'needed for the telemetry validation!')
sys.exit(0)
# prints for syntax errors and exits gracefully
raw_parse = instance_parser.parse_yaml(filename)
print('Passed syntax checks!')
modified_types_filepath = arg.modified_types_filepath
print('Generating universe ...')
universe = generate_universe.BuildUniverse(modified_types_filepath)
if universe is None:
print('\nError generating universe')
sys.exit(0)
print('Universe generated successfully')
parsed = dict(raw_parse)
entity_instances = {}
entity_names = list(parsed.keys())
# first build all the entity instances
for entity_name in entity_names:
entity = dict(parsed[entity_name])
instance = entity_instance.EntityInstance(entity,
universe,
set(entity_names))
entity_instances[entity_name] = instance
for entity_name, entity_instance in entity_instances.items():
if not entity_instance.IsValidEntityInstance(entity_instances):
print(entity_name, 'is not a valid instance')
sys.exit(0)
print('File passes all checks!')
if pubsub_validation_set:
print('Connecting to pubsub subscription: ', arg.subscription)
sub = subscriber.Subscriber(arg.subscription, arg.service_account)
sub.Listen(message_handler)
|
984,390 | 69132b88a9e8a74536284de828c4a688b2fe193a | from gridworld.GridEnv import *
def get_state_values_td(pi, env, gamma=0.9, alpha=0.2, alpha_decay_rate=.0003, min_alpha=0, episodes=30000):
nS = env.nS
V = np.zeros(nS)
for t in range(episodes):
alpha = max(min_alpha, alpha * np.exp(-alpha_decay_rate * t))
s = env.reset()
is_done = False
while not is_done:
a = pi[s]
new_s, reward, is_done, _ = env.step(a)
td_error = reward + gamma * V[new_s] - V[s]
V[s] += alpha * td_error
s = new_s
return V
game = GridEnv.steppable_static()
LEFT, DOWN, RIGHT, UP = range(4)
pi = [0, 1, 2, 3,
0, 1, 2, 3,
0, 1, 2, 3]
V = get_state_values_td(pi, game, alpha=1)
print(V)
# less variance, and close to the true state values, the bias isn't that bad either...
# [ 0.10325534 0.12150854 0.16389367 0. 0.08338727 0.
# -0.65685648 0. -0.10264621 -0.30644452 -0.41139743 -0.65096012]
|
984,391 | 8c2d86a1a4d507b80fb8597c014a2d8575036b59 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(540, 404)
self.tabWidget = QtWidgets.QTabWidget(Dialog)
self.tabWidget.setGeometry(QtCore.QRect(20, 20, 501, 371))
font = QtGui.QFont()
font.setPointSize(12)
self.tabWidget.setFont(font)
self.tabWidget.setObjectName("tabWidget")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.browserRP = QtWidgets.QTextBrowser(self.tab_3)
self.browserRP.setGeometry(QtCore.QRect(20, 140, 451, 181))
self.browserRP.setObjectName("browserRP")
self.label_3 = QtWidgets.QLabel(self.tab_3)
self.label_3.setGeometry(QtCore.QRect(20, 100, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.label_2 = QtWidgets.QLabel(self.tab_3)
self.label_2.setGeometry(QtCore.QRect(20, 60, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.label = QtWidgets.QLabel(self.tab_3)
self.label.setGeometry(QtCore.QRect(20, 20, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label.setFont(font)
self.label.setObjectName("label")
self.okButtonRP = QtWidgets.QPushButton(self.tab_3)
self.okButtonRP.setGeometry(QtCore.QRect(390, 100, 81, 31))
self.okButtonRP.setObjectName("okButtonRP")
self.lineEditNewRP = QtWidgets.QLineEdit(self.tab_3)
self.lineEditNewRP.setGeometry(QtCore.QRect(90, 100, 291, 31))
self.lineEditNewRP.setObjectName("lineEditNewRP")
self.chooseButtonRP = QtWidgets.QToolButton(self.tab_3)
self.chooseButtonRP.setGeometry(QtCore.QRect(390, 20, 81, 31))
self.chooseButtonRP.setObjectName("chooseButtonRP")
self.lineEditPathRP = QtWidgets.QLineEdit(self.tab_3)
self.lineEditPathRP.setGeometry(QtCore.QRect(90, 20, 291, 31))
self.lineEditPathRP.setObjectName("lineEditPathRP")
self.lineEditOldRP = QtWidgets.QLineEdit(self.tab_3)
self.lineEditOldRP.setGeometry(QtCore.QRect(90, 60, 291, 31))
self.lineEditOldRP.setObjectName("lineEditOldRP")
self.tabWidget.addTab(self.tab_3, "")
self.tab_4 = QtWidgets.QWidget()
self.tab_4.setObjectName("tab_4")
self.label_4 = QtWidgets.QLabel(self.tab_4)
self.label_4.setGeometry(QtCore.QRect(20, 60, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_6 = QtWidgets.QLabel(self.tab_4)
self.label_6.setGeometry(QtCore.QRect(20, 20, 71, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.okButtonRN = QtWidgets.QPushButton(self.tab_4)
self.okButtonRN.setGeometry(QtCore.QRect(390, 60, 81, 31))
self.okButtonRN.setObjectName("okButtonRN")
self.lineEditNewRN = QtWidgets.QLineEdit(self.tab_4)
self.lineEditNewRN.setGeometry(QtCore.QRect(90, 60, 291, 31))
self.lineEditNewRN.setObjectName("lineEditNewRN")
self.lineEditPathRN = QtWidgets.QLineEdit(self.tab_4)
self.lineEditPathRN.setGeometry(QtCore.QRect(90, 20, 291, 31))
self.lineEditPathRN.setObjectName("lineEditPathRN")
self.chooseButtonRN = QtWidgets.QToolButton(self.tab_4)
self.chooseButtonRN.setGeometry(QtCore.QRect(390, 20, 81, 31))
self.chooseButtonRN.setObjectName("chooseButtonRN")
self.browserRN = QtWidgets.QTextBrowser(self.tab_4)
self.browserRN.setGeometry(QtCore.QRect(20, 140, 451, 181))
self.browserRN.setObjectName("browserRN")
self.tabWidget.addTab(self.tab_4, "")
self.retranslateUi(Dialog)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label_3.setText(_translate("Dialog", "ๆฐๅ็จฑ :"))
self.label_2.setText(_translate("Dialog", "่ๅ็จฑ :"))
self.label.setText(_translate("Dialog", "่ทฏๅพ :"))
self.okButtonRP.setText(_translate("Dialog", "ๆดๆนๅ็จฑ"))
self.chooseButtonRP.setText(_translate("Dialog", "้ธๆ่ทฏๅพ"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_3), _translate("Dialog", "RP"))
self.label_4.setText(_translate("Dialog", "ๆฐๅ็จฑ :"))
self.label_6.setText(_translate("Dialog", "่ทฏๅพ :"))
self.okButtonRN.setText(_translate("Dialog", "ๆดๆนๅ็จฑ"))
self.chooseButtonRN.setText(_translate("Dialog", "้ธๆ่ทฏๅพ"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_4), _translate("Dialog", "RN"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
984,392 | 438462dda2cb91227d1bf745a708ceef50c7dbcb | """
generate original data file for hierarchy
Format:
[[[class_index for level i], ... ], [..], ...]
"""
import json
import numpy as np
import os
import argparse
import re
def generate_hierarchy(args):
f = open(args.scene_file, 'r')
scene = json.load(f)
f.close()
f = open(args.output, 'w')
json.dump(scene, f)
f.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--scene_file', default='../../data/ADE/ADE_Origin/scene.json')
parser.add_argument('--output', default='../../data/ADE/ADE_Supervision/scene.json')
args = parser.parse_args()
generate_hierarchy(args) |
984,393 | 95644e444ce2ed5e1d4a94c9fd0a31d7a68c706c | from libdw import sm
from time import time, sleep
import lightSensorInput
from lightSensorInput import readLight
from FlushButtonInput import buttonState
from valveControl import setValve
from lightControl import changeLightState
# inp includes
# 0: the state of button press
# 1: the output of shitPresence Model
# output includes:
# flushing, waiting, flushDone
class flushController(sm.SM):
startState = 'waiting'
def getNextValues(self,state,inp):
if state == 'waiting':
if inp[0] == True:
return ('flushing','startFlush')
elif inp[0] == False:
return ('waiting','waiting')
elif state == 'flushing':
if inp[1] == True:
return ('flushing','flushing')
elif inp[1] == False:
return ('waiting','endFlush')
if __name__ == '__main__':
sm = flushController()
sm.start()
while True:
print 'readLight: ', readLight()
print 'buttonState: ', buttonState()
print sm.step()
sleep(0.5)
|
984,394 | e5c58c15b7d3e82ff7b96a937001035f20b06eec | # open a link in browser using python
import webbrowser
url = 'https://pythonexamples.org'
webbrowser.register('chrome',
None,
webbrowser.BackgroundBrowser("C://Program Files//Google//Chrome//Application//chrome.exe"))
webbrowser.get('chrome').open(url)
#google search using python
#pip install google
try:
from googlesearch import search
except ImportError:
print("No module named 'google' found")
# to search
query = "images for scenery"
for j in search(query, tld="co.in", num=10, stop=10, pause=2):
print(j)
|
984,395 | 74c5f34ab8aad01377a092b6604a9206d54be86e | #from parse import parse
import re
lines = list()
maxCharLines = 0
numberOfLines = 0
def initializeList():
global numberOfLines
global maxCharLines
f = open("input.txt", "r")
for line in f:
lines.append(line)
numberOfLines = numberOfLines + 1
maxCharLines = len(lines[0]) - 1
".....#......#....#........#.#.."
#print("Charactes in line: {0} [{1}]".format(len(lines[0]),lines[0]))
def pattern(right, down):
global numberOfLines
global maxCharLines
treesFound = 0
print("Following {0:3} right and {1:3} down ({2})".format(right, down, numberOfLines))
rCount = 0
dCount = 0
end = 0
while end == 0:
rCount = rCount + right
dCount = dCount + down
if rCount >= maxCharLines:
print("Pos = now {0}, max = {1}, becomes {2}".format(rCount, maxCharLines, rCount - maxCharLines))
rCount = rCount - maxCharLines
if dCount >= numberOfLines:
end = 1
else:
print("Position {0:3} right and {1:3} down: Char = {2}".format(rCount, dCount, lines[dCount][rCount]))
if '#' == lines[dCount][rCount]:
#print ("Tree")
treesFound += 1
print("Found {0} trees".format(treesFound))
return treesFound
initializeList()
trees = 0
trees = pattern(1, 1)
trees *= pattern(3, 1)
trees *= pattern(5, 1)
trees *= pattern(7, 1)
trees *= pattern(1, 2)
print("Total trees: {0}".format(trees)) |
984,396 | 0015314b33b969ba837cdf36c4d0a10103703143 | List=[2,7,8,5]
target=9
newlist=[]
for i in range(len(List)):
for p in range(i+1,len(List)):
if List[i]+List[p] == target:
print(List[i],List[p]) |
984,397 | 8d10f463f04a6d90fa22ca13363b3ce91101589e | # Lint as: python3
#
# Copyright 2020 The XLS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper utilities for asserting DSLX interpreter/LLVM IR JIT equivalence."""
from typing import Iterable
from xls.dslx import bit_helpers
from xls.dslx.python import interp_value as dslx_value
from xls.dslx.python.cpp_concrete_type import ArrayType
from xls.dslx.python.cpp_concrete_type import BitsType
from xls.dslx.python.cpp_concrete_type import ConcreteType
from xls.dslx.python.cpp_concrete_type import TupleType
from xls.ir.python import bits as ir_bits
from xls.ir.python import value as ir_value
class UnsupportedJitConversionError(Exception):
"""Raised when the JIT bindings throw an exception."""
class JitMiscompareError(Exception):
"""Raised when the JIT and DSLX interpreter give inconsistent results."""
def convert_interpreter_value_to_ir(
interpreter_value: dslx_value.Value) -> ir_value.Value:
"""Recursively translates a DSLX Value into an IR Value."""
if interpreter_value.is_bits() or interpreter_value.is_enum():
return ir_value.Value(interpreter_value.get_bits())
elif interpreter_value.is_array():
ir_arr = []
for e in interpreter_value.get_elements():
ir_arr.append(convert_interpreter_value_to_ir(e))
return ir_value.Value.make_array(ir_arr)
elif interpreter_value.is_tuple():
ir_tuple = []
for e in interpreter_value.get_elements():
ir_tuple.append(convert_interpreter_value_to_ir(e))
return ir_value.Value.make_tuple(ir_tuple)
else:
raise UnsupportedJitConversionError(
"Can't convert to JIT value: {}".format(interpreter_value))
def convert_args_to_ir(
args: Iterable[dslx_value.Value]) -> Iterable[ir_value.Value]:
ir_args = []
for arg in args:
ir_args.append(convert_interpreter_value_to_ir(arg))
return ir_args
def bits_to_int(jit_bits: ir_bits.Bits, signed: bool) -> int:
"""Constructs the ir bits value by reading in a 64-bit value at a time."""
assert isinstance(jit_bits, ir_bits.Bits), jit_bits
bit_count = jit_bits.bit_count()
bits_value = jit_bits.to_uint()
return (bits_value if not signed else bit_helpers.from_twos_complement(
bits_value, bit_count))
def compare_values(interpreter_value: dslx_value.Value,
jit_value: ir_value.Value) -> None:
"""Asserts equality between a DSLX Value and an IR Value.
Recursively traverses the values (for arrays/tuples) and makes assertions
about value and length properties.
Args:
interpreter_value: Value that resulted from DSL interpretation.
jit_value: Value that resulted from JIT-compiled execution.
Raises:
JitMiscompareError: If the dslx_value and jit_value are not equivalent.
UnsupportedJitConversionError: If there is not JIT-supported type equivalent
for the interpreter value.
"""
if interpreter_value.is_bits() or interpreter_value.is_enum():
assert jit_value.is_bits(), f'Expected bits value: {jit_value!r}'
jit_bits_value = jit_value.get_bits()
assert isinstance(jit_bits_value, ir_bits.Bits), jit_bits_value
bit_count = interpreter_value.get_bit_count()
if bit_count != jit_bits_value.bit_count():
raise JitMiscompareError(f'Inconsistent bit counts for value -- '
f'interp: {bit_count}, '
f'jit: {jit_bits_value.bit_count()}')
interpreter_bits_value = interpreter_value.get_bits()
if interpreter_bits_value != jit_bits_value:
raise JitMiscompareError('Inconsistent bit values in return value -- '
'interp: {!r}, jit: {!r}'.format(
interpreter_bits_value, jit_bits_value))
elif interpreter_value.is_array():
assert jit_value.is_array(), f'Expected array value: {jit_value!r}'
interpreter_values = interpreter_value.get_elements()
jit_values = jit_value.get_elements()
interp_len = len(interpreter_values)
jit_len = len(jit_values)
if interp_len != jit_len:
raise JitMiscompareError(f'Inconsistent array lengths in return value -- '
f'interp: {interp_len}, jit: {jit_len}')
for interpreter_element, jit_element in zip(interpreter_values, jit_values):
compare_values(interpreter_element, jit_element)
elif interpreter_value.is_tuple():
assert jit_value.is_tuple(), 'Expected tuple value: {jit_value!r}'
interpreter_values = interpreter_value.get_elements()
jit_values = jit_value.get_elements()
interp_len = len(interpreter_values)
jit_len = len(jit_values)
if interp_len != jit_len:
raise JitMiscompareError(f'Inconsistent tuple lengths in return value -- '
f'interp: {interp_len}, jit: {jit_len}')
for interpreter_element, jit_element in zip(interpreter_values, jit_values):
compare_values(interpreter_element, jit_element)
else:
raise UnsupportedJitConversionError(
'No JIT-supported type equivalent: {}'.format(interpreter_value))
def ir_value_to_interpreter_value(value: ir_value.Value,
dslx_type: ConcreteType) -> dslx_value.Value:
"""Converts an IR Value to an interpreter Value."""
if value.is_bits():
assert isinstance(dslx_type, BitsType), dslx_type
ir_bits_val = value.get_bits()
if dslx_type.get_signedness():
return dslx_value.Value.make_sbits(ir_bits_val)
return dslx_value.Value.make_ubits(ir_bits_val)
elif value.is_array():
assert isinstance(dslx_type, ArrayType), dslx_type
return dslx_value.Value.make_array(
tuple(
ir_value_to_interpreter_value(e, dslx_type.element_type)
for e in value.get_elements()))
else:
assert value.is_tuple()
assert isinstance(dslx_type, TupleType), dslx_type
return dslx_value.Value.make_tuple(
tuple(
ir_value_to_interpreter_value(e, t) for e, t in zip(
value.get_elements(), dslx_type.get_unnamed_members())))
|
984,398 | 47439e3bb5789de916269539ced928d6dadc8f06 | """training
Revision ID: ffdfe694adfd
Revises: c81ae78ea1bd
Create Date: 2020-11-03 18:59:56.503126
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ffdfe694adfd'
down_revision = 'c81ae78ea1bd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('training', sa.Column('exNum', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('training', 'exNum')
# ### end Alembic commands ###
|
984,399 | 177c82514e3aaf8f9abdef7c4f903bfd5e639afd | # Generated by Django 2.0.6 on 2018-06-29 08:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('evaluations', '0003_auto_20180627_0913'),
]
operations = [
migrations.CreateModel(
name='EvaluationCriteriaRelationship',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rate', models.IntegerField()),
('criteria', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='evaluations.Criteria')),
('evaluation', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='evaluations.Evaluation')),
],
),
migrations.AddField(
model_name='evaluation',
name='feedback',
field=models.ManyToManyField(through='evaluations.EvaluationCriteriaRelationship', to='evaluations.Criteria'),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.