id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
132762 | #!/usr/bin/env python3
# Copyright (c) 2019 The Stakework Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import StakeWorkTestFramework
from test_framework.cfund_util import *
import urllib.parse
class CommunityFundProposalReorg(StakeWorkTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug=cfund"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug=cfund"]))
connect_nodes_bi(self.nodes, 0, 1)
self.is_network_split = split
def run_test(self):
self.nodes[0].staking(False)
self.nodes[0].staking(False)
activate_cfund(self.nodes[0])
sync_blocks(self.nodes)
self.nodes[0].donatefund(10)
slow_gen(self.nodes[0], 1)
rawproposal = self.nodes[0].createproposal(self.nodes[0].getnewaddress(), 10, 36000, "test", 50, True)["raw"]
# disconnect the nodes and generate the proposal on each node
url = urllib.parse.urlparse(self.nodes[1].url)
self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
time.sleep(2) # wait for disconnect
hash = self.nodes[0].sendrawtransaction(rawproposal)
self.nodes[1].sendrawtransaction(rawproposal)
self.nodes[0].generate(1)
self.nodes[1].generate(2)
connect_nodes_bi(self.nodes, 0, 1)
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getproposal(hash), self.nodes[1].getproposal(hash))
end_cycle(self.nodes[0])
self.nodes[0].proposalvote(hash, "yes")
slow_gen(self.nodes[0], 1)
end_cycle(self.nodes[0])
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getproposal(hash)['status'], 'accepted')
rawpaymentrequest = self.nodes[0].createpaymentrequest(hash, 10, "paymentReq1", 1, True)["raw"]
# disconnect the nodes and generate the proposal on each node
url = urllib.parse.urlparse(self.nodes[1].url)
self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
time.sleep(2) # wait for disconnect
hash = self.nodes[0].sendrawtransaction(rawpaymentrequest)
assert_equal(self.nodes[1].sendrawtransaction(rawpaymentrequest), hash)
self.nodes[0].generate(1)
self.nodes[1].generate(2)
blockhash0 = self.nodes[0].getpaymentrequest(hash)["blockHash"]
blockhash1 = self.nodes[1].getpaymentrequest(hash)["blockHash"]
longestChain = self.nodes[1].getbestblockhash()
preq1 = self.nodes[0].getpaymentrequest(hash)
# I would have assumed reorg to node 1 should reorg the payment request and probably include it in the next block?
connect_nodes_bi(self.nodes, 0, 1)
sync_blocks(self.nodes)
# check the nodes reorged to the longest chain (node 1)
assert_equal(self.nodes[0].getbestblockhash(), longestChain)
assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
assert_equal(self.nodes[0].getpaymentrequest(hash), self.nodes[1].getpaymentrequest(hash))
assert_equal(self.nodes[0].getpaymentrequest(hash)['hash'], hash)
assert_equal(self.nodes[0].getpaymentrequest(hash)["blockHash"], blockhash1)
assert_equal(self.nodes[1].getpaymentrequest(hash)["blockHash"], blockhash1)
if __name__ == '__main__':
CommunityFundProposalReorg().main()
| StarcoderdataPython |
148381 | from __future__ import print_function
import sys
import numpy
import pytest
import struct
from stl import mesh
_STL_FILE = '''
solid test.stl
facet normal -0.014565 0.073223 -0.002897
outer loop
vertex 0.399344 0.461940 1.044090
vertex 0.500000 0.500000 1.500000
vertex 0.576120 0.500000 1.117320
endloop
endfacet
endsolid test.stl
'''.lstrip()
def test_valid_ascii(tmpdir, speedups):
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
fh.write(_STL_FILE)
fh.seek(0)
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_ascii_with_missing_name(tmpdir, speedups):
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
# Split the file into lines
lines = _STL_FILE.splitlines()
# Remove everything except solid
lines[0] = lines[0].split()[0]
# Join the lines to test files that start with solid without space
fh.write('\n'.join(lines))
fh.seek(0)
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_ascii_with_blank_lines(tmpdir, speedups):
_stl_file = '''
solid test.stl
facet normal -0.014565 0.073223 -0.002897
outer loop
vertex 0.399344 0.461940 1.044090
vertex 0.500000 0.500000 1.500000
vertex 0.576120 0.500000 1.117320
endloop
endfacet
endsolid test.stl
'''.lstrip()
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
fh.write(_stl_file)
fh.seek(0)
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_incomplete_ascii_file(tmpdir, speedups):
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
fh.write('solid some_file.stl')
fh.seek(0)
with pytest.raises(AssertionError):
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
for offset in (-20, 82, 100):
with tmp_file.open('w+') as fh:
fh.write(_STL_FILE[:-offset])
fh.seek(0)
with pytest.raises(AssertionError):
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_corrupt_ascii_file(tmpdir, speedups):
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
fh.write(_STL_FILE)
fh.seek(40)
print('####\n' * 100, file=fh)
fh.seek(0)
if speedups and sys.version_info.major != 2:
with pytest.raises(AssertionError):
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
with tmp_file.open('w+') as fh:
fh.write(_STL_FILE)
fh.seek(40)
print(' ' * 100, file=fh)
fh.seek(80)
fh.write(struct.pack('<i', 10).decode('utf-8'))
fh.seek(0)
with pytest.raises(AssertionError):
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_corrupt_binary_file(tmpdir, speedups):
tmp_file = tmpdir.join('tmp.stl')
with tmp_file.open('w+') as fh:
fh.write('#########\n' * 8)
fh.write('#\0\0\0')
fh.seek(0)
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
with tmp_file.open('w+') as fh:
fh.write('#########\n' * 9)
fh.seek(0)
with pytest.raises(AssertionError):
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
with tmp_file.open('w+') as fh:
fh.write('#########\n' * 8)
fh.write('#\0\0\0')
fh.seek(0)
fh.write('solid test.stl')
fh.seek(0)
mesh.Mesh.from_file(str(tmp_file), fh=fh, speedups=speedups)
def test_duplicate_polygons():
data = numpy.zeros(3, dtype=mesh.Mesh.dtype)
data['vectors'][0] = numpy.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 1.]])
data['vectors'][0] = numpy.array([[0, 0, 0],
[2, 0, 0],
[0, 2, 1.]])
data['vectors'][0] = numpy.array([[0, 0, 0],
[3, 0, 0],
[0, 3, 1.]])
assert not mesh.Mesh(data, remove_empty_areas=False).check()
| StarcoderdataPython |
3220722 | <filename>Monsoon/reflash.py
import platform
import usb.core
import usb.util
import struct
from Monsoon import Operations as op
from copy import deepcopy
import numpy as np
import array
DEVICE = None
DEVICE_TYPE = None
epBulkWriter = None
epBulkReader = None
VID = '0x2ab9'
PID = '0xffff'
class bootloaderMonsoon(object):
def __init__(self,*args, **kwargs):
pass
def setup_usb(self):
"""Sets up the USB connection."""
global DEVICE
global epBulkWriter
global epBulkReader
global VID
global PID
DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)
if DEVICE is None:#If not a LVPM, look for an HVPM.
DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)
VID = '0x4d8'
PID = '0xb'
if "Linux" == platform.system():
try:
DEVICE.detach_kernel_driver(0)
except:
pass # already unregistered
DEVICE.set_configuration()
cfg = DEVICE.get_active_configuration()
intf = cfg[(0,0)]
epBulkWriter = usb.util.find_descriptor(
intf,
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_OUT)
epBulkReader = usb.util.find_descriptor(
intf,
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_IN)
def __bootCommand(self,Command,length,address,data):
"""Sends boot command."""
sendData = []
sendData.append(Command)
sendData.append(length)
sendData.append(address[2])
sendData.append(address[1])
sendData.append(address[0])
for i in range(0,len(data)):
sendData.append(data[i])
for i in range(len(data),length):
sendData.append(0)
test = epBulkWriter.write(sendData,timeout=10000)
ret = epBulkReader.read(length+5,timeout=10000)
return ret
def writeFlash(self, hex_):
"""Writes a hex file to the Power Monitor's PIC. Uses Intel HEX file format."""
Flash, EEPROM,IDlocs,Config = self.__formatHex(hex_)
print("Erasing Flash...")
self.__writeRegion(op.BootloaderMemoryRegions.Flash,op.BootloaderCommands.EraseFlash,0x0800,Flash,None)
print("Writing Flash...")
if(self.__writeRegion(op.BootloaderMemoryRegions.Flash,op.BootloaderCommands.WriteFlash,0x0800,Flash,op.BootloaderCommands.ReadFlash)):
print("Flash written OK")
#Don't actually erase the EEPROM, this would wipe out all of the calibration data.
#if(self.writeRegion(op.BootloaderMemoryRegions.EEPROM,op.BootloaderCommands.WriteEEPROM,0x0000,EEPROM,op.BootloaderCommands.ReadEEPROM)):
# print("EEPROM written OK")
if(self.__writeChunk(op.BootloaderMemoryRegions.IDLocs,op.BootloaderCommands.WriteFlash,0x0000,IDlocs,op.BootloaderCommands.ReadFlash)):
print("IDLocs written OK")
if(self.__writeChunk(op.BootloaderMemoryRegions.Config,op.BootloaderCommands.WriteConfig,0x0000,Config,op.BootloaderCommands.ReadConfig)):
print("Config written OK")
def __writeRegion(self, memoryRegion,command,addressStart,regionData,errorCheckCommand):
"""Writes information to a memory region."""
address = [0 for _ in range(3)]
data = [0 for _ in range(16)]
result = True
progressThresholds = [x*10 for x in range(11)]
progressindex = 0
len(regionData)
for i in range(addressStart, len(regionData), 16):
memoryIndex = struct.unpack("BBBB",struct.pack('I', i))
address[0] = memoryRegion
address[1] = memoryIndex[1]
address[2] = memoryIndex[0]
data = regionData[i:i+16]
#self.bootCommand(op.BootloaderCommands.EraseFlash,16,address,[])
self.__bootCommand(command,len(data),address,data)
if(errorCheckCommand != None):
dataout = self.__bootCommand(errorCheckCommand,16,address,[])
dataout = dataout[5:len(dataout)]
if not self.__compare(data,dataout):
result = False
print("Write error")
percentComplete = (i*1.0 / len(regionData)) * 100
if(progressThresholds[progressindex] < percentComplete):
print('%.0f percent complete' % percentComplete)
progressindex += 1
return result
def __writeChunk(self, memoryRegion,command,addressStart,regionData,errorCheckCommand):
result = True
address = [0 for _ in range(3)]
address[0] = memoryRegion
address[1] = 0
address[2] = 0
data = regionData
if(memoryRegion != op.BootloaderMemoryRegions.Config):
self.__bootCommand(op.BootloaderCommands.EraseFlash,16,address,[])
self.__bootCommand(command,len(data),address,data)
#dataout = self.bootCommand(errorCheckCommand,16,address,[])
#dataout = dataout[5:len(dataout)]
#if not self.compare(data,dataout):
# result = False
# print("Reflash Write error")
return result
def __compare(self,data,dataout):
"""Compare read data to the data we think we wrote."""
if(data == None or dataout == None):
return False
if(len(data) != len(dataout)):
return False
for i in range(len(data)):
if(data[i] != dataout[i]):
return False
return True
def __byteLine(self, line):
"""Translate a HEX file line into address, linetype, data, and checksum"""
output = []
for offset in range(1,len(line)-1,2):
output.append(struct.unpack("B",struct.pack('B',np.int(line[offset:offset+2],16))[0])[0])
address = []
length = output[0]
address.append(output[1])
address.append(output[2])
type_ = output[3]
Data = output[4:4+length]
checksum = output[len(output)-1]
return address, type_, Data, checksum
def getHeaderFromFWM(self, filename):
"""Strips the header from a Monsoon FWM file, returns the HEX file and the formatted header.
Header format [VID,PID,Rev,Model]"""
f = open(filename,'r')
hex_ = f.read()
f.close()
headerEnd = hex_.find(':')
header = hex_[0:headerEnd]
offset = 7
count = array.array('B', header[offset])[0]
offset += 1
hex_ = hex_[headerEnd:len(hex_)]
outHeader = [0 for _ in range(4)]
headers = []
i = 0
for i in range(count):
outHeader[0] = array.array('H', header[offset:offset+2])[0] #VID
offset += 2
outHeader[1] = array.array('H', header[offset:offset+2])[0] #PID
offset += 2
outHeader[2] = array.array('H', header[offset:offset+2])[0] #Rev
offset += 2
outHeader[3] = array.array('H', header[offset:offset+2])[0] #Model
offset += 2
test = deepcopy(outHeader)
headers.append(test)
i+= 1
return headers, hex_
def getHexFile(self, filename):
"""Reads an Intel HEX file."""
f = open(filename,'r')
hex_ = f.read()
f.close()
return hex_
def __formatHex(self,hex_):
"""Takes raw hex_ input, and turns it into an array of hex_ lines."""
output = []
lineEnd = hex_.find('\n')
while lineEnd > 0:
output.append(hex_[0:lineEnd])
hex_ = hex_[lineEnd+1:len(hex_)]
lineEnd = hex_.find('\n')
Flash, EEPROM,IDlocs,Config = self.__formatAsPICFlash(output)
return Flash, EEPROM,IDlocs,Config
def __formatAsPICFlash(self, hex_):
"""Formats an array of hex_ lines as PIC memory regions."""
flash = [0xff for _ in range(32768)]
EEPROM = [0xff for _ in range(256)]
IDlocs = [0xff for _ in range(16)]
Config = [0xff for _ in range(14)]
addressMSB = 0
for line in hex_:
address, type_, Data, _ = self.__byteLine(line)
intAddress = struct.unpack("h",struct.pack("BB", address[1],address[0]))[0]
if(type_ == op.hexLineType.ExtendedLinearAddress):
addressMSB = Data[1]
if(type_ == op.hexLineType.Data):
if(addressMSB == op.BootloaderMemoryRegions.Flash):
for byte in Data:
flash[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.EEPROM):
intAddress = address[1]
for byte in Data:
EEPROM[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.IDLocs):
intAddress = address[1]
for byte in Data:
IDlocs[intAddress] = byte
intAddress += 1
if(addressMSB == op.BootloaderMemoryRegions.Config):
intAddress = address[1]
for byte in Data:
Config[intAddress] = byte
intAddress += 1
return flash, EEPROM, IDlocs, Config
def verifyHeader(self, headers):
"""Verifies the header matches the physical hardware being reflashed."""
for head in headers:
if(hex(head[0]) == VID and hex(head[1]) == PID):
return True
return False
def getSerialNumber(self):
"""The bootloader lacks a get command for the serial number, but we can just read the EEPROM value directly with the appropriate boot command"""
address = [op.BootloaderMemoryRegions.EEPROM,0,8]#Memory address of the Serial number
ret = self.__bootCommand(op.BootloaderCommands.ReadEEPROM,2,address,[])
rawSerial = ret[5:7]
serialno = struct.unpack('H', struct.pack('B'*2,rawSerial[0],rawSerial[1]))[0]
return serialno
def resetToMainSection(self):
"""
Exits bootloader mode and returns to normal mode.
This will disconnect the device, and you should reconnect with HVPM.py or LVPM.py, depending on your hardware.
Most LVPM units have an older version of the bootloader, and this command may be nonfunctional on them.
In that case, just manually power cycle the unit."""
wValue = 0
wIndex = 0
wLength = 0
try:
self.__bootCommand(op.BootloaderCommands.Reset,1,[0,0,0],[])
except:
#This will always throw an exception because it disconnects the device and re-enumerates as a normal Power Monitor
print("Resetting to Main Section.")
| StarcoderdataPython |
3367357 | #!/usr/bin/python
import sys
import pickle
import numpy as np
import matplotlib.pyplot as plt
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
### DD Here we selected features based on:
# A. Intuition (assuming a correlation between POI status and financial data + email communication with other POIs)
# B. feature_importances_ attributes
# C. visualization
features_list = ['poi', 'deferred_income', 'total_stock_value', 'expenses', 'poi_mail_ratio']
### Load the dictionary containing the dataset
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
# DD Checking how many samples we'll be working with
num_samples = len(data_dict)
print "data set length:", num_samples
### Task 2: Remove outliers
# DD First remove non-people entries
data_dict.pop("TOTAL")
data_dict.pop("THE TRAVEL AGENCY IN THE PARK")
# DD Iterate over dataset and use ad hoc filters to remove other outliers, if necessary
salaryList = []
clean_dict = {}
for name, pdata in data_dict.iteritems():
#print name
# Remove email freaks
if pdata["from_messages"] > 10000:
continue
clean_dict[name] = pdata
### Task 3: Create new feature(s)
# DD Here we'll create a new feature poi_mail_ratio which represents
# the share of emails coming from POI
#Lists used for visualization
from_all=[]
from_poi=[]
m_ratio_list=[]
for name, pdata in clean_dict.iteritems():
pdata["poi_mail_ratio"] =0
if pdata['from_messages'] != "NaN":
from_all.append(float(pdata['from_messages']))
from_poi.append(float(pdata['from_poi_to_this_person']))
m_ratio = float(pdata['from_poi_to_this_person']) / float(pdata['from_messages'])
pdata["poi_mail_ratio"] = m_ratio
m_ratio_list.append(m_ratio)
print "m ratios", m_ratio_list
plt.scatter(from_all, from_poi)
plt.xlabel("all emails")
plt.ylabel("from POI")
#plt.show()
### Store to my_dataset for easy export below.
my_dataset = clean_dict
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, features_list, sort_keys = True)
labels, features = targetFeatureSplit(data)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
### Note that if you want to do PCA or other multi-stage operations,
### you'll need to use Pipelines. For more info:
### http://scikit-learn.org/stable/modules/pipeline.html
# Provided to give you a starting point. Try a variety of classifiers.
from sklearn.tree import DecisionTreeClassifier
#using rbf vs linear
clf = DecisionTreeClassifier(min_samples_split=5) #min_samples_split=40
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
# Example starting point. Try investigating other evaluation techniques!
from sklearn.cross_validation import train_test_split
features_train, features_test, labels_train, labels_test = \
train_test_split(features, labels, test_size=0.3, random_state=42)
from time import time
t0 = time()
clf.fit(features_train, labels_train)
print "training time:", round(time()-t0, 3), "s"
predicted = clf.predict(features_test)
## looking for most important feature / word
feat_imp = clf.feature_importances_
print "feat_imp", feat_imp
#Evaluation metrics
from sklearn.metrics import accuracy_score
acc = accuracy_score(labels_test, predicted)
print "accuracy:", acc
from sklearn.metrics import precision_score
precis = precision_score(labels_test, predicted)
print "precision:", precis
from sklearn.metrics import recall_score
recall = recall_score(labels_test, predicted)
print "recall:", recall
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf, my_dataset, features_list)
| StarcoderdataPython |
1763694 | from flask import Blueprint, g
from .v0 import get_routes as get_v0_routes
from .content import main
def register_routes(app):
"""Register routes with the blueprint API
Arguments:
app {FlaskApp} -- The flask app
"""
app.register_blueprint(main, url_prefix='')
app.register_blueprint(get_v0_routes(), url_prefix='/api/v0')
# app.register_blueprint(get_v1_routes(), url_prefix='/api/v1') # for when we add new version of routes, this is how itll be done.
| StarcoderdataPython |
3245327 | import argparse
import json
from argparse import RawTextHelpFormatter
# ---
parser = argparse.ArgumentParser(description='''
Provide me with the movies JSON and I will remove each movie without IMDb data. I expect the movies of this JSON to have acquired their IMDb data by the 'get_imdb.py' script.\n
Movies with the 'imdb_failed' field will be automatically discarded, though this behavior can be disabled.
''', formatter_class=RawTextHelpFormatter)
default_fields = [ 'movie_director', 'movie_cast', 'movie_plot' ]
def list_to_str(strings):
result = None
for s in strings:
s = "'{}'".format(s)
if not result: result = s
else: result += ' ' + s
return result
parser.add_argument('input',
help="JSON input file with the movies")
parser.add_argument('-o', '--output',
help="Place the output into a file. Defaults to \"<input>.out\"")
parser.add_argument('-t', "--trash-output",
help="Place the discarded movies into a separate file")
parser.add_argument('-f', '--obligatory-fields',
help="Fields which the movie must have for it to be considered with IMDb data. If even one of them is not found, the entry will be discarded. Default fields: {}".format(list_to_str(default_fields)),
nargs='+', default=default_fields)
parser.add_argument('--disable-imdb-failed',
help="Set it so that the 'imdb_failed' field won't be used to automatically discard movies",
action="store_true")
args = parser.parse_args()
# ---
input_filename = args.input
output_filename = args.output if args.output else input_filename + ".out"
obligatory_fields = args.obligatory_fields
use_imdb_failed = not args.disable_imdb_failed
trash_filename = args.trash_output
# ---
print("Input filename : \"{}\"".format(input_filename))
print("Output filename : \"{}\"".format(output_filename))
print("Fields that will be obligatory:", list_to_str(obligatory_fields))
if use_imdb_failed: print("Movies with 'imdb_failed' will be promptly discarded.")
print("---")
data = None
with open(input_filename) as input_file:
data = json.load(input_file)
kept_movies = []
trash = []
for movie in data:
if use_imdb_failed and 'imdb_failed' in movie:
print("Discarded movie with 'imdb_failed': \"{}\"".format(movie["movie_title"]))
if trash_filename: trash.append(movie)
continue
for field in obligatory_fields:
if field not in movie:
print("Discarded movie without obligatory fields: \"{}\"".format(movie["movie_title"]))
if trash_filename: trash.append(movie)
continue
kept_movies.append(movie)
print("Saving to \"{}\"...".format(output_filename))
with open(output_filename, 'w') as output_file:
json.dump(kept_movies, output_file)
if trash_filename:
print("Saving discarded files to \"{}\"...".format(trash_filename))
with open(trash_filename, 'w') as trash_file:
json.dump(trash, trash_file)
kept_percent = "{0:.2f}".format((len(kept_movies) * 100) / len(data))
print("---")
print("{} movies ({}%) kept after pruning.".format(len(kept_movies), kept_percent))
| StarcoderdataPython |
4835257 | <gh_stars>1-10
import logging
from gym.envs.doom import doom_env
logger = logging.getLogger(__name__)
class DoomTakeCoverEnv(doom_env.DoomEnv):
"""
------------ Training Mission 8 - Take Cover ------------
This map is to train you on the damage of incoming missiles.
It is a rectangular map with monsters firing missiles and fireballs
at you. You need to survive as long as possible.
Allowed actions:
[10] - MOVE_RIGHT - Move to the right - Values 0 or 1
[11] - MOVE_LEFT - Move to the left - Values 0 or 1
Note: see controls.md for details
Rewards:
+ 1 - 35 times per second - Survive as long as possible
Goal: 750 points
Survive for ~ 20 seconds
Mode:
- env.mode can be 'fast', 'normal' or 'human' (e.g. env.mode = 'fast')
- 'fast' (default) will run as fast as possible (~75 fps) (best for simulation)
- 'normal' will run at roughly 35 fps (easier for human to watch)
- 'human' will let you play the game (keyboard only: Arrow Keys, '<', '>' and Ctrl)
Ends when:
- Player is dead (one or two fireballs should be enough to kill you)
- Timeout (60 seconds - 2,100 frames)
Actions:
actions = [0] * 43
actions[10] = 0 # MOVE_RIGHT
actions[11] = 1 # MOVE_LEFT
-----------------------------------------------------
"""
def __init__(self):
super(DoomTakeCoverEnv, self).__init__(7)
| StarcoderdataPython |
3355379 | import wx
app = wx.App()
frm = wx.Frame(None, title="Hello World")
frm.Show()
app.MainLoop()
| StarcoderdataPython |
1665681 | <reponame>kenoseni/Flight-Booking
"""Base schema module"""
from marshmallow import Schema, fields
from ..utilities.error_handler.handle_error import ValidationError
class BaseSchema(Schema):
"""Base schema for all models"""
id = fields.String(dump_only=True)
created_at = fields.String(dump_only=True, dump_to='createdAt')
updated_at = fields.String(dump_only=True, dump_to='updatedAt')
created_by = fields.String(dump_only=True, dump_to='createdBy')
updated_by = fields.String(dump_only=True, dump_to='updatedBy')
def load_object_into_schema(self, data, partial=False):
"""Helper function to load python objects into schema"""
data, errors = self.load(data, partial=partial)
if errors:
raise ValidationError(
dict(errors=errors, message='An error occurred'), 400)
return data
| StarcoderdataPython |
1740518 | <reponame>uct-cbio/galaxy-tools<gh_stars>0
#!/usr/bin/python
#to remove duplicate sequences and renames identical seqnames:
#reads a tab_del alignment and outputs a fasta file with one copy of each sequence
#and prints deleted copies of duplicate seqs
#also prints number of sequences in the output file
#TO RUN: python remove_duplicates.py duplicate_alignment.txt output.fasta log.txt
import sys
infile = sys.argv[-3] #aafile, nucfile
outfile = sys.argv[-2]
logfile = sys.argv[-1]
singlecopies = [] #list of non-duplicated sequences
data = {} #one_copy name:sequence
duplicates = [] #list of duplicate copies
file = open(infile,"r")
lines = file.readlines()
for l in range(len(lines)):
line = lines[l]
name = line.strip().split("\t")[0]
seq = line.strip().split("\t")[-1]
if seq not in singlecopies:
singlecopies.append(seq)
if name not in data: #avoid identical names for diff seqs
data[name]=seq
else:
# print "renamed identical name for different sequences i.e., ", name
data[name+"_r"+str(l)]= seq
else:
duplicates.append(name)
outf = open(outfile,"w")
dfile = open(logfile,"w")
dfile.write("Duplicate sequences removed are:\n")
for d in data:
outf.write(">"+d+"\n"+data[d]+"\n")
for dup in duplicates:
# print "duplicate sequence removed is ", dup
dfile.write(dup+"\n")
#print "number of remaining sequences in edited alignment is ",len(data)
dfile.write("\nNumber of remaining sequences in edited alignment is "+str(len(data)))
dfile.close()
file.close()
outf.close() | StarcoderdataPython |
196439 | #!/usr/bin/env python
__author__ = '<NAME>'
import os
import argparse
from Bio import SeqIO
os.environ['MPLCONFIGDIR'] = '/tmp/'
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.ioff()
from RouToolPa.GeneralRoutines.File import make_list_of_path_to_files
from RouToolPa.Tools.Kmers import Jellyfish
from RouToolPa.Routines.Sequence import rev_com_generator
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_file", action="store", dest="input", type=lambda s: s.split(","),
help="Comma-separated list of fasta or fastq files or directories containing them.")
parser.add_argument("-m", "--kmer_length", action="store", dest="kmer_length", type=int, default=23,
help="Length of kmers")
parser.add_argument("-s", "--hash_size", action="store", dest="hash_size", type=str, default="10G",
help="Size of hash. Estimation of hash size: for short reads S=(G + k*n)/0.8, "
"G - genome size, k - kmer length, n - number of reads, for assembled sequences "
"S=Sum(L)")
parser.add_argument("-a", "--base_prefix", action="store", dest="base_prefix", default="jellyfish_db",
help="Name of kmer database. Default: jellyfish_db")
parser.add_argument("-t", "--threads", action="store", dest="threads", type=int, default=1,
help="Number of threads")
parser.add_argument("-b", "--count_both_strands", action="store_true", dest="count_both_strands",
help="Count kmers in both strands. NOTICE: only mer or its reverse-complement, whichever "
"comes first lexicographically, is stored and the count value is the number of "
"occurrences of both. So this option is not suitable for generating sets of forward "
"and reverse-complement kmers. For this case use -r/--add_reverse_complement option. "
"Not compatible with -r/--add_reverse_complement option.")
parser.add_argument("-r", "--add_reverse_complement", action="store_true", dest="add_rev_com",
help="Add reverse-complement sequences before counting kmers. "
"Works only for fasta sequences. "
"Not compatible with -b/--count_both_strands option")
parser.add_argument("-d", "--draw_distribution", action="store_true", dest="draw_distribution",
help="Draw distribution of kmers")
parser.add_argument("-j", "--jellyfish_path", action="store", dest="jellyfish_path",
help="Path to jellyfish")
parser.add_argument("-n", "--dont_extract_kmer_list", action="store_true", dest="dont_extract_kmer_list",
help="Don't extract kmer list")
args = parser.parse_args()
args.input = make_list_of_path_to_files(args.input)
if args.count_both_strands and args.add_rev_com:
raise ValueError("Options -b/--count_both_strands and -r/--add_reverse_complement are not compatible")
if args.add_rev_com:
file_with_rev_com = args.base_prefix + "_with_rev_com.fasta"
record_dict = SeqIO.index_db("temp_index.idx", args.input, format="fasta")
SeqIO.write(rev_com_generator(record_dict, yield_original_record=True), file_with_rev_com, "fasta")
args.base_prefix += "_with_rev_com"
base_file = "%s_%i_mer.jf" % (args.base_prefix, args.kmer_length)
kmer_table_file = "%s_%i_mer.counts" % (args.base_prefix, args.kmer_length)
kmer_file = "%s_%i_mer.kmer" % (args.base_prefix, args.kmer_length)
Jellyfish.threads = args.threads
Jellyfish.path = args.jellyfish_path if args.jellyfish_path else ""
Jellyfish.count(args.input if not args.add_rev_com else file_with_rev_com, base_file,
kmer_length=args.kmer_length, hash_size=args.hash_size,
count_both_strands=args.count_both_strands)
if not args.dont_extract_kmer_list:
Jellyfish.dump(base_file, kmer_table_file)
sed_string = 'sed -e "s/\t.*//" %s > %s' % (kmer_table_file, kmer_file)
os.system(sed_string)
if args.draw_distribution:
histo_file = "%s_%i_mer.histo" % (args.base_prefix, args.kmer_length)
picture_prefix = "%s_%i_mer_histogram" % (args.base_prefix, args.kmer_length)
Jellyfish.histo(base_file, histo_file, upper_count=10000000)
counts = []
bins = []
with open(histo_file, "r") as histo_fd:
for line in histo_fd:
entry = line.strip().split()
counts.append(entry[1])
bins.append(entry[0])
figure = plt.figure(1, figsize=(8, 8), dpi=300)
subplot = plt.subplot(1, 1, 1)
plt.suptitle("Distribution of %i-mers" % args.kmer_length,
fontweight='bold')
plt.plot(bins, counts)
plt.xlabel("Multiplicity")
plt.ylabel("Number of distinct kmers")
subplot.set_yscale('log', basey=10)
subplot.set_xscale('log', basex=10)
for extension in ["png", "svg"]:
plt.savefig("%s.%s" % (picture_prefix, extension))
if args.add_rev_com:
os.remove("temp_index.idx")
| StarcoderdataPython |
53498 | <filename>src/compute_results.py
import argparse
import os
import json
import shutil
import numpy as np
from distutils.util import strtobool as boolean
import torch
import torch.optim
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.utils.data
import torch.utils.data.distributed
import torchvision.datasets as datasets
import torchvision.models as models
from config import load_config
from softmax_cascade import SoftmaxCascade
from transforms import train_transforms, val_transforms
from trees import load_hierarchy, get_weighting, load_distances, get_classes
from helper import guo_ECE,MCE
import heapq
import math
torch.backends.cudnn.benchmark = True
MODEL_NAMES = sorted(name for name in models.__dict__ if name.islower() and not name.startswith("__") and callable(models.__dict__[name]))
LOSS_NAMES = ["cross-entropy", "soft-labels", "hierarchical-cross-entropy", "cosine-distance", "ranking-loss", "cosine-plus-xent", "yolo-v2"]
OPTIMIZER_NAMES = ["adagrad", "adam", "adam_amsgrad", "rmsprop", "SGD"]
DATASET_NAMES = ["tiered-imagenet-84", "inaturalist19-84", "tiered-imagenet-224", "inaturalist19-224"]
def init_model_on_gpu(gpus_per_node, opts):
arch_dict = models.__dict__
pretrained = False if not hasattr(opts, "pretrained") else opts.pretrained
distributed = False if not hasattr(opts, "distributed") else opts.distributed
print("=> using model '{}', pretrained={}".format(opts.arch, pretrained))
model = arch_dict[opts.arch](pretrained=pretrained)
if opts.arch == "resnet18":
feature_dim = 512
elif opts.arch == "resnet50":
feature_dim = 2048
else:
ValueError("Unknown architecture ", opts.arch)
model.fc = torch.nn.Sequential(torch.nn.Dropout(opts.dropout), torch.nn.Linear(in_features=feature_dim, out_features=opts.num_classes, bias=True))
if distributed:
# For multiprocessing distributed, DistributedDataParallel constructor
# should always set the single device scope, otherwise,
# DistributedDataParallel will use all available devices.
if opts.gpu is not None:
torch.cuda.set_device(opts.gpu)
model.cuda(opts.gpu)
# When using a single GPU per process and per
# DistributedDataParallel, we need to divide the batch size
# ourselves based on the total number of GPUs we have
opts.batch_size = int(opts.batch_size / gpus_per_node)
opts.workers = int(opts.workers / gpus_per_node)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[opts.gpu])
else:
model.cuda()
# DistributedDataParallel will divide and allocate batch_size to all
# available GPUs if device_ids are not set
model = torch.nn.parallel.DistributedDataParallel(model)
elif opts.gpu is not None:
torch.cuda.set_device(opts.gpu)
model = model.cuda(opts.gpu)
else:
# DataParallel will divide and allocate batch_size to all available GPUs
model = torch.nn.DataParallel(model).cuda()
return model
def _load_checkpoint(opts, model, optimizer,model_path):
if os.path.isfile(model_path):
print("=> loading checkpoint '{}'".format(model_path))
checkpoint = torch.load(model_path,map_location='cuda:0')
opts.start_epoch = checkpoint["epoch"]
model.load_state_dict(checkpoint["state_dict"])
optimizer.load_state_dict(checkpoint["optimizer"])
steps = checkpoint["steps"]
print("=> loaded checkpoint '{}' (epoch {})".format(model_path, checkpoint["epoch"]))
elif opts.pretrained_folder is not None:
if os.path.exists(opts.pretrained_folder):
print("=> loading pretrained checkpoint '{}'".format(opts.pretrained_folder))
if os.path.isdir(opts.pretrained_folder):
checkpoint = torch.load(os.path.join(opts.pretrained_folder, "checkpoint.pth.tar"))
else:
checkpoint = torch.load(opts.pretrained_folder)
model.load_state_dict(checkpoint["state_dict"], strict=False)
steps = 0
print("=> loaded pretrained checkpoint '{}' (epoch {})".format(opts.pretrained_folder, checkpoint["epoch"]))
else:
raise FileNotFoundError("Can not find {}".format(opts.pretrained_folder))
else:
steps = 0
print("=> no checkpoint found at '{}'".format(opts.out_folder))
return steps
def _select_optimizer(model, opts):
if opts.optimizer == "adagrad":
return torch.optim.Adagrad(model.parameters(), opts.lr, weight_decay=opts.weight_decay)
elif opts.optimizer == "adam":
return torch.optim.Adam(model.parameters(), opts.lr, weight_decay=opts.weight_decay, amsgrad=False)
elif opts.optimizer == "adam_amsgrad":
return torch.optim.Adam(model.parameters(), opts.lr, weight_decay=opts.weight_decay, amsgrad=True, )
elif opts.optimizer == "rmsprop":
return torch.optim.RMSprop(model.parameters(), opts.lr, weight_decay=opts.weight_decay, momentum=0)
elif opts.optimizer == "SGD":
return torch.optim.SGD(model.parameters(), opts.lr, weight_decay=opts.weight_decay, momentum=0, nesterov=False, )
else:
raise ValueError("Unknown optimizer", opts.loss)
def softmax(x):
'''Compute softmax values for a single vector.'''
return np.exp(x) / np.sum(np.exp(x))
def row_softmax(output):
'''Compute Row-Wise SoftMax given a matrix of logits'''
new=np.array([softmax(i) for i in output])
return new
def get_all_cost_sensitive(output,distances,classes):
'''Re-Rank all predictions in the dataset using CRM'''
num_classes=len(classes)
C=[[0 for i in range(num_classes)] for j in range(num_classes)]
for i in range(num_classes):
for j in range(num_classes):
C[i][j]=distances[(classes[i],classes[j])]
final=np.dot(output,C)
return -1*final
def get_topk(prediction,target,distances,classes,k=1):
'''Computing hierarchical distance@k'''
ind=heapq.nlargest(k, range(len(prediction)), prediction.take)
scores=[]
s1,s2=0,0
for i in ind:
scores.append(distances[(classes[i],classes[target])])
return scores
def get_metrics(opts,output,target,distances,classes):
##Random Shuffling if Required
if opts.shuffle_classes==1:
np.random.seed(42)##Seed used to Train HXE/Soft-Labels. However, can be changed
np.random.shuffle(classes)
##Apply CRM
if opts.rerank==1:
output=get_all_cost_sensitive(output,distances,classes)
orig_top1=[]
orig_mistake=[]
orig_avg_1=[]
orig_avg_5=[]
orig_avg_20=[]
for i in range(len(output)):
if output[i].argmax()==target[i]:
orig_top1.append(1)
else:
orig_top1.append(0)
orig_mistake.append(distances[(classes[target[i]],classes[output[i].argmax()])])
orig_avg_1.extend(get_topk(output[i],target[i],distances,classes,1))
orig_avg_5.append(get_topk(output[i],target[i],distances,classes,5))
orig_avg_20.append(get_topk(output[i],target[i],distances,classes,20))
print("Top-1 Accuracy",np.array(orig_top1).mean())
print("Mistake Severity",np.array(orig_mistake).mean())
print("Hierarchical Distance@1",np.array(orig_avg_1).mean())
print("Hierarchical Distance@5",np.array(orig_avg_5).mean())
print("Hierarchical Distance@20",np.array(orig_avg_20).mean())
result=[np.array(orig_top1).mean(),np.array(orig_avg_1).mean(),np.array(orig_avg_5).mean(),np.array(orig_avg_20).mean(),
np.array(orig_mistake).mean()]
return result
def main(opts,model_path):
##Setup Dataset
test_dir = os.path.join(opts.data_path, "test")
test_dataset = datasets.ImageFolder(test_dir, val_transforms(opts.data, resize=(224,224),normalize=True))
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=opts.batch_size, shuffle=False, num_workers=opts.workers, pin_memory=True, drop_last=False)
gpus_per_node=1
##Load Hierarchy Information
distances = load_distances(opts.data, 'ilsvrc', opts.data_dir)
hierarchy = load_hierarchy(opts.data, opts.data_dir)
if opts.loss == "yolo-v2":
classes, _ = get_classes(hierarchy, output_all_nodes=True)
else:
classes = test_dataset.classes
opts.num_classes = len(classes)
if opts.loss == "yolo-v2":
cascade = SoftmaxCascade(hierarchy, classes).cuda(opts.gpu)
num_leaf_classes = len(hierarchy.treepositions("leaves"))
weights = get_weighting(hierarchy, "exponential", value=opts.alpha)
def yolo2_corrector(output):
return cascade.final_probabilities(output)[:, :num_leaf_classes]
model = init_model_on_gpu(gpus_per_node, opts)
# setup optimizer
optimizer = _select_optimizer(model, opts)
# load from checkpoint if existing
steps = _load_checkpoint(opts, model, optimizer,model_path)
corrector = yolo2_corrector if opts.loss == "yolo-v2" else lambda x: x
model.eval()
torch.no_grad()
##Iterate Over Dataset and collect logits and labels
test_output=[]
test_target=[]
for batch_idx,(embeddings,target) in enumerate(test_loader):
if opts.gpu is not None:
embeddings = embeddings.cuda(opts.gpu,non_blocking=True)
output=model(embeddings)
output=corrector(output)
test_output.extend(output.cpu().tolist())
test_target.extend(target.tolist())
test_output=np.array(test_output)
test_target=np.array(test_target)
##The corrector applies softmax cascade for YOLOv2
if opts.loss!='yolo-v2':
softmax_output=row_softmax(test_output)
else:
softmax_output=test_output
##Compute Metrics and Return Logs
model_ece=guo_ECE(softmax_output,test_target)
model_mce=MCE(softmax_output,test_target)
print("ECE:",model_ece)
print("MCE:",model_mce)
result=get_metrics(opts,softmax_output,test_target,distances,classes)
result.append(model_ece)
result.append(model_mce)
return result
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--arch", default="resnet18", choices=MODEL_NAMES, help="model architecture: | ".join(MODEL_NAMES))
parser.add_argument("--loss", default="cross-entropy", choices=LOSS_NAMES, help="loss type: | ".join(LOSS_NAMES))
parser.add_argument("--optimizer", default="adam_amsgrad", choices=OPTIMIZER_NAMES, help="loss type: | ".join(OPTIMIZER_NAMES))
parser.add_argument("--lr", default=1e-5, type=float, help="initial learning rate of optimizer")
parser.add_argument("--weight_decay", default=0.0, type=float, help="weight decay of optimizer")
parser.add_argument("--pretrained", type=boolean, default=True, help="start from ilsvrc12/imagenet model weights")
parser.add_argument("--pretrained_folder", type=str, default=None, help="folder or file from which to load the network weights")
parser.add_argument("--dropout", default=0.0, type=float, help="Prob of dropout for network FC layer")
parser.add_argument("--data_augmentation", type=boolean, default=True, help="Train with basic data augmentation")
parser.add_argument("--num_training_steps", default=200000, type=int, help="number of total steps to train for (num_batches*num_epochs)")
parser.add_argument("--start-epoch", default=0, type=int, help="manual epoch number (useful on restarts)")
parser.add_argument("--batch-size", default=256, type=int, help="total batch size")
parser.add_argument("--shuffle_classes", default=False, type=boolean, help="Shuffle classes in the hierarchy")
parser.add_argument("--beta", default=0, type=float, help="Softness parameter: the higher, the closer to one-hot encoding")
parser.add_argument("--alpha", type=float, default=0, help="Decay parameter for hierarchical cross entropy.")
# Devise/B&D ----------------------------------------------------------------------------------------------------------------------------------------------
parser.add_argument("--devise", type=boolean, default=False, help="Use DeViSe label embeddings")
parser.add_argument("--devise_single_negative", type=boolean, default=False, help="Use one negative per samples instead of all")
parser.add_argument("--barzdenzler", type=boolean, default=False, help="Use Barz&Denzler label embeddings")
parser.add_argument("--train_backbone_after", default=float("inf"), type=float, help="Start training backbone too after this many steps")
parser.add_argument("--use_2fc", default=False, type=boolean, help="Use two FC layers for Devise")
parser.add_argument("--fc_inner_dim", default=1024, type=int, help="If use_2fc is True, their inner dimension.")
parser.add_argument("--lr_fc", default=1e-3, type=float, help="learning rate for FC layers")
parser.add_argument("--weight_decay_fc", default=0.0, type=float, help="weight decay of FC layers")
parser.add_argument("--use_fc_batchnorm", default=False, type=boolean, help="Batchnorm layer in network head")
# Data/paths ----------------------------------------------------------------------------------------------------------------------------------------------
parser.add_argument("--data", default="tiered-imagenet-224", help="id of the dataset to use: | ".join(DATASET_NAMES))
parser.add_argument("--target_size", default=224, type=int, help="Size of image input to the network (target resize after data augmentation)")
parser.add_argument("--data-paths-config", help="Path to data paths yaml file", default="../data_paths.yml")
parser.add_argument("--data-path", default=None, help="explicit location of the data folder, if None use config file.")
parser.add_argument("--data_dir", default="../data/", help="Folder containing the supplementary data")
parser.add_argument("--output", default=None, help="path to the model folder")
parser.add_argument("--expm_id", default="", type=str, help="Name log folder as: out/<scriptname>/<date>_<expm_id>. If empty, expm_id=time")
# Log/val -------------------------------------------------------------------------------------------------------------------------------------------------
parser.add_argument("--log_freq", default=100, type=int, help="Log every log_freq batches")
parser.add_argument("--val_freq", default=5, type=int, help="Validate every val_freq epochs (except the first 10 and last 10)")
# Execution -----------------------------------------------------------------------------------------------------------------------------------------------
parser.add_argument("--workers", default=2, type=int, help="number of data loading workers")
parser.add_argument("--seed", default=None, type=int, help="seed for initializing training. ")
parser.add_argument("--gpu", default=0, type=int, help="GPU id to use.")
## CRM ----------------------------------------------------------------------------------
parser.add_argument("--rerank",default=0,type=int,help='whether to use CRM or not')
### Logs ---------------------------------------------------------------------------------
parser.add_argument("--expname",default='cross-entropy',type=str,help="Name of model")
parser.add_argument("--epoch1",default=10,type=int,help="First epoch to evaluate")
parser.add_argument("--out_folder1",default=None,type=str,help="Path to model checkpoint")
parser.add_argument("--out_folder2",default=None,type=str,help="Path to model checkpoint")
parser.add_argument("--out_folder3",default=None,type=str,help="Path to model checkpoint")
parser.add_argument("--out_folder4",default=None,type=str,help="Path to model checkpoint")
parser.add_argument("--out_folder5",default=None,type=str,help="Path to model checkpoint")
opts=parser.parse_args()
if opts.data_path is None:
opts.data_paths = load_config(opts.data_paths_config)
opts.data_path = opts.data_paths[opts.data]
logs=[]
##Evaluating Results on 5 checkpoints
logs.append(main(opts,opts.out_folder1))
logs.append(main(opts,opts.out_folder2))
logs.append(main(opts,opts.out_folder3))
logs.append(main(opts,opts.out_folder4))
logs.append(main(opts,opts.out_folder5))
logs=np.array(logs,dtype='float64')
savename=opts.expname
np.savetxt(savename,logs, fmt="%.5f", delimiter=",")
| StarcoderdataPython |
3221428 | <gh_stars>0
# %matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
from six.moves import cPickle
# Y' = 0.2989 R + 0.5870 G + 0.1140 B
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.2989, 0.5870, 0.1140])
def readData():
image_data = np.array([])
image_labels = np.array([])
fileindex = 1
while(fileindex<6):
filename = "cifar-10-batches-py/data_batch_{}".format(fileindex)
print(filename)
f = open(filename, 'rb')
datadict = cPickle.load(f,encoding='latin1')
f.close()
X = datadict["data"]
Y = datadict['labels']
if(fileindex==1):
image_data = np.array(X)
image_labels = np.array(Y)
else:
image_data = np.vstack((image_data, X))
image_labels = np.append(image_labels,Y)
fileindex += 1
print(image_data.shape)
image_data = image_data.reshape(50000, 3, 32, 32).transpose(0,2,3,1).astype("uint8")
return image_data,image_labels
def convertAllIntoGrayScale(image_data):
print(len(image_data))
length = len(image_data)
grayscaleImageData = image_data
grayscaleImageData = []
for i in range(length):
grayscaleImageData.append(rgb2gray(image_data[i]))
grayscaleImageData = np.array(grayscaleImageData)
return grayscaleImageData
def calculateMean(data,labels, isGrayScale = False):
imagecount = 0
if(isGrayScale):
mean = np.zeros([10,32,32])
else:
mean = np.zeros([10,32,32,3])
while(imagecount<10):
indexes = np.where(labels==imagecount)[0]
for i in indexes:
mean[imagecount] += data[i]
length = len(indexes)
mean[imagecount] = (mean[imagecount] / length)
imagecount += 1
return mean
image_data,image_labels = readData()
print('Done reading')
gray_scale_image_data = convertAllIntoGrayScale(image_data)
print('Done Converting')
gray_scale_mean = calculateMean(gray_scale_image_data,image_labels,True)
gray_scale_mean = gray_scale_mean.reshape(10,1024)
meandifferencematrix = np.zeros([10,10])
print('Mean Differemce Matrix ')
print( )
print()
for i in range(10):
for j in range(10):
meandifferencematrix[i][j] = np.linalg.norm(gray_scale_mean[i]-gray_scale_mean[j])
print(meandifferencematrix) | StarcoderdataPython |
1625209 | <reponame>Wizard-Toast/CT-Prehistoric-Trader
"Chrono Trigger, Prehistoric Trade Handler" # Script assumes player is standing in front of trader with no other menu / dialogue open. It will attempt to bring chrono trigger to the foreground to begin trading.
import pyautogui
import win32gui
from modules.tradehandler import *
from modules.inputparameters import quantity_inp
from time import sleep
pyautogui.FAILSAFE = True
pyautogui.PAUSE = 0
def windowEnumerationHandler(hwnd, top_windows):
top_windows.append((hwnd, win32gui.GetWindowText(hwnd)))
start_str = pyautogui.confirm('Begin trading?', title="Trader", buttons=['Yes','No'])
if start_str is 'Yes':
pass
else:
print("Closing trader..."); exit()
item_var = pyautogui.confirm('Which item to trade for?', title="Trade List", buttons=['Ruby Gun',"Shaman's Bow",'Stone Arm','Mammoth Tusk','Ruby Vest','Stone Helm'])
if item_var == None:
print("Error: item_var is None. Closing trader..."); exit()
item_var = item_var.replace(" ", "_").lower() # Take item_str and turn it into something we can call later
item_var = item_var.replace("'","")
quantity_int = quantity_inp('33',"Trade Quantity") # Take user input, prompt again if answer has invalid chars or is higher than ('thisamount',"in a pyautogui.prompt with this title")
if __name__ == "__main__": # brings window named "chrono trigger" to the front. could potentially misfire if other windows containing the name are open
results = []
top_windows = []
win32gui.EnumWindows(windowEnumerationHandler, top_windows)
for i in top_windows:
if "chrono trigger" in i[1].lower():
print (i)
win32gui.ShowWindow(i[0],5)
win32gui.SetForegroundWindow(i[0])
break
trade_handler(item_var,quantity_int) # Simulate keypresses to navigate pre-history trade menu, assumes you are standing in front of trader with no dialogue or menu open
#trade_handler sleep timings can be improved
print(start_str,item_var,quantity_int,sep='\n') | StarcoderdataPython |
1743516 | from tkinter import *
import tkinter
from random import randint
root = Tk()
root.title("剪刀石頭布")
Button_1 = Button()
contentVar = tkinter.StringVar(root, '')
buttonList = list()
displayButton = Button(root, fg='white', bg='#3E4149', textvariable=contentVar,
width=30, height=2)
displayButton.grid(row=0, column=0, columnspan=3)
displayButton["font"] = ("arial", 20, "bold")
buttonList = ['Y','O','W']
computerWin = 0
userWin = 0
tie = 0
def click(user):
global computerWin, userWin, tie
computer = buttonList[randint(0,2)]
if(computer == 'O'):
if(user == 'Y'):
computerWin += 1
elif(user == 'O'):
tie +=1
elif(user == 'W'):
userWin += 1
if(computer == 'W'):
if(user == 'O'):
computerWin += 1
elif(user == 'W'):
tie +=1
elif(user == 'Y'):
userWin += 1
if(computer == 'Y'):
if(user == 'W'):
computerWin += 1
elif(user == 'Y'):
tie +=1
elif(user == 'O'):
userWin += 1
contentVar.set('You:' + user + ', Computer:' + computer +'\nYou Win:' + str(userWin))
for i in range(3):
col = i
get_str = buttonList[i]
width = 7
height = 3
Button_1 = Button(root, text=buttonList[i], bg='#3E4149', fg='white',
width=width, height=height,
command=lambda x=get_str: click(x))
Button_1.grid(row=1,column=col)
Button_1["font"] = ("arial", 12, "bold")
root.mainloop() | StarcoderdataPython |
101142 | from flask import Blueprint
from flask_restful import Api
from api.resources.demo import DemoResource
from api.resources.login import LoginResource
api_bp_v1 = Blueprint('bp_v1', __name__)
api_v1 = Api(api_bp_v1, '/v1')
api_v1.add_resource(DemoResource, '/demo')
api_v1.add_resource(LoginResource, '/login')
BLUEPRINTS = [api_bp_v1]
__all__ = ['BLUEPRINTS']
| StarcoderdataPython |
3303453 | #!/bin/env python
import os,sys
from datetime import datetime, timedelta
import numpy as np
from scipy import interpolate
from scipy.io import netcdf
import pygrib
def read_grib(fnl_path, ndays, u10, v10):
lon_lat = np.genfromtxt('/fvcom-exec/input/nele_lon_lat.txt',dtype='f')
file_list = [ fnl_path + "gfs.pgrb2.0p25.f"+ "{0:03d}".format(i) +".grib2" for i in range(0, ndays*24+6, 6 ) ]
for i in range(len(file_list)):
grb = pygrib.open(file_list[i])
print("read " + file_list[i])
U10 = grb.select(name='U component of wind')[0] # (721, 1440)
V10 = grb.select(name='V component of wind')[0]
lat, lon = U10.latlons()
#--------------------grid lat lon ------------point lat lon
u10 = interpolate.interpn((lat[::-1,0], lon[0]), U10.values[::-1,:], lon_lat[:,::-1], method='linear')
v10 = interpolate.interpn((lat[::-1,0], lon[0]), U10.values[::-1,:], lon_lat[:,::-1], method='linear')
if i == 0:
u10_r = u10
v10_r = v10
else:
u10_r = np.vstack((u10_r, u10))
v10_r = np.vstack((v10_r, v10))
print(u10_r.shape)
return u10_r, v10_r
# -- MAIN
if __name__ == '__main__':
global nele_path,nnele,nnode
nele_path = "/fvcom-exec/input/nele_lon_lat.txt"
nnode = 2361
nnele = 4448
if len(sys.argv) < 4:
print('Usage: fnl2fvcom_uv.py <fnl_path> <ndays> <start_ymdh>')
sys.exit(0)
fnl_path = sys.argv[1]
ndays = int(sys.argv[2])
ymdh = sys.argv[3]
starttime = datetime.strptime(ymdh,"%Y%m%d%H")
timeref = datetime(1858,11,17)
times = [ starttime + timedelta(hours=i) for i in range(0, ndays*24+6, 6 ) ]
times = [ (i - timeref ).total_seconds()/(60*60*24) for i in times ]
time_org = np.array(times, dtype='f4')
Itime_org = np.array(times, dtype='i4')
Itime2_org= np.round(( (time_org%1)*24*3600*1000)/(3600*1000))*(3600*1000)
u10_r = np.zeros(time_org.shape[0],dtype='f')
v10_r = np.zeros(time_org.shape[0],dtype='f')
u10_r, v10_r = read_grib(fnl_path, ndays, u10_r, v10_r)
f = netcdf.netcdf_file('uv_force.nc', 'w')
f.source = "fvcom grid (unstructured) surface forcing"
f.createDimension('time', None)
f.createDimension('node', nnode )
f.createDimension('nele', nnele )
time = f.createVariable('time', 'f', ('time',))
time[:] = time_org
time.long_name = "time"
time.units = 'days since 1858-11-17 00:00:00'
time.format = "modified julian day (MJD)"
time.time_zone = "UTC"
Itime = f.createVariable('Itime', 'i', ('time',))
Itime[:] = Itime_org
Itime.units = 'days since 1858-11-17 00:00:00'
Itime2 = f.createVariable('Itime2', 'i', ('time',))
Itime2[:] = Itime2_org.astype('i4')
Itime2.units = 'msec since 00:00:00'
u10 = f.createVariable('U10', 'f', ('time','nele',))
u10[:] = u10_r[:]
u10.long_name = "Eastward 10-m Velocity"
u10.units = "m/s"
v10 = f.createVariable('V10', 'f', ('time','nele',))
v10[:] = v10_r[:]
v10.long_name = "Northward 10-m Velocity"
v10.units = "m/s"
f.close()
| StarcoderdataPython |
3244615 | <gh_stars>0
from api.models import Share
def create(**kwargs):
return Share.objects.create(**kwargs)
def get_share(**kwargs):
return Share.objects.filter(**kwargs).first()
def get_shares(**kwargs):
return Share.objects.filter(**kwargs)
| StarcoderdataPython |
1606766 | import datetime
from django.test import TestCase
from model_mommy import mommy
from wagtailregulations.models.django import (
EffectiveVersion,
Part,
Section,
Subpart,
)
class RegulationsTestData(object):
def setUp_regulations(self):
self.part_1002 = mommy.make(
Part,
cfr_title_number="12",
part_number="1002",
title="Equal Credit Opportunity Act",
short_name="Regulation B",
chapter="X",
)
self.part_1030 = mommy.make(
Part,
part_number="1030",
title="Truth In Savings",
short_name="Regulation DD",
chapter="X",
)
self.effective_version = mommy.make(
EffectiveVersion,
effective_date=datetime.date(2014, 1, 18),
part=self.part_1002,
)
self.old_effective_version = mommy.make(
EffectiveVersion,
effective_date=datetime.date(2011, 1, 1),
part=self.part_1002,
)
self.draft_effective_version = mommy.make(
EffectiveVersion,
effective_date=datetime.date(2020, 1, 1),
part=self.part_1002,
draft=True,
)
self.subpart = mommy.make(
Subpart,
label="Subpart General",
title="Subpart A - General",
subpart_type=Subpart.BODY,
version=self.effective_version,
)
self.subpart_appendices = mommy.make(
Subpart,
label="Appendices",
title="Appendices",
subpart_type=Subpart.APPENDIX,
version=self.effective_version,
)
self.subpart_interps = mommy.make(
Subpart,
label="Official Interpretations",
title="Supplement I to Part 1002",
subpart_type=Subpart.INTERPRETATION,
version=self.effective_version,
)
self.subpart_orphan = mommy.make(
Subpart,
label="General Mistake",
title="An orphan subpart with no sections for testing",
version=self.effective_version,
)
self.old_subpart = mommy.make(
Subpart,
label="Subpart General",
title="General",
subpart_type=Subpart.BODY,
version=self.old_effective_version,
)
self.section_num2 = mommy.make(
Section,
label="2",
title="\xa7 1002.2 Definitions.",
contents="{c}\nAdverse action.\n\nsee(2-c-Interp)\n",
subpart=self.subpart,
)
self.section_num3 = mommy.make(
Section,
label="3",
title="\xa7 1002.3 Limited exceptions.",
contents="{b}\nSecurities credit.\n\nsee(3-b-Interp)\n",
subpart=self.subpart,
)
self.section_num4 = mommy.make(
Section,
label="4",
title="\xa7\xa01002.4 General rules.",
contents=(
"{a}\n(a) Regdown paragraph a.\n"
"{b}\n(b) Paragraph b\n"
"\nsee(4-b-Interp)\n"
"{c}\n(c) Paragraph c.\n"
"{c-1}\n \n"
"{d}\n(1) General rule. A creditor that provides in writing.\n"
),
subpart=self.subpart,
)
self.section_num15 = mommy.make(
Section,
label="15",
title="\xa7\xa01002.15 Rules concerning requests for information.",
contents="regdown content.",
subpart=self.subpart,
)
self.section_alpha = mommy.make(
Section,
label="A",
title=(
"Appendix A to Part 1002-Federal Agencies "
"To Be Listed in Adverse Action Notices"
),
contents="regdown content.",
subpart=self.subpart_appendices,
)
self.section_beta = mommy.make(
Section,
label="B",
title=("Appendix B to Part 1002-Errata"),
contents="regdown content.",
subpart=self.subpart_appendices,
)
self.section_interps = mommy.make(
Section,
label="Interp-A",
title=("Official interpretations for Appendix A to Part 1002"),
contents="interp content.",
subpart=self.subpart_interps,
)
self.old_section_num4 = mommy.make(
Section,
label="4",
title="\xa7\xa01002.4 General rules.",
contents="regdown contents",
subpart=self.old_subpart,
)
self.section_interp2 = mommy.make(
Section,
label="Interp-2",
title="Section 1002.2—Definitions",
contents="{c-Interp}\nInterpreting adverse action\n\n",
subpart=self.subpart_interps,
)
class RegulationsTestCase(TestCase, RegulationsTestData):
def setUp(self):
self.setUp_regulations()
| StarcoderdataPython |
1615088 | # """Pytorch Dataset object that loads 27x27 patches that contain single cells."""
import os
import random
import scipy.io
import numpy as np
from PIL import Image
import torch
import torch.utils.data as data_utils
import torchvision.transforms as transforms
from torch.nn.functional import pad
import dataloaders.additional_transforms as AT
class ColonCancerBagsCross(data_utils.Dataset):
def __init__(self, path,
train_val_idxs=None,
test_idxs=None,
train=True,
shuffle_bag=False,
data_augmentation=False,
padding=True,
base_att=False):
self.path = path
self.train_val_idxs = train_val_idxs
self.test_idxs = test_idxs
self.train = train
self.shuffle_bag = shuffle_bag
self.data_augmentation = data_augmentation
self.padding = padding
self.base_att = base_att
if self.base_att:
# Trace
# print('Normalization enabled on the Colon Cancer dataset.')
self.data_augmentation_img_transform = transforms.Compose(
[
AT.RandomHEStain(),
AT.HistoNormalize(),
AT.RandomRotate(),
AT.RandomVerticalFlip(),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5),
(0.5, 0.5, 0.5))
])
self.normalize_to_tensor_transform = transforms.Compose(
[
AT.HistoNormalize(),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5),
(0.5, 0.5, 0.5))
])
else:
# Trace
# print('Normalization disabled on the Colon Cancer dataset.')
self.data_augmentation_img_transform = transforms.Compose(
[
AT.RandomHEStain(),
AT.HistoNormalize(),
AT.RandomRotate(),
AT.RandomVerticalFlip(),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
])
self.normalize_to_tensor_transform = transforms.Compose(
[
AT.HistoNormalize(),
transforms.ToTensor(),
])
self.dir_list_train, self.dir_list_test = self.split_dir_list(
self.path, self.train_val_idxs, self.test_idxs)
if self.train:
self.bag_list_train, self.labels_list_train = self.create_bags(self.dir_list_train)
else:
self.bag_list_test, self.labels_list_test = self.create_bags(self.dir_list_test)
@staticmethod
def split_dir_list(path, train_val_idxs, test_idxs):
dirs = [x[0] for x in os.walk(path)]
dirs.pop(0)
dirs.sort()
dir_list_train = [dirs[i] for i in train_val_idxs]
dir_list_test = [dirs[i] for i in test_idxs]
return dir_list_train, dir_list_test
def create_bags(self, dir_list):
bag_list = []
labels_list = []
for dir in dir_list:
# Get image name
img_name = dir.split('/')[-1]
# bmp to pillow
img_dir = dir + '/' + img_name + '.bmp'
with open(img_dir, 'rb') as f:
with Image.open(f) as img:
img = img.convert('RGB')
# crop malignant cells
dir_epithelial = dir + '/' + img_name + '_epithelial.mat'
with open(dir_epithelial, 'rb') as f:
mat_epithelial = scipy.io.loadmat(f)
cropped_cells_epithelial = []
for (x, y) in mat_epithelial['detection']:
x = np.round(x)
y = np.round(y)
if self.data_augmentation:
x = x + np.round(np.random.normal(0, 3, 1))
y = y + np.round(np.random.normal(0, 3, 1))
# If it is a numpy array
if type(x) == np.ndarray:
x = x[0]
if x < 13:
x_start = 0
x_end = 27
elif x > 500 - 13:
x_start = 500 - 27
x_end = 500
else:
x_start = x - 13
x_end = x + 14
# If it is a numpy array
if type(y) == np.ndarray:
y = y[0]
if y < 13:
y_start = 0
y_end = 27
elif y > 500 - 13:
y_start = 500 - 27
y_end = 500
else:
y_start = y - 13
y_end = y + 14
cropped_cells_epithelial.append(img.crop((x_start, y_start, x_end, y_end)))
# crop all other cells
dir_inflammatory = dir + '/' + img_name + '_inflammatory.mat'
dir_fibroblast = dir + '/' + img_name + '_fibroblast.mat'
dir_others = dir + '/' + img_name + '_others.mat'
with open(dir_inflammatory, 'rb') as f:
mat_inflammatory = scipy.io.loadmat(f)
with open(dir_fibroblast, 'rb') as f:
mat_fibroblast = scipy.io.loadmat(f)
with open(dir_others, 'rb') as f:
mat_others = scipy.io.loadmat(f)
all_coordinates = np.concatenate(
(mat_inflammatory['detection'], mat_fibroblast['detection'], mat_others['detection']), axis=0)
cropped_cells_others = []
for (x, y) in all_coordinates:
x = np.round(x)
y = np.round(y)
if self.data_augmentation:
x = x + np.round(np.random.normal(0, 3, 1))
y = y + np.round(np.random.normal(0, 3, 1))
# If it is a numpy array
if type(x) == np.ndarray:
x = x[0]
if x < 13:
x_start = 0
x_end = 27
elif x > 500 - 13:
x_start = 500 - 27
x_end = 500
else:
x_start = x - 13
x_end = x + 14
# If it is a numpy array
if type(y) == np.ndarray:
y = y[0]
if y < 13:
y_start = 0
y_end = 27
elif y > 500 - 13:
y_start = 500 - 27
y_end = 500
else:
y_start = y - 13
y_end = y + 14
cropped_cells_others.append(img.crop((x_start, y_start, x_end, y_end)))
# generate bag
bag = cropped_cells_epithelial + cropped_cells_others
# store single cell labels
labels = np.concatenate((np.ones(len(cropped_cells_epithelial)),
np.zeros(len(cropped_cells_others))), axis=0)
# shuffle
if self.shuffle_bag:
zip_bag_labels = list(zip(bag, labels))
random.shuffle(zip_bag_labels)
bag, labels = zip(*zip_bag_labels)
# append every bag two times if training
if self.train:
for _ in [0, 1]:
bag_list.append(bag)
labels_list.append(labels)
else:
bag_list.append(bag)
labels_list.append(labels)
# bag_list.append(bag)
# labels_list.append(labels)
return bag_list, labels_list
def transform_and_data_augmentation(self, bag):
if self.data_augmentation:
img_transform = self.data_augmentation_img_transform
else:
img_transform = self.normalize_to_tensor_transform
bag_tensors = []
for img in bag:
# If padding is True
if self.padding:
bag_tensors.append(pad(img_transform(img), (0, 1, 0, 1), mode='constant'))
# Otherwise
else:
bag_tensors.append(img_transform(img))
return torch.stack(bag_tensors)
def __len__(self):
if self.train:
return len(self.labels_list_train)
else:
return len(self.labels_list_test)
def __getitem__(self, index):
if self.train:
bag = self.bag_list_train[index]
label = [max(self.labels_list_train[index]), self.labels_list_train[index]]
else:
bag = self.bag_list_test[index]
label = [max(self.labels_list_test[index]), self.labels_list_test[index]]
return self.transform_and_data_augmentation(bag), label
| StarcoderdataPython |
3305690 | '''
Util to recalculate persistable hashes
'''
__author__ = '<NAME>'
import logging
from typing import Optional, List
from queue import SimpleQueue
from simpleml.registries import SIMPLEML_REGISTRY
from simpleml.persistables.hashing import CustomHasherMixin
from simpleml.persistables.base_persistable import Persistable
from simpleml.datasets.base_dataset import Dataset
from simpleml.pipelines.base_pipeline import Pipeline
from simpleml.models.base_model import Model
from simpleml.metrics.base_metric import Metric
LOGGER = logging.getLogger(__name__)
class HashRecalculator(object):
'''
Utility class to recalculate hashes for persistables
Useful for backfilling changes to hash logic and for database migrations
that impact fields included in the hash (e.g. config metadata)
Expects to be called as part of an active session
```
HashRecalculator(
fail_on_error=False,
recursively_recalculate_dependent_hashes=True
).run()
```
'''
def __init__(self,
fail_on_error: bool,
recursively_recalculate_dependent_hashes: bool,
dataset_ids: Optional[List[str]] = None,
pipeline_ids: Optional[List[str]] = None,
model_ids: Optional[List[str]] = None,
metric_ids: Optional[List[str]] = None):
self.fail_on_error = fail_on_error
self.recursively_recalculate_dependent_hashes = recursively_recalculate_dependent_hashes
# persistable queues
self.dataset_queue = self.ids_to_records(Dataset, dataset_ids)
self.pipeline_queue = self.ids_to_records(Pipeline, pipeline_ids)
self.model_queue = self.ids_to_records(Model, model_ids)
self.metric_queue = self.ids_to_records(Metric, metric_ids)
def ids_to_records(self, persistable_cls: Persistable,
ids: Optional[List[str]]) -> List[Persistable]:
records = SimpleQueue()
if ids is not None:
for id in ids:
records.put(persistable_cls.find(id))
return records
def run(self) -> None:
_iterations = 1
session = Persistable._session
with session.begin(): # automatic rollback if error raised
while not self.is_finished:
LOGGER.debug(f'Processing iteration {_iterations}')
_iterations += 1
self.process_queue(self.dataset_queue)
self.process_queue(self.pipeline_queue)
self.process_queue(self.model_queue)
self.process_queue(self.metric_queue)
@property
def is_finished(self):
return self.dataset_queue.empty() and self.pipeline_queue.empty() and self.model_queue.empty() and self.metric_queue.empty()
def process_queue(self, queue: SimpleQueue) -> None:
'''
Loop one iteration through a queue -- adds items back to queues if
recursive parameter set
'''
LOGGER.debug(f'Processing {queue.qsize()} items in queue')
while not queue.empty():
record = queue.get()
existing_hash = record.hash_
new_hash = self.recalculate_hash(record)
if existing_hash == new_hash:
LOGGER.debug('No hash modification, skipping update')
continue
LOGGER.debug(f'Updating persistable {record.id} hash {existing_hash} -> {new_hash}')
record.update(hash_=new_hash)
if self.recursively_recalculate_dependent_hashes:
self.queue_dependent_persistables(record)
def recalculate_hash(self, record):
try:
# turn record into a persistable with a hash method
record.load(load_externals=False)
return record._hash()
except Exception as e:
if self.fail_on_error:
raise
else:
LOGGER.error(f"Failed to generate a new hash for record, skipping modification; {e}")
return record.hash_
def queue_dependent_persistables(self, persistable: Persistable) -> None:
'''
Queries for dependent persistables and queues them into the respective
queues
'''
persistable_type = persistable.object_type
# downstream dependencies
dependency_map = {
'DATASET': ((Pipeline, 'dataset_id', self.pipeline_queue), (Metric, 'dataset_id', self.metric_queue)),
'PIPELINE': ((Dataset, 'pipeline_id', self.dataset_queue), (Model, 'pipeline_id', self.model_queue)),
'MODEL': ((Metric, 'model_id', self.metric_queue),),
'METRIC': ()
}
for (immediate_dependency, foreign_key, queue) in dependency_map[persistable_type]:
dependents = immediate_dependency.where(**{foreign_key: persistable.id}).all()
LOGGER.debug(f'Found {len(dependents)} dependent persistables. Adding to queues')
for dependent in dependents:
queue.put(dependent)
def recalculate_dataset_hashes(fail_on_error: bool = False,
recursively_recalculate_dependent_hashes: bool = False) -> None:
'''
Convenience helper to recompute dataset hashes. Optionally recalculates hashes
for downstream persistables
'''
records = Dataset.all()
recalculator = HashRecalculator(
fail_on_error=fail_on_error,
recursively_recalculate_dependent_hashes=recursively_recalculate_dependent_hashes,
dataset_ids=[i.id for i in records]
)
recalculator.run()
def recalculate_pipeline_hashes(fail_on_error: bool = False,
recursively_recalculate_dependent_hashes: bool = False) -> None:
'''
Convenience helper to recompute pipeline hashes. Optionally recalculates hashes
for downstream persistables
'''
records = Pipeline.all()
recalculator = HashRecalculator(
fail_on_error=fail_on_error,
recursively_recalculate_dependent_hashes=recursively_recalculate_dependent_hashes,
dataset_ids=[i.id for i in records]
)
recalculator.run()
def recalculate_model_hashes(fail_on_error: bool = False,
recursively_recalculate_dependent_hashes: bool = False) -> None:
'''
Convenience helper to recompute model hashes. Optionally recalculates hashes
for downstream persistables
'''
records = Model.all()
recalculator = HashRecalculator(
fail_on_error=fail_on_error,
recursively_recalculate_dependent_hashes=recursively_recalculate_dependent_hashes,
dataset_ids=[i.id for i in records]
)
recalculator.run()
def recalculate_metric_hashes(fail_on_error: bool = False,
recursively_recalculate_dependent_hashes: bool = False) -> None:
'''
Convenience helper to recompute metric hashes. Optionally recalculates hashes
for downstream persistables
'''
records = Metric.all()
recalculator = HashRecalculator(
fail_on_error=fail_on_error,
recursively_recalculate_dependent_hashes=recursively_recalculate_dependent_hashes,
dataset_ids=[i.id for i in records]
)
recalculator.run()
| StarcoderdataPython |
1659219 | '''
Custom Crowdstrike library
--------------------------
Base Crowdstrike Class
Base API Requests Call
'''
import requests, json, datetime
from time import sleep
from base64 import b64encode
class CrowdStrike(object):
def __init__(self, endpoint, clientid, secret, oauth_endpoint="https://api.crowdstrike.com/"):
self.clientid = clientid
self.secret = secret
self.endpoint = endpoint # For API Requests
self.oauth_endpoint = oauth_endpoint
self.__jwt = self.__createtoken()
self.headers = {
"accept": "application/json",
"Authorization": "Bearer " + self.__jwt
}
# HOST
from ._CrowdstrikeHost import DevicesQueriesDevices
from ._CrowdstrikeHost import DevicesEntitiesDevices
from ._CrowdstrikeHost import DevicesCombinedDevicesLoginhistory
from ._CrowdstrikeHost import DevicesCombinedDevicesNetworkAddressHistory
from ._CrowdstrikeHost import DevicesEntitiesDevicesActions
# SENSOR
from ._CrowdstrikeSensor import SensorsQueriesInstallersCcid
# MALQUERY
from ._CrowdstrikeIntel import MalqueryEntitiesMetadata
# IOC
from ._CrowdstrikeIoC import IndicatorsQueriesDevices
# Detects
from ._CrowdstrikeDetects import DetectsQueriesDetects
from ._CrowdstrikeDetects import DetectsEntitiesSummariesGET
from ._CrowdstrikeDetects import DetectsEntitiesDetects
# User Manegement
from ._CrowdstrikeUserMenagement import UsersEntitiesUsers
from ._CrowdstrikeUserMenagement import UsersQueriesUser_uuids_by_cid
# RTR
from ._CrowdstrikeRTR import RealTimeResponseBatchInitSession
from ._CrowdstrikeRTR import RealTimeResponseCombinedBatchRefreshSession
from ._CrowdstrikeRTR import RealTimeResponseCombinedBatchAdminCommand
from ._CrowdstrikeRTR import RealTimeResponseQueriesSessions
from ._CrowdstrikeRTR import RealTimeResponseEntitiesScripts
from ._CrowdstrikeRTR import RealTimeResponseQueriesScripts
from ._CrowdstrikeRTR import RealTimeResponseEntitiesScriptsUpload
from ._CrowdstrikeRTR import RealTimeResponseEntitiesScriptsDelete
from ._CrowdstrikeRTR import RealTimeResponseQueriesPutFiles
from ._CrowdstrikeRTR import RealTimeResponseEntitiesPutFiles
from ._CrowdstrikeRTR import RealTimeResponseEntitiesPutFilesUpload
from ._CrowdstrikeRTR import RealTimeResponseEntitiesPutFilesDelete
def GetToken(self):
return self.__jwt
def Close(self):
self.__revoketoken()
def GetAPI(self, path):
'''Is public for custom requests'''
req = requests.get(self.endpoint + path, headers=self.headers)
if req.status_code == 403:
print ("Forbidden, maybe Token or API have problem")
exit(1)
if req.status_code == 429 and "X-RateLimit-RetryAfter" in req.headers:
self.__ratelimit(req.headers["X-RateLimit-RetryAfter"])
self.GetAPI(path)
return
return req.text
def DeleteAPI(self, path):
'''Is public for custom requests'''
req = requests.delete(self.endpoint + path, headers=self.headers)
if req.status_code == 403:
print ("Forbidden, maybe Token or API have problem")
return
if req.status_code == 429 and "X-RateLimit-RetryAfter" in req.headers:
self.__ratelimit(req.headers["X-RateLimit-RetryAfter"])
self.GetAPI(path)
return
return req.text
def PostAPI(self, path, payload):
headers = self.headers
headers.update({"Content-type": "application/json"})
req = requests.post(self.endpoint + path, json=payload, headers=headers)
if req.status_code == 403:
print ("Forbidden, maybe Token or API have problem")
exit(1)
if req.status_code == 429 and "X-RateLimit-RetryAfter" in req.headers:
self.__ratelimit(req.headers["X-RateLimit-RetryAfter"])
self.PostAPI(payload)
return
return req.text
def PostFileAPI(self, path, payload):
headers = self.headers
if "file" in payload:
req = requests.post(self.endpoint + path, files=payload, headers=headers)
else:
headers.update({"Content-type": "multipart/form-data"})
req = requests.post(self.endpoint + path, data=payload, headers=headers)
if req.status_code == 403:
print ("Forbidden, maybe Token or API have problem")
return
if req.status_code == 429 and "X-RateLimit-RetryAfter" in req.headers:
self.__ratelimit(req.headers["X-RateLimit-RetryAfter"])
self.PostAPI(payload)
return
return req.text
def PatchAPI(self, path, payload):
headers = self.headers
headers.update({"Content-type": "application/json"})
req = requests.patch(self.endpoint + path, json=payload, headers=headers)
if req.status_code == 403:
print ("Forbidden, maybe Token or API have problem")
exit(1)
if req.status_code == 429 and "X-RateLimit-RetryAfter" in req.headers:
self.__ratelimit(req.headers["X-RateLimit-RetryAfter"])
self.PostAPI(payload)
return
return req.text
def __ratelimit(self, RetryAfter):
RetryAfterTimestamp = int(RetryAfter)
TimestampUTC = int(datetime.datetime.utcnow().strftime("%s"))
print ("Reached the X-RateLimit-RetryAfter:\t" + str(RetryAfter))
sleep(RetryAfterTimestamp - TimestampUTC)
def __createtoken(self):
headers = {
"accept": "application/json",
"Content-Type": "application/x-www-form-urlencoded"
}
payloads = {
"client_id": self.clientid,
"client_secret": self.secret
}
req = requests.post(self.oauth_endpoint + "oauth2/token", headers=headers, data=payloads)
if req.status_code > 201:
print ("HTTP ERROR:\t" + req.status_code + "\nMESSAGE:\t" + req.text)
return
resp_json = json.loads(req.text)
if "access_token" in resp_json:
return resp_json["access_token"]
def __revoketoken(self):
headers = {
"Authorization": "Basic " + b64encode(str.encode(self.clientid+":"+self.secret)).decode(),
"accept": "application/json",
"Content-Type": "application/x-www-form-urlencoded"
}
payloads = {
"token": self.__jwt
}
req = requests.post(self.endpoint + "oauth2/revoke", headers=headers, data=payloads)
if req.status_code == 200:
self.__jwt = None
def __custom_error(self, msg):
return {
"crowdstrikeclient" : {
"error": msg
}
}
| StarcoderdataPython |
3287573 | <gh_stars>10-100
import pytest
from pactman import Like, SomethingLike
from pactman.mock.matchers import Matcher, Term
def test_is_something_like():
assert SomethingLike is Like
def test_valid_types():
types = [None, list(), dict(), 1, 1.0, "string", "unicode", Matcher()]
for t in types:
SomethingLike(t)
def test_invalid_types():
with pytest.raises(AssertionError) as e:
SomethingLike(set())
assert "matcher must be one of " in str(e.value)
def test_basic_type():
assert SomethingLike(123).ruby_protocol() == {
"json_class": "Pact::SomethingLike",
"contents": 123,
}
def test_complex_type():
assert SomethingLike({"name": Term(".+", "admin")}).ruby_protocol() == {
"json_class": "Pact::SomethingLike",
"contents": {
"name": {
"json_class": "Pact::Term",
"data": {
"matcher": {"json_class": "Regexp", "s": ".+", "o": 0},
"generate": "admin",
},
}
},
}
| StarcoderdataPython |
1740619 | <reponame>mardix/bufferapp
from bufferapp.response import ResponseObject
PATHS = {
'GET_SHARES': 'links/shares.json?url=%s'
}
class Link(ResponseObject):
'''
A link represents a unique URL that has been shared through Buffer
'''
def __init__(self, api, url):
shares = api.get(url=PATHS['GET_SHARES'] % url)['shares']
super(Link, self).__init__({'shares': shares, 'url': url})
self.api = api
def get_shares(self):
'''
Returns an object with a the numbers of shares a link has had using
Buffer.
www will be stripped, but other subdomains will not.
'''
self.shares = self.api.get(url=PATHS['GET_SHARES'] % self.url)['shares']
return self.shares
| StarcoderdataPython |
1765825 | '''
brt.py
Created by <NAME>
<EMAIL>
version 1.1 -- 7.15.2017
Buffalo Ray Trace (BRT) is an interactive GUI for plotting image
predictions for a lens model. BRT is written in Python, utilizing the
tkinter GUI library, the matplotlib plotting library, the astropy
library of tools for astrophysical data analysis. All are available
through Anaconda.
The only required inputs for BRT are the x and y deflection files (FITS),
in units of arcseconds, and a PNG color image or FITS image of the field of view.
These two sets of inputs need to have the same field of view. The program provides
helper functions to create these files.
VERSION HISTORY:
1.1 -- 7.15.2017: Fixed minor bugs. Fixed bug with computing dls/ds using proper
cosmology. Added postage stamp feature. Added feature that inserts
the redshift of the selected arcs from the arc list into the boxes
for ray tracing and plotting the critical curve.
'''
import matplotlib
matplotlib.use('TkAgg')
import numpy as np
import os
import sys
if sys.version_info[0] < 3:
from Tkinter import *
import tkMessageBox as tkMB
else:
from tkinter import *
from tkinter import messagebox as tkMB
import pickle
from astropy.io import fits
from astropy.wcs import WCS
from astropy.cosmology import FlatLambdaCDM
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.backend_bases import key_press_handler
from matplotlib.figure import Figure
from itertools import cycle
import warnings
import time
import datetime
import platform
from PIL import Image
def dlsds(zl,zs,Om0=0.3,H0=70):
cosmo = FlatLambdaCDM(Om0=Om0,H0=H0)
ratio = np.zeros_like(zs)
for i in range(len(zs)):
dls = cosmo.angular_diameter_distance_z1z2(zl,zs[i]).value
ds = cosmo.angular_diameter_distance(zs[i]).value
ratio[i] = dls/ds
return ratio
def predict_images(xim,yim,deflectx,deflecty,dlsds=1,maxdist=0.5):
dims = deflectx.shape
source_x = np.zeros_like(deflectx)
source_y = np.zeros_like(deflecty)
if dims[0] == dims[1]:
for i in range(dims[0]):
source_x[:,i] = i + 1 - deflectx[:,i]*dlsds
source_y[i,:] = i + 1 - deflecty[i,:]*dlsds
else:
for j in range(dims[0]): source_x[:,j] = j + 1 - deflectx[:,j]*dlsds
for k in range(dims[1]): source_y[k,:] = k + 1 - deflecty[k,:]*dlsds
xs = source_x[int(np.round(yim))-1,int(np.round(xim))-1]
ys = source_y[int(np.round(yim))-1,int(np.round(xim))-1]
d = np.sqrt((source_x-xs)**2+(source_y-ys)**2)
indices = np.where(d<maxdist)
ximp = []
yimp = []
for i,j in zip(indices[1],indices[0]): ximp.append(i+1),yimp.append(j+1)
ximp = np.array(ximp)
yimp = np.array(yimp)
return ximp, yimp
def update(f):
data = fits.getdata(f)
h = fits.getheader(f)
h['CRPIX1'] += 0.5
h['CRPIX2'] += 0.5
fits.writeto(f,data,header=h,clobber=True)
class StartWindow:
def __init__(self):
self.root = Tk()
self.root.wm_title("Start up")
self.root.geometry("380x380")
titleFrame = Frame(self.root)
titleFrame.pack()
title = Label(titleFrame,text="Buffalo Ray Trace",fg='blue')
title.config(font=("Helvetica", 24))
title.pack()
Label(titleFrame,text='Enter model parameters',fg='red').pack()
entryFrame = Frame(self.root)
entryFrame.pack()
Label(entryFrame, text = "Cluster redshift: ").grid(row=0, column=0,sticky=E)
self.entry_zl = Entry(entryFrame,width=15)
self.entry_zl.grid(row=0, column=1)
Label(entryFrame, text = "Image file: ").grid(row=1, column=0,sticky=E)
self.entry_imagefile = Entry(entryFrame,width=15)
self.entry_imagefile.grid(row=1, column=1)
Button(entryFrame, text='Create',command=self.tiff_window,padx=5,pady=5).grid(row=1,column=2)
Label(entryFrame, text = "X deflection file: ").grid(row=2, column=0,sticky=E)
self.entry_dxfile = Entry(entryFrame,width=15)
self.entry_dxfile.grid(row=2, column=1)
Label(entryFrame, text = "Y deflection file: ").grid(row=3, column=0,sticky=E)
self.entry_dyfile = Entry(entryFrame,width=15)
self.entry_dyfile.grid(row=3, column=1)
Label(entryFrame, text = "Deflection file redshift: ").grid(row=4, column=0,sticky=E)
self.entry_dz = Entry(entryFrame,width=15)
self.entry_dz.grid(row=4, column=1)
self.check = IntVar()
self.check.set(0)
Checkbutton(entryFrame, variable=self.check, onvalue=1, offvalue=0, text='Deflection files are dls/ds = 1').grid(row=5,columnspan=2)
Label(entryFrame,text='Enter cosmological parameters',fg='red').grid(row=6,columnspan=2)
Label(entryFrame, text = "Omega M (1 - Omega L): ").grid(row=7, column=0,sticky=E)
self.entry_Om0 = Entry(entryFrame,width=15)
self.entry_Om0.grid(row=7, column=1)
self.entry_Om0.insert(0,'0.3')
Label(entryFrame, text = "Hubble constant (km/s/Mpc): ").grid(row=8, column=0,sticky=E)
self.entry_H0 = Entry(entryFrame,width=15)
self.entry_H0.grid(row=8, column=1)
self.entry_H0.insert(0,'70.0')
submitFrame = Frame(self.root)
submitFrame.pack()
Button(submitFrame, text = "Enter", command = self.getParams,padx=5,pady=5).pack()
Label(submitFrame, text='Or').pack()
Button(submitFrame,text="Load previous model",command=self.loadPrevious,padx=5,pady=5).pack()
self.root.mainloop()
def tiff_window(self):
self.toplevel = Toplevel(self.root)
Label(self.toplevel, text='Open a fits file in ds9 (or three if RGB) and\nscale to the desired output.',fg='blue').grid(row=0,columnspan=2)
Label(self.toplevel, text='TIFF file name: ').grid(row=1,column=0,sticky=E)
self.file_entry = Entry(self.toplevel,width=10)
self.file_entry.grid(row=1,column=1)
Button(self.toplevel,text='Write TIFF',command=self.createTiff,padx=5,pady=5).grid(row=2,columnspan=2)
def createTiff(self):
tiffname = self.file_entry.get()
os.system('xpaset -p ds9 export tiff '+os.getcwd()+'/'+tiffname+' none')
fitsname = os.popen('xpaget ds9 file').readlines()[0].rsplit()[0]
htiff = fits.getheader(fitsname)
wtiff = WCS(htiff)
pickle.dump(wtiff,open(tiffname+'.wcs','wb'))
self.entry_imagefile.delete(0,END)
self.entry_imagefile.insert(0,tiffname)
self.toplevel.destroy()
def getParams(self):
self.zl = self.entry_zl.get()
self.imagefile = self.entry_imagefile.get()
self.dxfile = self.entry_dxfile.get()
self.dyfile = self.entry_dyfile.get()
self.dz = self.entry_dz.get()
self.isInf = self.check.get()
self.Om0 = self.entry_Om0.get()
self.H0 = self.entry_H0.get()
errors = []
try:
self.zl = float(self.zl)
except ValueError or self.zl < 0:
errors.append('Cluster redshift must be a number > 0.')
for file in [self.imagefile, self.dxfile, self.dyfile]:
if not os.path.isfile(file):
errors.append('File "'+file+ '" does not exist.')
if self.isInf == 0:
try:
self.dz = float(self.dz)
except ValueError or self.dz < self.zl:
errors.append('Deflect file redshift must be a number > cluster redshift.')
try:
self.Om0 = float(self.Om0)
except ValueError or Om0 < 0 or Om0>1:
errors.append('Omega M must be a number between 0 and 1')
try:
self.H0 = float(self.H0)
except ValueError or self.H0 < 0 or self.H0 > 100:
errors.append('H0 must be a number between 0 and 100.')
if len(errors) > 0:
tkMB.showinfo('Error','\n\n'.join(errors))
else:
pickle.dump((self.zl, self.imagefile, self.dxfile, self.dyfile, self.dz, self.isInf,self.Om0,self.H0),open('last.brt','wb'))
self.startUp()
self.root.destroy()
def loadPrevious(self):
if os.path.isfile('last.brt'):
self.zl, self.imagefile, self.dxfile, self.dyfile, self.dz, self.isInf, self.Om0, self.H0 = pickle.load(open('last.brt','rb'))
self.entry_zl.delete(0,END)
self.entry_zl.insert(0,str(self.zl))
self.entry_imagefile.delete(0,END)
self.entry_imagefile.insert(0,self.imagefile)
self.entry_dxfile.delete(0,END)
self.entry_dxfile.insert(0,self.dxfile)
self.entry_dyfile.delete(0,END)
self.entry_dyfile.insert(0,self.dyfile)
self.entry_dz.delete(0,END)
self.entry_dz.insert(0,str(self.dz))
self.check.set(self.isInf)
self.entry_Om0.delete(0,END)
self.entry_Om0.insert(0,str(self.Om0))
self.entry_H0.delete(0,END)
self.entry_H0.insert(0,str(self.H0))
else:
tkMB.showinfo('Error','Could not locate previous model. Enter new parameters.')
def startUp(self):
global zl, image, deflectx, deflecty, dDXdx, dDXdy, dDYdx, dDYdy, Om0, H0, wcs, scalefactor, xoff, yoff
zl = self.zl
Om0 = self.Om0
H0 = self.H0
with warnings.catch_warnings():
warnings.simplefilter('ignore')
im = Image.open(self.imagefile)
wtiff = pickle.load(open(self.imagefile+'.wcs','rb'))
deflectx = fits.getdata(self.dxfile)
deflecty = fits.getdata(self.dyfile)
h = fits.getheader(self.dxfile)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
wcs = WCS(h)
ra,dec = wcs.wcs_pix2world([1,h['NAXIS1']+1],[1,h['NAXIS2']+1],1)
x,y = wtiff.wcs_world2pix(ra,dec,1)
xoff = 0.5-(x[0]-int(x[0])+x[1]-int(x[1]))/2
yoff = 0.5-(y[0]-int(y[0])+y[1]-int(y[1]))/2
image = im.crop((int(x[0]),im.height-int(y[1])+1,int(x[1])+1,im.height-int(y[0])))
scalefactor = image.width/deflectx.shape[0]
ps = h['CDELT2']*3600
deflectx /= ps
deflecty /= ps
if self.isInf == 0:
ratio = dlsds(zl,[self.dz],Om0=Om0,H0=H0)
deflectx /= ratio[0]
deflecty /= ratio[0]
with warnings.catch_warnings():
warnings.simplefilter('ignore')
dDXdy,dDXdx = np.gradient(deflectx)
dDYdy,dDYdx = np.gradient(deflecty)
class MainWindow:
def __init__(self):
self.root = Tk()
self.root.wm_title("BRT")
self.root.geometry("1000x750")
self.points = []
self.labels = []
self.curves = []
self.arcs = []
self.arc_annotate = []
self.arc_labels = np.array([])
self.arc_x = np.array([])
self.arc_y = np.array([])
self.stamps = []
self.cid = None
self.X,self.Y = np.meshgrid(np.arange(deflectx.shape[1])*scalefactor,np.arange(deflecty.shape[0])*scalefactor)
plotFrame = Frame(self.root,width=800,height=750)
plotFrame.grid(rowspan=10,column=0)
self.fig = Figure(figsize=(7, 7), dpi=100)
self.fig.subplots_adjust(left=0.05,right=0.85,top=0.95,bottom=0.05)
self.ax = self.fig.add_subplot(111)
self.ax.imshow(image,extent=(-xoff,image.width-xoff,-yoff,image.height-yoff),interpolation='none')
self.ax.set_xlim(-xoff,image.width-xoff)
self.ax.set_ylim(-yoff,image.height-yoff)
self.ax.axes.get_xaxis().set_visible(False)
self.ax.axes.get_yaxis().set_visible(False)
self.ax.set_title('cluster z = '+str(zl),size=20)
canvas = FigureCanvasTkAgg(self.fig, master=plotFrame)
canvas.show()
canvas.get_tk_widget().pack(side=TOP,fill=BOTH,expand=1)
self.toolbar = NavigationToolbar2TkAgg(canvas, plotFrame)
self.toolbar.update()
canvas._tkcanvas.pack(side=TOP,fill=BOTH,expand=1)
dropDownFrame = Frame(self.root,padx=10,pady=15)
dropDownFrame.grid(row=0,column=1,sticky=S)
self.plot_var = StringVar(dropDownFrame)
self.plot_var.set("Ray Trace")
self.connect()
Label(dropDownFrame,text='Click Mode: ').grid(row=0,column=0,sticky=E)
option = OptionMenu(dropDownFrame, self.plot_var, "Ray Trace", "Postage Stamp",
command= lambda x: self.connect() if self.plot_var.get() == 'Ray Trace' else self.connect_postage_stamp())
option.grid(row=0,column=1)
Button(dropDownFrame,text='Kill all windows',padx=5,pady=5,command=self.kill_all).grid(row=1,columnspan=2)
Button(dropDownFrame,text='Clear all points',padx=5,pady=5,command=self.clear_all).grid(row=2,columnspan=2)
clickFrame = Frame(self.root)
clickFrame.grid(row=1,column=1)
Label(clickFrame,text='Click to add images',fg='blue').grid(row=0,columnspan=2)
Label(clickFrame,text = 'source z: ').grid(row=1,column=0,sticky=E)
self.zsource_entry = Entry(clickFrame,width=10)
self.zsource_entry.grid(row=1,column=1)
self.zsource_entry.insert(0,'2.0')
Label(clickFrame,text = 'z step: ').grid(row=2,column=0,sticky=E)
self.zstep_entry = Entry(clickFrame,width=10)
self.zstep_entry.grid(row=2,column=1)
self.zstep_entry.insert(0,'0.5')
Label(clickFrame,text = '# iterations: ').grid(row=3,column=0,sticky=E)
self.nz_entry = Entry(clickFrame,width=10)
self.nz_entry.grid(row=3,column=1)
self.nz_entry.insert(0,'6')
Button(clickFrame,text="Clear points",padx=5,pady=5,command=self.clear_points).grid(row=4,columnspan=2)
critFrame = Frame(self.root,padx=10)
critFrame.grid(row=2,column=1)
Label(critFrame,text="Plot critical curve",fg='blue').grid(row=0,columnspan=4)
Label(critFrame,text='z crit: ').grid(row=1,column=0,sticky=E)
self.z_crit_entry = Entry(critFrame,width=10)
self.z_crit_entry.insert(0,'2.0')
self.z_crit_entry.grid(row=1,column=1)
Button(critFrame,text="Plot",padx=5,pady=5,command=self.plot_crit).grid(row=1,column=2)
Button(critFrame,text="Clear",padx=5,pady=5,command=self.clear_curve).grid(row=1,column=3)
saveFrame = Frame(self.root,padx=10)
saveFrame.grid(row=3,column=1)
Label(saveFrame,text='Save points to region file',fg='blue').grid(row=0,columnspan=3)
Label(saveFrame,text='file name: ').grid(row=1,column=0,sticky=E)
self.reg_entry = Entry(saveFrame,width=10)
self.reg_entry.grid(row=1,column=1)
self.reg_entry.insert(0,'points.reg')
self.status_var = StringVar(self.root)
self.status_var.set(' ')
Label(saveFrame,textvariable=self.status_var).grid(row=2,columnspan=3)
Button(saveFrame,text='Save',padx=5,pady=5,command=self.plot_ds9).grid(row=1,column=2)
loadFrame = Frame(self.root,padx=10)
loadFrame.grid(row=4,column=1)
Label(loadFrame,text='Load arcs from file',fg='blue').grid(row=0,columnspan=3)
Label(loadFrame,text='file name: ').grid(row=1,column=0,sticky=E)
self.file_entry = Entry(loadFrame,width=10)
self.file_entry.grid(row=1,column=1)
self.file_entry.insert(0,'arcs.dat')
Button(loadFrame,text='Load list',padx=5,pady=5,command=self.load_arcs).grid(row=1,column=2)
Button(loadFrame,text='Clear list',command=self.clear_list,padx=5,pady=5).grid(row=1,column=3)
scrollFrame = Frame(self.root,padx=10)
scrollFrame.grid(row=5,column=1)
Label(scrollFrame,text='arc list').pack()
scrollbar = Scrollbar(scrollFrame)
scrollbar.pack(side=RIGHT, fill=Y)
self.listbox = Listbox(scrollFrame, width=10, height=5, selectmode='multiple',yscrollcommand=scrollbar.set)
self.listbox.pack(side=LEFT, fill=BOTH)
scrollbar.config(command=self.listbox.yview)
arcFrame = Frame(self.root,padx=10)
arcFrame.grid(row=6,column=1)
Button(arcFrame,text='Use selected arc redshift',command=self.insert_redshift,padx=5,pady=5).grid(row=0,columnspan=2)
Button(arcFrame,text='Plot',command=self.plot_arcs,padx=5,pady=5).grid(row=1,column=0)
Button(arcFrame,text='Clear',command=self.clear_arcs,padx=5,pady=5).grid(row=1,column=1)
Button(arcFrame,text='Select all',command=self.select_all,padx=5,pady=5).grid(row=2,column=0)
Button(arcFrame,text='Deselect all',command=self.deselect_all,padx=5,pady=5).grid(row=2,column=1)
self.root.mainloop()
def connect(self):
if self.cid is not None:
self.disconnect()
self.cid = self.fig.canvas.callbacks.connect('button_press_event', self.on_click)
def on_click(self,event):
if event.xdata is None or event.ydata is None: return
zsource = self.zsource_entry.get()
zstep = self.zstep_entry.get()
nz = self.nz_entry.get()
if self.toolbar._active is not None: return
for l in self.labels:
l.remove()
self.labels = []
errors = []
try:
zsource = float(zsource)
except ValueError or zsource < zl:
errors.append('Please enter a number greater than z='+str(zl)+' for source redshift.')
try:
zstep = float(zstep)
except ValueError:
errors.append('Please enter a number z step.')
try:
nz = int(nz)
except ValueError:
errors.append('Please enter a number greater than 0 for # iterations.')
if len(errors) > 0:
tkMB.showinfo('Error','\n\n'.join(errors))
else:
zvalues = zsource + np.arange(nz)*zstep
ratios = dlsds(zl,zvalues,Om0=Om0,H0=H0)
colors = ['red','green','magenta','cyan','yellow','blue']
colorcycle = cycle(colors)
for i in range(len(ratios)):
c = colorcycle.next()
ximp,yimp = predict_images(event.xdata/scalefactor,event.ydata/scalefactor,deflectx,deflecty,dlsds=ratios[i],maxdist=1)
p, = self.ax.plot(ximp*scalefactor,yimp*scalefactor,'o',color=c,markeredgewidth=0,markersize=3)
self.points.append(p)
l = self.fig.text(0.92,0.8-i*0.05,'z='+str(zvalues[i]),color=c,size=14,ha='center',va='center')
self.labels.append(l)
self.fig.canvas.draw()
def connect_postage_stamp(self):
if self.cid is not None:
self.disconnect()
self.cid = self.fig.canvas.callbacks.connect('button_press_event', self.postage_stamp)
def postage_stamp(self,event):
xcent = event.xdata
ycent = event.ydata
if xcent is None or ycent is None: return
stamp = Toplevel(self.root)
self.stamps.append(stamp)
fig = Figure(figsize=(2, 2), dpi=100)
fig.subplots_adjust(left=0.05,right=0.95,top=0.95,bottom=0.05)
ax = fig.add_subplot(111)
ax.imshow(image,extent=(-xoff,image.width-xoff,-yoff,image.height-yoff),interpolation='none')
ax.set_xlim(xcent-200,xcent+200)
ax.set_ylim(ycent-200,ycent+200)
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
canvas = FigureCanvasTkAgg(fig, master=stamp)
canvas.show()
canvas.get_tk_widget().pack(side=TOP,fill=BOTH,expand=1)
stamp.attributes("-topmost", True)
def kill_all(self):
for s in self.stamps:
s.destroy()
self.stamps = []
def disconnect(self):
self.fig.canvas.callbacks.disconnect(self.cid)
self.cid = None
def clear_points(self):
for p in self.points:
p.remove()
self.points = []
for l in self.labels:
l.remove()
self.labels = []
self.fig.canvas.draw()
def plot_crit(self):
if self.cid is None: return
z = self.z_crit_entry.get()
try:
z = float(z)
except ValueError or z < zl:
errors.append('Please enter a number greater than z='+str(zl)+' for source redshift.')
return
ratio = dlsds(zl,[z])
with warnings.catch_warnings():
warnings.simplefilter('ignore')
A = (1-dDXdx*ratio)*(1-dDYdy*ratio)-(dDXdy*ratio)*(dDYdx*ratio)
mag = 1.0/np.abs(A)
m = self.ax.contour(self.X,self.Y,mag,levels=[100],colors='white')
self.curves.append(m)
self.fig.canvas.draw()
def clear_curve(self):
for m in self.curves:
for coll in m.collections:
coll.remove()
self.curves = []
self.fig.canvas.draw()
def plot_ds9(self):
if self.cid is None: return
f = open(self.reg_entry.get(),'w')
for p in self.points:
xy = p.get_xydata()
ra,dec = wcs.wcs_pix2world(xy[:,0]/scalefactor,xy[:,1]/scalefactor,1)
c = p.get_markerfacecolor()
for i in range(xy.shape[0]):
f.write('fk5;circle({0:0.6f},{1:0.6f},0.1") # color={2} width=2\n'.format(ra[i],dec[i],c))
f.close()
ts = time.time()
self.status_var.set('file saved at: '+datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'))
self.root.update_idletasks()
def load_arcs(self):
arcFile = self.file_entry.get()
if os.path.isfile(arcFile):
self.arc_labels,ra,dec,self.arc_z = np.loadtxt(arcFile,usecols=(0,1,2,6),dtype='S8,<f8,<f8,S8',unpack=True)
self.arc_x, self.arc_y = wcs.wcs_world2pix(ra,dec,1)
self.arc_x *= scalefactor
self.arc_y *= scalefactor
for a,z in zip(self.arc_labels,self.arc_z):
if float(z) == 0:
s = ''
else:
s = ' (z=%s)' % z
self.listbox.insert(END,a+s)
else:
tkMB.showinfo('Error','File "'+arcFile+ '" does not exist.')
def plot_arcs(self):
if self.cid is None: return
for l in self.labels:
l.remove()
self.labels = []
select = self.listbox.curselection()
if len(select) == 0: return
errors = []
a, = self.ax.plot(self.arc_x[[select]],self.arc_y[[select]],'sw')
self.arcs.append(a)
for i in range(len(select)):
x = self.arc_x[select[i]]
y = self.arc_y[select[i]]
t = self.arc_labels[select[i]]
l = self.ax.text(x,y+50,t,color='white',size=12,ha='center',va='center')
if x < 0 or y < 0 or x > image.width or y > image.height:
errors.append(t+' is out of range of deflection matrices.')
self.arc_annotate.append(l)
self.fig.canvas.draw()
if len(errors) > 0:
tkMB.showinfo('Error','\n\n'.join(errors))
def clear_arcs(self):
for a in self.arcs:
a.remove()
self.arcs = []
for l in self.arc_annotate:
l.remove()
self.arc_annotate = []
self.fig.canvas.draw()
def clear_all(self):
self.clear_points()
self.clear_curve()
self.clear_arcs()
def select_all(self):
self.listbox.select_set(0, END)
def deselect_all(self):
self.listbox.selection_clear(0, END)
def clear_list(self):
for i in range(self.arc_labels.size):
self.listbox.delete(0)
self.arc_labels = np.array([])
self.arc_x = np.array([])
self.arc_y = np.array([])
def insert_redshift(self):
select = self.listbox.curselection()
if len(select) == 0: return
self.zsource_entry.delete(0,END)
self.zsource_entry.insert(0,self.arc_z[select[0]])
self.nz_entry.delete(0,END)
self.nz_entry.insert(0,'1')
self.z_crit_entry.delete(0,END)
self.z_crit_entry.insert(0,self.arc_z[select[0]])
if __name__ == '__main__':
StartWindow()
MainWindow()
| StarcoderdataPython |
181637 | #!/usr/bin/env python
###
# Conway's game of life for Unicorn Hat
# (C) 2014 <NAME>
# Shared under the MIT permissive license.
###
import random
import time
import unicornhat as unicorn
class LifeCell:
"""
Central class defining both the cells and the matrix of cells.
"""
matrix = {} # the matrix of cells
def __init__(self, initial_x, initial_y, initial_state='alive'):
"""
Create a new cell
:param initial_x:
:param initial_y:
:param initial_state:
"""
self.x = initial_x
self.y = initial_y
self.current_state = initial_state
self.next_state = 'unknown'
coordinate_tuple = (x, y)
if coordinate_tuple not in LifeCell.matrix:
LifeCell.matrix[coordinate_tuple] = self
else:
print "Cell already exists at %d,%d" % (x, y)
@staticmethod
def wipe_matrix():
"""
Wipe the matrix
"""
LifeCell.matrix = {}
@staticmethod
def matrix_value():
"""
Calculate the current state of the matrix (== the sum of all 'alive' cells)
:return: the 'value' of the matrix
"""
count = 0
for iter_cell in LifeCell.matrix.itervalues():
if iter_cell.current_state == 'alive':
count += 1
return count
@staticmethod
def get_neighbour_surrent_state(x, y):
"""
Get the 'state' of a neighbour
:param x:
:param y:
:return: 1 if alive, 0 if not.
"""
coordinate_tuple = (x, y)
if coordinate_tuple in LifeCell.matrix:
if LifeCell.matrix[coordinate_tuple].current_state == 'alive':
return 1
return 0
@staticmethod
def display_matrix(max_x, max_y, text=False, r=255, g=255, b=255):
"""
Display the matrix, either on the unicorn or on the stdout
:param max_x:
:param max_y:
:param text: If True, display on stdout instead of unicornhat. For debugging
"""
if text:
for x in range(max_x):
for y in range(max_y):
coordinate_tuple = (x, y)
if LifeCell.matrix[coordinate_tuple].current_state == 'alive':
print '*',
else:
print '.',
print
print
else:
for x in range(max_x):
for y in range(max_y):
coordinate_tuple = (x, y)
if LifeCell.matrix[coordinate_tuple].current_state == 'alive':
unicorn.set_pixel(x, y, r, g, b)
else:
unicorn.set_pixel(x, y, 0, 0, 0)
unicorn.show()
@staticmethod
def progress_generation():
"""
Step to the next generation
"""
for iter_cell in LifeCell.matrix.itervalues():
alive_neightbours = iter_cell.get_alive_neighbours()
if iter_cell.current_state == 'alive':
if alive_neightbours < 2:
iter_cell.next_state = 'dead'
elif alive_neightbours > 3:
iter_cell.next_state = 'dead'
else:
iter_cell.next_state = 'alive'
else:
if alive_neightbours == 3:
iter_cell.next_state = 'alive'
else:
iter_cell.next_state = 'dead'
for iter_cell in LifeCell.matrix.itervalues():
iter_cell.progress_state()
def get_alive_neighbours(self):
"""
calculate the number of neighbours that are alive
:return: the number of neighbors currently alive
"""
alive_count = 0
alive_count += LifeCell.get_neighbour_surrent_state(self.x-1, self.y-1)
alive_count += LifeCell.get_neighbour_surrent_state(self.x, self.y-1)
alive_count += LifeCell.get_neighbour_surrent_state(self.x+1, self.y-1)
alive_count += LifeCell.get_neighbour_surrent_state(self.x-1, self.y)
alive_count += LifeCell.get_neighbour_surrent_state(self.x+1, self.y)
alive_count += LifeCell.get_neighbour_surrent_state(self.x-1, self.y+1)
alive_count += LifeCell.get_neighbour_surrent_state(self.x, self.y+1)
alive_count += LifeCell.get_neighbour_surrent_state(self.x+1, self.y+1)
return alive_count
def progress_state(self):
"""
Progress to the next generation for one cell.
"""
self.current_state = self.next_state
self.next_state = 'unknown'
# Main program loop
if __name__ == "__main__":
# set comfortable brightness
unicorn.brightness(0.2)
# unicorn hat size
max_x = 7
max_y = 7
# Will forever show loops of 50 generations.
while True:
cells = []
LifeCell.wipe_matrix()
# create a random colour
random_r = random.randint(0, 255)
random_g = random.randint(0, 255)
random_b = random.randint(0, 255)
for x in range(max_x):
for y in range(max_y):
cell = LifeCell(x, y, random.choice(('alive', 'dead'))) # randomly populate the matrix
cells.append(cell)
for count in range(50):
LifeCell.progress_generation()
LifeCell.display_matrix(max_x, max_y, False, random_r, random_g, random_b) # use the unicorn hat
time.sleep(0.1)
if LifeCell.matrix_value() == 0:
break
| StarcoderdataPython |
1679141 | <reponame>wlanslovenija/django-guardian<filename>guardian/conf/settings.py<gh_stars>0
from __future__ import unicode_literals
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
ANONYMOUS_USER_NAME = getattr(settings, 'ANONYMOUS_USER_NAME', None)
if ANONYMOUS_USER_NAME is None:
ANONYMOUS_USER_NAME = getattr(settings, 'ANONYMOUS_DEFAULT_USERNAME_VALUE', None)
if ANONYMOUS_USER_NAME is not None:
warnings.warn("The ANONYMOUS_DEFAULT_USERNAME_VALUE setting has been renamed to ANONYMOUS_USER_NAME.", DeprecationWarning)
if ANONYMOUS_USER_NAME is None:
ANONYMOUS_USER_NAME = 'AnonymousUser'
RENDER_403 = getattr(settings, 'GUARDIAN_RENDER_403', False)
TEMPLATE_403 = getattr(settings, 'GUARDIAN_TEMPLATE_403', '403.html')
RAISE_403 = getattr(settings, 'GUARDIAN_RAISE_403', False)
GET_INIT_ANONYMOUS_USER = getattr(settings, 'GUARDIAN_GET_INIT_ANONYMOUS_USER',
'guardian.management.get_init_anonymous_user')
MONKEY_PATCH = getattr(settings, 'GUARDIAN_MONKEY_PATCH', True)
def check_configuration():
if RENDER_403 and RAISE_403:
raise ImproperlyConfigured("Cannot use both GUARDIAN_RENDER_403 AND "
"GUARDIAN_RAISE_403 - only one of this config may be True")
check_configuration()
| StarcoderdataPython |
1775105 | #!/usr/bin/env python
# Copyright 2019, FZI Forschungszentrum Informatik
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import actionlib
import rospy
from control_msgs.msg import (
FollowJointTrajectoryAction,
FollowJointTrajectoryGoal,
FollowJointTrajectoryResult,
)
from std_srvs.srv import Trigger
from trajectory_msgs.msg import JointTrajectoryPoint
from ur_dashboard_msgs.msg import RobotMode, SetModeAction, SetModeGoal
PKG = "ur_robot_driver"
NAME = "trajectory_test"
class TrajectoryTest(unittest.TestCase):
def __init__(self, *args):
super().__init__(*args)
rospy.init_node("trajectory_testing_client")
self.client = actionlib.SimpleActionClient(
"follow_joint_trajectory", FollowJointTrajectoryAction
)
timeout = rospy.Duration(30)
try:
self.client.wait_for_server(timeout)
except rospy.exceptions.ROSException as err:
self.fail(
"Could not reach controller action. Make sure that the driver is actually running."
" Msg: {}".format(err)
)
self.init_robot()
def init_robot(self):
"""Make sure the robot is booted and ready to receive commands."""
mode_client = actionlib.SimpleActionClient("/ur_hardware_interface/set_mode", SetModeAction)
timeout = rospy.Duration(30)
try:
mode_client.wait_for_server(timeout)
except rospy.exceptions.ROSException as err:
self.fail(
"Could not reach set_mode action. Make sure that the driver is actually running."
" Msg: {}".format(err)
)
goal = SetModeGoal()
goal.target_robot_mode = RobotMode.RUNNING
goal.play_program = False # we use headless mode during tests
mode_client.send_goal(goal)
mode_client.wait_for_result()
self.assertTrue(mode_client.get_result().success)
send_program_srv = rospy.ServiceProxy(
"/ur_hardware_interface/resend_robot_program", Trigger
)
send_program_srv.call()
rospy.sleep(5)
def test_trajectory(self):
"""Test robot movement."""
goal = FollowJointTrajectoryGoal()
goal.trajectory.joint_names = [
"elbow_joint",
"shoulder_lift_joint",
"shoulder_pan_joint",
"wrist_1_joint",
"wrist_2_joint",
"wrist_3_joint",
]
position_list = [[0.0 for i in range(6)]]
position_list.append([-0.5 for i in range(6)])
position_list.append([-1.0 for i in range(6)])
duration_list = [6.0, 9.0, 12.0]
for i, position in enumerate(position_list):
point = JointTrajectoryPoint()
point.positions = position
point.time_from_start = rospy.Duration(duration_list[i])
goal.trajectory.points.append(point)
rospy.loginfo("Sending simple goal")
self.client.send_goal(goal)
self.client.wait_for_result()
self.assertEqual(
self.client.get_result().error_code, FollowJointTrajectoryResult.SUCCESSFUL
)
rospy.loginfo("Received result SUCCESSFUL")
def test_illegal_trajectory(self):
"""Test trajectory server."""
"""This is more of a validation test that the testing suite does the right thing."""
goal = FollowJointTrajectoryGoal()
goal.trajectory.joint_names = [
"elbow_joint",
"shoulder_lift_joint",
"shoulder_pan_joint",
"wrist_1_joint",
"wrist_2_joint",
"wrist_3_joint",
]
position_list = [[0.0 for i in range(6)]]
position_list.append([-0.5 for i in range(6)])
# Create illegal goal by making the second point come earlier than the first
duration_list = [6.0, 3.0]
for i, position in enumerate(position_list):
point = JointTrajectoryPoint()
point.positions = position
point.time_from_start = rospy.Duration(duration_list[i])
goal.trajectory.points.append(point)
rospy.loginfo("Sending illegal goal")
self.client.send_goal(goal)
self.client.wait_for_result()
# As timings are illegal, we expect the result to be INVALID_GOAL
self.assertEqual(
self.client.get_result().error_code, FollowJointTrajectoryResult.INVALID_GOAL
)
rospy.loginfo("Received result INVALID_GOAL")
def test_scaled_trajectory(self):
"""Test robot movement."""
goal = FollowJointTrajectoryGoal()
goal.trajectory.joint_names = [
"elbow_joint",
"shoulder_lift_joint",
"shoulder_pan_joint",
"wrist_1_joint",
"wrist_2_joint",
"wrist_3_joint",
]
position_list = [[0.0 for i in range(6)]]
position_list.append([-1.0 for i in range(6)])
duration_list = [6.0, 6.5]
for i, position in enumerate(position_list):
point = JointTrajectoryPoint()
point.positions = position
point.time_from_start = rospy.Duration(duration_list[i])
goal.trajectory.points.append(point)
rospy.loginfo("Sending scaled goal without time restrictions")
self.client.send_goal(goal)
self.client.wait_for_result()
self.assertEqual(
self.client.get_result().error_code, FollowJointTrajectoryResult.SUCCESSFUL
)
rospy.loginfo("Received result SUCCESSFUL")
# Now do the same again, but with a goal time constraint
rospy.loginfo("Sending scaled goal with time restrictions")
goal.goal_time_tolerance = rospy.Duration(0.01)
self.client.send_goal(goal)
self.client.wait_for_result()
self.assertEqual(
self.client.get_result().error_code, FollowJointTrajectoryResult.GOAL_TOLERANCE_VIOLATED
)
rospy.loginfo("Received result GOAL_TOLERANCE_VIOLATED")
if __name__ == "__main__":
import rostest
rostest.run(PKG, NAME, TrajectoryTest, sys.argv)
| StarcoderdataPython |
68879 | #!/usr/bin/python2.7
import os, sys, socket, json, argparse, threading, csv, getpass,time, ConfigParser
from base64 import b64encode
from Queue import Queue
from Utility.crypto import *
q = Queue()
clientdict = {}
clientusernamedict ={}
def rsa_decrypt(cipher,private_key):
if len(cipher) > 256:
x = len(cipher)/256
plain = ''
for i in range(1,x+1):
ciphertext = cipher[(i-1)*256:i*256]
plaintext = private_key.decrypt(ciphertext,padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA512()),algorithm=hashes.SHA256(),label=None))
plain += str(plaintext)
else :
plaintext = private_key.decrypt(cipher,padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA512()),algorithm=hashes.SHA256(),label=None))
return plaintext
return plain
def authenticate_with_server(udpSocket,server,username,W,server_public_key):
#print "sending HI request!!"
data_to_send = {'Request': 'Hi'}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
session_id = -1
while True:
receivedData,server_address = udpSocket.recvfrom(4096)
data = json.loads(receivedData.rstrip().decode())
if data.get('ResponseTo') == 'Hi' :
bhash = data.get('Body')[0:90]
part_puzzle = str(data.get('Body')[90:])
for i in range(0,999):
puzzle = part_puzzle + str(i)
if bhash == generate_hash(puzzle):
break
else :
puzzle = -1
if puzzle == -1:
return 0,0,0,0
aeskey = os.urandom(32)
body = {'aeskey:'}
data_to_send = {'Request':'PuzzleResponse','puzzle':puzzle,'Header':'1',
'Body':str(rsa_encrypt(str(aeskey+username),server_public_key).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
elif data.get('ResponseTo') == 'PuzzleResponse' :
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(aeskey, associated_data, iv, ciphertext, tag)
if response == -1:
print style[1]+"Problem Detected.\nExiting!!!"+ style[0]
os._exit(0)
response = json.loads(response.decode('base-64'))
g = int(response.get('g'))
p = int(response.get('p'))
session_id = int(response.get('session_id'))
a = int(binascii.hexlify(os.urandom(40)),16)
gPowerA = pow(g,a,p)
plaintext = json.dumps({'gPowerA': gPowerA}).encode('base-64')
associated_data = os.urandom(16)
data_to_send = {'Request':'Session','session_id':generate_hash(str(session_id)),'Header':'1',
'Body':str(aes_encrypt(aeskey, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
elif data.get('ResponseTo') == 'Session' :
if int(session_id):
if data.get('Header') == '1':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(aeskey, associated_data, iv, ciphertext, tag)
if response == -1:
print style[1]+"Problem Detected.\nExiting!!!"+ style[0]
os._exit(0)
response = json.loads(response.decode('base-64'))
if plaintext != -1:
if session_id == response.get('session_id'):
U = int(response.get('U'))
C1 = int(response.get('C1'))
SRP = int(response.get('SRP'))
W = int(binascii.hexlify(W),16)
gPowerB = SRP - pow(g,U*W,p)
C2 = int(binascii.hexlify(os.urandom(16)),16)
client_server_key = pow(gPowerB,a,p)*pow(gPowerB,U*W,p)
client_server_session_key = generate_hash(str(client_server_key))[0:32]
own_private_key,own_public_key = generate_rsa_keys()
own_public_key_pem = own_public_key.public_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
plaintext = json.dumps({'response_to_C1': C1-1,'C2':C2,'client_public_key_pem':own_public_key_pem}).encode('base-64')
associated_data = os.urandom(16)
data_to_send = {'Request':'Session','session_id':generate_hash(str(session_id)),'Header':'2',
'Body':str(aes_encrypt(client_server_session_key[0:32], str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
else:
return 0,0,0,0
else :
return 0,0,0,0
elif data.get('Header') == '2':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key[0:32], associated_data, iv, ciphertext, tag)
if response == -1:
print style[1]+"Problem Detected.\nExiting!!!"+ style[0]
os._exit(0)
response = json.loads(response.decode('base-64'))
if plaintext!= -1:
if session_id == response.get('session_id'):
associated_data = os.urandom(16)
response_to_C2 = response.get('response_to_C2')
if response_to_C2 != C2-1:
print style[1] + "Error While Authetication" + style_default
plaintext = "Authentication Failure!"
associated_data = os.urandom(16)
data_to_send = {'ResponseTo':'Error','session_id':session_id,'Header':'1',
'Body':str(aes_encrypt(aeskey, plaintext, associated_data).encode('base-64'))}
return 0,0,0,0,0,0,0,0
return session_id,client_server_session_key[0:32],own_private_key,own_public_key
else :
return 0,0,0,0
elif data.get('ResponseTo') == 'Error':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
plaintext = aes_decrypt(aeskey, associated_data, iv, ciphertext, tag)
if plaintext == -1:
print style[1]+"Problem Detected.\nExiting!!!"+ style[0]
os._exit(0)
return 0,0,0,0
def set_session_with_client(client_username,username,udpSocket, own_private_key):
client_address = clientdict.get(client_username).get('client_address')
client_public_key_pem = clientdict.get(client_username).get('client_public_key_pem')
client_public_key = serialization.load_pem_public_key(str(client_public_key_pem), backend=default_backend())
C1 = int(binascii.hexlify(os.urandom(16)),16)
a = int(binascii.hexlify(os.urandom(40)),16)
g = clientdict.get(client_username).get('g')
p = clientdict.get(client_username).get('p')
gPowerA = pow(g,a,p)
plaintext = json.dumps({'username': username, 'C1':C1,'gPowerA':gPowerA})
data_to_send = {'Request':'Client','Header':'1',
'Body':str(rsa_encrypt(str(plaintext),client_public_key).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), client_address)
wait_time = 0.125
time.sleep(wait_time)
while (q.empty() and wait_time <1):
wait_time = wait_time*2
time.sleep(wait_time)
pass
if wait_time == 1:
return -1
recievedData = q.get()
cipher = rsa_decrypt(str(recievedData),own_private_key)
if cipher != -1 :
data = json.loads(cipher)
if client_username == data.get('username'):
response_to_C1 = data.get('C1').decode('base-64')
iv = response_to_C1[0:16]
tag = response_to_C1[16:32]
associated_data = response_to_C1[32:48]
ciphertext = str(response_to_C1[48:])
C2 = data.get('C2')
gPowerB = data.get('gPowerB')
sharedKey = str(pow(gPowerB,a,p))[0:32]
if (C1-1) == int(aes_decrypt(sharedKey, associated_data, iv, ciphertext, tag)):
clientdict.update({client_username:{'sharedKey' : sharedKey,'client_address':client_address,
'client_session_id': data.get('client_session_id')}})
data_to_send = {'Request':'Client','Header':'3','client_session_id':generate_hash(str(data.get('client_session_id'))),
'Body':aes_encrypt(sharedKey,str(C2-1),associated_data).encode('base-64')}
udpSocket.sendto(json.dumps(data_to_send).encode(),client_address)
return 1
else :
print style[1] + "Authetication failed with client" + style_default
return -1
else :
print style[1] + "Authetication failed with client" + style_default
return -1
else:
return -1
def command_interface(udpSocket,server,session_id,client_server_session_key,own_private_key,username):
print style[0] + "Ready to chat :)" + style_default
while True:
try :
message = raw_input(style[0])
#take user input
message = message.split()
#If the user requests LIST then request to the server
if message[0] == 'list':
#print "inside List"
plaintext = 'list'
associated_data = os.urandom(16)
data_to_send = {'Request':'Info','session_id':generate_hash(str(session_id)),'Header':'1',
'Body':str(aes_encrypt(client_server_session_key, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
wait_time = 0.125
time.sleep(wait_time)
while (q.empty() and wait_time <1):
wait_time = wait_time*2
time.sleep(wait_time)
pass
if wait_time == 1:
print style[1]+"Unable to connect to the server.\nPlease quit and try again later!"
continue
recievedData = q.get()
#print "Data received"
server_data = json.loads(recievedData)
#print the list of Users signed in the server
cipher = server_data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key[0:32], associated_data, iv, ciphertext, tag)
if response == -1:
print style[1]+"Problem Detected.\nExiting!!!"+ style[0]
os._exit(0)
response = json.loads(response)
if session_id == response.get('session_id'):
print style[4]+"List of online users -"
count = 0
for i in response.get('list'):
count +=1
if i == username:
print style[0]+str(count)+ ". "+ i+style[4]
pass
else :
print str(count)+". "+ i
else :
print style[1] + "Error" + style_default
elif message[0]=='send':
try:
if len(message) > 2:
plaintext = 'send' +' '+str(message[1])
if message[1] == username :
print style[0] + "Me" + " : "+''.join(message[2:]) + style_default
elif message[1] in clientdict:
if clientdict.get(message[1]).has_key('sharedKey'):
sharedKey = clientdict.get(message[1]).get('sharedKey')
plaintext = " ".join(message[2:])
client_session_id = clientdict.get(message[1]).get('client_session_id')
associated_data = os.urandom(16)
data_to_send = {'Request':'Data','Header':'1','client_session_id':generate_hash(str(client_session_id)),
'Body':str(aes_encrypt(sharedKey, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), tuple(clientdict.get(message[1]).get('client_address')))
else :
value = set_session_with_client(message[1],username,udpSocket,own_private_key)
if value == 1:
client_session_id = clientdict.get(message[1]).get('client_session_id')
sharedKey = clientdict.get(message[1]).get('sharedKey')
plaintext = " ".join(message[2:])
associated_data = os.urandom(16)
data_to_send = {'Request':'Data','Header':'1','client_session_id':generate_hash(str(client_session_id)),
'Body':str(aes_encrypt(sharedKey, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), tuple(clientdict.get(message[1]).get('client_address')))
else :
associated_data = os.urandom(16)
data_to_send = {'Request':'Info','session_id':generate_hash(str(session_id)),'Header':'2',
'Body':str(aes_encrypt(client_server_session_key, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
#wait as the other thread recieves the data and sets the recivedData variable
wait_time = 0.125
time.sleep(wait_time)
while (q.empty() and wait_time <1):
wait_time = wait_time*2
time.sleep(wait_time)
pass
if wait_time == 1:
print style[1]+"Unable to connect to the server.\nPlease quit and try again later!"
continue
recievedData = q.get()
if str(recievedData) == "User Doesn't Exists":
print style[1] + recievedData + style_default
elif str(recievedData) == "User is not Online":
print style[1] + recievedData + style_default
else:
clientdict.update(recievedData)
value = set_session_with_client(message[1],username,udpSocket,own_private_key)
if value == 1:
client_session_id = clientdict.get(message[1]).get('client_session_id')
sharedKey = clientdict.get(message[1]).get('sharedKey')
plaintext = " ".join(message[2:])
associated_data = os.urandom(16)
data_to_send = {'Request':'Data','Header':'1','client_session_id': generate_hash(str(client_session_id)),
'Body':str(aes_encrypt(sharedKey, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), tuple(clientdict.get(message[1]).get('client_address')))
else :
print style[1] + "Unable to send data to the user" + style_default
else:
print style[1] + "usage of send: <send username message>" + style[0]
except :
print style[1] + "usage of send: <send username message>" + style[0]
elif message[0]=='quit':
plaintext = 'logout'
associated_data = os.urandom(16)
data_to_send = {'Request':'Info','session_id':generate_hash(str(session_id)),'Header':'3',
'Body':str(aes_encrypt(client_server_session_key, str(plaintext), associated_data).encode('base-64'))}
udpSocket.sendto(json.dumps(data_to_send).encode(), server)
wait_time = 0.125
time.sleep(wait_time)
while (q.empty() and wait_time <1):
wait_time = wait_time*2
time.sleep(wait_time)
pass
if wait_time == 1:
print style[1]+"Unable to connect to the server.\nSo logging you out of the system!"
os._exit(0)
response = q.get()
if response == 'OK':
print "Logging out from the system"
os._exit(0)
else:
#when unknown command is received
print "Sorry no such command found"
print "Commands available - list, send, quit "
except :
pass
print "BYE!!!"
udpSocket.close()
#This function receives the data from other clients and server
#Also the passed the data to the main thread
def receivingdata(udpSocket, server, session_id, client_server_session_key,own_private_key,username):
while True:
#receive the data
server_data, server_address = udpSocket.recvfrom(4096)
#if the data was sent by the main server then set the recievedData variable as it requires processing
if server_address == server:
data = json.loads(server_data.rstrip().decode())
if data.get('ResponseTo') == 'Info' :
if data.get('Header') == '1':
q.put(server_data.rstrip().decode())
elif data.get('Header') == '2':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = json.loads(aes_decrypt(client_server_session_key[0:32], associated_data, iv, ciphertext, tag))
if response == -1:
continue
client_address = tuple(response.get('client_address'))
client_username = response.get('client_username')
client_public_key_pem = response.get('client_public_key_pem')
g = response.get('g')
p = response.get('p')
data = {client_username:{'client_address':client_address,'g':g,'p':p,'client_public_key_pem':client_public_key_pem}}
clientusernamedict.update({client_address:client_username})
q.put(data)
elif data.get('Header') == '3':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = json.loads(aes_decrypt(client_server_session_key[0:32], associated_data, iv, ciphertext, tag))
if response == -1:
continue
client_address = tuple(response.get('client_address'))
client_username = response.get('client_username')
client_public_key_pem = response.get('client_public_key_pem')
g = response.get('g')
p = response.get('p')
data = {client_username:{'client_address':client_address,'g':g,'p':p,'client_public_key_pem':client_public_key_pem}}
clientdict.update(data)
clientusernamedict.update({client_address:client_username})
elif data.get('Header') == '4':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key, associated_data, iv, ciphertext, tag)
if response == '-1':
continue
q.put(response)
elif data.get('Header') == '5':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key, associated_data, iv, ciphertext, tag)
if response == -1:
continue
response = json.loads(response)
if response.get('message') == 'logout':
if clientdict.has_key(response.get('username')):
del clientdict[response.get('username')]
print style[1] + "User "+ response.get('username') + " is now offline" + style[0]
else :
pass
elif data.get('Header') == '6':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key, associated_data, iv, ciphertext, tag).rstrip()
if response == -1:
continue
q.put(response)
elif data.get('Header') == '7':
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
response = aes_decrypt(client_server_session_key, associated_data, iv, ciphertext, tag)
if response == -1:
continue
if response == "duplicate session":
print "Logging you out as duplicate session exists!"
os._exit(0)
else :
client_address = server_address
data = json.loads(server_data.rstrip().decode())
if data.get('Request') == 'Client' :
if data.get('Header') == '1':
decrypt = rsa_decrypt(str(data.get('Body').decode('base-64')),own_private_key)
if decrypt == -1:
continue
data = json.loads(decrypt)
if clientdict.has_key(data.get('username')):
client_username = data.get('username')
C1 = int(data.get('C1'))
g = clientdict.get(client_username).get('g')
p = clientdict.get(client_username).get('p')
gPowerA = data.get('gPowerA')
b = int(binascii.hexlify(os.urandom(40)),16)
gPowerB = pow(g,b,p)
sharedKey = str(pow(gPowerA,b,p))[0:32]
associated_data = os.urandom(16)
C2 = int(binascii.hexlify(os.urandom(16)),16)
client_session_id = binascii.hexlify(os.urandom(16))
encrypted_C1 = aes_encrypt(sharedKey,str(C1-1),associated_data).encode('base-64')
message = json.dumps({'username':username,'C1':encrypted_C1,'gPowerB':gPowerB,'C2':C2,'client_session_id':client_session_id})
client_public_key_pem = clientdict.get(client_username).get('client_public_key_pem')
client_public_key = serialization.load_pem_public_key(str(client_public_key_pem), backend=default_backend())
data_to_send = {'Request':'Client','Header':'2',
'Body':str(rsa_encrypt(message,client_public_key).encode('base-64'))}
client_address = clientdict.get(client_username).get('client_address')
udpSocket.sendto(json.dumps(data_to_send).encode(), client_address)
clientdict.update({client_username:{'client_public_key':client_public_key,
'sharedKey':sharedKey,'client_address':client_address,'client_session_id':client_session_id}})
clientusernamedict.update({client_address:client_username})
elif data.get('Header') == '2':
cipher = data.get('Body').decode('base-64')
q.put(cipher)
elif data.get('Header') == '3':
response_client_session_id = data.get('client_session_id')
client_username = clientusernamedict.get(client_address)
if clientdict.get(client_username).has_key('client_session_id'):
if generate_hash(clientdict.get(client_username).get('client_session_id').rstrip()) == response_client_session_id:
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
sharedKey = clientdict.get(client_username).get('sharedKey')
client_session_id = clientdict.get(client_username).get('client_session_id')
response_to_C2 = aes_decrypt(sharedKey, associated_data, iv, ciphertext, tag)
if response_to_C2 == -1:
continue
if int(C2-1) == int(response_to_C2):
clientdict.update({client_username:{'sharedKey' : sharedKey,'client_address':client_address,
'client_session_id':client_session_id}})
else :
print "Authentication Failed!"
del clientdict[client_username]
else :
print "Authentication Failed!"
del clientdict[client_username]
elif data.get('Request') == 'Data':
client_username = clientusernamedict.get(client_address)
response_client_session_id = data.get('client_session_id')
if generate_hash(str(clientdict.get(client_username).get('client_session_id'))) == response_client_session_id:
cipher = data.get('Body').decode('base-64')
iv = cipher[0:16]
tag = cipher[16:32]
associated_data = cipher[32:48]
ciphertext = cipher[48:]
sharedKey = clientdict.get(client_username).get('sharedKey')
data = aes_decrypt(sharedKey, associated_data, iv, ciphertext, tag)
if data == -1:
continue
print style[5] + client_username + " : " +str(data)+ style[0]
else :
pass
def create_socket():
#Handle any exception generated while creating a socket
try:
udpSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#For non blocking sockets
udpSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#Bind the socket to a fixed port
return udpSocket
except socket.error, msg:
#display the error message
print "Error Code : " + str(msg[0]) + " " + str(msg[1])
os._exit(0)
#This is the main function
def main():
config = ConfigParser.RawConfigParser()
config.read('Client/client.cfg')
server_public_key_file = config.get('server_keys', 'public_key')
server = config.get('server_address','ip_address'), config.getint('server_address', 'port')
os.system('cls' if os.name == 'nt' else 'clear')
#create a udp socket
udpSocket = create_socket()
server_public_key = load_public_key(server_public_key_file)
username = raw_input('Username:')
password = generate_hash(getpass.getpass('Password:'))
session_id,client_server_session_key,own_private_key,own_public_key = authenticate_with_server(udpSocket,
server,username,password,server_public_key)
if int(session_id) == 0:
print "Error while Authentication!!"
os._exit(0)
os.system('cls' if os.name == 'nt' else 'clear')
manage_data = threading.Thread(target=receivingdata, args=(udpSocket,server,
session_id,client_server_session_key,own_private_key,username))
manage_data.setDaemon(True)
manage_data.start()
manage_input = threading.Thread(target=command_interface, args=(udpSocket,server,session_id,
client_server_session_key,own_private_key,username))
manage_input.setDaemon(True)
manage_input.start()
manage_input.join()
manage_data.join()
#the main boilerplate
if __name__=='__main__':
#to handle keyboard exceptions
try:
main()
except KeyboardInterrupt:
print "Exiting the program...\nBYE!!!" | StarcoderdataPython |
1767708 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created by pat on 4/4/18
"""
.. currentmodule:: base
.. moduleauthor:: <NAME> <<EMAIL>>
The GeoAlchemy declarative base for the data model is defined in this module
along with some other helpful classes.
"""
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import String, DateTime
from .geometry import GeometryTypes
from .meta import column, ColumnMeta, Requirement
from .types import GUID
Base = declarative_base() #: This is the model's declarative base. pylint: disable=invalid-name
class ModelMixin(object):
"""
This mixin includes columns and methods common to objects within the
data model.
"""
__geoattr__ = 'geometry' #: the name of the geometry column attribute
"""
This is the parent class for all entity classes in the model. It defines
common fields.
"""
gcUnqId = column(
GUID,
meta=ColumnMeta(
label='GeoComm ID',
guaranteed=True,
calculated=True
),
primary_key=True
)
srcOfData = column(
String,
ColumnMeta(
label='Data Source'
)
)
srcLastEd = column(
DateTime,
ColumnMeta(
label='Source of Last Update'
)
)
uploadAuth = column(
String,
ColumnMeta(
label='Upload Authority'
)
)
updateDate = column(
DateTime,
ColumnMeta(
label='Last Update'
)
)
effective = column(
DateTime,
ColumnMeta(
label='Effective Date',
requirement=Requirement.REQUESTED
)
)
expire = column(
DateTime,
ColumnMeta(
label='Expiration Date',
requirement=Requirement.REQUESTED
)
)
srcUnqId = column(
String,
ColumnMeta(
label='NENA ID',
nena='RCL_NGUID',
requirement=Requirement.REQUESTED
)
)
@classmethod
def geometry_type(cls) -> GeometryTypes:
"""
Get the geometry type defined for the model class.
:return: the geometry type
"""
try:
# Get the string that identifies the geometry type.
gt_str = cls.__table__.c[cls.__geoattr__].type.geometry_type
# The string should correspond to one of the supported types.
gtyp = GeometryTypes[gt_str]
# Return that value.
return gtyp
except KeyError:
return GeometryTypes.NONE
| StarcoderdataPython |
1602309 | <filename>segundoModulo/PYTHON - DODO/0.3_list-f-c-retorno-s-parametros(TERMINADO)/6.py
def parImpar2():
N = int(input('Informe um valor: '))
if N%2:
return 'False'
else:
return 'True'
def main():
print(parImpar2())
main()
| StarcoderdataPython |
1679533 | <filename>main.py
#!user/bin/env python3.7
# coding:utf-8
# author:wanghongzhang
# email:<EMAIL>
# time: 2018/10/29
import sys
import datetime
from PyQt5.QtWidgets import QApplication,QDialog
from PyQt5.uic.properties import QtCore
from PyQt5 import QtCore, QtGui, QtWidgets
from register_dia import Ui_Regist
from login_dia import Ui_Login
from forget_dia import Ui_Forget_Password
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow, QDialog
from user_ui_main import Ui_User_UI
from personal_dia import Ui_Personal_information_dialog
from history_dia import Ui_History_stop
from modify_username_dia import Ui_Modify_username_dia
from modify_password_dia import Ui_Modify_password_dialog
from xialatest import Ui_Dialog
class Login(Ui_Login):
def __init__(self, main, regist, forget_password):
super(Login, self).__init__()
self.main = main
self.regist = regist
self.forget_password = forget_password
self.dialog = QDialog()
self.setupUi(self.dialog)
self.dialog.show()
self.display()
self.click_btn()
def login_function(self):
user_name = self.login_username_lineEdit.text()
pass_word = self.login_password_lineEdit.text()
L = [user_name, pass_word]
print(L)
# result = self.car_client.login(account=user_name, password=pass_word)
# self.login_tooltip_label.setText(result)
self.main.show()
self.dialog.close()
def display(self):
self.login_login_btn.clicked.connect(self.login_function)
self.login_regist_btn.clicked.connect(self.regist.show)
def click_btn(self):
self.login_forget_password_btn.clicked.connect(self.forget_password.show)
class Regist(Ui_Regist):
def __init__(self):
super(Regist, self).__init__()
self.dialog = QDialog()
self.setupUi(self.dialog)
self.click_button()
def show(self):
self.dialog.show()
def click_button(self):
##关闭注册界面
self.register_cancel_button.clicked.connect(self.register_close)
##
self.register_enter_button.clicked.connect(self.regist_function)
##
self.register_get_verification_code_btn.clicked.connect(self.send_email)
def regist_function(self):
self.register_tooltip_label.setText('')
name = self.register_username_edit.text()
password = self.register_password_edit.text()
password_agin = self.register_password_agin_edit.text()
phone_number = self.register_phonenumber_edit.text()
car_factory = self.register_car_factory_edit.text()
car_model = self.register_car_model_edit.text()
car_color = self.register_car_color_edit.text()
car_plate_number = self.register_plate_number_edit.text()
email = self.register_email_edit.text()
verification_code = self.register_verification_code_edit.text()
L = [name, password, password_agin, phone_number, car_factory, car_model,
car_color, car_plate_number, email, verification_code]
print(L)
login_result = self.car_client.regist(L)
self.register_tooltip_label.setText(login_result)
def register_close(self):
self.dialog.close()
def send_email(self):
my_email = self.register_email_edit.text()
self.car_client.send_email(my_email)
class Forget_Password(Ui_Forget_Password):
def __init__(self):
super(Forget_Password, self).__init__()
self.dialog = QDialog()
self.setupUi(self.dialog)
self.click_button()
def show(self):
self.dialog.show()
def click_button(self):
##
self.forget_cancle_btn.clicked.connect(self.close)
self.forget_get_verification_btn.clicked.connect(self.get_verification_code)
def close(self):
self.dialog.close()
def get_verification_code(self):
login_account = self.forget_username_edit.text()
login_email = self.forget_email_edit.text()
# 判断邮箱输入且格式正确
if login_email:
self.forget_tooltip_label.setText("")
judge_result = self.judge_email(login_email)
if judge_result:
self.forget_tooltip_label.setText("验证码已发送,请耐心等待...")
else:
self.forget_tooltip_label.setText("请输入合法邮箱")
else:
self.forget_tooltip_label.setText("邮箱不能为空") # 没有输入注册邮箱提示邮箱不能为空
# 账户传入,调用client对象
self.forget_verification_code = self.car_client.forget(login_account, login_email)
print(self.forget_verification_code)
try:
if type(int(self.forget_verification_code)) is not int:
self.forget_tooltip_label.setText("您输入的账号不存在,请检查后再输入")
except ValueError:
self.forget_tooltip_label.setText(self.forget_verification_code)
def judge_email(self, login_email):
import re
pettern = r"\w+@\w+\.[a-z]+"
L_result = re.findall(pettern, login_email)
print(L_result)
if L_result:
if len(login_email) == len(L_result[0]):
return True
return False
#=======================================================================
class Main(Ui_User_UI):
def __init__(self, personal, history, modify_uname, modify_password):
super(Main, self).__init__()
self.personal = personal
self.history = history
self.modify_uname = modify_uname
self.modify_password = modify_password
self.mainwindow = QMainWindow()
self.setupUi(self.mainwindow)
self.mainwindow.show()
self.click()
self.update_time()
def show(self):
self.mainwindow.show()
def update_time(self):
date = datetime.date.today()
self.calendar_widget.setSelectedDate(QtCore.QDate(int(date.year),
int(date.month), int(date.day)))
def click(self):
self.personal_information_btn.clicked.connect(
self.personal_function)
self.stop_car_history_btn.clicked.connect(
self.history_function)
self.modify_username_btn.clicked.connect(
self.modify_username_function)
self.modify_password_btn.clicked.connect(
self.modify_password_function)
def personal_function(self):
self.personal.show()
def history_function(self):
self.history.show()
def modify_username_function(self):
self.modify_uname.show()
def modify_password_function(self):
self.modify_password.show()
class Personal(Ui_Personal_information_dialog):
def __init__(self):
super(Personal, self).__init__()
self.dialog = QDialog()
self.setupUi(self.dialog)
self.click()
def show(self):
self.dialog.show()
def click(self):
self.personal_info_cancle_btn.clicked.connect(self.close)
def close(self):
self.dialog.close()
class History(Ui_History_stop):
def __init__(self):
super(History, self).__init__()
self.dialog = QDialog()
self.setupUi(self.dialog)
def show(self):
self.dialog.show()
class Modify_username(Ui_Modify_username_dia):
def __init__(self):
super(Modify_username, self).__init__()
self.dialog = QDialog()
self.setupUi(self.dialog)
self.click()
def show(self):
self.dialog.show()
def click(self):
self.modify_cancle_btn.clicked.connect(self.close)
def close(self):
self.dialog.close()
class Modify_password(Ui_Modify_password_dialog):
def __init__(self):
self.dialog = QDialog()
self.setupUi(self.dialog)
self.click()
def show(self):
self.dialog.show()
def click(self):
self.modify_password_cancle_btn.clicked.connect(self.close)
def close(self):
self.dialog.close()
class Xiala(Ui_Dialog):
def __init__(self):
self.dialog = QDialog()
self.setupUi(self.dialog)
self.dialog.show()
if __name__ == "__main__":
app = QApplication(sys.argv)
# 创建个人资料窗口
personal = Personal()
# 创建历史纪录窗口
history = History()
# 创建修改用户名窗口
modify_uname = Modify_username()
# 创建修改密码窗口
modify_password = Modify_password()
# 创建主界面
main = Main(personal, history, modify_uname, modify_password)
regist = Regist()
forget_password = <PASSWORD>get_Password()
login = Login(main, regist, forget_password)
sys.exit(app.exec_())
| StarcoderdataPython |
114854 | <gh_stars>1-10
path = "E:\\Datasets\\BraTs\\ToCrop\\MICCAI_BraTS2020_TrainingData\\Training_001\\Training_001_t1.png"
from PIL import Image
import numpy as np
img = Image.open(path)
img = np.array(img)
import matplotlib.pyplot as plt
plt.imshow(img,cmap="gray")
plt.show()
randbuf=[]
import random
x = 239//2
y = 239//2
x_l =x-7
x_r = x+8+1
y_l = y-(68//2 - 1)
y_r = y+(68//2 + 1)
for i in range(15):
a = random.randint(x_l,x_r)
b = random.randint(y_l,y_r)
tmp_img = img[a-63:a+64+1,b-63:b+64+1]
randbuf.append(tmp_img)
for item in randbuf:
plt.imshow(item,cmap="gray")
plt.show()
| StarcoderdataPython |
1641713 | # -*- coding: utf-8 -*-
"""
Plot of the Datasaurus Dozen
@author: <NAME>
"""
import numpy as np
import matplotlib.pyplot as plt
plt.style.use("ggplot")
plt.rcParams["mathtext.fontset"]='cm'
labels = np.genfromtxt("../data/DatasaurusDozen.tsv", delimiter="\t",
usecols=(0,), skip_header=1, dtype=str)
X = np.loadtxt("../data/DatasaurusDozen.tsv", delimiter="\t",
usecols=(1,), skiprows=1)
Y = np.loadtxt("../data/DatasaurusDozen.tsv", delimiter="\t",
usecols=(2,), skiprows=1)
list_labels = ['wide_lines', 'star', 'h_lines', 'high_lines', 'v_lines',
'circle', 'bullseye', 'slant_up', 'slant_down', 'x_shape',
'dots', 'away', 'dino']
#%% Plot of the dozen
plt.figure(figsize=(10, 6))
for k, label in enumerate(list_labels[:-1]):
plt.subplot(3, 4, k + 1)
plt.plot(X[labels == label],Y[labels == label], 'ok',
markersize=3)
plt.axis("image")
plt.axis([0, 100, 0, 100])
if k >= 8:
plt.xticks(np.linspace(0, 100, 5))
plt.xlabel(r"$x$")
else:
plt.xticks(np.linspace(0, 100, 5), [])
if k % 4 == 0:
plt.yticks(np.linspace(0, 100, 5))
plt.ylabel(r"$y$")
else:
plt.yticks(np.linspace(0, 100, 5), [])
plt.tight_layout()
plt.savefig("datasaurus-dozen.svg", bbox_inches="tight")
#%% Plot of the datasaurus
plt.figure(figsize=(2.5, 1.5))
plt.plot(X[labels == 'dino'], Y[labels == 'dino'], 'ok',
markersize=3)
plt.axis("image")
plt.axis([0, 100, 0, 100])
plt.xticks(np.linspace(0, 100, 5))
plt.yticks(np.linspace(0, 100, 5))
plt.xlabel(r"$x$")
plt.ylabel(r"$y$")
plt.savefig("datasaurus.svg", bbox_inches="tight")
plt.show() | StarcoderdataPython |
196026 | import json
import os
import time
from datetime import datetime
import pytz
import req_model
def main():
tz = pytz.timezone('Asia/Shanghai')
data = json.loads(os.environ['DATA'])
for item in data:
for i in range(3):
time.sleep(i * 5)
print("now {} clock in {}:".format(item, i))
if data[item]["username"] != "" and data[item]["password"] != "":
msg = req_model.upload(data[item]["username"], data[item]["password"])
if msg == "":
print("{} 打卡失败!!".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M')))
req_model.push_msg(
"{} 打卡失败!!".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M')), data[item])
elif json.loads(msg)["m"] == "今天已经填报了" or json.loads(msg)["m"] == "操作成功":
print(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]))
req_model.push_msg(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]), data[item])
break
else:
print(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]))
req_model.push_msg(
"{} {}".format(datetime.fromtimestamp(int(time.time()), tz).strftime('%H:%M'), json.loads(msg)[
"m"]), data[item])
else:
print("{}'s username or password is null".format(item))
break
if __name__ == '__main__':
main()
| StarcoderdataPython |
11243 | <gh_stars>1-10
"""
Extension to the logging package to support buildlogger.
"""
# Alias the built-in logging.Logger class for type checking arguments. Those interested in
# constructing a new Logger instance should use the loggers.new_logger() function instead.
from logging import Logger
from . import config
from . import buildlogger
from . import flush
from . import loggers
| StarcoderdataPython |
3240388 | import pytest
from edera import Condition
from edera import Task
from edera.exceptions import TargetVerificationError
from edera.workflow import WorkflowBuilder
from edera.workflow.processors import TargetChecker
def test_target_checker_skips_task_execution_if_possible():
class C(Condition):
def check(self):
return True
class T(Task):
target = C()
def execute(self):
raise RuntimeError
workflow = WorkflowBuilder().build(T())
TargetChecker().process(workflow)
workflow[T()].item.execute()
def test_target_checker_verifies_target_after_task_execution():
class C(Condition):
def check(self):
return False
class T(Task):
target = C()
def execute(self):
pass
workflow = WorkflowBuilder().build(T())
TargetChecker().process(workflow)
with pytest.raises(TargetVerificationError):
workflow[T()].item.execute()
| StarcoderdataPython |
154427 | <filename>whatsappEnvioDeArquivos/botImgSms.py
import os
import time
import emoji
from os import close
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
#Instanciando as options para o webdriver
options = webdriver.ChromeOptions()
#options para manter conta do google logada com usuario principal
options.add_argument("--user-data-dir=C:/Users/MV002/AppData/Local/Google/Chrome/User Data")
options.add_argument("--profile-directory=Default")
#comando para executar o Chromedrive
driver = webdriver.Chrome(options=options,executable_path='X:/DEV/bots/whatsappEnvioDeArquivos/chromedriver.exe')
#abre o site Whatsapp Web - Não pode haver janelas do chrome abertas
driver.get('https://web.whatsapp.com/')
#da um sleep de 15 segundos, tempo para scannear o QRCODE
time.sleep(15)
#Comando para buscar contatos e grupos do wpp
contatosProducao = ['Rudimar repDgb','<NAME>','<NAME> repDgb','Alessandro repDgb','Tufic repDgb',
'Tiago repDgb','Eduardo DGB','Ronaldo repDgb','Abrantes repDgb','Carlos repDgb','Igor repDgb',
'<NAME> repDgb','Paganini repDgb','<NAME> repD gb','Mara repDgb','Renato Spama repDgb',
'Monica repDgb','Rodrigo Porto repDgb', 'DEV HOMOLOGAÇÃO']
contatosHomologacao = ['DEV HOMOLOGAÇÃO']
contatosEx = ['DEV EX']
#Mensagem - Mensagem que sera enviada
mensagem = 'Bom dia'
mensagem2 = ' '
mensagem3 = ' Segue estoque atualizado. Boas vendas! ;-) '
mensagem4 = ' erro bot DGB estoque pronta entrega representantes '
#Midia = imagem, pdf, documento, video (caminho do arquivo, lembrando que mesmo no windows o caminho deve ser passado com barra invertida */* )
Arquivo1 = "X:/capturaDeTelas/Arquivo_1.png"
Arquivo2 = "X:/capturaDeTelas/Arquivo_2.png"
Arquivo3 = "X:/capturaDeTelas/Arquivo_3.png"
Arquivo4 = "X:/capturaDeTelas/Arquivo_4.png"
Arquivo5 = "X:/capturaDeTelas/Arquivo_5.png"
Arquivo6 = "X:/capturaDeTelas/Arquivo_6.png"
verificarSuccess = ""
#Funcao que pesquisa o Contato/Grupo
def buscar_contato(contato):
campo_pesquisa = driver.find_element_by_xpath('//div[contains(@class,"copyable-text selectable-text")]')
time.sleep(1)
campo_pesquisa.click()
campo_pesquisa.send_keys(contato)
campo_pesquisa.send_keys(Keys.ENTER)
#Funcao que envia a mensagem
def enviar_mensagem(texto1,texto2):
campo_mensagem = driver.find_elements_by_xpath('//div[contains(@class,"copyable-text selectable-text")]')
campo_mensagem[1].click()
time.sleep(3)
campo_mensagem[1].send_keys(str(texto1) + str(''))
campo_mensagem[1].send_keys(Keys.ENTER)
campo_mensagem[1].send_keys(str('') + str(texto2))
campo_mensagem[1].send_keys(Keys.ENTER)
#Funcao que envia midia como mensagem
def enviar_midia(Arquivo_1):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_1)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
def enviar_midia2(Arquivo_2):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_2)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
def enviar_midia3(Arquivo_3):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_3)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
def enviar_midia4(Arquivo_4):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_4)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
def enviar_midia5(Arquivo_5):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_5)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
def enviar_midia6(Arquivo_6):
driver.find_element_by_css_selector("span[data-icon='clip']").click()
attach = driver.find_element_by_css_selector("input[type='file']")
attach.send_keys(Arquivo_6)
time.sleep(3)
send = driver.find_element_by_css_selector("span[data-icon='send']")
send.click()
#Verificando arquivo success para envio das mensagens
try:
verificarSuccess = open('X:\capturaDeTelas\success.txt')
verificarSuccess.close()
#caso nao teja o arquivo succes sera acionado a mensagen de erro
except:
log = 'ERRO: Arquivo success.txt nao existe'
if(verificarSuccess):
#Percorre todos os contatos/Grupos e envia as mensagens e fotos
for contato in contatosHomologacao:
buscar_contato(contato)
enviar_mensagem(mensagem,mensagem3)
time.sleep(2)
enviar_midia(Arquivo1)
time.sleep(1)
enviar_midia2(Arquivo2)
time.sleep(1)
enviar_midia3(Arquivo3)
time.sleep(1)
enviar_midia4(Arquivo4)
time.sleep(1)
enviar_midia5(Arquivo5)
time.sleep(1)
enviar_midia3(Arquivo6)
time.sleep(2)
#comando para mandar mensagens de erro caso os arquivos nao forem enviado
else:
for contato in contatosEx:
buscar_contato(contato)
enviar_mensagem(mensagem4,log)
print('success nao existe')
driver.close()
#fechar google chorome
driver.close()
| StarcoderdataPython |
3202597 | # -*- coding: utf-8 -*-
from typing import List, Tuple, Union
import matplotlib as mpl
from matplotlib.figure import Figure
from matplotlib.axes import Subplot
import matplotlib.pyplot as plt
from numpy import ndarray
from .extend import ExtendDict
from .color import PLOT_COLORS
# DEFAULT PARAMETERS
class PlotProperty(object):
FIGURE_SIZE: Tuple[int, int] = (16, 9)
ROWS_NUMBER: int = 1
COLS_NUMBER: int = 1
GRID_POSITION_LEFT: float = 0.1
GRID_POSITION_RIGHT: float = 0.95
GRID_POSITION_BOTTOM: float = 0.2
GRID_POSITION_TOP: float = 0.95
GRID_SPACE_WIDTH: float = 0.03
GRID_SPACE_HEIGHT: float = 0.02
GRID_RATIO_WIDTH: List[float] = [1.]
GRID_RATIO_HEIGHT: List[float] = [1.]
FONT_SIZE: float = 25.0
LABEL_FONT_SIZE = FONT_SIZE*0.8
LEGENG_FONT_SIZE = FONT_SIZE*0.5
TICKS_FONT_SIZE = FONT_SIZE
COMMON_ALIGN = 'center'
VERTICAL_ALIGN = 'center'
HORIZONTAL_ALIGN = 'center'
LINE_STYLE = 'solid'
LINE_WIDTH = 2.0
MARKER = 'o'
PLOT_FORMAT = ','
MARKER_SIZE = 5.0
DPI = 100 # resolution of the figure in unit of dot per inch
IGFONT = ExtendDict(family='IPAexGothic')
def configure_figure(figsize: Tuple[int, int] = PlotProperty.FIGURE_SIZE,
nrows: int = PlotProperty.ROWS_NUMBER,
ncols: int = PlotProperty.COLS_NUMBER,
left: float = PlotProperty.GRID_POSITION_LEFT,
right: float = PlotProperty.GRID_POSITION_RIGHT,
top: float = PlotProperty.GRID_POSITION_TOP,
bottom: float = PlotProperty.GRID_POSITION_BOTTOM,
wspace: float = PlotProperty.GRID_SPACE_WIDTH,
hspace: float = PlotProperty.GRID_SPACE_HEIGHT,
sharex: bool = True, sharey: bool = True,
width_ratios: List[float] = PlotProperty.GRID_RATIO_WIDTH,
height_ratios: List[float] = PlotProperty.GRID_RATIO_HEIGHT) -> Tuple[Figure, Union[ndarray, Subplot]]:
sharex_ = 'col' if sharex else None
sharey_ = 'row' if sharey else None
if nrows > PlotProperty.ROWS_NUMBER and height_ratios == PlotProperty.GRID_RATIO_HEIGHT:
height_ratios = PlotProperty.GRID_RATIO_HEIGHT * nrows
if ncols > PlotProperty.COLS_NUMBER and width_ratios == PlotProperty.GRID_RATIO_WIDTH:
width_ratios = PlotProperty.GRID_RATIO_WIDTH * ncols
fig, ax = plt.subplots(
nrows=nrows, ncols=ncols,
sharex=sharex_, sharey=sharey_,
figsize=figsize, dpi=PlotProperty.DPI,
gridspec_kw={'height_ratios': height_ratios, 'width_ratios': width_ratios})
fig.subplots_adjust(
left=left, right=right, bottom=bottom, top=top,
wspace=wspace, hspace=hspace)
# grd = fig.add_gridspec(grid_num_v,grid_num_h)
return fig, ax
class SimplePlot(object):
def __init__(self, configure: bool = True, **args) -> None:
self.figsize: Tuple[int, int] = args.get('figsize',
PlotProperty.FIGURE_SIZE)
self.nrows: int = args.get('nrows', PlotProperty.ROWS_NUMBER)
self.ncols: int = args.get('ncols', PlotProperty.COLS_NUMBER)
self.left: float = args.get('left', PlotProperty.GRID_POSITION_LEFT)
self.right: float = args.get('right', PlotProperty.GRID_POSITION_RIGHT)
self.top: float = args.get('top', PlotProperty.GRID_POSITION_TOP)
self.bottom: float = args.get(
'bottom', PlotProperty.GRID_POSITION_BOTTOM)
self.wspace: float = args.get('wspace', PlotProperty.GRID_SPACE_WIDTH)
self.hspace: float = args.get('hspace', PlotProperty.GRID_SPACE_HEIGHT)
self.fsize: float = args.get('fsize', PlotProperty.FONT_SIZE)
self.labfsize: float = args.get(
'labfsize', PlotProperty.LABEL_FONT_SIZE)
self.legfsize: float = args.get(
'legfsize', PlotProperty.LEGENG_FONT_SIZE)
self.tckfsize: float = args.get(
'tckfsize', PlotProperty.TICKS_FONT_SIZE)
self.calign: str = args.get('calign', PlotProperty.COMMON_ALIGN)
self.valign: str = args.get('valign', PlotProperty.VERTICAL_ALIGN)
self.halign: str = args.get('halign', PlotProperty.HORIZONTAL_ALIGN)
self.lstyle: str = args.get('lstyle', PlotProperty.LINE_STYLE)
self.lwidth: float = args.get('lwidth', PlotProperty.LINE_WIDTH)
self.marker: str = args.get('marker', PlotProperty.MARKER)
self.pltfmt: str = args.get('pltfmt', PlotProperty.PLOT_FORMAT)
self.masize: float = args.get('masize', PlotProperty.MARKER_SIZE)
self.igfont: ExtendDict = args.get('igfont', PlotProperty.IGFONT)
self.colors = args.get('colors', PLOT_COLORS)
self.sharex: bool = args.get('sharex', True)
self.sharey: bool = args.get('sharey', True)
self.width_ratios: List = args.get('width_ratios',
PlotProperty.GRID_RATIO_WIDTH)
self.height_ratios: List = args.get('height_ratios',
PlotProperty.GRID_RATIO_HEIGHT)
if configure:
self.configure()
def configure(self) -> None:
self.set_rcparams()
self.fig, self.axes = configure_figure(
figsize=self.figsize,
nrows=self.nrows, ncols=self.ncols,
left=self.left, right=self.right,
top=self.top, bottom=self.bottom,
wspace=self.wspace, hspace=self.hspace,
sharex=self.sharex, sharey=self.sharey,
width_ratios=self.width_ratios,
height_ratios=self.height_ratios)
def set_rcparams(self) -> None:
plt.rcParams['font.family'] = 'Times New Roman'
plt.rcParams['mathtext.fontset'] = 'cm'
plt.rcParams['mathtext.rm'] = 'serif'
plt.rcParams['axes.titleweight'] = 'bold'
# plt.rcParams['axes.labelweight'] = 'bold'
plt.rcParams['axes.linewidth'] = self.lwidth
plt.rcParams['grid.linestyle'] = 'solid'
plt.rcParams['grid.linewidth'] = 1.0
plt.rcParams['grid.alpha'] = 0.2
plt.rcParams['xtick.major.size'] = 8
plt.rcParams['xtick.minor.size'] = 5
plt.rcParams['xtick.major.width'] = self.lwidth
plt.rcParams['xtick.minor.width'] = self.lwidth
plt.rcParams['xtick.major.pad'] = 5
plt.rcParams['ytick.major.size'] = 8
plt.rcParams['xtick.top'] = True
plt.rcParams['ytick.minor.size'] = 5
plt.rcParams['ytick.major.width'] = self.lwidth
plt.rcParams['ytick.minor.width'] = self.lwidth
plt.rcParams['ytick.major.pad'] = 5
plt.rcParams['xtick.direction'] = 'in'
plt.rcParams['ytick.direction'] = 'in'
plt.rcParams['xtick.labelsize'] = self.labfsize
plt.rcParams['ytick.labelsize'] = self.labfsize
plt.rcParams['ytick.right'] = True
def get(self, name):
return self.__dict__.get(name)
def set(self, name, value):
self.__dict__[name] = value
return self.get(name)
| StarcoderdataPython |
61351 | from qsearch import Project, solvers, unitaries, utils, multistart_solvers, parallelizers, compiler, options
import scipy as sp
import os
try:
from qsrs import BFGS_Jac_SolverNative, LeastSquares_Jac_SolverNative
except ImportError:
BFGS_Jac_SolverNative = None
LeastSquares_Jac_SolverNative = None
import pytest
import tempfile
import os
import sys
qft3 = unitaries.qft(8)
def test_cobyla(project):
project.add_compilation('qft2', unitaries.qft(4))
project['solver'] = solvers.COBYLA_Solver()
project.run()
def test_bfgs_jac(project):
project.add_compilation('qft3', qft3)
project['solver'] = solvers.BFGS_Jac_Solver()
project.run()
def test_least_squares_jac(project):
project.add_compilation('qft3', qft3)
project['solver'] = solvers.LeastSquares_Jac_Solver()
project['error_residuals'] = utils.matrix_residuals
project['error_residuals_jac'] = utils.matrix_residuals_jac
project.run()
@pytest.mark.skipif(sys.platform == 'win32', reason="This test currently hangs due to the nested parallel executor")
def test_multistart_least_squares(project):
project.add_compilation('qft3', qft3)
project['solver'] = multistart_solvers.MultiStart_Solver(2)
project['inner_solver'] = solvers.LeastSquares_Jac_Solver()
project['parallelizer'] = parallelizers.ProcessPoolParallelizer
project['error_residuals'] = utils.matrix_residuals
project['error_residuals_jac'] = utils.matrix_residuals_jac
project.run()
@pytest.mark.skipif(sys.platform == 'win32', reason="This test currently hangs due to the nested parallel executor")
def test_multistart_bfgs(project):
project.add_compilation('qft3', qft3)
project['solver'] = multistart_solvers.MultiStart_Solver(2)
project['inner_solver'] = solvers.BFGS_Jac_Solver()
project['parallelizer'] = parallelizers.ProcessPoolParallelizer
project.run()
def compile(U, solver):
with tempfile.TemporaryDirectory() as dir:
opts = options.Options()
opts.target = U
opts.error_func = utils.matrix_distance_squared
opts.error_jac = utils.matrix_distance_squared_jac
opts.solver = solver
opts.log_file = os.path.join(dir, 'test.log')
comp = compiler.SearchCompiler()
res = comp.compile(opts)
return res
@pytest.mark.skipif(BFGS_Jac_SolverNative is None, reason="The rustopt feature has not been enabled")
def test_rust_solver_qft3():
U = unitaries.qft(8)
res = compile(U, BFGS_Jac_SolverNative())
circ = res['structure']
v = res['parameters']
assert utils.matrix_distance_squared(U, circ.matrix(v)) < 1e-10
@pytest.mark.skipif(LeastSquares_Jac_SolverNative is None, reason="The rustopt feature has not been enabled")
def test_rust_solver_qft3():
U = unitaries.qft(8)
res = compile(U, LeastSquares_Jac_SolverNative())
circ = res['structure']
v = res['parameters']
assert utils.matrix_distance_squared(U, circ.matrix(v)) < 1e-10
| StarcoderdataPython |
3349026 | from typing import Callable, NamedTuple, Tuple
import jax.numpy as jnp
from jax.scipy.stats import norm, multivariate_normal
from jax import jacfwd
from jax import grad
from jax.ops import index_update
from jaxvi.models import Model
# class ADVIState(NamedTuple):
# phi: jnp.DeviceArray
# grad_phi: jnp.DeviceArray
class ADVI(object):
def __init__(self, model: Model):
"""
A native implementation of ADVI.
Arguments:
log_joint: function to compute the log joint p(x, theta)
inv_T: function to map latent parameters to the real space
latent_dim: s
"""
self.model = model
self.latent_dim = model.latent_dim
self.inv_T = model.inv_T
self.jac_T = jacfwd(self.inv_T)
# gradients
self.grad_joint = grad(model.log_joint)
self.grad_det_J = grad(self.log_abs_det_jacobian)
# variational parameters
self.phi = jnp.zeros(2 * model.latent_dim)
# def __call__(self, eta):
# phi = jnp.zeros(2 * self.latent_dim)
# return ADVIState(phi, self.grad(eta, phi))
def log_abs_det_jacobian(self, zeta: jnp.DeviceArray) -> jnp.DeviceArray:
return jnp.log(jnp.abs(jnp.linalg.det(self.jac_T(zeta))))
@property
def loc(self):
return self.inv_T(self.phi[: self.latent_dim])
@property
def scale(self):
return jnp.exp(self.phi[self.latent_dim :])
def mu(self, phi: jnp.DeviceArray) -> jnp.DeviceArray:
return phi[: self.latent_dim]
def sigma(self, phi: jnp.DeviceArray) -> jnp.DeviceArray:
return phi[self.latent_dim :]
def omega(self, phi: jnp.DeviceArray) -> jnp.DeviceArray:
return jnp.exp(self.sigma(phi))
def inv_S(self, eta: jnp.DeviceArray, phi: jnp.DeviceArray) -> jnp.DeviceArray:
"""
Transforms eta to zeta.
"""
return (eta * self.omega(phi)) + self.mu(phi)
def variational_entropy(
self, zeta: jnp.DeviceArray, phi: jnp.DeviceArray
) -> jnp.DeviceArray:
probs = norm.pdf(zeta, loc=self.mu(phi), scale=self.omega(phi))
return -(probs * jnp.log(probs)).sum()
def grad(self, eta: jnp.DeviceArray, phi: jnp.DeviceArray) -> jnp.DeviceArray:
""" Returns nabla mu and nabla omega """
zeta = self.inv_S(eta, phi)
theta = self.inv_T(zeta)
# compute gradients
grad_joint = self.grad_joint(theta)
grad_inv_t = self.jac_T(zeta)
grad_trans = self.grad_det_J(zeta)
grad_mu = grad_inv_t @ grad_joint + grad_trans
grad_omega = grad_mu * eta * self.omega(phi) + 1
return jnp.append(grad_mu, grad_omega)
def elbo(self, eta: jnp.DeviceArray, phi: jnp.DeviceArray) -> jnp.DeviceArray:
zeta = self.inv_S(eta, phi)
theta = self.inv_T(zeta)
return (
self.model.log_joint(theta)
+ self.log_abs_det_jacobian(zeta)
+ self.variational_entropy(zeta, phi)
)
class FullRankADVI(ADVI):
def __init__(self, model):
"""
A naive implementation of ADVI.
Arguments:
log_joint: function to compute the log joint p(x, theta)
inv_T: function to map latent parameters to the real space
latent_dim: s
"""
super().__init__(model)
# variational parameters
self.phi = jnp.append(
jnp.zeros(self.latent_dim),
jnp.ones(int(self.latent_dim * (self.latent_dim + 1) / 2)),
)
def L(self, phi: jnp.DeviceArray) -> jnp.DeviceArray:
L = jnp.zeros((self.latent_dim, self.latent_dim))
L = index_update(L, jnp.tril_indices(self.latent_dim), phi[self.latent_dim :])
return L
def inv_L(self, phi: jnp.DeviceArray) -> jnp.DeviceArray:
L = self.L(phi)
return jnp.linalg.inv(L @ L.T)
def inv_S(self, eta: jnp.DeviceArray, phi: jnp.DeviceArray) -> jnp.DeviceArray:
"""
Transforms eta to zeta.
"""
return (self.L(phi) @ eta) + phi[: self.latent_dim]
def variational_entropy(self, zeta, phi):
L = self.L(phi)
probs = multivariate_normal.pdf(zeta, mean=phi[: self.latent_dim], cov=L @ L.T)
return -(probs * jnp.log(probs)).sum()
def grad(self, eta, phi):
""" Returns nabla mu and nabla omega """
zeta = self.inv_S(eta, phi)
theta = self.inv_T(zeta)
# compute gradients
grad_joint = self.grad_joint(theta)
grad_inv_t = self.jac_T(zeta)
grad_trans = self.grad_det_J(zeta)
grad_mu = grad_inv_t @ grad_joint + grad_trans
# print(grad_μ, η, grad_μ * η, grad_μ * η.T, self.inv_L(ϕ).T)
grad_L = (grad_mu * eta + self.inv_L(phi).T)[jnp.tril_indices(self.latent_dim)]
return jnp.append(grad_mu, grad_L)
| StarcoderdataPython |
75300 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 24 13:43:28 2017
@author: nightowl
"""
from __future__ import print_function
import os
# from fuzzywuzzy import fuzz
from shutil import copyfile
from ..io.database.sql_to_python import QuerySQL
from ..io.database.sql_connector import DVH_SQL
from ...paths import PREF_DIR, SCRIPT_DIR
from ...tools.utilities import flatten_list_of_lists
class Physician:
def __init__(self, initials):
self.initials = initials
self.physician_rois = {}
def add_physician_roi(self, institutional_roi, physician_roi):
institutional_roi = clean_name(institutional_roi)
physician_roi = clean_name(physician_roi)
self.physician_rois[physician_roi] = {'institutional_roi': institutional_roi,
'variations': [physician_roi]}
def add_physician_roi_variation(self, physician_roi, variation):
physician_roi = clean_name(physician_roi)
variation = clean_name(variation)
if physician_roi in list(self.physician_rois):
if variation not in self.physician_rois[physician_roi]['variations']:
self.physician_rois[physician_roi]['variations'].append(variation)
self.physician_rois[physician_roi]['variations'].sort()
class DatabaseROIs:
def __init__(self):
self.physicians = {}
self.institutional_rois = []
# Copy default ROI files to user folder if they do not exist
if not os.path.isfile(os.path.join(PREF_DIR, 'institutional.roi')):
initialize_roi_preference_file('institutional.roi')
initialize_roi_preference_file('physician_BBM.roi')
# Import institutional roi names
abs_file_path = os.path.join(PREF_DIR, 'institutional.roi')
if os.path.isfile(abs_file_path):
with open(abs_file_path, 'r') as document:
for line in document:
if not line:
continue
line = clean_name(str(line))
self.institutional_rois.append(line)
physicians = get_physicians_from_roi_files()
for physician in physicians:
self.add_physician(physician, add_institutional_rois=(physician == 'DEFAULT'))
self.import_physician_roi_maps()
if 'uncategorized' not in self.institutional_rois:
self.institutional_rois.append('uncategorized')
self.branched_institutional_rois = {}
##############################################
# Import from file functions
##############################################
def import_physician_roi_maps(self):
for physician in list(self.physicians):
rel_path = 'physician_%s.roi' % physician
abs_file_path = os.path.join(PREF_DIR, rel_path)
if os.path.isfile(abs_file_path):
self.import_physician_roi_map(abs_file_path, physician)
def import_physician_roi_map(self, abs_file_path, physician):
with open(abs_file_path, 'r') as document:
for line in document:
if not line:
continue
line = str(line).lower().strip().replace(':', ',').split(',')
institutional_roi = line[0].strip()
physician_roi = line[1].strip()
self.add_institutional_roi(institutional_roi)
self.add_physician_roi(physician, institutional_roi, physician_roi)
for i in range(2, len(line)):
variation = clean_name(line[i])
self.add_variation(physician, physician_roi, variation)
###################################
# Physician functions
###################################
def add_physician(self, physician, add_institutional_rois=True):
physician = clean_name(physician).upper()
if physician not in self.get_physicians():
self.physicians[physician] = Physician(physician)
if add_institutional_rois:
for institutional_roi in self.institutional_rois:
self.add_physician_roi(physician, institutional_roi, institutional_roi)
def delete_physician(self, physician):
physician = clean_name(physician).upper()
self.physicians.pop(physician, None)
def get_physicians(self):
return list(self.physicians)
def get_physician(self, physician):
return self.physicians[physician]
def is_physician(self, physician):
physician = clean_name(physician).upper()
for initials in self.get_physicians():
if physician == initials:
return True
return False
def set_physician(self, new_physician, physician):
new_physician = clean_name(new_physician).upper()
physician = clean_name(physician).upper()
self.physicians[new_physician] = self.physicians.pop(physician)
#################################
# Institutional ROI functions
#################################
def get_institutional_rois(self):
return self.institutional_rois
def get_institutional_roi(self, physician, physician_roi):
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
if physician == 'DEFAULT':
return physician_roi
else:
return self.physicians[physician].physician_rois[physician_roi]['institutional_roi']
def add_institutional_roi(self, roi):
roi = clean_name(roi)
if roi not in self.institutional_rois:
self.institutional_rois.append(roi)
self.institutional_rois.sort()
def set_institutional_roi(self, new_institutional_roi, institutional_roi):
new_institutional_roi = clean_name(new_institutional_roi)
institutional_roi = clean_name(institutional_roi)
index = self.institutional_rois.index(institutional_roi)
self.institutional_rois.pop(index)
self.add_institutional_roi(new_institutional_roi)
for physician in self.get_physicians():
if physician != 'DEFAULT':
for physician_roi in self.get_physician_rois(physician):
physician_roi_obj = self.physicians[physician].physician_rois[physician_roi]
if physician_roi_obj['institutional_roi'] == institutional_roi:
physician_roi_obj['institutional_roi'] = new_institutional_roi
def set_linked_institutional_roi(self, new_institutional_roi, physician, physician_roi):
self.physicians[physician].physician_rois[physician_roi]['institutional_roi'] = new_institutional_roi
def delete_institutional_roi(self, roi):
self.set_institutional_roi('uncategorized', roi)
def is_institutional_roi(self, roi):
roi = clean_name(roi)
for institutional_roi in self.institutional_rois:
if roi == institutional_roi:
return True
return False
def get_unused_institutional_rois(self, physician):
physician = clean_name(physician).upper()
used_rois = []
if self.get_physician_rois(physician)[0] != '':
for physician_roi in self.get_physician_rois(physician):
used_rois.append(self.get_institutional_roi(physician, physician_roi))
unused_rois = []
for roi in self.institutional_rois:
if roi not in used_rois:
unused_rois.append(roi)
if 'uncategorized' not in unused_rois:
unused_rois.append('uncategorized')
return unused_rois
########################################
# Physician ROI functions
########################################
def get_physician_rois(self, physician):
physician = clean_name(physician).upper()
if self.is_physician(physician):
physician_rois = list(self.physicians[physician].physician_rois)
if physician_rois:
physician_rois.sort()
return physician_rois
return ['']
def get_physician_roi(self, physician, roi):
physician = clean_name(physician).upper()
roi = clean_name(roi)
for physician_roi in self.get_physician_rois(physician):
for variation in self.get_variations(physician, physician_roi):
if roi == variation:
return physician_roi
return 'uncategorized'
def get_physician_roi_from_institutional_roi(self, physician, institutional_roi):
physician = clean_name(physician).upper()
institutional_roi = clean_name(institutional_roi)
if institutional_roi == 'uncategorized':
return ['uncategorized']
for physician_roi in self.get_physician_rois(physician):
if institutional_roi == self.get_institutional_roi(physician, physician_roi):
return physician_roi
else:
return ['uncategorized']
def add_physician_roi(self, physician, institutional_roi, physician_roi):
physician = clean_name(physician).upper()
institutional_roi = clean_name(institutional_roi)
physician_roi = clean_name(physician_roi)
if physician_roi not in self.get_physician_rois(physician):
if institutional_roi in self.institutional_rois:
self.physicians[physician].add_physician_roi(institutional_roi, physician_roi)
def set_physician_roi(self, new_physician_roi, physician, physician_roi):
new_physician_roi = clean_name(new_physician_roi)
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
if new_physician_roi != physician_roi:
self.physicians[physician].physician_rois[new_physician_roi] = \
self.physicians[physician].physician_rois.pop(physician_roi, None)
self.add_variation(physician, new_physician_roi, new_physician_roi)
# self.delete_variation(physician, new_physician_roi, physician_roi)
def delete_physician_roi(self, physician, physician_roi):
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
if physician_roi in self.get_physician_rois(physician):
self.physicians[physician].physician_rois.pop(physician_roi, None)
def is_physician_roi(self, roi, physician):
roi = clean_name(roi)
for physician_roi in self.get_physician_rois(physician):
if roi == physician_roi:
return True
return False
def get_unused_physician_rois(self, physician):
physician = clean_name(physician).upper()
unused_rois = []
for physician_roi in self.get_physician_rois(physician):
if self.get_institutional_roi(physician, physician_roi) == 'uncategorized':
unused_rois.append(physician_roi)
if not unused_rois:
unused_rois = ['']
return unused_rois
def merge_physician_rois(self, physician, physician_rois, final_physician_roi):
variation_lists = [self.get_variations(physician, physician_roi) for physician_roi in physician_rois]
variations = flatten_list_of_lists(variation_lists, remove_duplicates=True)
for variation in variations:
self.add_variation(physician, final_physician_roi, variation)
for physician_roi in physician_rois:
if physician_roi != final_physician_roi:
self.delete_physician_roi(physician, physician_roi)
###################################################
# Variation-of-Physician-ROI functions
###################################################
def get_variations(self, physician, physician_roi):
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
if physician_roi == 'uncategorized':
return ['uncategorized']
if self.is_physician_roi(physician_roi, physician):
variations = self.physicians[physician].physician_rois[physician_roi]['variations']
if variations:
return variations
return ['']
def get_all_variations_of_physician(self, physician):
physician = clean_name(physician).upper()
variations = []
for physician_roi in self.get_physician_rois(physician):
for variation in self.get_variations(physician, physician_roi):
variations.append(variation)
if variations:
variations.sort()
else:
variations = ['']
return variations
def add_variation(self, physician, physician_roi, variation):
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
variation = clean_name(variation)
if variation and variation not in self.get_variations(physician, physician_roi):
self.physicians[physician].add_physician_roi_variation(physician_roi, variation)
def delete_variation(self, physician, physician_roi, variation):
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
variation = clean_name(variation)
if variation in self.get_variations(physician, physician_roi):
index = self.physicians[physician].physician_rois[physician_roi]['variations'].index(variation)
self.physicians[physician].physician_rois[physician_roi]['variations'].pop(index)
self.physicians[physician].physician_rois[physician_roi]['variations'].sort()
def set_variation(self, new_variation, physician, physician_roi, variation):
new_variation = clean_name(new_variation)
physician = clean_name(physician).upper()
physician_roi = clean_name(physician_roi)
variation = clean_name(variation)
if new_variation != variation:
self.add_variation(physician, physician_roi, new_variation)
self.delete_variation(physician, physician_roi, variation)
def is_roi(self, roi):
roi = clean_name(roi)
for physician in self.get_physicians():
for physician_roi in self.get_physician_rois(physician):
for variation in self.get_variations(physician, physician_roi):
if roi == variation:
return True
return False
# def get_best_roi_match(self, roi, length=None):
# roi = clean_name(roi)
#
# scores = []
# rois = []
# physicians = []
#
# for physician in self.get_physicians():
# for physician_roi in self.get_physician_rois(physician):
# scores.append(get_combined_fuzz_score(physician_roi, roi))
# rois.append(physician_roi)
# physicians.append(physician)
# for variation in self.get_variations(physician, physician_roi):
# scores.append(get_combined_fuzz_score(variation, roi))
# rois.append(variation)
# physicians.append(physician)
#
# for institutional_roi in self.institutional_rois:
# scores.append(get_combined_fuzz_score(institutional_roi, roi))
# rois.append(institutional_roi)
# physicians.append('DEFAULT')
#
# best = []
#
# if length:
# if length > len(scores):
# length = len(scores)
# else:
# length = 1
#
# for i in range(length):
# max_score = max(scores)
# index = scores.index(max_score)
# scores.pop(index)
# best_match = rois.pop(index)
# best_physician = physicians.pop(index)
# if self.is_institutional_roi(best_match):
# best_institutional_roi = best_match
# else:
# best_institutional_roi = 'uncategorized'
#
# best_physician_roi = self.get_physician_roi(best_physician, best_match)
#
# best.append({'variation': best_match,
# 'physician_roi': best_physician_roi,
# 'physician': best_physician,
# 'institutional_roi': best_institutional_roi,
# 'score': max_score})
#
# return best
########################
# Export to file
########################
def write_to_file(self):
file_name = 'institutional.roi'
abs_file_path = os.path.join(PREF_DIR, file_name)
document = open(abs_file_path, 'w')
lines = self.institutional_rois
lines.sort()
lines = '\n'.join(lines)
for line in lines:
document.write(line)
document.close()
physicians = self.get_physicians()
physicians.pop(physicians.index('DEFAULT')) # remove 'DEFAULT' physician
for physician in physicians:
file_name = 'physician_' + physician + '.roi'
abs_file_path = os.path.join(PREF_DIR, file_name)
lines = []
for physician_roi in self.get_physician_rois(physician):
institutional_roi = self.get_institutional_roi(physician, physician_roi)
variations = self.get_variations(physician, physician_roi)
variations = ', '.join(variations)
line = [institutional_roi,
physician_roi,
variations]
line = ': '.join(line)
line += '\n'
lines.append(line)
lines.sort()
if lines:
document = open(abs_file_path, 'w')
for line in lines:
document.write(line)
document.close()
for physician in get_physicians_from_roi_files():
if physician not in physicians and physician != 'DEFAULT':
file_name = 'physician_' + physician + '.roi'
abs_file_path = os.path.join(PREF_DIR, file_name)
os.remove(abs_file_path)
################
# Plotting tools
################
def get_physician_roi_visual_coordinates(self, physician, physician_roi):
# All 0.5 subtractions due to a workaround of a Bokeh 0.12.9 bug
institutional_roi = self.get_institutional_roi(physician, physician_roi)
# x and y are coordinates for the circles
# x0, y0 is beggining of line segment, x1, y1 is end of line-segment
if institutional_roi == 'uncategorized':
table = {'name': [physician_roi],
'x': [2 - 0.5],
'y': [0],
'x0': [2 - 0.5],
'y0': [0],
'x1': [2 - 0.5],
'y1': [0]}
else:
table = {'name': [institutional_roi, physician_roi],
'x': [1 - 0.5, 2 - 0.5],
'y': [0, 0],
'x0': [1 - 0.5, 2 - 0.5],
'y0': [0, 0],
'x1': [2 - 0.5, 1 - 0.5],
'y1': [0, 0]}
variations = self.get_variations(physician, physician_roi)
for i, variation in enumerate(variations):
y = -i
table['name'].append(variation)
table['x'].append(3 - 0.5)
table['y'].append(y)
table['x0'].append(2 - 0.5)
table['y0'].append(0)
table['x1'].append(3 - 0.5)
table['y1'].append(y)
table_length = len(table['name'])
table['color'] = ['#1F77B4'] * table_length
table['institutional_roi'] = [institutional_roi] * table_length
table['physician_roi'] = [physician_roi] * table_length
return table
def get_all_institutional_roi_visual_coordinates(self, physician, ignored_physician_rois=[]):
p_rois = [roi for roi in self.get_physician_rois(physician) if roi not in ignored_physician_rois]
i_rois = [self.get_institutional_roi(physician, p_roi) for p_roi in p_rois]
for i, i_roi in enumerate(i_rois):
if i_roi == 'uncategorized':
i_rois[i] = 'zzzzzzzzzzzzzzzzzzz'
sorted_indices = [i[0] for i in sorted(enumerate(i_rois), key=lambda x:x[1])]
p_rois_sorted = [p_rois[i] for i in sorted_indices]
p_rois = p_rois_sorted
tables = {p_roi: self.get_physician_roi_visual_coordinates(physician, p_roi) for p_roi in p_rois}
heights = [3 - min(tables[p_roi]['y']) for p_roi in p_rois]
max_y_delta = sum(heights) + 2 # include 2 buffer to give space to read labels on plot
for i, p_roi in enumerate(p_rois):
y_delta = sum(heights[i:])
for key in ['y', 'y0', 'y1']:
for j in range(len(tables[p_roi][key])):
tables[p_roi][key][j] += y_delta - max_y_delta
table = tables[p_rois[0]]
for i in range(1, len(p_rois)):
for key in list(table):
table[key].extend(tables[p_rois[i]][key])
return self.update_duplicate_y_entries(table, physician)
@staticmethod
def get_roi_visual_y_values(table):
y_values = {}
for i, x in enumerate(table['x']):
if x == 1 - 0.5:
name = table['name'][i]
y = table['y'][i]
if name not in list(y_values):
y_values[name] = []
y_values[name].append(y)
for name in list(y_values):
y_values[name] = sum(y_values[name]) / len(y_values[name])
return y_values
def update_duplicate_y_entries(self, table, physician):
y_values = self.get_roi_visual_y_values(table)
self.branched_institutional_rois[physician] = []
for i, name in enumerate(table['name']):
if table['x'][i] == 1 - 0.5 and table['y'][i] != y_values[name]:
table['y'][i] = y_values[name]
table['y0'][i] = y_values[name]
table['color'][i] = 'red'
self.branched_institutional_rois[physician].append(name)
if table['x'][i] == 2 - 0.5:
inst_name = self.get_institutional_roi(physician, name)
if inst_name != 'uncategorized':
table['y1'][i] = y_values[inst_name]
if self.branched_institutional_rois[physician]:
self.branched_institutional_rois[physician] = list(set(self.branched_institutional_rois[physician]))
return table
def clean_name(name):
return str(name).lower().strip().replace('\'', '`').replace('_', ' ')
def get_physicians_from_roi_files():
physicians = ['DEFAULT']
for file_name in os.listdir(PREF_DIR):
if file_name.startswith("physician_") and file_name.endswith(".roi"):
physician = file_name.replace('physician_', '').replace('.roi', '')
physician = clean_name(physician).upper()
physicians.append(physician)
return physicians
def get_physician_from_uid(uid):
cnx = DVH_SQL()
condition = "study_instance_uid = '" + uid + "'"
results = cnx.query('Plans', 'physician', condition)
if len(results) > 1:
print('Warning: multiple plans with this study_instance_uid exist')
return str(results[0][0])
def update_uncategorized_rois_in_database():
roi_map = DatabaseROIs()
dvh_data = QuerySQL('DVHs', "physician_roi = 'uncategorized'")
cnx = DVH_SQL()
for i in range(len(dvh_data.roi_name)):
uid = dvh_data.study_instance_uid[i]
mrn = dvh_data.mrn[i]
physician = get_physician_from_uid(uid)
roi_name = dvh_data.roi_name[i]
new_physician_roi = roi_map.get_physician_roi(physician, roi_name)
new_institutional_roi = roi_map.get_institutional_roi(physician, roi_name)
if new_physician_roi != 'uncategorized':
print(mrn, physician, new_institutional_roi, new_physician_roi, roi_name, sep=' ')
condition = "study_instance_uid = '" + uid + "'" + "and roi_name = '" + roi_name + "'"
cnx.update('DVHs', 'physician_roi', new_physician_roi, condition)
cnx.update('DVHs', 'institutional_roi', new_institutional_roi, condition)
cnx.close()
def reinitialize_roi_categories_in_database():
roi_map = DatabaseROIs()
dvh_data = QuerySQL('DVHs', "mrn != ''")
cnx = DVH_SQL()
for i in range(len(dvh_data.roi_name)):
uid = dvh_data.study_instance_uid[i]
physician = get_physician_from_uid(uid)
roi_name = dvh_data.roi_name[i]
new_physician_roi = roi_map.get_physician_roi(physician, roi_name)
new_institutional_roi = roi_map.get_institutional_roi(physician, roi_name)
print(i, physician, new_institutional_roi, new_physician_roi, roi_name, sep=' ')
condition = "study_instance_uid = '" + uid + "'" + "and roi_name = '" + roi_name + "'"
cnx.update('DVHs', 'physician_roi', new_physician_roi, condition)
cnx.update('DVHs', 'institutional_roi', new_institutional_roi, condition)
cnx.close()
def print_uncategorized_rois():
dvh_data = QuerySQL('DVHs', "physician_roi = 'uncategorized'")
print('physician, institutional_roi, physician_roi, roi_name')
for i in range(len(dvh_data.roi_name)):
uid = dvh_data.study_instance_uid[i]
physician = get_physician_from_uid(uid)
roi_name = dvh_data.roi_name[i]
physician_roi = dvh_data.physician_roi[i]
institutional_roi = dvh_data.institutional_roi[i]
print(physician, institutional_roi, physician_roi, roi_name, sep=' ')
# def get_combined_fuzz_score(a, b, simple=None, partial=None):
# a = clean_name(a)
# b = clean_name(b)
#
# if simple:
# w_simple = float(simple)
# else:
# w_simple = 1.
#
# if partial:
# w_partial = float(partial)
# else:
# w_partial = 1.
#
# simple = fuzz.ratio(a, b) * w_simple
# partial = fuzz.partial_ratio(a, b) * w_partial
# combined = float(simple) * float(partial) / 10000.
# return combined
def initialize_roi_preference_file(rel_file_name):
roi_files_user = [f for f in os.listdir(PREF_DIR) if '.roi' in f]
if rel_file_name not in roi_files_user:
src = os.path.join(SCRIPT_DIR, 'preferences', rel_file_name)
dest = os.path.join(PREF_DIR, rel_file_name)
copyfile(src, dest)
| StarcoderdataPython |
126417 | from pathlib import Path
from typing import Tuple, Optional
import streamlit as st
import pandas as pd
from src.utils import io
@st.cache
def load_data(file_name: str, src_dir: str) -> pd.DataFrame:
if str(src_dir) == 'raw':
return io.load_csv_data(file_name, src_dir, io.filter_dt_session)
else:
return io.load_csv_data(file_name, src_dir)
def select_file(src_dir: str, default: Optional[str] = None) -> Tuple[str, pd.DataFrame]:
"""
Selection widget for choosing the file to work on.
:param src_dir: sub-directory within the 'data'-directory from where the files should be used
:param default: preset file
:return: tuple with name of selected file and loaded file as pandas dataframe
"""
available_files = io.get_available_datasets(src_dir)
default_idx = available_files.index(default) if default is default in available_files else 0
file_name = st.sidebar.selectbox("Source file: ", options=available_files, index=default_idx)
if file_name is None:
st.error("No valid file selected")
return '', pd.DataFrame()
else:
st.spinner("Loading data " + file_name)
df = load_data(file_name, src_dir)
st.write(f"Loaded {len(df)} entries")
return file_name, df
| StarcoderdataPython |
176630 | <gh_stars>1-10
import os
import json
import requests
from dotenv import load_dotenv
load_dotenv()
# To set your enviornment variables in your terminal run the following line:
# export 'BEARER_TOKEN'='<your_bearer_token>'
bearer_token = os.getenv('TWITTER_API_KEY')
def getComments(tweets):
MAX_SEARCH_TWT_LIMIT = 700 #270
text = []
next_token = ''
count = 0
for index, tweet in enumerate(tweets):
if count == MAX_SEARCH_TWT_LIMIT:
break
while True:
if count == MAX_SEARCH_TWT_LIMIT:
break
if next_token != '':
url = f'https://api.twitter.com/2/tweets/search/recent?query=conversation_id:{tweet}&max_results=100&next_token={next_token}'
else:
url = f'https://api.twitter.com/2/tweets/search/recent?query=conversation_id:{tweet}&max_results=100'
try:
response = connect_to_endpoint(url)
except:
return text
print('tweet-{}_{}_{}'.format(index+1, tweet, next_token))
if 'data' in response:
for twt in response['data']:
text.append(twt['text'])
if 'meta' in response and 'next_token' in response['meta']:
next_token = response['meta']['next_token']
else:
next_token=''
break
count += 1
return text
def getTweetComments(data):
MAX_TWT_LOOKUP = 900
tweetIDs = {}
next_token = ''
window_count = 0
for user in data:
id = user["id"]
tweetIDs[id] = []
if window_count == MAX_TWT_LOOKUP:
break
while True:
if window_count == MAX_TWT_LOOKUP:
break
if next_token != '':
url = f'https://api.twitter.com/2/users/{id}/tweets?&max_results=100&pagination_token={next_token}'
else:
url = f'https://api.twitter.com/2/users/{id}/tweets?&max_results=100'
response = connect_to_endpoint(url)
window_count += 1
if 'data' in response:
tweetIDs[id].extend([twt['id'] for twt in response['data']])
if 'meta' in response and 'next_token' in response['meta']:
next_token = response['meta']['next_token']
else:
break
text = getComments(tweetIDs[id])
with open(user['username'] + '.txt', 'w', encoding='utf-8') as outfile:
for line in text:
outfile.write("%s\n" % line)
def getUserIDs(usernames):
usernames = f"usernames={usernames}"
url = "https://api.twitter.com/2/users/by?{}".format(usernames)
response = connect_to_endpoint(url)
return response['data']
def bearer_oauth(r):
"""
Method required by bearer token authentication.
"""
r.headers["Authorization"] = f"Bearer {bearer_token}"
r.headers["User-Agent"] = "v2UserLookupPython"
return r
def connect_to_endpoint(url):
response = requests.request("GET", url, auth=bearer_oauth,)
if response.status_code != 200:
raise Exception(
"Request returned an error: {} {}".format(
response.status_code, response.text
)
)
return response.json()
def main():
usernames = input('Enter username: ')
users = getUserIDs(usernames)
getTweetComments(users)
if __name__ == "__main__":
main() | StarcoderdataPython |
170397 | <reponame>heavenshell/py-robo-misawa
# -*- coding: utf-8 -*-
"""
robo.tests.test_misawa_handler
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for robo.handlers.misawa.
:copyright: (c) 2016 <NAME>, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import logging
import requests
from mock import patch
from unittest import TestCase
from robo.robot import Robot
from robo.handlers.misawa import Client, Misawa
def dummy_response(m, filename=None):
response = requests.Response()
response.status_code = 200
if filename is None:
response._content = ''
else:
root_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(root_path, filename)
with open(file_path, 'r') as f:
data = f.read()
response._content = data
m.return_value = response
class NullAdapter(object):
def __init__(self, signal):
self.signal = signal
self.responses = []
def say(self, message, **kwargs):
self.responses.append(message)
return message
class TestClient(TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client()
@patch('robo.handlers.misawa.requests.get')
def test_generate_url(self, m):
""" Client().generate('query') should generate search by keyword. """
dummy_response(m, './fixture.json')
ret = self.client.generate(u'\u30C9\u30E9\u30E0')
self.assertTrue(ret.startswith('http://'))
@patch('robo.handlers.misawa.requests.get')
def test_generate_url_query_is_none(self, m):
""" Client().generate(None) should generate random image. """
dummy_response(m, './fixture.json')
ret = self.client.generate()
self.assertTrue(ret.startswith('http://'))
class TestMisawaHandler(TestCase):
@classmethod
def setUpClass(cls):
logger = logging.getLogger('robo')
logger.level = logging.ERROR
cls.robot = Robot('test', logger)
misawa = Misawa()
misawa.signal = cls.robot.handler_signal
method = cls.robot.parse_handler_methods(misawa)
cls.robot.handlers.extend(method)
adapter = NullAdapter(cls.robot.handler_signal)
cls.robot.adapters['null'] = adapter
@patch('robo.handlers.misawa.requests.get')
def test_should_misawa(self, m):
""" Misawa().get() should search misawa url. """
dummy_response(m, 'fixture.json')
self.robot.handler_signal.send('test misawa')
response = self.robot.adapters['null'].responses[0]
self.assertTrue(response.startswith('http://'))
self.robot.adapters['null'].responses = []
| StarcoderdataPython |
3383883 | <filename>COMET/misc_plugins/TelegramResponderPlugins/QTCPlugins.py
import re
def do_QTC_Status(value, TelegramResponder): #
"""Status - Gives back the QTC Status"""
for val in value.values():
if re.findall(r"Status\b", val):
text = "Current QTC status: \n\n"
text += "Measurement running: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"Measurement_running"
]
)
text += "Measurement progress: {} % \n".format(
TelegramResponder.main.default_values_dict["settings"]["progress"]
)
text += "Current Bias voltage: {} \n".format(
TelegramResponder.main.default_values_dict["settings"].get(
"bias_voltage", 0
)
)
text += "Start time: {} \n".format(
TelegramResponder.main.default_values_dict["settings"]["Start_time"]
)
text += "Est. end time: {} \n".format(
TelegramResponder.main.default_values_dict["settings"]["End_time"]
)
text += "Single Strip scan time: {} s\n".format(
TelegramResponder.main.default_values_dict["settings"][
"strip_scan_time"
]
)
text += "Bad Strips: {} \n".format(
TelegramResponder.main.default_values_dict["settings"]["Bad_strips"]
)
text += "Current filename: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"Current_filename"
]
)
text += "Current operator: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"Current_operator"
]
)
text += "Sensor type: {} \n".format(
TelegramResponder.main.default_values_dict["settings"]["Current_sensor"]
)
text += "Project: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"Current_project"
]
)
text += "Table moving: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"table_is_moving"
]
)
text += "Current Switching: {} \n".format(
TelegramResponder.main.default_values_dict["settings"][
"current_switching"
]
)
TelegramResponder.answer += text
| StarcoderdataPython |
1600054 | <gh_stars>0
#!/opt/conda/bin/python
import datetime,time, pickle, os, sys, argparse
import numpy as np
def power_of_two(string):
n = int(string)
fl_lgn = np.floor(np.log(n)/np.log(2))
if n != 2**fl_lgn:
msg = "%r is not a power of two" % string
raise argparse.ArgumentTypeError(msg)
return n
parser = argparse.ArgumentParser(description='1 qubit GST experiment generation')
parser.add_argument('--maxlength', metavar='L', type=power_of_two, nargs=1, required=True,
help='Max germ power in experimental sequences (must be power of 2)')
args = parser.parse_args()
import pygsti
from pygsti.construction import std1Q_XYI
gs1Q = std1Q_XYI.gs_target
fiducials1Q = std1Q_XYI.fiducials
germs1Q = std1Q_XYI.germs
#effects1Q = std1Q_XYI.effectStrs
#maxLengths1Q = [0,1,2,4,8,16,32,64,128,256,512,1024]
maxLengths1Q = list(map(int,2**np.arange(0,int(np.log(args.maxlength)/np.log(2)+1))))
listOfExperiments = pygsti.construction.make_lsgst_experiment_list( gateLabels = list(gs1Q.gates.keys()),
prepStrs = fiducials1Q,
effectStrs = fiducials1Q,
germList = germs1Q,
maxLengthList = maxLengths1Q)
pygsti.io.write_gatestring_list('gst-1q-1024.txt', listOfExperiments)
| StarcoderdataPython |
83509 | from __future__ import unicode_literals
import codecs
def encode_hex(value):
return '0x' + codecs.decode(codecs.encode(value, 'hex'), 'utf8')
def decode_hex(value):
_, _, hex_part = value.rpartition('x')
return codecs.decode(hex_part, 'hex')
| StarcoderdataPython |
3238022 | #!/usr/bin/env python3
'''
gpg/card> admin
Admin commands are allowed
gpg/card> passwd
gpg: OpenPGP card no. D2760001240102010006061158870000 detected
1 - change PIN
2 - unblock PIN
3 - change Admin PIN
4 - set the Reset Code
Q - quit
Your selection? 1
PIN changed.
1 - change PIN
2 - unblock PIN
3 - change Admin PIN
4 - set the Reset Code
Q - quit
Your selection? q
gpg/card> q
'''
import re
import sys
import pexpect
import os
from random import randint
from getpass import getpass
from time import sleep
import datetime
oldpin = '123456'
newpin = '123456'
oldadminpin = '12345678'
adminpin = oldadminpin
newadminpin = str(randint(1,99999999)).zfill(8)
cardserial = None
try:
passphrase = os.environ['passphrase']
keyid = os.environ['KEYID']
lname = os.environ['lname']
fname = os.environ['fname']
email = os.environ['email']
backupdir = os.environ['BACKUPDIR']
except KeyError:
print("Could not find requirement environment variables...")
print("Please source the key-generated environment variables: '. keygen.env'")
exit(1)
# LOG FILE
logfile = open('%s/loadkeys.log'%(backupdir),'wb')
def logtitle(title):
logfile.write("\n\n\n##################################################################\n".encode())
logfile.write(("### %s\n"%title).encode())
logfile.write("##################################################################\n".encode())
logtitle("TRNG check: infnoise")
p = pexpect.pty_spawn.spawn('infnoise -l')
p.logfile = logfile
ret = p.expect('ID:')
ret = p.read()
if (re.sub(".*Serial:[ ]?[']?",'',str(ret.strip())) != ""):
print("Please disconnect your Infnoise TRNG, as it prevents Yubikey programming")
exit(0)
#cat >> /offline/"${lname},${fname}.csv" <<EOF
#"${email}","${lname}","${fname}",${KEYID},${adminpin},${serialnum},${date}
logtitle("New Admin Pin Code")
logfile.write(("AdminPin:%s"%newadminpin).encode())
# reset the card first
logtitle("ykman card reset")
p = pexpect.pty_spawn.spawn('ykman openpgp reset')
p.logfile = logfile
ret = p.expect('restore factory settings')
p.sendline('y')
ret = p.expect(['Success!','Error: No YubiKey detected!'])
if ret == 0:
print("Yubikey reset successfully")
else:
print("Couldn't reset openpgp... is this a bare metal host and is the yubikey present?")
# set the touch policies
print("Hardening touch policies")
logtitle("ykman touch policies to fixed - sig")
p = pexpect.pty_spawn.spawn('ykman openpgp touch --admin-pin %s -f sig fixed'%(adminpin))
p.logfile = logfile
ret = p.expect(['Touch policy successfully set','Error: No YubiKey detected!'])
if ret == 0:
print("Successfully Updated a Yubikey touch policy")
else:
print("Failed to update a Yubikey touch policy")
logtitle("ykman touch policies to fixed - encrypt")
p = pexpect.pty_spawn.spawn('ykman openpgp touch --admin-pin %s -f enc fixed'%(adminpin))
p.logfile = logfile
ret = p.expect(['Touch policy successfully set','Error: No YubiKey detected!'])
if ret == 0:
print("Successfully Updated a Yubikey touch policy")
else:
print("Failed to update a Yubikey touch policy")
logtitle("ykman touch policies to fixed - auth")
p = pexpect.pty_spawn.spawn('ykman openpgp touch --admin-pin %s -f aut fixed'%(adminpin))
p.logfile = logfile
ret = p.expect(['Touch policy successfully set','Error: No YubiKey detected!'])
if ret == 0:
print("Successfully Updated a Yubikey touch policy")
else:
print("Failed to update a Yubikey touch policy")
'''
ykman openpgp touch --admin-pin 12345678 -f sig on
ykman openpgp touch --admin-pin 12345678 -f enc on
ykman openpgp touch --admin-pin 12345678 -f aut on
'''
testpin = getpass('Please Enter your *new* PIN: ')
testpin2 = getpass('Please Reenter your *new* PIN: ')
if testpin == testpin2:
newpin = testpin
logtitle("gpg card-status")
print("Checking card status...")
# we run this twice... not a typo....
p = pexpect.pty_spawn.spawn('gpg --card-status')
p.logfile = logfile
p.expect(['Serial number \.+: ','card not available'])
if ret == 1:
print("No Yubikey/OpenPGP card found, exiting")
exit(0)
elif ret == 0:
ret = p.read()
cardserial = re.sub("\\\\r.*",'',str(ret))
cardserial = re.sub("[^[0-9]]*",'',cardserial)
print("Found Yubikey/OpenPGP card (#%s)"%(cardserial))
sleep(0.2)
# we run this twice... not a typo....!!!
p = pexpect.pty_spawn.spawn('gpg --card-status')
p.logfile = logfile
p.expect(['Serial','card not available'])
if ret == 1:
print("No Yubikey/OpenPGP card found, exiting")
exit(0)
elif ret == 0:
ret = p.read()
#cardserial = re.sub(".*Serial number \.+: ",'',str(ret.strip()))
#print("Found Yubikey/OpenPGP card (#%s)"%(cardserial))
# set the new pins
logtitle("gpg card-edit new pins")
print("Programming new pin codes into Yubikey/OpenPGP card")
p = pexpect.pty_spawn.spawn('gpg --card-edit --pinentry-mode loopback')
p.logfile = logfile
p.expect('gpg/card>')
p.sendline('admin')
p.expect('are allowed')
p.sendline('passwd')
ret = p.expect(['Your selection?','No such device'])
if ret == 1:
print('Device not connected!')
p.sendline('q')
p.expect('gpg/card>')
p.sendline('q')
sys.exit(1)
p.sendline('1')
p.expect('Enter passphrase')
p.sendline(oldpin)
p.sendline(newpin)
p.sendline(newpin)
p.expect('PIN changed')
p.expect('Your selection?')
p.sendline('3')
p.expect('Enter passphrase')
p.sendline(oldadminpin)
p.sendline(newadminpin)
p.sendline(newadminpin)
p.expect('PIN changed')
p.sendline('q')
adminpin = newadminpin
p.expect('gpg/card>')
print("Programming user identity (%s, %s: %s)."%(lname,fname,email))
p.sendline('name')
p.expect('Cardholder\'s surname:')
p.sendline(lname)
p.expect('Cardholder\'s given name:')
p.sendline(fname)
ret = p.expect(['gpg/card','Enter passphrase'])
if ret == 1:
p.sendline(adminpin)
p.expect('gpg/card>')
p.sendline('lang')
p.expect('Language preferences:')
p.sendline('en')
ret = p.expect(['gpg/card','Enter passphrase'])
if ret == 1:
p.sendline(adminpin)
p.expect('gpg/card>')
p.sendline('login')
p.expect('account name')
p.sendline(email)
p.expect('gpg/card>')
p.sendline('q')
'''
gpg> key 1
sec rsa4096/0x8191ACCD34BE4A72
created: 2019-01-10 expires: never usage: SCEA
trust: ultimate validity: ultimate
ssb* rsa4096/0xB412313296D2E621
created: 2019-01-10 expires: never usage: S
ssb rsa4096/0xD7A205F011EBE5BC
created: 2019-01-10 expires: never usage: E
ssb rsa4096/0xC3FFBB7859ADA9AD
created: 2019-01-10 expires: never usage: A
[ultimate] (1). b, a (Automatic Boot-GPG Generated (v1)) <a@b>
gpg> keytocard
Please select where to store the key:
(1) Signature key
(3) Authentication key
Your selection? 1
'''
# program in the keys
print("Saving the Private key into Yubikey/Openpgp card")
logtitle("gpg edit-key load keys to yubikey")
p = pexpect.pty_spawn.spawn('gpg --pinentry-mode loopback --edit-key %s'%(keyid))
p.logfile = logfile
p.expect('gpg>')
p.sendline('key 1')
p.expect('gpg>')
p.sendline('keytocard')
ret = p.expect(['Your selection?','No such device'])
if ret == 1:
print('Device not connected!')
p.sendline('q')
sys.exit(1)
p.sendline('1')
ret = p.expect(['Enter passphrase:','Replace existing key?'])
if ret==1:
p.sendline('y')
p.expect('Enter passphrase:')
p.sendline(passphrase)
ret = p.expect(['Enter passphrase:','gpg>'])
if ret==0:
p.sendline(adminpin)
ret == p.expect(['Enter passphrase:','gpg>','SCEA'])
if ret == 0:
p.sendline(adminpin)
p.expect('gpg>')
p.sendline('key 1')
print(" key 1... Signing")
p.expect('gpg>')
p.sendline('key 2')
p.expect('gpg>')
p.sendline('keytocard')
p.expect('Your selection?')
p.sendline('2')
ret = p.expect(['Enter passphrase:','Replace existing key?'])
if ret==1:
p.sendline('y')
p.expect('Enter passphrase:')
p.sendline(passphrase)
ret == p.expect(['Enter passphrase:','gpg>','SCEA'])
if ret == 0:
p.sendline(adminpin)
ret == p.expect(['Enter passphrase:','gpg>','SCEA'])
## if NOT gpg, always send the adminpin?... input looks like '\r \r '
if ret == 0:
p.sendline(adminpin)
p.expect('gpg>')
p.sendline('key 2')
print(" key 2... Encryption")
p.expect('gpg>')
p.sendline('key 3')
p.expect('gpg>')
p.sendline('keytocard')
p.expect('Your selection?')
p.sendline('3')
ret = p.expect(['Enter passphrase:','Replace existing key?'])
if ret==1:
p.sendline('y')
p.expect('Enter passphrase:')
p.sendline(passphrase)
ret = p.expect(['Enter passphrase:','gpg>'])
if ret==0:
p.sendline(adminpin)
ret == p.expect(['Enter passphrase:','gpg>','SCEA'])
if ret == 0:
p.sendline(adminpin)
p.expect('gpg>')
p.sendline('key 3')
print(" key 3... Authentication")
p.expect('gpg>')
p.sendline('q')
p.expect('Save changes')
p.sendline('y')
with open("%s/%s,%s.csv"%(backupdir,lname,fname),'a+') as fw:
strdate = datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S%Z')
fw.write('"%s","%s","%s","%s","%s","%s","%s"\n'%(email,lname,fname,keyid,adminpin,cardserial,strdate))
print("Changes saved, done!")
| StarcoderdataPython |
97495 | <gh_stars>0
__author__ = 'hs634'
class Node():
def __init__(self, v):
self.val = v
self.left = None
self.right = None
class BinaryTree():
def __init__(self):
self.root = None
def insert(self, root, v):
if not root:
return Node(v)
if v < root.val:
root.left = self.insert(root.left, v)
else:
root.right = self.insert(root.right, v)
return root
def mirror(self, root):
if not root:
return
self.mirror(root.left)
self.mirror(root.right)
root.left, root.right = root.right, root.left
def print_tree(self, root):
if not root:
return
self.print_tree(root.left)
print root.val,
self.print_tree(root.right)
class BinaryTreeRunner(object):
def __init__(self):
self.btree = BinaryTree()
self.btree.root = self.btree.insert(None, 5)
root_3 = self.btree.insert(self.btree.root, 3)
root_7 = self.btree.insert(self.btree.root, 7)
self.btree.insert(root_3, 2)
self.btree.insert(root_3, 4)
self.btree.insert(root_7, 6)
self.btree.insert(root_7, 8)
def mirror(self):
print "Before"
self.btree.print_tree(self.btree.root)
self.btree.mirror(self.btree.root)
print "After"
self.btree.print_tree(self.btree.root)
@staticmethod
def main():
BinaryTreeRunner().mirror()
BinaryTreeRunner().main()
5
3 7
2 4 6 8
5
7 3
8 6 4 2
| StarcoderdataPython |
1743989 | <filename>cairis/gui/PersonaEnvironmentNotebook.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
from DimensionListCtrl import DimensionListCtrl
from BVNarrativeTextCtrl import BVNarrativeTextCtrl
__author__ = '<NAME>'
class SummaryPage(wx.Panel):
def __init__(self,parent,dp):
wx.Panel.__init__(self,parent)
self.dbProxy = dp
topSizer = wx.BoxSizer(wx.VERTICAL)
directBox = wx.StaticBox(self,-1,'Direct/Indirect Persona')
directSizer = wx.StaticBoxSizer(directBox,wx.HORIZONTAL)
topSizer.Add(directSizer,0,wx.EXPAND)
self.directCtrl = wx.CheckBox(self,PERSONA_CHECKDIRECT_ID)
self.directCtrl.SetValue(True)
directSizer.Add(self.directCtrl,0,wx.EXPAND)
roleBox = wx.StaticBox(self)
roleSizer = wx.StaticBoxSizer(roleBox,wx.HORIZONTAL)
topSizer.Add(roleSizer,1,wx.EXPAND)
self.roleList = DimensionListCtrl(self,PERSONA_LISTROLES_ID,wx.DefaultSize,'Role','role',self.dbProxy)
roleSizer.Add(self.roleList,1,wx.EXPAND)
self.SetSizer(topSizer)
class NarrativePage(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent)
topSizer = wx.BoxSizer(wx.VERTICAL)
narrativeBox = wx.StaticBox(self,-1)
narrativeBoxSizer = wx.StaticBoxSizer(narrativeBox,wx.HORIZONTAL)
topSizer.Add(narrativeBoxSizer,1,wx.EXPAND)
self.narrativeCtrl = BVNarrativeTextCtrl(self,PERSONA_TEXTNARRATIVE_ID)
narrativeBoxSizer.Add(self.narrativeCtrl,1,wx.EXPAND)
self.SetSizer(topSizer)
class PersonaEnvironmentNotebook(wx.Notebook):
def __init__(self,parent,dp):
wx.Notebook.__init__(self,parent,PERSONA_NOTEBOOKENVIRONMENT_ID)
p1 = SummaryPage(self,dp)
p2 = NarrativePage(self)
self.AddPage(p1,'Summary')
self.AddPage(p2,'Narrative')
| StarcoderdataPython |
1794353 | <reponame>yesitsme007/potter-controller
#!/usr/bin/env python3
import pathlib
import util
import os
from github.util import GitHubRepositoryHelper
OUTPUT_FILE_NAME = 'out'
VERSION_FILE_NAME = 'VERSION'
repo_owner_and_name = util.check_env('SOURCE_GITHUB_REPO_OWNER_AND_NAME')
repo_dir = util.check_env('MAIN_REPO_DIR')
lint_path = util.check_env('LINT_PATH')
backend_test_path = util.check_env('BACKEND_TEST_PATH')
lint_path = pathlib.Path(lint_path).resolve()
backend_test_path = pathlib.Path(backend_test_path).resolve()
repo_owner, repo_name = repo_owner_and_name.split('/')
repo_path = pathlib.Path(repo_dir).resolve()
lint_path = lint_path / OUTPUT_FILE_NAME
backend_test_path = backend_test_path / OUTPUT_FILE_NAME
version_file_path = repo_path / VERSION_FILE_NAME
version_file_contents = version_file_path.read_text()
cfg_factory = util.ctx().cfg_factory()
github_cfg = cfg_factory.github('github_com')
github_repo_helper = GitHubRepositoryHelper(
owner=repo_owner,
name=repo_name,
github_cfg=github_cfg,
)
gh_release = github_repo_helper.repository.release_from_tag(version_file_contents)
gh_release.upload_asset(
content_type='text/plain',
name=f'linting-result-{version_file_contents}.txt',
asset=lint_path.open(mode='rb'),
)
gh_release.upload_asset(
content_type='text/plain',
name=f'backend-test-result-{version_file_contents}.txt',
asset=backend_test_path.open(mode='rb'),
)
try:
os.environ['INTEGRATION_TEST_PATH']
except KeyError:
print("No integration test output path found. Output will not be added to release")
else:
integration_test_path = util.check_env('INTEGRATION_TEST_PATH')
integration_test_path = pathlib.Path(integration_test_path).resolve()
integration_test_path = integration_test_path / OUTPUT_FILE_NAME
gh_release.upload_asset(
content_type='text/plain',
name=f'integration-test-result-{version_file_contents}.txt',
asset=integration_test_path.open(mode='rb'),
)
| StarcoderdataPython |
100035 | import os
import webbrowser
import sys
from colorama import Fore, Back, Style
from colorama import init
init()
#os.environ["HTTPS_PROXY"] = "http://username:pass@192.168.1.107:3128"
import requests
from bs4 import BeautifulSoup
import time
headers = {'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'}
#Take query and convert into search parameter
#query=input('Ask your Quora que: ')
arg = sys.argv
query = ''
for i in range(1,len(arg)-2):
query=query+arg[i]
query=query+'+'
print(query)
#retrieve query
url='https://www.quora.com/search?q='+query
source_code = requests.get(url, headers=headers, timeout=15)
plain_text=source_code.text
soup=BeautifulSoup(plain_text,"html.parser")
#get the relevant questions list
que_list=soup.findAll('a',{'class':'question_link'})
hrefs=list(que_list)
#convert into user-friendly string
print(Fore.GREEN+' << Showing some relevant questions asked >>')
for i in range(len(que_list)):
que_list[i]['href']=que_list[i]['href'].replace('-',' ')
que_list[i]['href']=que_list[i]['href'].replace('/','')
print(str(i+1)+'. '+que_list[i]['href'])
print(' <-------------------------------/-------------------------------->')
#get_inp=input('Select a question from the above > ')
get_inp = arg[len(arg)-2]
#retrieve the page with that answer
url='https://www.quora.com/'+hrefs[int(get_inp)-1]['href'].replace(' ','-')
try:
source_code = requests.get(url, timeout=15)
plain_text=source_code.text
soup=BeautifulSoup(plain_text,"html.parser")
ans=soup.findAll('div',{'class':'AnswerHeader ContentHeader'})
header=ans[0].text
nans=ans[0].parent
mans=nans.next_sibling
#man=mans.findNextSibling()
text=mans.text
pos=text.find('Upvotes')
uf=text[0:pos+7]
print(Fore.BLUE+header)
print(uf)
except Exception as e:
print(e)
print('Sorry, this que hasn\'t been answered.')
print(' <----------------------------------/------------------------------->')
#a=input('Head over to the link for more answers?(y/n) ')
a= arg[len(arg)-1]
if a is 'y':
webbrowser.open(url)
time.sleep(2)
exit()
| StarcoderdataPython |
6531 | <gh_stars>1-10
# Generated by Django 3.0.7 on 2020-07-27 19:23
import build.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AutoCompleteRecord',
fields=[
('updated_at', build.models.UnixTimestampField(auto_created=True, null=True)),
('created_at', build.models.UnixTimestampField(auto_created=True, null=True)),
('log_autocomplete_record_id', models.AutoField(primary_key=True, serialize=False)),
('type', models.CharField(max_length=50)),
('value', models.CharField(max_length=300)),
],
options={
'db_table': 'log_autocomplete_record',
},
),
]
| StarcoderdataPython |
1700340 | <filename>oauthost/tests/conftest.py<gh_stars>1-10
from pytest_djangoapp import configure_djangoapp_plugin
pytest_plugins = configure_djangoapp_plugin(
{
'ROOT_URLCONF': 'oauthost.urls',
},
extend_INSTALLED_APPS=[
'django.contrib.sessions',
],
extend_MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
],
)
| StarcoderdataPython |
3315320 | <filename>fython/lexem/rparx.py
from fython.unit import *
class RParX(Unit):
unit = l.rparx
| StarcoderdataPython |
164883 | import json
import SalesforceMetadataModule as smm
import dicttoxml
from xml.dom.minidom import parseString
from fulcrum import Fulcrum
import re
import collections
import time
import datetime
import requests
import base64
import string
import random
from simple_salesforce import Salesforce
from simple_salesforce import SalesforceLogin
from simple_salesforce import SFType
_sfdcPrefix = 'f_'
_sfdcUsername = ""
_sfdcPassword = ""
_sfdcToken = ""
_sfdcDomain = 'test'
# Set _sfdcSandbox to False to run in production
_sfdcSandbox = True
_isDateFieldDefault = False
_fulcrumXApiToken = ""
_fulcrumBaseURL = 'https://api.fulcrumapp.com/api/v2/'
__author__ = "<NAME>"
__copyright__ = "Copyright 2019, <NAME>, Burning Man Project"
__credits__ = ["<NAME>"]
__version__ = "0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>, <EMAIL>"
__status__ = "Development"
__Changelog01__ = "Initial Release"
class FulcrumRecordToSalesforceRecord:
_sfdcSession_id, _sfdcInstance = SalesforceLogin(username=_sfdcUsername, password=_sfdcPassword, security_token=_sfdcToken, domain=_sfdcDomain)
sfdc = Salesforce(instance=_sfdcInstance, session_id=_sfdcSession_id)
fulcrum = Fulcrum(key=_fulcrumXApiToken)
fulcrumHeaders = {'X-ApiToken': _fulcrumXApiToken}
def sf_api_call(self, action, parameters = {}, method = 'get', data = {}, multipart=False, boundary=None):
"""
Helper function to make calls to Salesforce REST API.
Parameters: action (the URL), URL params, method (get, post or patch), data for POST/PATCH.
"""
headers = {}
if multipart == False:
headers = {
'Content-type': 'application/json',
'Accept-Encoding': 'gzip',
'Authorization': 'OAuth ' + self._sfdcSession_id,
}
else:
headers = {
'Content-type': 'multipart/form-data; boundary='+boundary,
'Accept-Encoding': 'gzip',
'Authorization': 'OAuth ' + self._sfdcSession_id,
}
if method == 'get':
r = requests.request(method, 'https://'+self._sfdcInstance+action, headers=headers, params=parameters, timeout=30)
elif method in ['post', 'patch']:
r = requests.request(method, 'https://'+self._sfdcInstance+action, headers=headers, json=data, params=parameters, timeout=10)
else:
# other methods not implemented in this example
raise ValueError('Method should be get or post or patch.')
#print('Debug: API %s call: %s' % (method, r.url) )
if r.status_code < 300:
if method=='patch':
return None
else:
return r.json()
else:
raise Exception('API error when calling %s : %s' % (r.url, r.content))
# Generates a random string
def id_generator(self, size=32, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
#checks to see if a key exists in a dictonary
def checkKey (self, dictionary, key):
try:
if key in dictionary.keys():
return True
else:
return False
except KeyError:
return False
## pass JSON Directly
def composite_salesforce_create (self, objectId, records):
response = self.sfdc.restful (method='POST', path='composite/tree/'+objectId, json=records)
return response
#must have Salesforce record IDs
def composite_salesforce_update (self, objectId, extCustomField, extIdValue, records):
response = self.sfdc.restful (method='PATCH', path='composite/sobjects', json=records)
return response
def composite_salesforce_request (self, objectId, extCustomField, extIdValue, records):
response = self.sfdc.restful (method='POST', path='composite/sobjects/' + objectId, json=records)
return reponse
# Data should either be a single JSON encapsulating base64 encoded blob up to 34MB
# Or a multipart message encapsulating a base64 encoded blob up to 2GB
# https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_sobject_insert_update_blob.htm
def contentVersion_salesforce_create (self, data):
return self.sf_api_call('/services/data/v40.0/sobjects/ContentVersion', method="post", data=data)
def contentVersion_2GB_salesforce_create (self, data, boundary):
return self.sf_api_call('/services/data/v40.0/sobjects/ContentVersion', method="post", data=data, multipart=True, boundary=boundary)
# Data should be an ID
def contentVersion_salesforce_get (self, data):
return self.sf_api_call('/services/data/v40.0/sobjects/ContentVersion/%s' % data)
def contentDocumentLink_salesforce_create (self, data):
return self.sf_api_call('/services/data/v40.0/sobjects/ContentDocumentLink', method = 'post', data=data)
def create_output_json (self, recordJson):
recordJson = json.dumps (recordJson)
recordJson = recordJson[1:-1]
recordJson = recordJson.replace('null', '')
return recordJson
def process_generate_field (self, fieldId, fieldValue, fieldType='Data'):
print ' ' + str(fieldType) + ': ' + str(_sfdcPrefix) + str(fieldId) + '__c:' + str(fieldValue)
if fieldType == 'Latitude' or fieldType == 'Longitude':
return {_sfdcPrefix + fieldId + '__' + fieldType +'__s' : fieldValue}
else:
return {_sfdcPrefix + fieldId + '__c' : fieldValue}
def upload_2GB_file_to_salesforce_and_attach_to_record (self, recordId, fileTitle, fileDescription, fileName, fileContents):
boundary = self.id_generator ()
fileContents = base64.b64encode(fileContents)
#Multi part request can handle 2GB Max
ContentVersionMetadata = {
'Title':fileTitle,
'Description':fileDescription,
'PathOnClient':fileName,
}
ContentVersionData = """--"""+boundary+"""
Content-Disposition: form-data; name="entity_content";
Content-Type: application/json
{
"Title" : """+'"'+fileTitle+'"'+""",
"Description" : """+'"'+fileDescription+'"'+""",
"PathOnClient" : """+'"'+fileName+'"'+"""
}
--"""+boundary+"""
Content-Disposition: form-data; name="VersionData"; filename=""" + '"' + fileName + '"' +"""
Content-Type: application/octet-stream
""" + fileContents + """
--"""+boundary+"""--"""
# 1: Insert the Content Document
ContentVersion = self.contentVersion_2GB_salesforce_create (data=ContentVersionData, boundary=boundary)
ContentVersionId = ContentVersion.get('id')
# 2: Get the ContentDocumentId from the just inserted ContentVersion
ContentVersion = self.contentVersion_salesforce_get (ContentVersionId)
ContentDocumentId = ContentVersion.get('ContentDocumentId')
# 3: Create a ContentDocumentLink between the ContentDocumentId and the Record
contentDocumentLinkMetadata = {
'ContentDocumentId': ContentDocumentId,
'LinkedEntityId': recordId,
'ShareType': 'V'
}
ContentDocumentLink = self.contentDocumentLink_salesforce_create (contentDocumentLinkMetadata)
return {'ContentVersionId' : ContentVersionId, 'ContentDocumentId' : ContentDocumentId, 'ContentDocumentLink' : ContentDocumentLink}
def upload_file_to_salesforce_and_attach_to_record (self, recordId, fileTitle, fileDescription, fileName, fileContent, fulcrumId):
fileContent = base64.b64encode(fileContent)
#Single part request can handle ~34MB Max
ContentVersionData = {
'Title':fileTitle,
'Description':fileDescription,
'PathOnClient':fileName,
'VersionData':fileContent,
_sfdcPrefix + 'Fulcrum_Id__c':fulcrumId,
# _sfdcPrefix + 'Location__c':fulcrumLocation
}
# 1: Insert the Content Document
ContentVersion = self.contentVersion_salesforce_create (data=ContentVersionData)
ContentVersionId = ContentVersion.get('id')
# 2: Get the ContentDocumentId from the just inserted ContentVersion
ContentVersion = self.contentVersion_salesforce_get (ContentVersionId)
ContentDocumentId = ContentVersion.get('ContentDocumentId')
# 3: Create a ContentDocumentLink between the ContentDocumentId and the Record
contentDocumentLinkMetadata = {
'ContentDocumentId': ContentDocumentId,
'LinkedEntityId': recordId,
'ShareType': 'V'
}
ContentDocumentLink = self.contentDocumentLink_salesforce_create (contentDocumentLinkMetadata)
return {'ContentVersionId' : ContentVersionId, 'ContentDocumentId' : ContentDocumentId, 'ContentDocumentLink' : ContentDocumentLink}
def process_file_fields (self, record, recordId):
#print record
newFiles = []
for fieldId in record['form_values']:
files = self.detect_file_field_type_and_process_field (fieldId, record, recordId=recordId)
#print files
if isinstance (files, dict):
newFiles.append (files)
return newFiles
def process_video_field (self, fieldValue, recordId):
print 'Downloading Video File From Fulcrum ... ' + fieldValue['video_id']
baseurl = _fulcrumBaseURL + 'videos/' + fieldValue['video_id']
blob = requests.request ('GET', baseurl + '.mp4', headers=self.fulcrumHeaders)
if blob.status_code == 200:
videoMetadata = self.fulcrum.videos.find(fieldValue['video_id'])
print 'Uploading Video File To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['video_id'] + ' Video', fileDescription=fieldValue['caption'], fileName=fieldValue['video_id'] + '.mp4', fileContent=blob.content, fulcrumId=fieldValue['video_id'])
blob = requests.request ('GET', baseurl + '/track.json', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Video Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['video_id'] + ' JSON Track', fileDescription='JSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.json', fileContent=blob.content)
blob = requests.request ('GET', baseurl + '/track.geojson', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Video GeoJSON Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['video_id'] + ' GEO JSON Track', fileDescription='GeoJSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.geojson', fileContent=blob.content)
blob = requests.request ('GET', baseurl + '/track.gpx', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Video GPX Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['video_id'] + ' GPX Track', fileDescription='GPX Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.gpx', fileContent=blob.content)
blob = requests.request ('GET', baseurl + '/track.kml', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Video KML Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['video_id'] + ' KML Track', fileDescription='KML Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['video_id'] + '-track.kml', fileContent=blob.content)
return
def process_photo_field (self, fieldValue, recordId):
print 'Downloading Photo File From Fulcrum ... ' + fieldValue['photo_id']
blob = requests.request ('GET', _fulcrumBaseURL + 'photos/' + fieldValue['photo_id'] + '.jpg', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Photo File To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['photo_id'] + ' Photo', fileDescription=fieldValue['caption'], fileName=fieldValue['photo_id'] + '.jpg', fileContent=blob.content, fulcrumId=fieldValue['photo_id'])
return
def process_signature_field (self, fieldValue, recordId):
print 'Downloading Signature File From Fulcrum ... ' + fieldValue['signature_id']
blob = requests.request ('GET', _fulcrumBaseURL + 'signature/' + fieldValue['signature_id'] + '.png', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Signature File To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['photo_id'] + ' Signature', fileDescription='Signed At: ' + fieldValue['timestamp'], fileName=fieldValue['signature_id'] + '.png', fileContent=blob.content, fulcrumId=fieldValue['signature_id'])
return
def process_audio_field (self, fieldValue, recordId):
print 'Downloading Audio File From Fulcrum ... ' + fieldValue['audio_id']
blob = requests.request ('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '.mp4', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Audio File To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['audio_id'] + ' Video', fileDescription=fieldValue['caption'], fileName=fieldValue['audio_id'] + '.mp4', fileContent=blob.content, fulcrumId=fieldValue['audio_id'])
blob = requests.request ('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.json', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Audio Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['audio_id'] + ' JSON Track', fileDescription='JSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.json', fileContent=blob.content)
blob = requests.request ('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.geojson', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Audio GeoJSON Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['audio_id'] + ' GEO JSON Track', fileDescription='GeoJSON Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.geojson', fileContent=blob.content)
blob = requests.request ('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.gpx', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Audio GPX Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['audio_id'] + ' GPX Track', fileDescription='GPX Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.gpx', fileContent=blob.content)
blob = requests.request ('GET', _fulcrumBaseURL + 'audio/' + fieldValue['audio_id'] + '/track.kml', headers=self.fulcrumHeaders)
if blob.status_code == 200:
print 'Uploading Audio KML Track To Salesforce... ' + recordId
self.upload_file_to_salesforce_and_attach_to_record (recordId=recordId, fileTitle=fieldValue['audio_id'] + ' KML Track', fileDescription='KML Track Track Of\n' + fieldValue['caption'], fileName=fieldValue['audio_id'] + '-track.kml', fileContent=blob.content)
return
def process_date_field (self, fieldId, fieldValue):
#Generate Date Time
return self.process_generate_field (fieldId, fieldValue, 'Date')
def process_datetime_field (self, record, isDateField, fieldId, fieldValue):
#Generate Date Time
# Check to see if the last field processed was a Date Field
if isDateField != _isDateFieldDefault:
dateValue = record['form_values'][isDateField]
dateTimeValue = dateValue + ' ' + fieldValue
return self.process_generate_field (isDateField + '_' + fieldId, dateTimeValue, 'DateTime')
#Not paired with a Date Field
else:
return self.process_generate_field (fieldId, fieldValue, 'Time')
def process_address_and_choice_field (self, fieldId, subFieldKey, subFieldValue):
if subFieldValue == 'sub_thoroughfare':
return self.process_generate_field (fieldId + '_1', subFieldValue, 'Street Number')
elif subFieldKey == 'thoroughfare':
return self.process_generate_field (fieldId + '_2', subFieldValue, 'Street Name')
elif subFieldKey == 'suite':
return self.process_generate_field (fieldId + '_3', subFieldValue, 'Suite')
elif subFieldKey == 'locality':
return self.process_generate_field (fieldId + '_4', subFieldValue, 'City')
elif subFieldKey == 'sub_admin_area':
return self.process_generate_field (fieldId + '_5', subFieldValue, 'County')
elif subFieldKey == 'admin_area':
return self.process_generate_field (fieldId + '_6', subFieldValue, 'State/Province')
elif subFieldKey == 'postal_code':
return self.process_generate_field (fieldId + '_7', subFieldValue, 'Postal Code')
elif subFieldKey == 'country':
return self.process_generate_field (fieldId + '_8', subFieldValue, 'Country')
elif subFieldKey == 'choice_values':
choices = []
multiSelectChoices = subFieldValue[0]
for choice in subFieldValue:
choices.append (choice)
if multiSelectChoices != choice:
multiSelectChoices += ';' + choice
if len(choices) == 1:
self.process_generate_field (fieldId, choices, 'Choices')
else:
return self.process_generate_field (fieldId, multiSelectChoices, 'Multiselect Choices')
elif subFieldKey == 'other_values':
for choice in subFieldValue:
return self.process_generate_field (fieldId, choice, 'Other Choice')
# Determine the type of field and process it. This handles files.
def detect_file_field_type_and_process_field (self, fieldId, record, recordId, detail=False):
fieldValue = ''
if detail == False:
fieldValue = record['form_values'][fieldId]
elif detail == True:
fieldValue = record[fieldId]
isDictField = isinstance (fieldValue, dict)
isListField = isinstance (fieldValue, list)
#print fieldValue
if isListField == True:
for complexFieldValue in fieldValue:
#print complexFieldValue
isComplexDictField = isinstance (complexFieldValue, dict)
if isComplexDictField == True:
isRepeatingSections = self.checkKey(complexFieldValue, 'form_values')
isPhotoField = self.checkKey(complexFieldValue, 'photo_id')
isVideoField = self.checkKey(complexFieldValue, 'video_id')
isAudioField = self.checkKey(complexFieldValue, 'audio_id')
if isPhotoField == True:
print "Photo Field Detected..."
return self.process_photo_field (complexFieldValue, recordId)
elif isVideoField == True:
print "Video Field Detected..."
return self.process_video_field (complexFieldValue, recordId)
elif isAudioField == True:
print "Audio Field Detected..."
return self.process_audio_field (complexFieldValue, recordId)
elif isRepeatingSections == True:
print "Child Record Detected..."
return self.process_file_fields (complexFieldValue, recordId)
elif isDictField == True:
isSignatureField = self.checkKey(fieldValue, 'signature_id')
if isSignatureField == True:
print "Signature Field Detected..."
return self.process_signature_field (fieldValue, recordId)
# Determine the type of field and process it. This handles data.
def detect_field_type_and_process_field (self, fieldId, record, isDateField=_isDateFieldDefault, detail=False):
fieldValue = ''
if detail == False:
fieldValue = record['form_values'][fieldId]
elif detail == True:
fieldValue = record[fieldId]
isListField = isinstance (fieldValue, list)
isDictField = isinstance (fieldValue, dict)
if isListField == True:
for complexFieldValue in fieldValue:
isRepeatingSections = self.checkKey(complexFieldValue, 'form_values')
isDictComplexField = isinstance (complexFieldValue, dict)
isJunctionObject = self.checkKey(complexFieldValue, 'record_id')
elif isDictField == True:
for subFieldKey in fieldValue:
subFieldValue = fieldValue[subFieldKey]
return self.process_address_and_choice_field (fieldId, subFieldKey, subFieldValue)
# Date Time field
elif re.match(r"([0-2][0-9]:[0-5][0-9])", fieldValue):
return self.process_datetime_field (record, isDateField, fieldId, fieldValue)
# Date field
elif re.match(r"([1-2][0-9][0-9][0-9]-[0-1][0-9]-[0-3][0-9])", fieldValue):
#Mark that this loop was a Date, in prep for a Time Field
isDateField = fieldId
return self.process_date_field (fieldId, fieldValue)
#easy field
else:
return self.process_generate_field (fieldId, fieldValue)
def generate_junction_records (self, complexFormValues):
return
def generate_detail_fields (self, complexFormValues):
dict (complexFormValues)
sfdcFields = []
for detailRecord in complexFormValues:
isDateField = _isDateFieldDefault
fieldAppend = self.detect_field_type_and_process_field (detailRecord, complexFormValues, isDateField, True)
#print fieldAppend
if isinstance (fieldAppend, dict):
sfdcFields.append (fieldAppend)
if isDateField != detailRecord:
isDateField = _isDateFieldDefault
sfdcFields = json.dumps (sfdcFields).replace('[','').replace(']','').replace('{','').replace('}','')
return sfdcFields
def generate_fields (self, record):
sfdcFields = []
isDateField = _isDateFieldDefault
#print record
for fieldId in record['form_values']:
fieldAppend = self.detect_field_type_and_process_field (fieldId, record, isDateField)
#print fieldAppend
if isinstance (fieldAppend, dict):
sfdcFields.append (fieldAppend)
# If this Loop was not a Date Field, Reset Back to Default Value
if isDateField != fieldId:
isDateField = _isDateFieldDefault
sfdcFields = json.dumps (sfdcFields).replace('[','').replace(']','').replace('{','').replace('}','')
return sfdcFields
def create_sfdc_fulcrum_record (self, record):
objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-','_')
sfdcCreateRecords = self.generate_sfdc_fulcrum_record (record)
sfdcCreateRecords = json.loads(sfdcCreateRecords)
return fulcrumToSalesforce.composite_salesforce_create (objectId, sfdcCreateRecords)
def update_sfdc_fulcrum_record (self, record):
objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-','_')
sfdcObject = SFType (objectId, self.sfdc.session_id, self.sfdc.sf_instance)
recordExists = sfdcObject.get_by_custom_id (_sfdcPrefix + 'fulcrum_id__c', record['id'])
if recordExists:
## Get Child Records
for fieldId in record['form_values']:
fieldValue = record['form_values'][fieldId]
isListField = isinstance (fieldValue, list)
if isListField == True:
complexFieldType = fieldValue[0]
isRepeatingSections = self.checkKey(complexFieldType, 'form_values')
isJunctioonObject = self.checkKey(complexFieldType, 'record_id')
if isRepeatingSections == True:
objectId = _sfdcPrefix + record['form_id'][0:13].replace('-','_') + '_' + fieldId + '_d__c'
objectReferenceId = _sfdcPrefix + record['form_id'][0:13].replace('-','_') + '_' + fieldId + '_d__r'
sfdcInsertRecord = ''
for complexFieldValue in fieldValue:
detailRecordExists = sfdcObject.get_by_custom_id (_sfdcPrefix + 'fulcrum_id__c', complexFieldValue['id'])
if detailRecordExists:
sfdcRecordUpdate = generate_sfdc_fulcrum_detail_record (self, complexFieldValue)
print sfdcRecordUpdate
exit ()
else:
self.create_sfdc_fulcrum_record (record)
def generate_sfdc_fulcrum_record (self, record):
print '---------------------------------------'
print 'Processing Fulcrum Record...'
objectId = (_sfdcPrefix + record['form_id'] + '__c').replace('-','_')
sfdcRecord = self.standard_fields_master_record (record)
sfdcFields = self.generate_fields (record)
objectIdString = '"' + objectId + '"'
recordIdString = '"' + record['id'] + '"'
sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','')
sfdcDetailRecords = self.generate_sfdc_fulcrum_detail_records (record)
if sfdcDetailRecords is None:
sfdcRecord = """{"records": [{"attributes": {"type" : """ + objectIdString + """, "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }]}"""
else:
detailRecordJson = sfdcDetailRecords[0]
for detailRecord in sfdcDetailRecords:
if detailRecord != detailRecordJson:
detailRecordJson += "," + detailRecordJson
sfdcRecord = """{"records": [{"attributes": {"type" : """ + objectIdString + """, "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + ', ' + detailRecordJson + """ }]}"""
return sfdcRecord
def generate_sfdc_fulcrum_detail_record (self, complexFieldValue):
complexFormValues = complexFieldValue['form_values']
sfdcFields = self.generate_detail_fields (complexFormValues)
objectIdString = '"' + objectId + '"'
recordIdString = '"' + complexFieldValue['id'] + '"'
#sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','')
sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','')
sfdcRecord = """, { "attributes": {"type" : """ + objectIdString + """ , "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }"""
sfdcInsertRecord += sfdcRecord
def standard_fields_master_record (self, record):
sfdcRecord = []
if record['status'] is not None:
sfdcRecord.append (self.process_generate_field ('status', record['status'], 'Status'))
if record['version'] is not None:
sfdcRecord.append (self.process_generate_field ('version', record['version'], 'Version'))
if record['id'] is not None:
sfdcRecord.append (self.process_generate_field ('fulcrum_id', record['id'], 'Id'))
if record['created_at'] is not None:
sfdcRecord.append (self.process_generate_field ('created_at', record['created_at'], 'Created At'))
if record['updated_at'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_at', record['updated_at'], 'Updated At'))
if record['client_created_at'] is not None:
sfdcRecord.append (self.process_generate_field ('client_created_at', record['client_created_at'], 'Client Created At'))
if record['client_updated_at'] is not None:
sfdcRecord.append (self.process_generate_field ('client_updated_at', record['client_updated_at'], 'Client Updated At'))
if record['created_by'] is not None:
sfdcRecord.append (self.process_generate_field ('created_by', record['created_by'], 'Created By'))
if record['created_by_id'] is not None:
sfdcRecord.append (self.process_generate_field ('created_by_id', record['created_by_id'], 'Created By Id'))
if record['updated_by'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_by', record['updated_by'], 'Updated By'))
if record['updated_by_id'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_by_id', record['updated_by_id'], 'Updated By Id'))
if record['created_location'] is not None:
sfdcRecord.append (self.process_generate_field ('created_location', record['created_location'], 'Created Location'))
if record['updated_location'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_location', record['updated_location'], 'Updated Location'))
if record['created_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('created_duration', record['created_duration'], 'Created Duration'))
if record['updated_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_duration', record['updated_duration'], 'Updated Duration'))
if record['edited_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('edited_duration', record['edited_duration'], 'Edited Duration'))
if record['project_id'] is not None:
sfdcRecord.append (self.process_generate_field ('project_id', record['project_id'], 'Project Id'))
if record['changeset_id'] is not None:
sfdcRecord.append (self.process_generate_field ('changeset_id', record['changeset_id'], 'Change Set ID'))
if record['assigned_to'] is not None:
sfdcRecord.append (self.process_generate_field ('assigned_to', record['assigned_to'], 'Assigned To'))
if record['assigned_to_id'] is not None:
sfdcRecord.append (self.process_generate_field ('assigned_to_id', record['assigned_to_id'], 'Assigned To Id'))
if record['form_id'] is not None:
sfdcRecord.append (self.process_generate_field ('form_id', record['form_id'], 'Form Id'))
if record['latitude'] is not None:
sfdcRecord.append (self.process_generate_field ('location', record['latitude'], 'Latitude'))
if record['longitude'] is not None:
sfdcRecord.append (self.process_generate_field ('location', record['longitude'], 'Longitude'))
if record['speed'] is not None:
sfdcRecord.append (self.process_generate_field ('speed', record['speed'], 'Speed'))
if record['course'] is not None:
sfdcRecord.append (self.process_generate_field ('course', record['course'], 'Course'))
if record['horizontal_accuracy'] is not None:
sfdcRecord.append (self.process_generate_field ('horizontal_accuracy', record['horizontal_accuracy'], 'Horizontal Accuracy'))
if record['vertical_accuracy'] is not None:
sfdcRecord.append (self.process_generate_field ('vertical_accuracy', record['vertical_accuracy'], 'Vertical Accuracy'))
return sfdcRecord
def standard_fields_detail_record (self, complexFieldValue):
sfdcRecord = []
if complexFieldValue['version'] is not None:
sfdcRecord.append (self.process_generate_field ('version', complexFieldValue['version'], 'Version'))
if complexFieldValue['id'] is not None:
sfdcRecord.append (self.process_generate_field ('fulcrum_id', complexFieldValue['id'], 'Id'))
if complexFieldValue['created_at'] is not None:
sfdcRecord.append (self.process_generate_field ('created_at', complexFieldValue['created_at'], 'Created At'))
if complexFieldValue['updated_at'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_at', complexFieldValue['updated_at'], 'Updated At'))
if complexFieldValue['created_by_id'] is not None:
sfdcRecord.append (self.process_generate_field ('created_by_id', complexFieldValue['created_by_id'], 'Created By Id'))
if complexFieldValue['updated_by_id'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_by_id', complexFieldValue['updated_by_id'], 'Updated By Id'))
if complexFieldValue['created_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('created_duration', complexFieldValue['created_duration'], 'Created Duration'))
if complexFieldValue['updated_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('updated_duration', complexFieldValue['updated_duration'], 'Updated Duration'))
if complexFieldValue['edited_duration'] is not None:
sfdcRecord.append (self.process_generate_field ('edited_duration', complexFieldValue['edited_duration'], 'Edited Duration'))
if complexFieldValue['changeset_id'] is not None:
sfdcRecord.append (self.process_generate_field ('changeset_id', complexFieldValue['changeset_id'], 'Change Set ID'))
if complexFieldValue['geometry'] is not None:
sfdcRecord.append (self.process_generate_field ('location', complexFieldValue['geometry']['coordinates'][1], 'Latitude'))
sfdcRecord.append (self.process_generate_field ('location', complexFieldValue['geometry']['coordinates'][0], 'Longitude'))
return sfdcRecord
# Fulcrum Record and SFDC Parent Record ID (prefix and postfix added)
def generate_sfdc_fulcrum_detail_records (self, record):
print '.......................................'
print 'Processing Fulcrum Detail Records...'
sfdcRecords = []
for fieldId in record['form_values']:
fieldValue = record['form_values'][fieldId]
isListField = isinstance (fieldValue, list)
if isListField == True:
complexFieldType = fieldValue[0]
isRepeatingSections = self.checkKey(complexFieldType, 'form_values')
if isRepeatingSections == True:
sfdcInsertRecord = ''
objectId = _sfdcPrefix + record['form_id'][0:13].replace('-','_') + '_' + fieldId + '_d__c'
objectReferenceId = _sfdcPrefix + record['form_id'][0:13].replace('-','_') + '_' + fieldId + '_d__r'
for complexFieldValue in fieldValue:
print '.......................................'
print 'Processing Detail Record...'
print ' Object: ' + objectId
print ' ReferenceName: ' + objectReferenceId
sfdcRecord = self.standard_fields_detail_record (complexFieldValue)
complexFormValues = complexFieldValue['form_values']
sfdcFields = self.generate_detail_fields (complexFormValues)
objectIdString = '"' + objectId + '"'
recordIdString = '"' + complexFieldValue['id'] + '"'
#sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','')
sfdcRecord = json.dumps (sfdcRecord).replace('[','').replace(']','').replace('{','').replace('}','')
sfdcRecord = """, { "attributes": {"type" : """ + objectIdString + """ , "referenceId": """ + recordIdString + """ }, """ + sfdcRecord + ',' + sfdcFields + """ }"""
sfdcInsertRecord += sfdcRecord
objectReferenceIdString = '"' + str(objectReferenceId) + '"'
sfdcInsertRecord = sfdcInsertRecord.replace(',',"",1)
recordJson = objectReferenceIdString + """:{"records":[""" + sfdcInsertRecord +"""]}"""
sfdcRecords.append (recordJson)
return sfdcRecords
| StarcoderdataPython |
21656 | <gh_stars>0
# -*- coding: utf-8 -*-
#
# Usage: Download all stock code info from TWSE
#
# TWSE equities = 上市證券
# TPEx equities = 上櫃證券
#
import csv
from collections import namedtuple
import requests
from lxml import etree
TWSE_EQUITIES_URL = 'http://isin.twse.com.tw/isin/C_public.jsp?strMode=2'
TPEX_EQUITIES_URL = 'http://isin.twse.com.tw/isin/C_public.jsp?strMode=4'
ROW = namedtuple( 'Row', [ 'type', 'code', 'name', 'ISIN', 'start', 'market', 'group', 'CFI' ] )
def make_row_tuple(typ, row):
"""u'\u3000′是全角空格的unicode编码"""
code, name = row[ 1 ].split( '\u3000' )
code = code.replace( ' ', '' )
name = name.replace( ' ', '' )
return ROW( typ, code, name, *row[ 2:-1 ] )
def fetch_data(url):
r = requests.get(url)
print( r.url )
root = etree.HTML( r.text )
trs = root.xpath('//tr')[1:]
result = []
typ = ''
for tr in trs:
tr = list( map( lambda x: x.text, tr.iter( ) ) )
if len(tr) == 4:
# This is type
typ = tr[2].strip(' ')
else:
# This is the row data
result.append( make_row_tuple( typ, tr ) )
return result
def to_csv( url, path ):
data = fetch_data( url )
print( 'Save File Path {}'.format( path ) )
with open( path, 'w', newline='', encoding='utf_8' ) as csvfile:
writer = csv.writer( csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL )
writer.writerow( data[0]._fields )
for d in data:
writer.writerow( [ _ for _ in d ] )
def GetFile( path ):
to_csv( TWSE_EQUITIES_URL, path )
if __name__ == '__main__':
to_csv( TWSE_EQUITIES_URL, 'twse_equities.csv' )
to_csv( TPEX_EQUITIES_URL, 'tpex_equities.csv' )
| StarcoderdataPython |
1732176 | """
Cartesian View Curve with Logarithmic Y Axis
"""
# (C) Copyright 2017- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
#
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import metview as mv
# read the GRIB data into a fieldset
filename = "ozone_pl.grib"
if mv.exist(filename):
g = mv.read(filename)
else:
g = mv.gallery.load_dataset(filename)
# extract the values at a point, and the vertical levels
levels = mv.grib_get_double(g, "level")
ozone = mv.nearest_gridpoint(g, [-85, 0])
# define curve
vis_curve = mv.input_visualiser(input_x_values=ozone, input_y_values=levels)
# define curve style
gr_curve = mv.mgraph(
graph_type="curve", graph_line_colour="coral", graph_line_thickness=4
)
# define a nice title for the x axis
# use object methods to get metadata, just to show the alternative
# to using functions
x_title = g[0].grib_get_string("name") + " in " + g[0].grib_get_string("units")
x_axis = mv.maxis(axis_title_text=x_title)
# define y axis title
y_title = "Pressure in hPa"
y_axis = mv.maxis(axis_title_text=y_title)
# define view, setting a log y axis
view = mv.cartesianview(
x_automatic="off",
x_min=0,
x_max=max(ozone),
y_automatic="on",
y_axis_type="logarithmic",
horizontal_axis=x_axis,
vertical_axis=y_axis,
)
# define the output plot file
mv.setoutput(mv.pdf_output(output_name="cartesian_log_y_axis"))
# generate plot
mv.plot(view, vis_curve, gr_curve)
| StarcoderdataPython |
3339641 | <reponame>AI4PFAS/AI4PFAS
import pandas as pd
import numpy as np
from sklearn.model_selection import KFold, StratifiedKFold
from sklearn.feature_selection import VarianceThreshold
from rdkit import Chem
from rdkit.Chem import Descriptors
from helpers import count_cf_bonds, create_morgan_space
from graphnn import mol2graph
def generate_dataset(splitter, name):
ldtoxdb = pd.read_csv('../data/ldtoxdb-mordred.csv').dropna(axis=1)
ldtoxdb['rd_mol'] = ldtoxdb.SMI.apply(Chem.MolFromSmiles)
ldtoxdb['n_cf_bonds'] = ldtoxdb.rd_mol.apply(count_cf_bonds)
ldtoxdb['mol_wt'] = ldtoxdb.rd_mol.apply(Chem.Descriptors.MolWt)
ldtoxdb['is_pfas_like'] = ldtoxdb['n_cf_bonds'] >= 2
pfas8k = pd.read_csv('../data/pfas8k-mordred.csv')
pfas8k['canon_smi'] = pfas8k.SMILES.apply(Chem.MolFromSmiles).apply(Chem.MolToSmiles)
ldtoxdb['is_pfas'] = ldtoxdb.SMI.isin(pfas8k.canon_smi)
mordred = ldtoxdb.columns[5:-5]
# comment next 4 lines for speed if processing data only for non-benchmarks
ecfp4096 = np.array(ldtoxdb.rd_mol.apply(create_morgan_space(nbits=4096, r=2)).tolist())
ecfp2048 = np.array(ldtoxdb.rd_mol.apply(create_morgan_space(nbits=2048, r=1)).tolist())
ecfp2048r6 = np.array(ldtoxdb.rd_mol.apply(create_morgan_space(nbits=2048, r=6)).tolist())
graph = np.array(ldtoxdb.rd_mol.apply(mol2graph.mol2torchdata).tolist())
# for stratified splitting
bins = pd.cut(ldtoxdb[['NeglogLD50']].to_numpy().reshape(-1), bins=5, labels=False)
for foldno, (train_idx, test_idx) in enumerate(splitter.split(ldtoxdb, bins)):
prefix = '../data/preprocessed/%s/fold%d' % (name, foldno)
train = ldtoxdb.iloc[train_idx]
test = ldtoxdb.iloc[test_idx]
pfas_like_pfas_excluded_train = train.loc[(train.is_pfas_like & ~train.is_pfas)]
pfas_like_pfas_excluded_test = test.loc[(test.is_pfas_like & ~test.is_pfas)]
pfas_like_train = train.loc[train.is_pfas_like]
pfas_like_test = test.loc[test.is_pfas_like]
pfas_like_excluded_train = train.loc[~train.is_pfas_like]
pfas_like_excluded_test = test.loc[~test.is_pfas_like]
pfas_excluded_train = train.loc[~train.is_pfas]
pfas_excluded_test = test.loc[~test.is_pfas]
pfas_train = train.loc[train.is_pfas]
pfas_test = test.loc[test.is_pfas]
pfas_like_train_idx = pfas_like_train.index
pfas_like_test_idx = pfas_like_test.index
# SMILES
np.savez_compressed(prefix + '_smiles_test', smiles=test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train', smiles=train[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_test_pfas_like', smiles=pfas_like_test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train_pfas_like', smiles=pfas_like_train[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_test_pfas_like_excluded', smiles=pfas_like_excluded_test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train_pfas_like_excluded', smiles=pfas_like_excluded_train[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_test_pfas_like_pfas_excluded', smiles=pfas_like_pfas_excluded_test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train_pfas_like_pfas_excluded', smiles=pfas_like_pfas_excluded_train[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_test_pfas_excluded', smiles=pfas_excluded_test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train_pfas_excluded', smiles=pfas_excluded_train[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_test_pfas', smiles=pfas_test[['SMI']].to_numpy())
np.savez_compressed(prefix + '_smiles_train_pfas', smiles=pfas_train[['SMI']].to_numpy())
# Outputs
np.savez_compressed(prefix + '_y_test', y=test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train', y=train[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_test_pfas_like', y=pfas_like_test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train_pfas_like', y=pfas_like_train[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_test_pfas_like_excluded', y=pfas_like_excluded_test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train_pfas_like_excluded', y=pfas_like_excluded_train[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_test_pfas_like_pfas_excluded', y=pfas_like_pfas_excluded_test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train_pfas_like_pfas_excluded', y=pfas_like_pfas_excluded_train[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_test_pfas_excluded', y=pfas_excluded_test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train_pfas_excluded', y=pfas_excluded_train[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_test_pfas', y=pfas_test[['NeglogLD50']].to_numpy())
np.savez_compressed(prefix + '_y_train_pfas', y=pfas_train[['NeglogLD50']].to_numpy())
# Mordred inputs
col_selector = VarianceThreshold()
np.savez_compressed(prefix + '_mordred_x_train', x=col_selector.fit_transform(train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test', x=col_selector.transform(test[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_train_pfas_like', x=col_selector.transform(pfas_like_train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test_pfas_like', x=col_selector.transform(pfas_like_test[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_train_pfas_like_excluded', x=col_selector.transform(pfas_like_excluded_train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test_pfas_like_excluded', x=col_selector.transform(pfas_like_excluded_test[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_train_pfas_like_pfas_excluded', x=col_selector.transform(pfas_like_pfas_excluded_train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test_pfas_like_pfas_excluded', x=col_selector.transform(pfas_like_pfas_excluded_test[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_train_pfas_excluded', x=col_selector.transform(pfas_excluded_train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test_pfas_excluded', x=col_selector.transform(pfas_excluded_test[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_train_pfas', x=col_selector.transform(pfas_train[mordred]).astype(np.float32))
np.savez_compressed(prefix + '_mordred_x_test_pfas', x=col_selector.transform(pfas_test[mordred]).astype(np.float32))
# We need these for inference later on
indices = col_selector.get_support(indices=True)
np.savez_compressed(prefix + '_mordred_x_cols', cols=train[mordred].iloc[[0], indices].columns)
continue
# ECFP-4096 inputs
np.savez_compressed(prefix + '_ecfp_4096_x_train', x=col_selector.fit_transform(ecfp4096[train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_4096_x_test', x=col_selector.transform(ecfp4096[test_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_4096_x_train_pfas_like', x=col_selector.transform(ecfp4096[pfas_like_train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_4096_x_test_pfas_like', x=col_selector.transform(ecfp4096[pfas_like_test_idx]).astype(np.float32))
# ECFP-2048 inputs
np.savez_compressed(prefix + '_ecfp_2048_x_train', x=col_selector.fit_transform(ecfp2048[train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048_x_test', x=col_selector.transform(ecfp2048[test_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048_x_train_pfas_like', x=col_selector.transform(ecfp2048[pfas_like_train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048_x_test_pfas_like', x=col_selector.transform(ecfp2048[pfas_like_test_idx]).astype(np.float32))
# ECFP-2048 inputs
np.savez_compressed(prefix + '_ecfp_2048r6_x_train', x=col_selector.fit_transform(ecfp2048r6[train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048r6_x_test', x=col_selector.transform(ecfp2048r6[test_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048r6_x_train_pfas_like', x=col_selector.transform(ecfp2048r6[pfas_like_train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_ecfp_2048r6_x_test_pfas_like', x=col_selector.transform(ecfp2048r6[pfas_like_test_idx]).astype(np.float32))
# GP Convienience
col_selector2 = VarianceThreshold()
np.savez_compressed(prefix + '_gp_x_train', x=col_selector.fit_transform(train[mordred]).astype(np.float32),
x2=col_selector2.fit_transform(ecfp4096[train_idx]).astype(np.float32))
np.savez_compressed(prefix + '_gp_x_test', x=col_selector.transform(test[mordred]).astype(np.float32),
x2=col_selector2.transform(ecfp4096[test_idx]).astype(np.float32))
# GCN
# Graph featurized on fly
def main():
np.random.seed(9700)
generate_dataset(splitter=KFold(n_splits=5, shuffle=True), name='random')
np.random.seed(9700)
generate_dataset(splitter=StratifiedKFold(n_splits=5, shuffle=True), name='stratified')
if __name__ == '__main__':
main() | StarcoderdataPython |
4802787 | <reponame>ProzorroUKR/openprocurement.api
from openprocurement.tender.core.procedure.context import get_request
from openprocurement.tender.core.procedure.state.tender import TenderState
class CFASelectionTenderState(TenderState):
min_bids_number = 1
def lots_qualification_events(self, tender):
yield from () # no qualification events
def lots_awarded_events(self, tender):
yield from () # no awarded events
def check_bids_number(self, tender):
if tender.get("lots"):
for lot in tender["lots"]:
bid_number = self.count_lot_bids_number(tender, lot["id"])
if bid_number < self.min_bids_number:
if lot.get("auctionPeriod", {}).get("startDate"):
del lot["auctionPeriod"]["startDate"]
if not lot["auctionPeriod"]:
del lot["auctionPeriod"]
if lot["status"] == "active":
self.set_object_status(lot, "unsuccessful")
# for procedures where lotValues have "status" field (openeu, competitive_dialogue, more ?)
for bid in tender.get("bids", ""):
for lot_value in bid.get("lotValues", ""):
if "status" in lot_value and lot_value["relatedLot"] == lot["id"]:
lot_value["status"] = "unsuccessful"
# should be moved to tender_status_check ?
if not set(i["status"] for i in tender["lots"]).difference({"unsuccessful", "cancelled"}):
self.get_change_tender_status_handler("unsuccessful")(tender)
elif max(self.count_lot_bids_number(tender, i["id"])
for i in tender["lots"] if i["status"] == "active") == 1:
self.add_next_award()
else:
bid_number = self.count_bids_number(tender)
if bid_number == 1:
self.add_next_award()
elif bid_number < self.min_bids_number:
if tender.get("auctionPeriod", {}).get("startDate"):
del tender["auctionPeriod"]["startDate"]
if not tender["auctionPeriod"]:
del tender["auctionPeriod"]
self.get_change_tender_status_handler("unsuccessful")(tender)
| StarcoderdataPython |
1698710 | <reponame>odeke-em/utils
#!/usr/bin/env python3
# Author: <NAME> <<EMAIL>>
# Copy content from src to destination only if it doesn't
# exist in the destination
import os
import sys
import json
import shutil
import hashlib
from threading import Thread
isDir = lambda p: p and os.path.isdir(p)
isPath = lambda p: p and os.path.isfile(p)
def getHashDigest(fPath):
if isPath(fPath):
with open(fPath, 'rb') as f:
digest = hashlib.md5(f.read()).hexdigest()
return digest
def mapDigests(dirPath, hmap):
for root, dirs, paths in os.walk(dirPath):
joinedPaths = (os.path.join(root, path) for path in paths)
for path in joinedPaths:
digest = getHashDigest(path)
hmap.setdefault(digest, []).append(path)
print(path, digest)
def getNonExistant(primary, secondary):
foreignToSecondary = []
for digest in primary:
if digest not in secondary:
headList = primary[digest]
if headList:
foreignToSecondary.append(headList[0])
return foreignToSecondary
def main():
argc = len(sys.argv)
if argc < 3:
sys.stderr.write('Usage: <primary_dir> <secondary_dir>\n')
sys.exit(-1)
pdir, sdir = sys.argv[1:3]
destination = sys.argv[3] if argc > 3 else sdir
if not isDir(pdir):
sys.stderr.write('Primary is not a directory\n')
elif not isDir(sdir):
sys.stderr.write('Secondary is not a directory\n')
else:
pmap = {}
smap = {}
pTh = Thread(target=mapDigests, args=(pdir, pmap))
sTh = Thread(target=mapDigests, args=(sdir, smap))
pTh.start()
sTh.start()
pTh.join()
sTh.join()
handleMerging(pmap, smap, destination)
def handleDirCreation(path):
if not path:
return 400, None
elif os.path.isdir(path):
return 409, path
else:
try:
os.mkdir(path)
except Exception as e:
return 500, e
else:
return 200, path
def handleMerging(pmap, smap, destination):
status, destPath = handleDirCreation(destination)
if not (status == 200 or status == 409):
return destPath # An error
errd = []
accessDenied = []
passCount = 0
foreignToSecondary = getNonExistant(pmap, smap)
for i, path in enumerate(foreignToSecondary):
if not os.access(path, os.R_OK):
accessDenied.append(path)
else:
try:
shutil.copy(path, destPath)
except Exception as e:
errd.append((path, str(e),))
else:
sys.stdout.write("Successful Copy: index %d/%d\r"%(passCount, i))
passCount += 1
if errd:
with open('errdCopy.json', 'w') as f:
f.write(json.dumps(errd))
if accessDenied:
with open('accessDenied.json', 'w') as g:
g.write(json.dumps(accessDenied))
return passCount
if __name__ == '__main__':
main()
| StarcoderdataPython |
1712625 | '''
This is separate library file for the CSD.py applications
Its done this way to clean up the code in the main app
v00 - Initial Build
'''
# External Loads
from csdlib.vect import Vector as v2
import os.path
import numpy as np
import functools
from tkinter import filedialog, messagebox
from tkinter import *
import pickle
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use('TKAgg')
# End of external Loads
# Classes
class cointainer():
def __init__(self, xArray, dX, dY):
self.xArray = xArray
self.dX = dX
self.dY = dY
print('The container is created')
def save(self, filename):
filename = str(filename) + '.csd'
try:
print('trying to save to: {}'.format(filename))
tempfile = filename + '_temp'
with open(tempfile, 'wb') as output:
pickle.dump(self, output, pickle.DEFAULT_PROTOCOL)
except:
print('There is issue with pickle. Save aborted to protect file.')
else:
with open(filename, 'wb') as output:
pickle.dump(self, output, pickle.DEFAULT_PROTOCOL)
def restore(self):
return self.xArray, self.dX, self.dY
# ################# FUNCTIONS & PROCEDURES##############################
def n_shiftPhase(phaseId, dX, dY, XSecArray):
'''
This procedure is shifting the particucal geometry of the phase in arrays
to the specific x and y direction.
input:
phaseId - the value in the geometry array t hat describes the phase 1,2 or 3
dX - number of cells to shift in columnspan
dY - number of cells to shift in rows
XSecArray - input geometry array
'''
# making the copy of input geommetry array
tempGeometry = np.copy(XSecArray)
# deleting the other phases geometry from the array
tempGeometry[tempGeometry != phaseId] = 0
# deleting the selected phase in original geometry array
XSecArray[XSecArray == phaseId] = 0
oR = XSecArray.shape[0]
oC = XSecArray.shape[1]
for r in range(oR):
for c in range(oC):
if tempGeometry[r, c] == phaseId:
nR = r + dY
nC = c + dX
if nR >= oR:
nR -= oR
if nC >= oC:
nC -= oC
XSecArray[nR, nC] = tempGeometry[r, c]
def n_cloneGeometry(dX, dY, N, XSecArray):
'''
This procedure alternate the x section array multiplying the
existing geometry as a pattern with defined shift vector
in cells
input:
dX - shift of cells in X (cols)
dy - shift of cells in Y (rows)
N - number of copies created
XSecArray - input array of the base cross section
'''
# Lets figure out the new shape of the array
# Original shape
oR = XSecArray.shape[0]
oC = XSecArray.shape[1]
d = max(dX, dY)
# the new shape can be figured out by the relation
nR = N * d + oR
nC = N * d + oC
print('New array shape: {}x{}'.format(nR, nC))
# creating new empty array of required size
NewGeometryArray = np.zeros((nR, nC))
# placing the existing array into the new one as copies
NewGeometryArray[0:oR, 0:oC] = XSecArray
for x in range(1, N+1):
print('copying to: {} x {}'.format(x * dY, x * dX))
for row in range(x * dY, x * dY + oR):
for col in range(x * dX, x * dX + oC):
if XSecArray[row - x * dY, col - x * dX] != 0:
NewGeometryArray[row,
col] = XSecArray[row - x * dY, col - x * dX]
# and sign new array back to the main one of geometry
return NewGeometryArray
def n_getDistance(A, B):
'''
This function returns simple in line distance between two points
defined by input
input:
A - tuple (x,y) - first point position on surface
B - tuple (x,y) - first point position on surface
Output
Distance between A and B in the units of position
'''
return np.sqrt((A[0] - B[0])**2 + (A[1] - B[1])**2)
def loadObj(filename):
'''load object data from file that was saved by saveObj function.
Inputs:
filename: file to save the data to with properly delivered path.
Returns:
Recreated object
Example use:``
P = loadObj('project.save')
recreate P as myProject class object.
'''
with open(filename, 'rb') as myInput:
return pickle.load(myInput)
# Calculations of mututal inductance between conductors
def n_getMutualInductance(sizeX, sizeY, lenght, distance):
'''
Calculate the mutual inductance for the subconductor
It assumes rectangular shape. If you want put for circular shape just
make sizeX = sizeY = 2r
Inputs:
sizeX - width in [mm]
sizeY - height in [mm]
lenght - conductor lenght in [mm]
distance - distance between analyzed conductors in [mm]
output
M in [H]
'''
srednica = (sizeX+sizeY)/2
a = srednica * 1e-3
l = lenght * 1e-3
d = distance * 1e-3
mi0 = 4 * np.pi * 1e-7
# fromula by:
# https://pdfs.semanticscholar.org/b0f4/eff92e31d4c5ff42af4a873ebdd826e610f5.pdf
M = (mi0*l / (2*np.pi)) * \
(np.log((l+np.sqrt(l**2 + d**2))/d) - np.sqrt(1+(d/l)**2) + d/l)
# previous formula
# return 0.000000001*2*lenght*1e-1*(np.log(2*lenght*1e-1/(distance/10))-(3/4))
return M
# Calculation of self inductance value function
def n_getSelfInductance(sizeX, sizeY, lenght):
'''
Calculate the self inductance for the subconductor
It assumes rectangular shape. If you want put for circular shape just
make sizeX = sizeY = 2r
Inputs:
sizeX - width in [mm]
sizeY - height in [mm]
lenght - cinductor lenght in [mm]
output
L in [H]
'''
srednica = (sizeX+sizeY)/2
a = srednica * 1e-3
l = lenght * 1e-3
mi0 = 4 * np.pi * 1e-7
# This calculation is based on the:
# https://pdfs.semanticscholar.org/b0f4/eff92e31d4c5ff42af4a873ebdd826e610f5.pdf
L = (mi0 * l / (2 * np.pi)) * \
(np.log(2 * l / a) - np.log(2)/3 + 13/12 - np.pi/2)
# this was the previous formula
# return 0.000000001*2*100*lenght*1e-3*(np.log(2*lenght*1e-3/(0.5*srednica*1e-3))-(3/4))
return L
# Calculate the resistance value function
def n_getResistance(sizeX, sizeY, lenght, temp, sigma20C, temCoRe):
'''
Calculate the resistance of the al'a square shape in given temperature
All dimensions in mm
temperature in deg C
output:
Resistance in Ohm
'''
return (lenght/(sizeX*sizeY*sigma20C)) * 1e3 * (1+temCoRe*(temp-20))
# Calculate distance between elements function
def n_getDistancesArray(inputVector):
'''
This function calculate the array of distances between every conductors
element
Input:
the vector of conductor elements as delivered by n_vectorizeTheArray
'''
# lets check for the numbers of elements
elements = inputVector.shape[0]
print(elements)
# Define the outpur array
distanceArray = np.zeros((elements, elements))
for x in range(elements):
for y in range(elements):
if x != y:
posXa = inputVector[y][2]
posYa = inputVector[y][3]
posXb = inputVector[x][2]
posYb = inputVector[x][3]
distanceArray[y, x] = np.sqrt(
(posXa-posXb)**2 + (posYa-posYb)**2)
else:
distanceArray[y, x] = 0
# for debug
# print(distanceArray)
#
return distanceArray
def n_perymiter(vec, arr, dXmm, dYmm):
'''
This function returns the area perynmiter lenght for given
vector of conducting elements in the array
Inputs:
vec - vector of elements to calculate the perymiter
lenght for (as delivered by n_vectorizeTheArray)
arr - array that describe the geometry shape
dXmm - element size in x diretion
dYmm - element size in y diretion
Output:
perymiter lenght in the same units as dXmm and dYmm
'''
# TODO: adding check if we dont exeed dimensions of array
# its done
perymiter = 0
for box in vec:
# checking the size of the arr array
x, y = arr.shape
# checking in x directions lef and right
A, B = int(box[0] + 1), int(box[1])
if A < x:
if arr[A][B] == 0:
perymiter += dYmm
else:
perymiter += dYmm
A, B = int(box[0] - 1), int(box[1])
if A >= 0:
if arr[A][B] == 0:
perymiter += dYmm
else:
perymiter += dYmm
A, B = int(box[0]), int(box[1] + 1)
if B < y:
if arr[A][B] == 0:
perymiter += dXmm
else:
perymiter += dXmm
A, B = int(box[0]), int(box[1] - 1)
if B >= 0:
if arr[A][B] == 0:
perymiter += dXmm
else:
perymiter += dXmm
return perymiter
# Master Array Vecrorization FUNCTION
def n_arrayVectorize(inputArray, phaseNumber, dXmm, dYmm):
'''
Desription:
This function returns vector of 4 dimension vectors that deliver
input:
inputArray = 3D array thet describe by 1's position of
conductors in cross section
dXmm - size of each element in X direction [mm]
dYmm - size of each element in Y diretion [mm]
Output:
[0,1,2,3] - 4 elemets vector for each element, where:
0 - Oryginal inputArray geometry origin Row for the set cell
1 - Oryginal inputArray geometry origin Col for the set cell
2 - X position in mm of the current element
3 - Y position in mm of the current element
Number of such [0,1,2,3] elements is equal to the number of defined
conductor cells in geometry
'''
# Let's check the size of the array
elementsInY = inputArray.shape[0]
elementsInX = inputArray.shape[1]
# lets define the empty vectorArray
vectorArray = []
# lets go for each input array position and check if is set
# and if yes then put it into putput vectorArray
for Row in range(elementsInY):
for Col in range(elementsInX):
if inputArray[Row][Col] == phaseNumber:
# Let's calculate the X and Y coordinates
coordinateY = (0.5 + Row) * dYmm
coordinateX = (0.5 + Col) * dXmm
vectorArray.append([Row, Col, coordinateX, coordinateY])
return np.array(vectorArray)
# Functions that calculate the master impedance array for given geometry
def n_getImpedanceArray(distanceArray, freq, dXmm, dYmm, lenght=1000, temperature=20, sigma20C=58e6, temCoRe=3.9e-3):
'''
Calculate the array of impedance as complex values for each element
Input:
distanceArray - array of distances beetween the elements in [mm]
freq = frequency in Hz
dXmm - size of element in x [mm]
dYmm - size of element in Y [mm]
lenght - analyzed lenght in [mm] /default= 1000mm
temperature - temperature of the conductors in deg C / defoult = 20degC
sigma20C - conductivity of conductor material in 20degC in [S] / default = 58MS (copper)
temCoRe - temperature resistance coeficcient / default is copper
'''
omega = 2*np.pi*freq
impedanceArray = np.zeros((distanceArray.shape), dtype=np.complex_)
for X in range(distanceArray.shape[0]):
for Y in range(distanceArray.shape[0]):
if X == Y:
impedanceArray[Y, X] = n_getResistance(sizeX=dXmm, sizeY=dYmm, lenght=lenght, temp=temperature, sigma20C=sigma20C,
temCoRe=temCoRe) + 1j*omega*n_getSelfInductance(sizeX=dXmm, sizeY=dYmm, lenght=lenght)
else:
impedanceArray[Y, X] = 1j*omega*n_getMutualInductance(
sizeX=dXmm, sizeY=dYmm, lenght=lenght, distance=distanceArray[Y, X])
# For debug
# print(impedanceArray)
#
return impedanceArray
# Function for calculating resistance array
def n_getResistanceArray(elementsVector, dXmm, dYmm, lenght=1000, temperature=20, sigma20C=58e6, temCoRe=3.9e-3):
'''
Calculate the array of resistance values for each element
Input:
elementsVector - The elements vector as delivered by arrayVectorize
dXmm - size of element in x [mm]
dYmm - size of element in Y [mm]
lenght - analyzed lenght in [mm] /default= 1000mm
temperature - temperature of the conductors in deg C / defoult = 20degC
sigma20C - conductivity of conductor material in 20degC in [S] / default = 58MS (copper)
temCoRe - temperature resistance coeficcient / default is copper
'''
resistanceArray = np.zeros(elementsVector.shape[0])
for element in range(elementsVector.shape[0]):
resistanceArray[element] = n_getResistance(
sizeX=dXmm, sizeY=dYmm, lenght=lenght, temp=temperature, sigma20C=sigma20C, temCoRe=temCoRe)
# for debug
# print(resistanceArray)
#
return resistanceArray
# Function that increase the resolution of the main geometry array
def n_arraySlicer(inputArray, subDivisions=2):
'''
This function increase the resolution of the cross section array
inputArray - oryginal geometry matrix
subDivisions - number of subdivisions / factor of increase of resoluttion / default = 2
'''
return inputArray.repeat(subDivisions, axis=0).repeat(subDivisions, axis=1)
# Functions that calculate module of complex number
def n_getComplexModule(x):
'''
returns the module of complex number
input: x - complex number
if not a complex number is given as parameter then it return the x diretly
'''
if isinstance(x, complex):
return np.sqrt(x.real**2 + x.imag**2)
else:
return x
# Canvas preparation procedure
def n_checkered(canvas, cutsX, cutsY, mode=0):
'''
This function clean the board and draw grid
Inputs:
canvas - tkinter canvas object
cutsX - elements in X (left right) direction
cutsY - elements in Y (top down) direction
'''
# Reading the size of the canvas element
canvasHeight = canvas.winfo_height()
canvasWidth = canvas.winfo_width()
line_distanceX = (canvasWidth / cutsX)
line_distanceY = (canvasHeight / cutsY)
# Cleaning up the whole canvas space by drawing a white rectangle
if mode == 0 or mode == 1:
canvas.create_rectangle(
0, 0, canvasWidth, canvasHeight, fill="white", outline="gray")
# vertical lines at an interval of "line_distance" pixel
# some limits added - we dont draw it if the line amout is to big
# it would be mess anyway if too much
if max(cutsX, cutsY) <= 100 and mode == 0 or max(cutsX, cutsY) <= 100 and mode == 2:
for x in range(0, cutsX):
canvas.create_line(x*line_distanceX, 0, x *
line_distanceX, canvasHeight, fill="gray")
# horizontal lines at an interval of "line_distance" pixel
for y in range(0, cutsY):
canvas.create_line(0, y*line_distanceY, canvasWidth,
y*line_distanceY, fill="gray")
# previous implementation - i think too much
# for x in range(0,canvasWidth):
# canvas.create_line(x*line_distanceX, 0, x*line_distanceX, canvasHeight, fill="gray")
# # horizontal lines at an interval of "line_distance" pixel
# for y in range(0,canvasHeight):
# canvas.create_line(0, y*line_distanceY, canvasWidth, y*line_distanceY, fill="gray")
# Procedure that plot the array to canvas
def n_printTheArray(dataArray, canvas):
'''
This procedure allows to print the array back to the graphical board
usefull for redraw or draw loaded data
Inputs:
dataArray - the array to display on canvas
canvas - tkinter canvas object
'''
global canvasElements
# Let's check the size
elementsInY = dataArray.shape[0]
elementsInX = dataArray.shape[1]
# Now we calculate the propper dX and dY for this array
canvasHeight = canvas.winfo_height()
canvasWidth = canvas.winfo_width()
dX = canvasWidth / elementsInX
dY = canvasHeight / elementsInY
# protection for backward compatibility
# & cleaning stuff
for graphElement in canvasElements:
try:
print(graphElement)
canvas.delete(graphElement)
except:
print("Error in removing stuff")
pass
canvasElements = []
colorList = ["red", "green", "blue"]
for Row in range(elementsInY):
for Col in range(elementsInX):
theNumber = int(dataArray[Row][Col])
if theNumber in [1,2,3]:
fillColor = colorList[theNumber-1]
canvasElements.append(canvas.create_rectangle(
(Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline=""))
# elif dataArray[Row][Col] == 2:
# canvas.create_rectangle(
# (Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline="")
# elif dataArray[Row][Col] == 3:
# canvas.create_rectangle(
# (Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline="")
# n_checkered(canvas, elementsInX, elementsInY, mode=2)
# Procedure that plot the array to canvas
def n_printTheArrayOld(dataArray, canvas):
'''
This procedure allows to print the array back to the graphical board
usefull for redraw or draw loaded data
Inputs:
dataArray - the array to display on canvas
canvas - tkinter canvas object
'''
# Let's check the size
elementsInY = dataArray.shape[0]
elementsInX = dataArray.shape[1]
# Now we calculate the propper dX and dY for this array
canvasHeight = canvas.winfo_height()
canvasWidth = canvas.winfo_width()
dX = canvasWidth / elementsInX
dY = canvasHeight / elementsInY
# Now we cleanUp the field
n_checkered(canvas, elementsInX, elementsInY, mode=1)
for Row in range(elementsInY):
for Col in range(elementsInX):
if dataArray[Row][Col] == 1:
fillColor = "red"
canvas.create_rectangle(
(Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline="")
elif dataArray[Row][Col] == 2:
fillColor = "green"
canvas.create_rectangle(
(Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline="")
elif dataArray[Row][Col] == 3:
fillColor = "blue"
canvas.create_rectangle(
(Col)*dX, (Row)*dY, (Col)*dX+dX, (Row)*dY+dY, fill=fillColor, outline="")
n_checkered(canvas, elementsInX, elementsInY, mode=2)
# Procedure to set up point in the array and display it on canvas
def n_setUpPoint(event, Set, dataArray, canvas):
'''
This procedure track the mouse position from event ad setup or reset propper element
in the cross section array
Inputs
event - the event object from tkinter that create the point (or reset)
Set - Number of phase to set or 0 to reset
dataArray - the array that keeps the cross section design data
canvas - tk inter canvas object
'''
# gathering some current data
elementsInY = dataArray.shape[0]
elementsInX = dataArray.shape[1]
canvasHeight = canvas.winfo_height()
canvasWidth = canvas.winfo_width()
dX = canvasWidth / elementsInX
dY = canvasHeight / elementsInY
Col = int(event.x/dX)
Row = int(event.y/dY)
if event.x < canvasWidth and event.y < canvasHeight and event.x > 0 and event.y > 0:
inCanvas = True
else:
inCanvas = False
if Set != 0 and inCanvas:
actualPhase = Set
if actualPhase == 3:
canvas.create_rectangle(
Col*dX, Row*dY, Col*dX+dX, Row*dY+dY, fill="blue", outline="gray")
dataArray[Row][Col] = 3
elif actualPhase == 2:
canvas.create_rectangle(
Col*dX, Row*dY, Col*dX+dX, Row*dY+dY, fill="green", outline="gray")
dataArray[Row][Col] = 2
else:
canvas.create_rectangle(
Col*dX, Row*dY, Col*dX+dX, Row*dY+dY, fill="red", outline="gray")
dataArray[Row][Col] = 1
elif Set == 0 and inCanvas:
canvas.create_rectangle(Col*dX, Row*dY, Col *
dX+dX, Row*dY+dY, fill="white", outline="gray")
dataArray[Row][Col] = 0
# Function that put back together the solution vectr back to represent the crss section shape array
def n_recreateresultsArray(elementsVector, resultsVector, initialGeometryArray):
'''
Functions returns recreate cross section array with mapperd solution results
Inputs:
elementsVector - vector of crossection elements as created by the n_arrayVectorize
resultsVector - vectr with results values calculated base on the elementsVector
initialGeometryArray - the array that contains the cross section geometry model
'''
localResultsArray = np.zeros((initialGeometryArray.shape), dtype=float)
for vectorIndex, result in enumerate(resultsVector):
localResultsArray[int(elementsVector[vectorIndex][0]), int(
elementsVector[vectorIndex][1])] = result
return localResultsArray
def n_sumVecList(list):
sumV = v2(0, 0)
for v in list:
sumV = sumV + v
return sumV
def n_getForces(XsecArr, vPhA, vPhB, vPhC, Ia, Ib, Ic, Lenght=1):
'''
this experimental functions will calcuate the fore vector for each phase
in given geometry and currents values.
Inputs:
vPhA/B/C - elements vectors of the each phase geometry as delivered by n_arrayVectorize
Ia/b/c - current value in each phase in [A]
'''
def sumVecList(list):
sumV = v2(0, 0)
for v in list:
sumV = sumV + v
return sumV
mi0_o2pi = 2e-7
# Memorizing each phaze elements count
lPh = (len(vPhA), len(vPhB), len(vPhC))
Iph = (Ia / len(vPhA), Ib / len(vPhB), Ic / len(vPhC))
# One vector for all phases
vPhAll = np.concatenate((vPhA, vPhB, vPhC), axis=0)
totalForceVec = []
for this in vPhAll:
forceVec = v2(0, 0) # initial reset for this element force
for other in vPhAll:
if this[0] != other[0] or this[1] != other[1]:
distV = v2(other[2]-this[2], other[3]-this[3])
direction = distV.normalize()
distance = distV.norm() * 1e-3 # to convert into [m]
Ithis = Iph[int(XsecArr[int(this[0])][int(this[1])])-1]
Iother = Iph[int(XsecArr[int(other[0])][int(other[1])])-1]
forceVec += Lenght * \
(mi0_o2pi * Iother * Ithis / distance) * direction
totalForceVec.append(forceVec)
ForceA = sumVecList(totalForceVec[:lPh[0]])
ForceB = sumVecList(totalForceVec[lPh[0]: lPh[0] + lPh[1]])
ForceC = sumVecList(totalForceVec[lPh[0] + lPh[1]:])
ForceMagVect = [force.norm() for force in totalForceVec]
return ForceA, ForceB, ForceC, ForceMagVect, totalForceVec
def n_getPhasesCenters(vPhA, vPhB, vPhC):
'''
This functions calculate the geometry center (average) for each phase
delivered as a vector form
Inputs:
vPhA/B/C - elements vectors of the each phase geometry as delivered by n_arrayVectorize
'''
tempX = [x[2] for x in vPhA]
tempY = [x[3] for x in vPhA]
Pha = (sum(tempX) / len(tempX), sum(tempY) / len(tempY))
tempX = [x[2] for x in vPhB]
tempY = [x[3] for x in vPhB]
Phb = (sum(tempX) / len(tempX), sum(tempY) / len(tempY))
tempX = [x[2] for x in vPhC]
tempY = [x[3] for x in vPhC]
Phc = (sum(tempX) / len(tempX), sum(tempY) / len(tempY))
return Pha, Phb, Phc
def n_getCenter(v):
'''
This functions calculate the geometry center (average) for each phase
delivered as a vector form
Inputs:
vPhA/B/C - elements vectors of the each phase geometry as delivered by n_arrayVectorize
'''
tempX = [x[2] for x in v]
tempY = [x[3] for x in v]
center = (sum(tempX) / len(tempX), sum(tempY) / len(tempY))
return center
def n_getConductors(XsecArr, vPhA, vPhB, vPhC):
'''
[Row,Col,X,Y]
'''
# Setting up new conductors array
conductorsArr = np.zeros((XsecArr.shape), dtype=int)
conductor = 0
phases = [vPhA, vPhB, vPhC]
phaseCond = []
for phase in phases:
phaseConductors = 0
for element in phase:
R = int(element[0])
C = int(element[1])
if conductorsArr[R, C] == 0:
# tests in 4 directions
N, E, S, W = 0, 0, 0, 0
try:
E = conductorsArr[R+1, C]
W = conductorsArr[R-1, C]
N = conductorsArr[R, C-1]
S = conductorsArr[R, C+1]
except:
pass
if N != 0:
conductorsArr[R, C] = N
elif S != 0:
conductorsArr[R, C] = S
elif E != 0:
conductorsArr[R, C] = E
elif W != 0:
conductorsArr[R, C] = W
else:
conductor += 1
phaseConductors += 1
conductorsArr[R, C] = conductor
phaseCond.append(phaseConductors)
return conductorsArr, conductor, phaseCond
| StarcoderdataPython |
66845 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for dealing with FuzzerStats."""
from builtins import object
import datetime
import functools
import itertools
import json
import os
import random
import re
from base import memoize
from base import utils
from datastore import data_handler
from datastore import data_types
from datastore import fuzz_target_utils
from google_cloud_utils import big_query
from google_cloud_utils import storage
from metrics import fuzzer_logs
from metrics import logs
from system import environment
from system import shell
STATS_FILE_EXTENSION = '.stats2'
PERFORMANCE_REPORT_VIEWER_PATH = '/performance-report/{fuzzer}/{job}/{date}'
JOB_RUN_SCHEMA = {
'fields': [{
'name': 'testcases_executed',
'type': 'INTEGER',
'mode': 'NULLABLE'
}, {
'name': 'build_revision',
'type': 'INTEGER',
'mode': 'NULLABLE'
}, {
'name': 'new_crashes',
'type': 'INTEGER',
'mode': 'NULLABLE'
}, {
'name': 'job',
'type': 'STRING',
'mode': 'NULLABLE'
}, {
'name': 'timestamp',
'type': 'FLOAT',
'mode': 'NULLABLE'
}, {
'name':
'crashes',
'type':
'RECORD',
'mode':
'REPEATED',
'fields': [{
'name': 'crash_type',
'type': 'STRING',
'mode': 'NULLABLE'
}, {
'name': 'is_new',
'type': 'BOOLEAN',
'mode': 'NULLABLE'
}, {
'name': 'crash_state',
'type': 'STRING',
'mode': 'NULLABLE'
}, {
'name': 'security_flag',
'type': 'BOOLEAN',
'mode': 'NULLABLE'
}, {
'name': 'count',
'type': 'INTEGER',
'mode': 'NULLABLE'
}]
}, {
'name': 'known_crashes',
'type': 'INTEGER',
'mode': 'NULLABLE'
}, {
'name': 'fuzzer',
'type': 'STRING',
'mode': 'NULLABLE'
}, {
'name': 'kind',
'type': 'STRING',
'mode': 'NULLABLE'
}]
}
class FuzzerStatsException(Exception):
"""Fuzzer stats exception."""
class BaseRun(object):
"""Base run."""
VALID_FIELDNAME_PATTERN = re.compile(r'[a-zA-Z][a-zA-Z0-9_]*')
def __init__(self, fuzzer, job, build_revision, timestamp):
self._stats_data = {
'fuzzer': fuzzer,
'job': job,
'build_revision': build_revision,
'timestamp': timestamp,
}
def __getitem__(self, key):
return self._stats_data.__getitem__(key)
def __setitem__(self, key, value):
if not re.compile(self.VALID_FIELDNAME_PATTERN):
raise ValueError('Invalid key name.')
return self._stats_data.__setitem__(key, value)
def __delitem__(self, key):
return self._stats_data.__delitem__(key)
def __contains__(self, key):
return self._stats_data.__contains__(key)
def to_json(self):
"""Return JSON representation of the stats."""
return json.dumps(self._stats_data)
def update(self, other):
"""Update stats with a dict."""
self._stats_data.update(other)
@property
def data(self):
return self._stats_data
@property
def kind(self):
return self._stats_data['kind']
@property
def fuzzer(self):
return self._stats_data['fuzzer']
@property
def job(self):
return self._stats_data['job']
@property
def build_revision(self):
return self._stats_data['build_revision']
@property
def timestamp(self):
return self._stats_data['timestamp']
@staticmethod
def from_json(json_data):
"""Convert json to the run."""
try:
data = json.loads(json_data)
except (ValueError, TypeError):
return None
if not isinstance(data, dict):
return None
result = None
try:
kind = data['kind']
if kind == 'TestcaseRun':
result = TestcaseRun(data['fuzzer'], data['job'],
data['build_revision'], data['timestamp'])
elif kind == 'JobRun':
result = JobRun(data['fuzzer'], data['job'], data['build_revision'],
data['timestamp'], data['testcases_executed'],
data['new_crashes'], data['known_crashes'],
data.get('crashes'))
except KeyError:
return None
if result:
result.update(data)
return result
class JobRun(BaseRun):
"""Represents stats for a particular job run."""
SCHEMA = JOB_RUN_SCHEMA
# `crashes` is a new field that will replace `new_crashes` and `old_crashes`.
def __init__(self, fuzzer, job, build_revision, timestamp,
number_of_testcases, new_crashes, known_crashes, crashes):
super(JobRun, self).__init__(fuzzer, job, build_revision, timestamp)
self._stats_data.update({
'kind': 'JobRun',
'testcases_executed': number_of_testcases,
'new_crashes': new_crashes,
'known_crashes': known_crashes,
'crashes': crashes
})
class TestcaseRun(BaseRun):
"""Represents stats for a particular testcase run."""
SCHEMA = None
def __init__(self, fuzzer, job, build_revision, timestamp):
super(TestcaseRun, self).__init__(fuzzer, job, build_revision, timestamp)
self._stats_data.update({
'kind': 'TestcaseRun',
})
source = environment.get_value('STATS_SOURCE')
if source:
self._stats_data['source'] = source
@staticmethod
def get_stats_filename(testcase_file_path):
"""Get stats filename for the given testcase."""
return testcase_file_path + STATS_FILE_EXTENSION
@staticmethod
def read_from_disk(testcase_file_path, delete=False):
"""Read the TestcaseRun for the given testcase."""
stats_file_path = TestcaseRun.get_stats_filename(testcase_file_path)
if not os.path.exists(stats_file_path):
return None
fuzzer_run = None
with open(stats_file_path) as f:
fuzzer_run = BaseRun.from_json(f.read())
if delete:
shell.remove_file(stats_file_path)
return fuzzer_run
@staticmethod
def write_to_disk(testcase_run, testcase_file_path):
"""Write the given TestcaseRun for |testcase_file_path| to disk."""
if not testcase_run:
return
stats_file_path = TestcaseRun.get_stats_filename(testcase_file_path)
with open(stats_file_path, 'w') as f:
f.write(testcase_run.to_json())
class QueryGroupBy(object):
"""GroupBy enum."""
GROUP_BY_NONE = 0
GROUP_BY_REVISION = 1
GROUP_BY_DAY = 2
GROUP_BY_TIME = 3
GROUP_BY_JOB = 4
GROUP_BY_FUZZER = 5
def group_by_to_field_name(group_by):
"""Convert QueryGroupBy value to its corresponding field name."""
if group_by == QueryGroupBy.GROUP_BY_REVISION:
return 'build_revision'
if group_by == QueryGroupBy.GROUP_BY_DAY:
return 'date'
if group_by == QueryGroupBy.GROUP_BY_TIME:
return 'time'
if group_by == QueryGroupBy.GROUP_BY_JOB:
return 'job'
if group_by == QueryGroupBy.GROUP_BY_FUZZER:
return 'fuzzer'
return None
class BuiltinFieldData(object):
"""Represents a cell value for a builtin field."""
def __init__(self, value, sort_key=None, link=None):
self.value = value
self.sort_key = sort_key
self.link = link
class BuiltinFieldSpecifier(object):
"""Represents a builtin field."""
def __init__(self, name, alias=None):
self.name = name
self.alias = alias
def create(self, ctx=None):
"""Create the actual BuiltinField."""
constructor = BUILTIN_FIELD_CONSTRUCTORS.get(self.name)
if not constructor:
return None
return constructor(ctx)
def field_class(self):
"""Return the class for the field."""
constructor = BUILTIN_FIELD_CONSTRUCTORS.get(self.name)
if not constructor:
return None
if isinstance(constructor, functools.partial):
return constructor.func
return constructor
class BuiltinField(object):
"""Base Builtin field."""
def __init__(self, ctx=None):
self.ctx = ctx
def get(self, group_by, group_by_value): # pylint: disable=unused-argument
"""Return BuiltinFieldData."""
return None
class BuiltinFieldContext(object):
"""Context for builtin fields."""
def __init__(self, fuzzer=None, jobs=None):
self.fuzzer = fuzzer
self.jobs = jobs
def single_job_or_none(self):
"""Return the job if only 1 is specified, or None."""
if self.jobs and len(self.jobs) == 1:
return self.jobs[0]
return None
class CoverageFieldContext(BuiltinFieldContext):
"""Coverage field context. Acts as a cache."""
def __init__(self, fuzzer=None, jobs=None):
super(CoverageFieldContext, self).__init__(fuzzer=fuzzer, jobs=jobs)
@memoize.wrap(memoize.FifoInMemory(256))
def get_coverage_info(self, fuzzer, date=None):
"""Return coverage info of child fuzzers."""
if fuzzer in data_types.BUILTIN_FUZZERS:
# Get coverage info for a job (i.e. a project).
job = self.single_job_or_none()
project = data_handler.get_project_name(job)
return get_coverage_info(project, date)
fuzz_target = data_handler.get_fuzz_target(fuzzer)
if fuzz_target:
fuzzer = fuzz_target.project_qualified_name()
return get_coverage_info(fuzzer, date)
class BaseCoverageField(object):
"""Base builtin field class for coverage related fields."""
CONTEXT_CLASS = CoverageFieldContext
def __init__(self, ctx):
self.ctx = ctx
def get_coverage_info(self, group_by, group_by_value):
"""Return coverage information."""
coverage_info = None
if group_by == QueryGroupBy.GROUP_BY_DAY:
# Return coverage data for the fuzzer and the day.
coverage_info = self.ctx.get_coverage_info(self.ctx.fuzzer,
group_by_value)
elif group_by == QueryGroupBy.GROUP_BY_FUZZER:
# Return latest coverage data for each fuzzer.
coverage_info = self.ctx.get_coverage_info(group_by_value)
elif group_by == QueryGroupBy.GROUP_BY_JOB:
# Return the latest coverage data for the fuzzer. Even though we group by
# job here, coverage information does not differ accross jobs. As of now,
# it only depends on the fuzzer name and the date.
coverage_info = self.ctx.get_coverage_info(self.ctx.fuzzer)
return coverage_info
class CoverageField(BaseCoverageField):
"""Coverage field."""
EDGE = 0
FUNCTION = 1
VALUE_TYPE = float
def __init__(self, coverage_type, ctx=None):
super(CoverageField, self).__init__(ctx)
self.coverage_type = coverage_type
def get(self, group_by, group_by_value):
"""Return data."""
coverage_info = self.get_coverage_info(group_by, group_by_value)
if not coverage_info:
return None
if self.coverage_type == self.EDGE:
covered = coverage_info.edges_covered
total = coverage_info.edges_total
else:
covered = coverage_info.functions_covered
total = coverage_info.functions_total
if covered is None or total is None:
return None
if not total:
logs.log_error(
'Invalid coverage info: total equals 0 for "%s".' % self.ctx.fuzzer)
return BuiltinFieldData('No coverage', sort_key=0.0)
percentage = 100.0 * float(covered) / total
display_value = '%.2f%% (%d/%d)' % (percentage, covered, total)
return BuiltinFieldData(display_value, sort_key=percentage)
class CorpusBackupField(BaseCoverageField):
"""Link to the latest corpus backup archive."""
VALUE_TYPE = str
def __init__(self, ctx=None):
super(CorpusBackupField, self).__init__(ctx)
def get(self, group_by, group_by_value):
"""Return data."""
coverage_info = self.get_coverage_info(group_by, group_by_value)
if not coverage_info:
return None
if not coverage_info.corpus_backup_location:
return None
# Google Cloud console does not support linking to a specific file, so we
# link to the directory instead.
corpus_backup_location = os.path.dirname(
coverage_info.corpus_backup_location)
display_value = 'Download'
return BuiltinFieldData(display_value, link=corpus_backup_location)
class CorpusSizeField(BaseCoverageField):
"""Corpus size field."""
CORPUS = 0
QUARANTINE = 1
VALUE_TYPE = int
def __init__(self, corpus_type, ctx=None):
super(CorpusSizeField, self).__init__(ctx)
self.corpus_type = corpus_type
def get(self, group_by, group_by_value):
"""Return data."""
if (self.ctx.fuzzer in data_types.BUILTIN_FUZZERS and
group_by == QueryGroupBy.GROUP_BY_DAY):
# Explicitly return None here, as coverage_info below might exist and have
# default corpus size of 0, which might look confusing on the stats page.
return None
coverage_info = self.get_coverage_info(group_by, group_by_value)
if not coverage_info:
return None
if self.corpus_type == self.CORPUS:
corpus_size_units = coverage_info.corpus_size_units
corpus_size_bytes = coverage_info.corpus_size_bytes
corpus_location = coverage_info.corpus_location
else:
corpus_size_units = coverage_info.quarantine_size_units
corpus_size_bytes = coverage_info.quarantine_size_bytes
corpus_location = coverage_info.quarantine_location
# If the values aren't specified, return None to show the default '--' text.
if corpus_size_units is None or corpus_size_bytes is None:
return None
display_value = '%d (%s)' % (corpus_size_units,
utils.get_size_string(corpus_size_bytes))
return BuiltinFieldData(
display_value, sort_key=corpus_size_units, link=corpus_location)
class CoverageReportField(BaseCoverageField):
"""Coverage report field."""
VALUE_TYPE = str
def __init__(self, ctx=None):
super(CoverageReportField, self).__init__(ctx)
def get(self, group_by, group_by_value):
"""Return data."""
coverage_info = self.get_coverage_info(group_by, group_by_value)
if not coverage_info or not coverage_info.html_report_url:
return None
display_value = 'Coverage'
return BuiltinFieldData(display_value, link=coverage_info.html_report_url)
def _logs_bucket_key_fn(func, args, kwargs): # pylint: disable=unused-argument
return 'fuzzer_logs_bucket:' + args[1]
class FuzzerRunLogsContext(BuiltinFieldContext):
"""Fuzzer logs context."""
MEMCACHE_TTL = 15 * 60
def __init__(self, fuzzer=None, jobs=None):
super(FuzzerRunLogsContext, self).__init__(fuzzer=fuzzer, jobs=jobs)
@memoize.wrap(memoize.FifoInMemory(256))
def _get_logs_bucket_from_job(self, job_type):
"""Get logs bucket from job."""
return data_handler.get_value_from_job_definition_or_environment(
job_type, 'FUZZ_LOGS_BUCKET')
@memoize.wrap(memoize.Memcache(MEMCACHE_TTL, key_fn=_logs_bucket_key_fn))
def _get_logs_bucket_from_fuzzer(self, fuzzer_name):
"""Get logs bucket from fuzzer (child fuzzers only)."""
jobs = [
mapping.job for mapping in fuzz_target_utils.get_fuzz_target_jobs(
fuzz_target_name=fuzzer_name)
]
if not jobs:
return None
# Check that the logs bucket is same for all of them.
bucket = self._get_logs_bucket_from_job(jobs[0])
if all(bucket == self._get_logs_bucket_from_job(job) for job in jobs[1:]):
return bucket
return None
def get_logs_bucket(self, fuzzer_name=None, job_type=None):
"""Return logs bucket for the job."""
if job_type:
return self._get_logs_bucket_from_job(job_type)
if fuzzer_name:
return self._get_logs_bucket_from_fuzzer(fuzzer_name)
return None
class FuzzerRunLogsField(BuiltinField):
"""Fuzzer logs field."""
CONTEXT_CLASS = FuzzerRunLogsContext
VALUE_TYPE = str
def _get_logs_bucket_path(self, group_by, group_by_value):
"""Return logs bucket path."""
fuzzer = self.ctx.fuzzer
job = self.ctx.single_job_or_none()
date = None
if group_by == QueryGroupBy.GROUP_BY_FUZZER:
fuzzer = group_by_value
elif group_by == QueryGroupBy.GROUP_BY_DAY:
date = group_by_value
if not fuzzer or not job:
# We can only use the date if both fuzzer and job exist (since they come
# before the date in the GCS path).
return None
elif group_by == QueryGroupBy.GROUP_BY_JOB:
job = group_by_value
else:
return None
if not fuzzer:
# Fuzzer always needs to be specified (first component in GCS path).
return None
logs_bucket = self.ctx.get_logs_bucket(fuzzer_name=fuzzer, job_type=job)
if not logs_bucket:
return None
return 'gs:/' + fuzzer_logs.get_logs_directory(logs_bucket, fuzzer, job,
date)
def get(self, group_by, group_by_value):
"""Return data."""
logs_path = self._get_logs_bucket_path(group_by, group_by_value)
if not logs_path:
return None
return BuiltinFieldData('Logs', link=logs_path)
class PerformanceReportField(BuiltinField):
"""Performance report field."""
CONTEXT_CLASS = FuzzerRunLogsContext
VALUE_TYPE = str
def _get_performance_report_path(self, group_by, group_by_value):
"""Return performance analysis report path."""
fuzzer = self.ctx.fuzzer
job = self.ctx.single_job_or_none()
date = 'latest'
if group_by == QueryGroupBy.GROUP_BY_FUZZER:
fuzzer = group_by_value
elif group_by == QueryGroupBy.GROUP_BY_JOB:
job = group_by_value
elif group_by == QueryGroupBy.GROUP_BY_DAY:
date = group_by_value
else:
return None
if not fuzzer or not job:
return None
return PERFORMANCE_REPORT_VIEWER_PATH.format(
fuzzer=fuzzer, job=job, date=date)
def get(self, group_by, group_by_value):
"""Return data."""
report_path = self._get_performance_report_path(group_by, group_by_value)
if not report_path:
return None
return BuiltinFieldData('Performance', link=report_path)
class QueryField(object):
"""Represents a query field."""
def __init__(self,
table_alias,
field_name,
aggregate_function,
select_alias=None):
self.table_alias = table_alias
self.name = field_name
self.aggregate_function = aggregate_function
self.select_alias = select_alias or field_name
def is_custom(self):
"""Return true if this field uses complex query. This field won't appear
in the SELECT's fields automatically. We will need to define how to get
the data."""
return (self.aggregate_function and
self.aggregate_function.lower() == 'custom')
def __str__(self):
if self.aggregate_function:
result = '%s(%s.%s)' % (self.aggregate_function, self.table_alias,
self.name)
else:
result = '%s.%s' % (self.table_alias, self.name)
if self.select_alias:
result += ' as ' + self.select_alias
return result
class Query(object):
"""Represents a stats query."""
def _ensure_valid_name(self, name, regex):
"""Ensure that the given name is valid for fuzzer/jobs."""
if name and not regex.match(name):
raise FuzzerStatsException('Invalid fuzzer or job name.')
def __init__(self, fuzzer_name, job_types, query_fields, group_by, date_start,
date_end, base_table, alias):
assert group_by is not None
self._ensure_valid_name(fuzzer_name, data_types.Fuzzer.VALID_NAME_REGEX)
if job_types:
for job_type in job_types:
self._ensure_valid_name(job_type, data_types.Job.VALID_NAME_REGEX)
self.fuzzer_name = fuzzer_name
self.job_types = job_types
self.query_fields = query_fields
self.group_by = group_by
self.date_start = date_start
self.date_end = date_end
self.base_table = base_table
self.alias = alias
self.fuzzer_or_engine_name = get_fuzzer_or_engine_name(fuzzer_name)
def _group_by_select(self):
"""Return a group by field."""
if self.group_by == QueryGroupBy.GROUP_BY_DAY:
return ('TIMESTAMP_TRUNC(TIMESTAMP_SECONDS(CAST('
'timestamp AS INT64)), DAY, "UTC") as date')
if self.group_by == QueryGroupBy.GROUP_BY_TIME:
return 'TIMESTAMP_SECONDS(CAST(timestamp AS INT64)) as time'
return group_by_to_field_name(self.group_by)
def _group_by(self):
"""Return the group by part of the query."""
return group_by_to_field_name(self.group_by)
def _select_fields(self):
"""Return fields for the query."""
group_by_select = self._group_by_select()
fields = [group_by_select] if group_by_select else []
for field in self.query_fields:
if field.is_custom():
continue
if field.aggregate_function:
fields.append('%s(%s) as %s' % (field.aggregate_function, field.name,
field.select_alias))
else:
fields.append('%s as %s' % (field.name, field.select_alias))
return ', '.join(fields)
def _table_name(self):
"""Return the table name for the query."""
app_id = utils.get_application_id()
dataset = dataset_name(self.fuzzer_or_engine_name)
return '`%s`.%s.%s' % (app_id, dataset, self.base_table)
def _where(self):
"""Return the where part of the query."""
result = []
result.extend(self._partition_selector())
result.extend(self._job_and_fuzzer_selector())
result = ' AND '.join(result)
if result:
return 'WHERE ' + result
return ''
def _job_and_fuzzer_selector(self):
"""Return the job filter condition."""
result = []
if self.job_types:
result.append('(%s)' % ' OR '.join(
['job = \'%s\'' % job_type for job_type in self.job_types]))
if self.fuzzer_name != self.fuzzer_or_engine_name:
result.append('fuzzer = \'%s\'' % self.fuzzer_name)
return result
def _partition_selector(self):
"""Return the partition filter condition."""
result = ('(_PARTITIONTIME BETWEEN TIMESTAMP_SECONDS(%d) '
'AND TIMESTAMP_SECONDS(%d))')
return [
result % (int(utils.utc_date_to_timestamp(self.date_start)),
int(utils.utc_date_to_timestamp(self.date_end)))
]
def build(self):
"""Return query."""
query_parts = [
'SELECT',
self._select_fields(),
'FROM',
self._table_name(),
self._where(),
]
if self._group_by():
query_parts += ['GROUP BY', self._group_by()]
return ' '.join(query_parts)
class TestcaseQuery(Query):
"""The query class for TestcaseRun Query."""
ALIAS = 't'
def __init__(self, fuzzer_name, job_types, query_fields, group_by, date_start,
date_end):
super(TestcaseQuery, self).__init__(
fuzzer_name=fuzzer_name,
job_types=job_types,
query_fields=query_fields,
group_by=group_by,
date_start=date_start,
date_end=date_end,
base_table='TestcaseRun',
alias=TestcaseQuery.ALIAS)
class JobQuery(Query):
"""The query class for JobRun Query."""
DEFAULT_FIELDS = """
sum(j.testcases_executed) as testcases_executed,
custom(j.total_crashes) as total_crashes,
custom(j.new_crashes) as new_crashes,
custom(j.known_crashes) as known_crashes
"""
SQL = """
WITH
JobRunWithConcatedCrashes AS (
SELECT
{select_fields},
ARRAY_CONCAT_AGG(crashes) AS crashes
FROM
{table_name}
{where}
GROUP BY
{group_by}
),
JobRunWithUniqueCrashes AS (
SELECT
* EXCEPT(crashes),
ARRAY(
SELECT AS STRUCT
crash.crash_type,
crash.crash_state,
crash.security_flag,
SUM(count) AS count,
MAX(crash.is_new) AS is_new
FROM
UNNEST(crashes) AS crash
GROUP BY
crash.crash_type,
crash.crash_state,
crash.security_flag
) AS crashes
FROM
JobRunWithConcatedCrashes
),
JobRunWithSummary AS (
SELECT
* EXCEPT(crashes),
(
SELECT AS STRUCT
IFNULL(SUM(crash.count), 0) AS total,
COUNTIF(crash.is_new) AS unique_new,
COUNT(crash) AS unique
FROM
UNNEST(crashes) AS crash
) AS crash_count
FROM
JobRunWithUniqueCrashes
)
SELECT
* EXCEPT(crash_count),
crash_count.total AS total_crashes,
crash_count.unique_new AS new_crashes,
(crash_count.unique - crash_count.unique_new) AS known_crashes
FROM
JobRunWithSummary
"""
ALIAS = 'j'
def __init__(self, fuzzer_name, job_types, query_fields, group_by, date_start,
date_end):
super(JobQuery, self).__init__(
fuzzer_name=fuzzer_name,
job_types=job_types,
query_fields=query_fields,
group_by=group_by,
date_start=date_start,
date_end=date_end,
base_table='JobRun',
alias=JobQuery.ALIAS)
def build(self):
"""Return query."""
sql = JobQuery.SQL.format(
table_name=self._table_name(),
select_fields=self._select_fields(),
group_by=self._group_by(),
where=self._where())
return sql
class TableQuery(object):
"""Query for generating results in a table."""
def __init__(self, fuzzer_name, job_types, stats_columns, group_by,
date_start, date_end):
assert group_by
self.fuzzer_name = fuzzer_name
self.job_types = job_types
self.group_by = group_by
self.date_start = date_start
self.date_end = date_end
self.job_run_query = None
self.testcase_run_query = None
job_run_fields = []
testcase_run_fields = []
fields = parse_stats_column_fields(stats_columns)
for field in fields:
# Split up fields by table.
if not isinstance(field, QueryField):
continue
if field.table_alias == JobQuery.ALIAS:
job_run_fields.append(field)
elif field.table_alias == TestcaseQuery.ALIAS:
testcase_run_fields.append(field)
# subqueries.
# For query by time, we can't correlate the time of testcase run with a job
# run since they are set at different times. So, use only the results from
# testcase run and don't join them with job run, see build(). Also, the job
# paramaters like: known crashes, new crashes are aggregate numbers from job
# that are not applicable to show per testcase run (a point on graph).
if job_run_fields and self.group_by != QueryGroupBy.GROUP_BY_TIME:
self.job_run_query = JobQuery(fuzzer_name, job_types, job_run_fields,
group_by, date_start, date_end)
if testcase_run_fields:
self.testcase_run_query = TestcaseQuery(fuzzer_name, job_types,
testcase_run_fields, group_by,
date_start, date_end)
assert self.job_run_query or self.testcase_run_query, (
'Unable to create query.')
def _join_subqueries(self):
"""Create an inner join for subqueries."""
result = [
'(%s) as %s' % (self.job_run_query.build(), self.job_run_query.alias),
'INNER JOIN',
'(%s) as %s' % (self.testcase_run_query.build(),
self.testcase_run_query.alias), 'ON',
'{job_alias}.{group_by} = {testcase_alias}.{group_by}'.format(
job_alias=self.job_run_query.alias,
testcase_alias=self.testcase_run_query.alias,
group_by=group_by_to_field_name(self.group_by))
]
return ' '.join(result)
def _single_subquery(self):
"""Create a single subquery."""
query = self.job_run_query or self.testcase_run_query
return '(%s) as %s' % (query.build(), query.alias)
def build(self):
"""Build the table query."""
valid_run_query = self.job_run_query or self.testcase_run_query
result = [
# We need to do the below to avoid the duplicate column name error.
'SELECT {0}.{1}, * EXCEPT({1}) FROM'.format(
valid_run_query.alias,
group_by_to_field_name(valid_run_query.group_by))
]
if self.job_run_query and self.testcase_run_query:
result.append(self._join_subqueries())
else:
result.append(self._single_subquery())
return ' '.join(result)
def get_coverage_info(fuzzer, date=None):
"""Returns a CoverageInformation entity for a given fuzzer and date. If date
is not specified, returns the latest entity available."""
query = data_types.CoverageInformation.query(
data_types.CoverageInformation.fuzzer == fuzzer)
if date:
# Return info for specific date.
query = query.filter(data_types.CoverageInformation.date == date)
else:
# Return latest.
query = query.order(-data_types.CoverageInformation.date)
return query.get()
def get_gcs_stats_path(kind, fuzzer, timestamp):
"""Return gcs path in the format "/bucket/path/to/containing_dir/" for the
given fuzzer, job, and timestamp or revision."""
bucket_name = big_query.get_bucket()
if not bucket_name:
return None
datetime_value = datetime.datetime.utcfromtimestamp(timestamp)
dir_name = data_types.coverage_information_date_to_string(datetime_value)
path = '/%s/%s/%s/date/%s/' % (bucket_name, fuzzer, kind, dir_name)
return path
@environment.local_noop
def upload_stats(stats_list, filename=None):
"""Upload the fuzzer run to the bigquery bucket. Assumes that all the stats
given are for the same fuzzer/job run."""
if not stats_list:
logs.log_error('Failed to upload fuzzer stats: empty stats.')
return
assert isinstance(stats_list, list)
bucket_name = big_query.get_bucket()
if not bucket_name:
logs.log_error('Failed to upload fuzzer stats: missing bucket name.')
return
kind = stats_list[0].kind
fuzzer = stats_list[0].fuzzer
# Group all stats for fuzz targets.
fuzzer_or_engine_name = get_fuzzer_or_engine_name(fuzzer)
if not filename:
# Generate a random filename.
filename = '%016x' % random.randint(0, (1 << 64) - 1) + '.json'
# Handle runs that bleed into the next day.
timestamp_start_of_day = lambda s: utils.utc_date_to_timestamp(
datetime.datetime.utcfromtimestamp(s.timestamp).date())
stats_list.sort(key=lambda s: s.timestamp)
for timestamp, stats in itertools.groupby(stats_list, timestamp_start_of_day):
upload_data = '\n'.join(stat.to_json() for stat in stats)
day_path = 'gs:/' + get_gcs_stats_path(
kind, fuzzer_or_engine_name, timestamp=timestamp) + filename
if not storage.write_data(upload_data, day_path):
logs.log_error('Failed to upload FuzzerRun.')
def parse_stats_column_fields(column_fields):
"""Parse the stats column fields."""
# e.g. 'sum(t.field_name) as display_name'.
aggregate_regex = re.compile(r'^(\w+)\(([a-z])\.([^\)]+)\)(\s*as\s*(\w+))?$')
# e.g. '_EDGE_COV as blah'.
builtin_regex = re.compile(r'^(_\w+)(\s*as\s*(\w+))?$')
fields = []
parts = [field.strip() for field in column_fields.split(',')]
for part in parts:
match = aggregate_regex.match(part)
if match:
table_alias = match.group(2)
field_name = match.group(3)
aggregate_function = match.group(1)
select_alias = match.group(5)
if select_alias:
select_alias = select_alias.strip('"')
fields.append(
QueryField(table_alias, field_name, aggregate_function, select_alias))
continue
match = builtin_regex.match(part)
if match:
name = match.group(1)
alias = match.group(3)
if alias:
alias = alias.strip('"')
fields.append(BuiltinFieldSpecifier(name, alias))
continue
return fields
def get_fuzzer_or_engine_name(fuzzer_name):
"""Return fuzzing engine name if it exists, or |fuzzer_name|."""
fuzz_target = data_handler.get_fuzz_target(fuzzer_name)
if fuzz_target:
return fuzz_target.engine
return fuzzer_name
def dataset_name(fuzzer_name):
"""Get the stats dataset name for the given |fuzzer_name|."""
return fuzzer_name.replace('-', '_') + '_stats'
BUILTIN_FIELD_CONSTRUCTORS = {
'_EDGE_COV':
functools.partial(CoverageField, CoverageField.EDGE),
'_FUNC_COV':
functools.partial(CoverageField, CoverageField.FUNCTION),
'_CORPUS_SIZE':
functools.partial(CorpusSizeField, CorpusSizeField.CORPUS),
'_CORPUS_BACKUP':
CorpusBackupField,
'_QUARANTINE_SIZE':
functools.partial(CorpusSizeField, CorpusSizeField.QUARANTINE),
'_COV_REPORT':
CoverageReportField,
'_FUZZER_RUN_LOGS':
FuzzerRunLogsField,
'_PERFORMANCE_REPORT':
PerformanceReportField,
}
| StarcoderdataPython |
3323344 | <gh_stars>1-10
# This file is Copyright (c) 2020 <NAME> <<EMAIL>>
# License: BSD
from migen import *
from litex.soc.interconnect.stream import Endpoint
class VideoStream(Module):
def __init__(self):
# VGA output
self.red = red = Signal(8)
self.green = green = Signal(8)
self.blue = blue = Signal(8)
self.data_valid = data_valid = Signal()
self.source = source = Endpoint([("data", 32)])
self.sync.pixel += [
source.valid.eq(data_valid),
source.data.eq(Cat(red, green, blue)),
]
| StarcoderdataPython |
103947 |
from lib import db
from lib import part
import pretty_errors
import xlwings
from argparse import ArgumentParser
XML_PARTS_XLS = r"\\HSSFILESERV1\HSSshared\HSSI Lean\Job Plans\Pre-Nesting Tools\XML PRENESTING\XML-Parts.xls"
pretty_errors.configure(
line_number_first = True,
display_link = True,
)
def main():
parser = ArgumentParser()
parser.add_argument("--csv", action="store_true", help="Return as csv output")
parser.add_argument("--all", action="store_const", const="all", default='secondary', help="skip main member")
# parser.add_argument("--secondary", action="store_const", const="secondary", default='all', help="skip main member")
parser.add_argument("job", nargs="?", default=None)
parser.add_argument("shipment", nargs="?", default=None)
args = parser.parse_args()
if "-" in args.job:
assert args.shipment is None, "Shipment must be specified in either job or shipment arguments"
args.job, args.shipment = args.job.split("-")
parts = list()
with db.DbConnection(server='HSSSQLSERV', use_win_auth=True) as conn:
conn.cursor.execute(
"EXEC BOM.SAP.GetBOMData @Job=?, @Ship=?",
args.job, args.shipment
)
for r in conn.cursor.fetchall():
p = part.Part(r)
if p.for_prenest(args.all):
parts.append(p)
print(p)
dump_to_xl(parts, args.job, args.shipment)
def dump_to_xl(data, job, shipment):
wb = xlwings.Book(XML_PARTS_XLS)
wb.macro("clear")()
to_prenest = list()
for part in data:
to_prenest.append(part.xml_format())
wb.sheets("DATA").range("A2").value = to_prenest
wb.sheets("DATA").range("JOB").value = job.upper()
wb.sheets("DATA").range("SHIP").value = shipment
if __name__ == "__main__":
main()
| StarcoderdataPython |
128696 | import unittest
from better_profanity import profanity
class ProfanityTest(unittest.TestCase):
def test_contains_profanity(self):
profane = profanity.contains_profanity('he is a m0th3rf*cker')
self.assertTrue(profane)
def test_leaves_paragraphs_untouched(self):
innocent_text = """If you prick us do we not bleed?
If you tickle us do we not laugh?
If you poison us do we not die?
And if you wrong us shall we not revenge?"""
censored_text = profanity.censor(innocent_text)
self.assertTrue(innocent_text == censored_text)
def test_censorship(self):
bad_text = "Dude, I hate shit. Fuck bullshit."
censored_text = profanity.censor(bad_text)
# make sure it finds both instances
self.assertFalse("shit" in censored_text)
# make sure it's case sensitive
self.assertFalse("fuck" in censored_text)
# make sure some of the original text is still there
self.assertTrue("Dude" in censored_text)
def test_censorship_for_2_words(self):
bad_text = "That wh0re gave m3 a very good H4nd j0b"
censored_text = profanity.censor(bad_text)
self.assertFalse("H4nd j0b" in censored_text)
self.assertTrue("m3" in censored_text)
def test_custom_wordlist(self):
custom_badwords = ['happy', 'jolly', 'merry']
profanity.load_censor_words(custom_badwords)
# make sure it doesn't find real profanity anymore
self.assertFalse(profanity.contains_profanity("Fuck you!"))
# make sure it finds profanity in a sentence containing custom_badwords
self.assertTrue(profanity.contains_profanity("Have a merry day! :)"))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
1733973 | # Author: <NAME>
# Created: 10/4/2021
# Last Edited: 10/8/2021
import Gnome, random
class Gnomes(object):
"""
Gnomes class:
Represents a specified collection of Gnomes
This class is constructed for solving TSP using a genetic algorithm
"""
# Constructor
# NOTE: numCells is the number of unique cells, not the length of the gnome
def __init__(self, numGnomes=1, numCells=1, durationMatrix = None, distanceMatrix = None):
if (numCells < 1):
raise Exception
self.numGnomes = numGnomes
self.numCells = numCells
self.durationMatrix = durationMatrix
self.distanceMatrix = distanceMatrix
self.averageDuration = 0
self.averageDistance = 0
self.gnomes = [0] * numGnomes
for x in range(0, numGnomes):
self.gnomes[x] = Gnome.Gnome(numCells, durationMatrix, distanceMatrix);
self.createAverages()
def swap(self, g1, g2):
newg = Gnome.Gnome(self.numCells, self.durationMatrix, self.distanceMatrix)
for x in range(1, self.numCells):
newg[x] = g1[x]
for x in range(1, self.numCells):
g1[x] = g2[x]
for x in range(1, self.numCells):
g2[x] = newg[x]
# Sorts gnomes with shortest distance at the top of the list (first index)
def selectionSortByDistance(self):
for i in range(0, self.numGnomes):
bestDisInd = i
for j in range(i, self.numGnomes):
if (self.gnomes[j].distance < self.gnomes[bestDisInd].distance):
bestDisInd = j
self.swap(self.gnomes[bestDisInd],self.gnomes[i])
# Sorts gnomes with shortest duration at the top of the list (first index)
def selectionSortByDuration(self):
for i in range(0, self.numGnomes):
bestDurInd = i
for j in range(i, self.numGnomes):
if (self.gnomes[j].duration < self.gnomes[bestDurInd].duration):
bestDurInd = j
self.swap(self.gnomes[bestDurInd],self.gnomes[i])
# Boolean, helper function for crossOver method
# Checks if specified number is in specified list
def inList(self, num, list):
for x in range(0, len(list)):
if (list[x] == num):
return True
return False
# Takes two gnomes parents and crosses them into new child
def crossOver(self, g1, g2):
randNum1 = random.randint(1,self.numCells-2)
randNum2 = random.randint(randNum1+1,self.numCells-1)
gChild = Gnome.Gnome(self.numCells, self.durationMatrix, self.distanceMatrix)
# Creates a list of all the cells that will be taken from g2 and placed in gChild
tempList = [0] * (randNum2-randNum1+1)
index = 0
for x in range(randNum1, randNum2+1):
tempList[index] = g2[x]
index = index+1
print("TempList: ", tempList)
#Take the relavent cells from g1 and place them in gChild
for y in range(randNum1, randNum2+1):
gChild[y] = g2[y]
#Take the relavent cells from g2 and place them in gChild
nxtSpt = 1
for x in range(self.numCells-1, 0,-1):
if (self.inList(g1[x], tempList) == False):
while (nxtSpt >= randNum1) and (nxtSpt <= randNum2):
nxtSpt = nxtSpt + 1
gChild[nxtSpt] = g1[x]
nxtSpt = nxtSpt + 1
gChild.calcDis()
print("gChild: ", gChild.getGnome(), "distance: ", gChild.distance)
#Return the child
return gChild
# Creates a new population using the specified number of elites and
# the children created by crossing-over the better half of all of the parents
def createNewDisPop(self, numElites):
self.selectionSortByDistance()
newGnomes = [0] * self.numGnomes
for x in range(0, self.numGnomes):
newGnomes[x] = Gnome.Gnome(self.numCells, self.durationMatrix, self.distanceMatrix)
for x in range(0, numElites):
newGnomes[x] = self.gnomes[x]
for x in range(numElites, self.numGnomes):
num1 = random.randint(0, self.numGnomes/2)
num2 = num1
while (num2 == num1):
num2 = random.randint(0, self.numGnomes/2)
newGnomes[x] = self.crossOver(self.gnomes[num1], self.gnomes[num2])
self.mutateGnomes()
self.gnomes = newGnomes
def createNewDurPop(self, numElites):
self.selectionSortByDuration()
newGnomes = [0] * numGnomes
for x in range(0, numGnomes):
newGnomes[x] = Gnome.Gnome(numTarget)
for x in range(0, numElites):
newGnomes[x] = self.gnomes[self.numGnomes-x]
for x in range(numElites, self.numGnomes):
g1 = random.randInt(self.numGnomes/2, self.numGnomes)
g2 = g1
while (g2 == g1):
g2 = random.randInt(self.numGnomes/2, self.numGnomes)
newGnomes[x] = crossOver(g1, g2)
self.mutateGnomes()
self.gnomes = newGnomes
# Calculates the average distance and duration and stores them in class variables
def createAverages(self):
totalDis = 0
totalDur = 0
for x in range(0, self.numGnomes):
print(self.gnomes[x].distance)
totalDis = totalDis + self.gnomes[x].distance
totalDur = totalDur + self.gnomes[x].duration
self.averageDistance = totalDis / self.numGnomes
self.averageDuration = totalDur / self.numGnomes
# Overloaded Operator: Returns gnome at specified index
def __getitem__(self, index):
if ((index < 0) or (index > self.numGnomes)):
raise Exception
return self.gnomes[index]
# Mutates all gnomes
def mutateGnomes(self):
for x in range(0, self.numGnomes):
self.gnomes[x].mutate()
# Prints all gnomes
def printGnomes(self):
for x in range(0, self.numGnomes):
self.gnomes[x].printGnome()
| StarcoderdataPython |
3218428 | import os
import unittest
from dart.client.python.dart_client import Dart
from dart.engine.no_op.metadata import NoOpActionTypes
from dart.model.action import ActionData, Action, ActionState
from dart.model.dataset import Column, DatasetData, Dataset, DataFormat, DataType, FileFormat, RowFormat, LoadType
from dart.model.datastore import Datastore, DatastoreData, DatastoreState
from dart.model.subscription import Subscription, SubscriptionData, SubscriptionElementStats, SubscriptionElementState, \
SubscriptionState
from dart.model.trigger import Trigger, TriggerState
from dart.model.trigger import TriggerData
from dart.model.workflow import WorkflowData, WorkflowState, WorkflowInstanceState
from dart.model.workflow import Workflow
class TestConsumeSubscription(unittest.TestCase):
def setUp(self):
dart = Dart(host='localhost', port=5000)
""" :type dart: dart.client.python.dart_client.Dart """
self.dart = dart
cs = [Column('c1', DataType.VARCHAR, 50), Column('c2', DataType.BIGINT)]
df = DataFormat(FileFormat.TEXTFILE, RowFormat.DELIMITED)
dataset_data = DatasetData(name='test-dataset', table_name='test_dataset_table',
load_type=LoadType.INSERT,
location=('s3://' + os.environ['DART_TEST_BUCKET'] + '/impala'),
data_format=df,
columns=cs,
tags=[])
self.dataset = self.dart.save_dataset(Dataset(data=dataset_data))
start = 's3://' + os.environ['DART_TEST_BUCKET'] + '/impala/impala'
end = 's3://' + os.environ['DART_TEST_BUCKET'] + '/impala/install'
regex = '.*\\.rpm'
ds = Subscription(data=SubscriptionData('test-subscription', self.dataset.id, start, end, regex))
self.subscription = self.dart.save_subscription(ds)
dst_args = {'action_sleep_time_in_seconds': 0}
dst = Datastore(data=DatastoreData('test-datastore', 'no_op_engine', args=dst_args, state=DatastoreState.TEMPLATE))
self.datastore = self.dart.save_datastore(dst)
wf = Workflow(data=WorkflowData('test-workflow', self.datastore.id, state=WorkflowState.ACTIVE))
self.workflow = self.dart.save_workflow(wf, self.datastore.id)
a_args = {'subscription_id': self.subscription.id}
a0 = Action(data=ActionData(NoOpActionTypes.action_that_succeeds.name, NoOpActionTypes.action_that_succeeds.name, state=ActionState.TEMPLATE))
a1 = Action(data=ActionData(NoOpActionTypes.consume_subscription.name, NoOpActionTypes.consume_subscription.name, a_args, state=ActionState.TEMPLATE))
self.action0, self.action1 = self.dart.save_actions([a0, a1], workflow_id=self.workflow.id)
def tearDown(self):
for a in self.dart.get_actions(workflow_id=self.workflow.id):
self.dart.delete_action(a.id)
for wfi in self.dart.get_workflow_instances(self.workflow.id):
self.dart.delete_datastore(wfi.data.datastore_id)
self.dart.delete_workflow_instances(self.workflow.id)
self.dart.delete_workflow(self.workflow.id)
self.dart.delete_datastore(self.datastore.id)
self.dart.delete_subscription(self.subscription.id)
self.dart.delete_dataset(self.dataset.id)
def test_consume_subscription(self):
subscription = self.dart.await_subscription_generation(self.subscription.id)
self.assertEqual(subscription.data.state, SubscriptionState.ACTIVE)
tr_args = {'subscription_id': self.subscription.id, 'unconsumed_data_size_in_bytes': 49524}
tr = Trigger(data=TriggerData('test-trigger', 'subscription_batch', [self.workflow.id], tr_args, TriggerState.ACTIVE))
self.trigger = self.dart.save_trigger(tr)
wf_instances = self.dart.await_workflow_completion(self.workflow.id, num_instances=3)
for wfi in wf_instances:
self.assertEqual(wfi.data.state, WorkflowInstanceState.COMPLETED)
stats = self.dart.get_subscription_element_stats(self.subscription.id)
ses = SubscriptionElementStats(SubscriptionElementState.CONSUMED, 3, 152875004 + 834620 + 49524)
self.assertEqual([s.to_dict() for s in stats], [ses.to_dict()])
self.dart.delete_trigger(self.trigger.id)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1742767 | from .material_dicts import get_material_dict
class InvalidStringMaterialError(KeyError): pass
class InvalidGaugeError(ValueError): pass
class OutOfRangeError(ValueError): pass
class GuitarString():
def __init__(self, gauge, string_material):
self.is_valid_string_material(string_material)
self.string_material = string_material
self.gauge = self.sanitize_gauge(gauge)
self.unit_weight = self.convert_to_unit_weight(self.gauge, self.string_material)
@staticmethod
def convert_to_unit_weight(gauge, string_material):
"""
Needs to be rewritten...
cycles through an array of the appropriate string materials, comparing each entry to the gauge
if the gauge in the reversed sorted list of keys is less than the given gauge then we have found
the closest match our hardcoded dictionary values allow for
@param string_material: one of 5 types used to determine which array to fetch
@param gauge: the desired gauge, used to find the unit weight in the material_dict
@return: the closest matched unit_weight to the gauge of the given material
"""
material_dict = get_material_dict(string_material)
gauge_keys = sorted(material_dict.keys())
max_gauge_index = gauge_keys.index(max(gauge_keys))
if gauge > max(gauge_keys):
low_gauge = gauge_keys[max_gauge_index - 2]
high_gauge = max(gauge_keys)
elif gauge < min(gauge_keys):
low_gauge = gauge_keys[0]
high_gauge = gauge_keys[2]
else:
gauge_index = 0
for matched_gauge in gauge_keys:
if gauge < matched_gauge:
break
gauge_index += 1
low_gauge = gauge_keys[gauge_index - 1]
high_gauge = gauge_keys[gauge_index]
low_unit_weight = material_dict[low_gauge]
high_unit_weight = material_dict[high_gauge]
unit_weight = low_unit_weight + ((high_unit_weight - low_unit_weight) * (gauge - low_gauge)
/ (high_gauge - low_gauge))
return unit_weight
@staticmethod
def is_valid_string_material(string_material):
"""
converts parameter to an int and raises an appropriate error on failure
@param string_material:
@return: @raise InvalidStringMaterialError:
"""
if get_material_dict(string_material) == 'Invalid':
raise InvalidStringMaterialError('string_material does not match predefined string materials')
return True
@staticmethod
def sanitize_gauge(gauge):
"""
converts parameter to an float and raises an appropriate error on failure
also checks that the scale length is above zero or it will throw OutOfRangeError
@param gauge:
@return: @raise OutOfRangeError:
"""
try:
gauge = float(gauge)
except ValueError:
raise InvalidGaugeError('gauge must be a float')
if gauge <= 0:
raise OutOfRangeError('gauge must be a positive number')
return gauge | StarcoderdataPython |
165344 | # Generated file, please do not change!!!
import re
import typing
import marshmallow
import marshmallow_enum
from commercetools import helpers
from ... import models
from ..cart import (
CartOrigin,
CartState,
DiscountCodeState,
InventoryMode,
LineItemMode,
LineItemPriceMode,
RoundingMode,
ShippingMethodState,
TaxCalculationMode,
TaxMode,
)
from ..common import ReferenceTypeId
from .common import (
BaseResourceSchema,
LocalizedStringField,
ReferenceSchema,
ResourceIdentifierSchema,
)
from .type import FieldContainerField
# Fields
# Marshmallow Schemas
class CartSchema(BaseResourceSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
last_modified_by = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.LastModifiedBySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="lastModifiedBy",
)
created_by = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.CreatedBySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="createdBy",
)
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
customer_email = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerEmail",
)
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
store = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".store.StoreKeyReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".LineItemSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="lineItems",
)
custom_line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CustomLineItemSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="customLineItems",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
cart_state = marshmallow_enum.EnumField(
CartState, by_value=True, allow_none=True, missing=None, data_key="cartState"
)
shipping_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingAddress",
)
billing_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="billingAddress",
)
inventory_mode = marshmallow_enum.EnumField(
InventoryMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="inventoryMode",
)
tax_mode = marshmallow_enum.EnumField(
TaxMode, by_value=True, allow_none=True, missing=None, data_key="taxMode"
)
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxRoundingMode",
)
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxCalculationMode",
)
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".customer_group.CustomerGroupReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
shipping_info = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ShippingInfoSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingInfo",
)
discount_codes = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountCodeInfoSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="discountCodes",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
payment_info = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.PaymentInfoSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="paymentInfo",
)
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
refused_gifts = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".cart_discount.CartDiscountReferenceSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="refusedGifts",
)
origin = marshmallow_enum.EnumField(
CartOrigin, by_value=True, allow_none=True, missing=None
)
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
item_shipping_addresses = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="itemShippingAddresses",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.Cart(**data)
class CartDraftSchema(helpers.BaseSchema):
currency = marshmallow.fields.String(allow_none=True, missing=None)
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
customer_email = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerEmail",
)
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".customer_group.CustomerGroupResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
store = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".store.StoreResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
inventory_mode = marshmallow_enum.EnumField(
InventoryMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="inventoryMode",
)
tax_mode = marshmallow_enum.EnumField(
TaxMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxMode",
)
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxRoundingMode",
)
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxCalculationMode",
)
line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".LineItemDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="lineItems",
)
custom_line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CustomLineItemDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customLineItems",
)
shipping_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingAddress",
)
billing_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="billingAddress",
)
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
external_tax_rate_for_shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRateForShippingMethod",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
origin = marshmallow_enum.EnumField(
CartOrigin,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
)
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputDraftSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputDraftSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
item_shipping_addresses = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="itemShippingAddresses",
)
discount_codes = marshmallow.fields.List(
marshmallow.fields.String(allow_none=True),
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="discountCodes",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartDraft(**data)
class CartPagedQueryResponseSchema(helpers.BaseSchema):
limit = marshmallow.fields.Integer(allow_none=True, missing=None)
count = marshmallow.fields.Integer(allow_none=True, missing=None)
total = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
offset = marshmallow.fields.Integer(allow_none=True, missing=None)
results = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CartSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartPagedQueryResponse(**data)
class CartReferenceSchema(ReferenceSchema):
obj = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CartSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type_id"]
return models.CartReference(**data)
class CartResourceIdentifierSchema(ResourceIdentifierSchema):
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type_id"]
return models.CartResourceIdentifier(**data)
class CartUpdateSchema(helpers.BaseSchema):
version = marshmallow.fields.Integer(allow_none=True, missing=None)
actions = marshmallow.fields.List(
helpers.Discriminator(
allow_none=True,
discriminator_field=("action", "action"),
discriminator_schemas={
"addCustomLineItem": helpers.absmod(
__name__, ".CartAddCustomLineItemActionSchema"
),
"addDiscountCode": helpers.absmod(
__name__, ".CartAddDiscountCodeActionSchema"
),
"addItemShippingAddress": helpers.absmod(
__name__, ".CartAddItemShippingAddressActionSchema"
),
"addLineItem": helpers.absmod(__name__, ".CartAddLineItemActionSchema"),
"addPayment": helpers.absmod(__name__, ".CartAddPaymentActionSchema"),
"addShoppingList": helpers.absmod(
__name__, ".CartAddShoppingListActionSchema"
),
"applyDeltaToCustomLineItemShippingDetailsTargets": helpers.absmod(
__name__,
".CartApplyDeltaToCustomLineItemShippingDetailsTargetsActionSchema",
),
"applyDeltaToLineItemShippingDetailsTargets": helpers.absmod(
__name__,
".CartApplyDeltaToLineItemShippingDetailsTargetsActionSchema",
),
"changeCustomLineItemMoney": helpers.absmod(
__name__, ".CartChangeCustomLineItemMoneyActionSchema"
),
"changeCustomLineItemQuantity": helpers.absmod(
__name__, ".CartChangeCustomLineItemQuantityActionSchema"
),
"changeLineItemQuantity": helpers.absmod(
__name__, ".CartChangeLineItemQuantityActionSchema"
),
"changeTaxCalculationMode": helpers.absmod(
__name__, ".CartChangeTaxCalculationModeActionSchema"
),
"changeTaxMode": helpers.absmod(
__name__, ".CartChangeTaxModeActionSchema"
),
"changeTaxRoundingMode": helpers.absmod(
__name__, ".CartChangeTaxRoundingModeActionSchema"
),
"recalculate": helpers.absmod(__name__, ".CartRecalculateActionSchema"),
"removeCustomLineItem": helpers.absmod(
__name__, ".CartRemoveCustomLineItemActionSchema"
),
"removeDiscountCode": helpers.absmod(
__name__, ".CartRemoveDiscountCodeActionSchema"
),
"removeItemShippingAddress": helpers.absmod(
__name__, ".CartRemoveItemShippingAddressActionSchema"
),
"removeLineItem": helpers.absmod(
__name__, ".CartRemoveLineItemActionSchema"
),
"removePayment": helpers.absmod(
__name__, ".CartRemovePaymentActionSchema"
),
"setAnonymousId": helpers.absmod(
__name__, ".CartSetAnonymousIdActionSchema"
),
"setBillingAddress": helpers.absmod(
__name__, ".CartSetBillingAddressActionSchema"
),
"setCartTotalTax": helpers.absmod(
__name__, ".CartSetCartTotalTaxActionSchema"
),
"setCountry": helpers.absmod(__name__, ".CartSetCountryActionSchema"),
"setCustomField": helpers.absmod(
__name__, ".CartSetCustomFieldActionSchema"
),
"setCustomLineItemCustomField": helpers.absmod(
__name__, ".CartSetCustomLineItemCustomFieldActionSchema"
),
"setCustomLineItemCustomType": helpers.absmod(
__name__, ".CartSetCustomLineItemCustomTypeActionSchema"
),
"setCustomLineItemShippingDetails": helpers.absmod(
__name__, ".CartSetCustomLineItemShippingDetailsActionSchema"
),
"setCustomLineItemTaxAmount": helpers.absmod(
__name__, ".CartSetCustomLineItemTaxAmountActionSchema"
),
"setCustomLineItemTaxRate": helpers.absmod(
__name__, ".CartSetCustomLineItemTaxRateActionSchema"
),
"setCustomShippingMethod": helpers.absmod(
__name__, ".CartSetCustomShippingMethodActionSchema"
),
"setCustomType": helpers.absmod(
__name__, ".CartSetCustomTypeActionSchema"
),
"setCustomerEmail": helpers.absmod(
__name__, ".CartSetCustomerEmailActionSchema"
),
"setCustomerGroup": helpers.absmod(
__name__, ".CartSetCustomerGroupActionSchema"
),
"setCustomerId": helpers.absmod(
__name__, ".CartSetCustomerIdActionSchema"
),
"setDeleteDaysAfterLastModification": helpers.absmod(
__name__, ".CartSetDeleteDaysAfterLastModificationActionSchema"
),
"setKey": helpers.absmod(__name__, ".CartSetKeyActionSchema"),
"setLineItemCustomField": helpers.absmod(
__name__, ".CartSetLineItemCustomFieldActionSchema"
),
"setLineItemCustomType": helpers.absmod(
__name__, ".CartSetLineItemCustomTypeActionSchema"
),
"setLineItemDistributionChannel": helpers.absmod(
__name__, ".CartSetLineItemDistributionChannelActionSchema"
),
"setLineItemPrice": helpers.absmod(
__name__, ".CartSetLineItemPriceActionSchema"
),
"setLineItemShippingDetails": helpers.absmod(
__name__, ".CartSetLineItemShippingDetailsActionSchema"
),
"setLineItemTaxAmount": helpers.absmod(
__name__, ".CartSetLineItemTaxAmountActionSchema"
),
"setLineItemTaxRate": helpers.absmod(
__name__, ".CartSetLineItemTaxRateActionSchema"
),
"setLineItemTotalPrice": helpers.absmod(
__name__, ".CartSetLineItemTotalPriceActionSchema"
),
"setLocale": helpers.absmod(__name__, ".CartSetLocaleActionSchema"),
"setShippingAddress": helpers.absmod(
__name__, ".CartSetShippingAddressActionSchema"
),
"setShippingMethod": helpers.absmod(
__name__, ".CartSetShippingMethodActionSchema"
),
"setShippingMethodTaxAmount": helpers.absmod(
__name__, ".CartSetShippingMethodTaxAmountActionSchema"
),
"setShippingMethodTaxRate": helpers.absmod(
__name__, ".CartSetShippingMethodTaxRateActionSchema"
),
"setShippingRateInput": helpers.absmod(
__name__, ".CartSetShippingRateInputActionSchema"
),
"updateItemShippingAddress": helpers.absmod(
__name__, ".CartUpdateItemShippingAddressActionSchema"
),
},
),
allow_none=True,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartUpdate(**data)
class CartUpdateActionSchema(helpers.BaseSchema):
action = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartUpdateAction(**data)
class CustomLineItemSchema(helpers.BaseSchema):
id = marshmallow.fields.String(allow_none=True, missing=None)
name = LocalizedStringField(allow_none=True, missing=None)
money = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxCategoryReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
discounted_price_per_quantity = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceForQuantitySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPricePerQuantity",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItem(**data)
class CustomLineItemDraftSchema(helpers.BaseSchema):
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItemDraft(**data)
class DiscountCodeInfoSchema(helpers.BaseSchema):
discount_code = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".discount_code.DiscountCodeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountCode",
)
state = marshmallow_enum.EnumField(
DiscountCodeState, by_value=True, allow_none=True, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountCodeInfo(**data)
class DiscountedLineItemPortionSchema(helpers.BaseSchema):
discount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".cart_discount.CartDiscountReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
discounted_amount = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="discountedAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPortion(**data)
class DiscountedLineItemPriceSchema(helpers.BaseSchema):
value = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
included_discounts = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPortionSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="includedDiscounts",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPrice(**data)
class DiscountedLineItemPriceForQuantitySchema(helpers.BaseSchema):
quantity = marshmallow.fields.Float(allow_none=True, missing=None)
discounted_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPriceForQuantity(**data)
class ExternalLineItemTotalPriceSchema(helpers.BaseSchema):
price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalLineItemTotalPrice(**data)
class ExternalTaxAmountDraftSchema(helpers.BaseSchema):
total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalGross",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalTaxAmountDraft(**data)
class ExternalTaxRateDraftSchema(helpers.BaseSchema):
name = marshmallow.fields.String(allow_none=True, missing=None)
amount = marshmallow.fields.Float(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
country = marshmallow.fields.String(allow_none=True, missing=None)
state = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
sub_rates = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.SubRateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="subRates",
)
included_in_price = marshmallow.fields.Boolean(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="includedInPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalTaxRateDraft(**data)
class ItemShippingDetailsSchema(helpers.BaseSchema):
targets = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
valid = marshmallow.fields.Boolean(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingDetails(**data)
class ItemShippingDetailsDraftSchema(helpers.BaseSchema):
targets = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingDetailsDraft(**data)
class ItemShippingTargetSchema(helpers.BaseSchema):
address_key = marshmallow.fields.String(
allow_none=True, missing=None, data_key="addressKey"
)
quantity = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingTarget(**data)
class LineItemSchema(helpers.BaseSchema):
id = marshmallow.fields.String(allow_none=True, missing=None)
product_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="productId"
)
name = LocalizedStringField(allow_none=True, missing=None)
product_slug = LocalizedStringField(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productSlug",
)
product_type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".product_type.ProductTypeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="productType",
)
variant = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".product.ProductVariantSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.PriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
added_at = marshmallow.fields.DateTime(
allow_none=True, metadata={"omit_empty": True}, missing=None, data_key="addedAt"
)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
discounted_price_per_quantity = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceForQuantitySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPricePerQuantity",
)
price_mode = marshmallow_enum.EnumField(
LineItemPriceMode,
by_value=True,
allow_none=True,
missing=None,
data_key="priceMode",
)
line_item_mode = marshmallow_enum.EnumField(
LineItemMode,
by_value=True,
allow_none=True,
missing=None,
data_key="lineItemMode",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
last_modified_at = marshmallow.fields.DateTime(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="lastModifiedAt",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.LineItem(**data)
class LineItemDraftSchema(helpers.BaseSchema):
product_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productId",
)
variant_id = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="variantId",
)
sku = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
added_at = marshmallow.fields.DateTime(
allow_none=True, metadata={"omit_empty": True}, missing=None, data_key="addedAt"
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.LineItemDraft(**data)
class ReplicaCartDraftSchema(helpers.BaseSchema):
reference = helpers.Discriminator(
allow_none=True,
discriminator_field=("typeId", "type_id"),
discriminator_schemas={
"cart-discount": helpers.absmod(
__name__, ".cart_discount.CartDiscountReferenceSchema"
),
"cart": helpers.absmod(__name__, ".CartReferenceSchema"),
"category": helpers.absmod(__name__, ".category.CategoryReferenceSchema"),
"channel": helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
"key-value-document": helpers.absmod(
__name__, ".custom_object.CustomObjectReferenceSchema"
),
"customer-group": helpers.absmod(
__name__, ".customer_group.CustomerGroupReferenceSchema"
),
"customer": helpers.absmod(__name__, ".customer.CustomerReferenceSchema"),
"discount-code": helpers.absmod(
__name__, ".discount_code.DiscountCodeReferenceSchema"
),
"inventory-entry": helpers.absmod(
__name__, ".inventory.InventoryEntryReferenceSchema"
),
"order-edit": helpers.absmod(
__name__, ".order_edit.OrderEditReferenceSchema"
),
"order": helpers.absmod(__name__, ".order.OrderReferenceSchema"),
"payment": helpers.absmod(__name__, ".payment.PaymentReferenceSchema"),
"product-discount": helpers.absmod(
__name__, ".product_discount.ProductDiscountReferenceSchema"
),
"product-type": helpers.absmod(
__name__, ".product_type.ProductTypeReferenceSchema"
),
"product": helpers.absmod(__name__, ".product.ProductReferenceSchema"),
"review": helpers.absmod(__name__, ".review.ReviewReferenceSchema"),
"shipping-method": helpers.absmod(
__name__, ".shipping_method.ShippingMethodReferenceSchema"
),
"shopping-list": helpers.absmod(
__name__, ".shopping_list.ShoppingListReferenceSchema"
),
"state": helpers.absmod(__name__, ".state.StateReferenceSchema"),
"store": helpers.absmod(__name__, ".store.StoreReferenceSchema"),
"tax-category": helpers.absmod(
__name__, ".tax_category.TaxCategoryReferenceSchema"
),
"type": helpers.absmod(__name__, ".type.TypeReferenceSchema"),
"zone": helpers.absmod(__name__, ".zone.ZoneReferenceSchema"),
},
missing=None,
)
key = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ReplicaCartDraft(**data)
class ShippingInfoSchema(helpers.BaseSchema):
shipping_method_name = marshmallow.fields.String(
allow_none=True, missing=None, data_key="shippingMethodName"
)
price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
shipping_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".shipping_method.ShippingRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shippingRate",
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxCategoryReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodReferenceSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
deliveries = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.DeliverySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
discounted_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="discountedPrice",
)
shipping_method_state = marshmallow_enum.EnumField(
ShippingMethodState,
by_value=True,
allow_none=True,
missing=None,
data_key="shippingMethodState",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ShippingInfo(**data)
class ShippingRateInputSchema(helpers.BaseSchema):
type = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ShippingRateInput(**data)
class ClassificationShippingRateInputSchema(ShippingRateInputSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
label = LocalizedStringField(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ClassificationShippingRateInput(**data)
class ScoreShippingRateInputSchema(ShippingRateInputSchema):
score = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ScoreShippingRateInput(**data)
class ShippingRateInputDraftSchema(helpers.BaseSchema):
type = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ShippingRateInputDraft(**data)
class ClassificationShippingRateInputDraftSchema(ShippingRateInputDraftSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ClassificationShippingRateInputDraft(**data)
class ScoreShippingRateInputDraftSchema(ShippingRateInputDraftSchema):
score = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ScoreShippingRateInputDraft(**data)
class TaxPortionSchema(helpers.BaseSchema):
name = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
rate = marshmallow.fields.Float(allow_none=True, missing=None)
amount = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxPortion(**data)
class TaxPortionDraftSchema(helpers.BaseSchema):
name = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
rate = marshmallow.fields.Float(allow_none=True, missing=None)
amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxPortionDraft(**data)
class TaxedItemPriceSchema(helpers.BaseSchema):
total_net = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalNet",
)
total_gross = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalGross",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedItemPrice(**data)
class TaxedPriceSchema(helpers.BaseSchema):
total_net = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalNet",
)
total_gross = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalGross",
)
tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedPrice(**data)
class TaxedPriceDraftSchema(helpers.BaseSchema):
total_net = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalNet",
)
total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalGross",
)
tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedPriceDraft(**data)
class CartAddCustomLineItemActionSchema(CartUpdateActionSchema):
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
slug = marshmallow.fields.String(allow_none=True, missing=None)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddCustomLineItemAction(**data)
class CartAddDiscountCodeActionSchema(CartUpdateActionSchema):
code = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddDiscountCodeAction(**data)
class CartAddItemShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddItemShippingAddressAction(**data)
class CartAddLineItemActionSchema(CartUpdateActionSchema):
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
product_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productId",
)
variant_id = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="variantId",
)
sku = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddLineItemAction(**data)
class CartAddPaymentActionSchema(CartUpdateActionSchema):
payment = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".payment.PaymentResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddPaymentAction(**data)
class CartAddShoppingListActionSchema(CartUpdateActionSchema):
shopping_list = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shopping_list.ShoppingListResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shoppingList",
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddShoppingListAction(**data)
class CartApplyDeltaToCustomLineItemShippingDetailsTargetsActionSchema(
CartUpdateActionSchema
):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
targets_delta = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="targetsDelta",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartApplyDeltaToCustomLineItemShippingDetailsTargetsAction(**data)
class CartApplyDeltaToLineItemShippingDetailsTargetsActionSchema(
CartUpdateActionSchema
):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
targets_delta = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="targetsDelta",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartApplyDeltaToLineItemShippingDetailsTargetsAction(**data)
class CartChangeCustomLineItemMoneyActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeCustomLineItemMoneyAction(**data)
class CartChangeCustomLineItemQuantityActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeCustomLineItemQuantityAction(**data)
class CartChangeLineItemQuantityActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeLineItemQuantityAction(**data)
class CartChangeTaxCalculationModeActionSchema(CartUpdateActionSchema):
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxCalculationMode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxCalculationModeAction(**data)
class CartChangeTaxModeActionSchema(CartUpdateActionSchema):
tax_mode = marshmallow_enum.EnumField(
TaxMode, by_value=True, allow_none=True, missing=None, data_key="taxMode"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxModeAction(**data)
class CartChangeTaxRoundingModeActionSchema(CartUpdateActionSchema):
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxRoundingMode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxRoundingModeAction(**data)
class CartRecalculateActionSchema(CartUpdateActionSchema):
update_product_data = marshmallow.fields.Boolean(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="updateProductData",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRecalculateAction(**data)
class CartRemoveCustomLineItemActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveCustomLineItemAction(**data)
class CartRemoveDiscountCodeActionSchema(CartUpdateActionSchema):
discount_code = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".discount_code.DiscountCodeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountCode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveDiscountCodeAction(**data)
class CartRemoveItemShippingAddressActionSchema(CartUpdateActionSchema):
address_key = marshmallow.fields.String(
allow_none=True, missing=None, data_key="addressKey"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveItemShippingAddressAction(**data)
class CartRemoveLineItemActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details_to_remove = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetailsToRemove",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveLineItemAction(**data)
class CartRemovePaymentActionSchema(CartUpdateActionSchema):
payment = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".payment.PaymentResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemovePaymentAction(**data)
class CartSetAnonymousIdActionSchema(CartUpdateActionSchema):
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetAnonymousIdAction(**data)
class CartSetBillingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetBillingAddressAction(**data)
class CartSetCartTotalTaxActionSchema(CartUpdateActionSchema):
external_total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="externalTotalGross",
)
external_tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCartTotalTaxAction(**data)
class CartSetCountryActionSchema(CartUpdateActionSchema):
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCountryAction(**data)
class CartSetCustomFieldActionSchema(CartUpdateActionSchema):
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomFieldAction(**data)
class CartSetCustomLineItemCustomFieldActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemCustomFieldAction(**data)
class CartSetCustomLineItemCustomTypeActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemCustomTypeAction(**data)
class CartSetCustomLineItemShippingDetailsActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemShippingDetailsAction(**data)
class CartSetCustomLineItemTaxAmountActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemTaxAmountAction(**data)
class CartSetCustomLineItemTaxRateActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemTaxRateAction(**data)
class CartSetCustomShippingMethodActionSchema(CartUpdateActionSchema):
shipping_method_name = marshmallow.fields.String(
allow_none=True, missing=None, data_key="shippingMethodName"
)
shipping_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".shipping_method.ShippingRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shippingRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomShippingMethodAction(**data)
class CartSetCustomTypeActionSchema(CartUpdateActionSchema):
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomTypeAction(**data)
class CartSetCustomerEmailActionSchema(CartUpdateActionSchema):
email = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerEmailAction(**data)
class CartSetCustomerGroupActionSchema(CartUpdateActionSchema):
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".customer_group.CustomerGroupResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerGroupAction(**data)
class CartSetCustomerIdActionSchema(CartUpdateActionSchema):
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerIdAction(**data)
class CartSetDeleteDaysAfterLastModificationActionSchema(CartUpdateActionSchema):
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetDeleteDaysAfterLastModificationAction(**data)
class CartSetKeyActionSchema(CartUpdateActionSchema):
key = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetKeyAction(**data)
class CartSetLineItemCustomFieldActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemCustomFieldAction(**data)
class CartSetLineItemCustomTypeActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemCustomTypeAction(**data)
class CartSetLineItemDistributionChannelActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemDistributionChannelAction(**data)
class CartSetLineItemPriceActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemPriceAction(**data)
class CartSetLineItemShippingDetailsActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemShippingDetailsAction(**data)
class CartSetLineItemTaxAmountActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTaxAmountAction(**data)
class CartSetLineItemTaxRateActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTaxRateAction(**data)
class CartSetLineItemTotalPriceActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTotalPriceAction(**data)
class CartSetLocaleActionSchema(CartUpdateActionSchema):
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLocaleAction(**data)
class CartSetShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingAddressAction(**data)
class CartSetShippingMethodActionSchema(CartUpdateActionSchema):
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodAction(**data)
class CartSetShippingMethodTaxAmountActionSchema(CartUpdateActionSchema):
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodTaxAmountAction(**data)
class CartSetShippingMethodTaxRateActionSchema(CartUpdateActionSchema):
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodTaxRateAction(**data)
class CartSetShippingRateInputActionSchema(CartUpdateActionSchema):
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputDraftSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputDraftSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingRateInputAction(**data)
class CartUpdateItemShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartUpdateItemShippingAddressAction(**data)
class CustomLineItemImportDraftSchema(helpers.BaseSchema):
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItemImportDraft(**data)
| StarcoderdataPython |
3300734 | <reponame>streamlit-badge-bot/automating-technical-analysis<filename>app/exchange_tickers.py
def crypto_to_ticker(Crypto):
cryptos = {'0Chain': 'ZCN','0x': 'ZRX','12Ships': 'TSHP','ARPA Chain': 'ARPA','Aave': 'LEND','Abyss Token': 'ABYSS','AdEx': 'ADX','Aeon': 'AEON',
'Aeron': 'ARN','Aeternity': 'AE','Agrello': 'DLT','AidCoin': 'AID','Aion': 'AIO','AirSwap': 'AST','Akropolis': 'AKRO','Algorand': 'ALGO',
'Ambrosus': 'AMB','Ampleforth': 'AMPL','Ankr': 'ANKR','AppCoins': 'APPC','Aragon': 'ANT','Ardor': 'ARDR','Ark': 'ARK','Atonomi': 'ATMI','Auctus': 'AUC',
'Augur': 'REP','Autonio': 'NIO','Aventus': 'AVT','BANKEX': 'BKX','BLOCKv': 'VEE','BORA': 'BORA','BTU Protocol': 'BTU','Bancor': 'BNT',
'Band Protocol': 'BAND','Banyan Network': 'BBN','Basic Attention Token': 'BAT','Beam': 'BEAM','Binance Coin': 'BNB','Binance GBP Stable Coin': 'BGBP',
'Binance USD': 'BUSD','BitKan': 'KAN','BitShares': 'BTS','BitTorrent': 'BTT','BitTube': 'TUBE','Bitcoin': 'BTC','Bitcoin Cash': 'BCH',
'Bitcoin Diamond': 'BCD','Bitcoin Gold': 'BTG','Bitcoin Interest': 'BCI','Bitcoin SV': 'BSV','BlackCoin': 'BLK','Blockcloud': 'BLOC',
'Blockmason Credit Protocol': 'BCPT','Blocknet': 'BLOCK','Blockpass': 'PASS','Blockstack': 'STX','Blox': 'CDT','Bluzelle': 'BLZ','BnkToTheFuture': 'BFT',
'Bread': 'BRD','Burst': 'BURST','Bytom': 'BTM','Callisto Network': 'CLO','Cardano': 'ADA','Celer Network': 'CELR','Chainlink': 'LINK','Chiliz': 'CHZ',
'Chromia': 'CHR','Cindicator': 'CND','Civic': 'CVC','Cocos-BCX': 'COCOS','CommerceBlock': 'CBT','Content Neutrality Network': 'CNN','ContentBox': 'BOX',
'Contentos': 'COS','Cortex': 'CTXC','Cosmo Coin': 'COSM','Cosmos': 'ATOM','Cred': 'LBA','Credits': 'CS','Crowd Machine': 'CMCT','Crown': 'CRW',
'Crypto.com Coin': 'CRO','CryptoFranc': 'XCHF','Curecoin': 'CURE','CyberMiles': 'CMT','DAOstack': 'GEN','DATA': 'DTA','DECENT': 'DCT','DMarket': 'DMT',
'Dash': 'DASH','Decentraland': 'MANA','Decred': 'DCR','Dent': 'DENT','Dether': 'DTH','DigiByte': 'DGB','DigitalNote': 'XDN','Digix Gold Token': 'DGX',
'DigixDAO': 'DGD','Dock': 'DOCK','Dogecoin': 'DOGE','Dragon Token': 'DT','Dragonchain': 'DRGN','Dusk Network': 'DUSK','EOS': 'EOS','Edge': 'DADI',
'Edgeless': 'EDG','Eidoo': 'EDO','Einsteinium': 'EMC2','Elrond': 'ERD','Endor Protocol': 'EDR','Enigma': 'ENG','Enjin Coin': 'ENJ','Essentia': 'ESS',
'Ether Kingdoms Token': 'IMP','Ethereum': 'ETH','Ethereum Classic': 'ETC','Etherparty': 'FUEL','Everex': 'EVX','Everipedia': 'IQX',
'ExclusiveCoin': 'EXCL','Expanse': 'EXP','FLETA': 'FLETA','FLO': 'FLO','FNB Protocol': 'FNB','FOAM': 'FOAM','FTX Token': 'FTT','Factom': 'FCT',
'Fantom': 'FTM','Feathercoin': 'FTC','Fetch.ai': 'FET','FirstBlood': '1ST','Flexacoin': 'FXC','FunFair': 'FUN','Function X': 'FX','Fusion': 'FSN',
'GXChain': 'GXS','GameCredits': 'GAME','Gas': 'GAS','Gemini Dollar': 'GSD','Genesis Vision': 'GVT','GeoCoin': 'GEO','Gifto': 'GTO','Gnosis': 'GNO',
'GoChain': 'GO','Golem': 'GNT','Grin': 'GRIN','Groestlcoin': 'GRS','Gulden': 'NLG','Harmony': 'ONE','Haven Protocol': 'XHV','Hdac': 'HDAC',
'Hedera Hashgraph': 'HBAR','HedgeTrade': 'HEDG','Holo': 'HOT','Horizen': 'ZEN','Humaniq': 'HMQ','Hxro': 'HXRO','Hydro': 'HYDRO','Hydro Protocol': 'HOT',
'HyperCash': 'HC','I/O Coin': 'IOC','ICON': 'ICX','IHT Real Estate Protocol': 'IHT','INT Chain': 'INT','ION': 'ION','IOST': 'IOST','IOTA': 'IOTA',
'Ignis': 'IGNIS','Incent': 'INCNT','Insolar': 'INS','IoTeX': 'IOTX','Jibrel Network': 'JNT','Kava': 'KAVA','Kleros': 'PNK','Komodo': 'KMD',
'Kyber Network': 'KNC','LBRY Credits': 'LBC','LUNA': 'LUNA','Lambda': 'LAMB','Lisk': 'LSK','Litecoin': 'LTC','Loom Network': 'LOOM','Loopring': 'LRC',
'Lunyr': 'LUN','Lympo': 'LYM','MCO': 'MCO','Maecenas': 'ART','MaidSafeCoin': 'MAID','Mainframe': 'MFT','Maker': 'MKR','Matic Network': 'MATIC',
'Matrix AI Network': 'MAN','Medicalchain': 'MTN','Melon': 'MLN','Memetic / PepeCoin': 'MEME','Mercury': 'MER','Metadium': 'META','Metal': 'MTL',
'Metaverse ETP': 'ETP','Metronome': 'MET','Mithril': 'MITH','MobileGo': 'MGO','Moeda Loyalty Points': 'MDA','MonaCoin': 'MONA','Monero': 'XMR',
'MonetaryUnit': 'MUE','Monetha': 'MTH','Monolith': 'TKN','More Coin': 'MORE','Morpheus.Network': 'MRPH','Multi-collateral DAI': 'DAI','Myriad': 'XMY',
'NEM': 'XEM','NEO': 'NEO','NKN': 'NKN','NULS': 'NULS','Nano': 'NANO','NavCoin': 'NAV','Neblio': 'NEBL','Nebulas': 'NAS','Nectar': 'NEC','Nexus': 'NXS',
'NoLimitCoin': 'NLC2','Nucleus Vision': 'NCASH','Numeraire': 'NMR','Nxt': 'NXT','OAX': 'OAX','ODEM': 'ODE','OKB': 'OKB','OKCash': 'OK','ORS Group': 'ORS',
'OST': 'OST','Obyte': 'GBYTE','Ocean Protocol': 'OCEAN','OmiseGO': 'OMG','Omni': 'OMN','On.Live': 'ONL','Ontology': 'ONT','Ontology Gas': 'ONG',
'Orbs': 'ORBS','OriginTrail': 'TRAC','PAL Network': 'PAL','PCHAIN': 'PI','PIVX': 'PIVX','PIXEL': 'PXL','POA': 'POA','ParkinGo': 'GOT','Particl': 'PART',
'Patientory': 'PTOY','Paxos Standard': 'PAX','Peercoin': 'PPC','Perlin': 'PERL','Pinkcoin': 'PINK','PlayChip': 'PLA','Pledge Coin': 'PLG','Po.et': 'POE',
'Polymath': 'POLY','Populous': 'PPT','PotCoin': 'POT','Power Ledger': 'POWR','Project Pai': 'PAI','Prometeus': 'PROM','PumaPay': 'PMA','Pundi X': 'NPXS',
'QASH': 'QASH','QLC Chain': 'QLC','Qtum': 'QTUM','Quant': 'QNT','Quantstamp': 'QSP','Quantum Resistant Ledger': 'QRL','QuarkChain': 'QKC',
'RIF Token': 'RIF','RSK Smart Bitcoin': 'RBT','Radium': 'RADS','Raiden Network Token': 'RDN','Rate3': 'RTE','Ravencoin': 'RVN','Red Pulse Phoenix': 'PHB',
'ReddCoin': 'RDD','Refereum': 'RFR','Ren': 'REN','Request': 'REQ','Ripio Credit Network': 'RCN','SEER': 'SEER','SIBCoin': 'SIB','SIRIN LABS Token': 'SRN',
'SIX': 'SIX','SOLVE': 'SOLVE','SONM': 'SNM','STASIS EURO': 'EURS','STPT': 'STPT','SaluS': 'SLS','Santiment Network Token': 'SAN','Selfkey': 'KEY',
'Sentinel Protocol': 'UPP','Siacoin': 'SC','SingularDTV': 'SNGLS','SingularityNET': 'AGI','Skycoin': 'SKY','SpaceChain': 'SPC','SpankChain': 'SPANK',
'Spendcoin': 'SPND','Sphere': 'SPHR','StableUSD': 'USDS','Status': 'SNT','Stealth': 'XST','Steem': 'STEEM','Steem Dollars': 'SBD','Stellar': 'XLM',
'Storj': 'STORJ','Storm': 'STORM','Stratis': 'STRAT','Streamr DATAcoin': 'DATA','Swarm': 'SWM','Syscoin': 'SYS','TEMCO': 'TEMCO','THETA': 'THETA',
'TRON': 'TRX','TROY': 'TROY','TTC': 'TTC','Tael': 'WABI','TenX': 'PAY','Tether': 'USDT','Tezos': 'XTZ','Theta Fuel': 'TFUEL','Tierion': 'TNT',
'Time New Bank': 'TNB','TomoChain': 'TOMO','Tripio': 'TRIO','TrueUSD': 'TSD',"Tutor's Diary": 'TUDA','UNUS SED LEO': 'LEO','USD Coin': 'USDC',
'USDK': 'USDK','Ubiq': 'UBQ','Ultra': 'UOS','Unikoin Gold': 'UKG','Universa': 'UTNP','Upfiring': 'UFR','Uranus': 'URAC','Utrust': 'UTK',
'V Systems': 'VSY','VIBE': 'VIBE','VITE': 'VITE','VeChain': 'VET','Verge': 'XVG','VeriBlock': 'VBK','VeriCoin': 'VRC','Vertcoin': 'VTC','Vetri': 'VLD',
'Viacoin': 'VIA','Viberate': 'VIB','Vodi X': 'VDX','Voyager Token': 'BQX','W Green Pay': 'WGP','WAX': 'WAXP','WINk': 'WIN','WOLLO': 'WLO',
'Waltonchain': 'WTC','Wanchain': 'WAN','Waves': 'WAVES','WePower': 'WPR','Wrapped Bitcoin': 'WBTC','XEL': 'XEL','XRP': 'XRP','Xriba': 'XRA',
'YGGDRASH': 'YEED','YOYOW': 'YOYO','ZB Token': 'ZBT','Zcash': 'ZEC','Zcoin': 'XZC','Zilliqa': 'ZIL','adToken': 'ADT','aelf': 'ELF','district0x': 'DNT',
'iExec RLC': 'RLC'}
for crypto, ticker in cryptos.items():
if Crypto == crypto:
return ticker
def stock_to_ticker(Stock):
stocks = {'Apple': 'AAPL', 'Airbus': 'AIR.BE', 'AMD': 'AMD', 'Boeing': 'BA', 'BMW': 'BMW.BE', 'Facebook': 'FB', 'Google': 'GOOG', 'IBM': 'IBM',
'Intel': 'INTC', 'Jumia': 'JMIA', 'Microsoft': 'MSFT', 'Nvidia': 'NVDA', 'Samsung': '005930.KS', 'Tesla': 'TSLA', 'Twitter': 'TWTR', 'Uber': 'UBER',
'Volkswagen': 'VOW.DE'}
for stock, ticker in stocks.items():
if Stock == stock:
return ticker
def stock_crypto_markets(Exchange):
bittrex_markets = ('Bitcoin','Ethereum','Tether','US Dollar')
binance_markets = ('Binance Coin','Binance USD','Bitcoin','Ethereum','Nigerian Nira','Paxos Standard','Ripple','Russian Ruble','Stable USD',
'Tether','Tron','TrueUSD','USD Coin')
bitfinex_markets = ('Bitcoin','British Pound','Ethereum','Japanese Yen','US Dollar')
bitfinex_markets_not_working = ('EOS','Euro','Ishares China Index ETF','Stable USD','Stellar','Tether','Ultra Salescloud')
yahoo_stocks = ('AMD', 'Airbus', 'Apple', 'BMW', 'Boeing', 'Facebook', 'Google', 'IBM', 'Intel', 'Jumia', 'Microsoft', 'Nvidia', 'Samsung',
'Tesla', 'Twitter', 'Uber', 'Volkswagen')
if Exchange == 'Bittrex':
market = bittrex_markets
elif Exchange == 'Binance':
market = binance_markets
elif Exchange == 'Bitfinex':
market = bitfinex_markets
elif Exchange == 'Yahoo! Finance':
market = yahoo_stocks
return market
def crypto_markets_to_ticker(Market):
markets = {'Bitcoin':'BTC','Ethereum':'ETH','Tether':'USDT','Binance Coin':'BNB','Ripple':'XRP','US Dollar':'USD',
'TrueUSD':'TUSD','Euro':'EUR','British Pound':'GBP','Russian Ruble':'RUB','Nigerian Nira':'NGN','Stellar':'XLM',
'Japanese Yen':'JPY','Paxos Standard':'PAX','USD Coin':'USDC','Stable USD':'USDS','Binance USD':'BUSD',
'Tron':'TRX','Ishares China Index ETF':'XCH','Ultra Salescloud':'UST'}
for market, ticker in markets.items():
if Market == market:
return ticker
def bittrex_coins(Market):
bittrex_btc_options = ('0x','12Ships','Abyss Token','AdEx','Aeon','Akropolis','Ankr','Aragon','Ardor','Ark','Augur','BANKEX','BLOCKv','BORA',
'BTU Protocol','Bancor','Basic Attention Token','BitShares','BitTorrent','BitTube','Bitcoin Cash','Bitcoin SV',
'BlackCoin','Blockcloud','Blocknet','BnkToTheFuture','Burst','Bytom','Cardano','Chainlink','Chromia','Cindicator','Civic',
'Cortex','Cosmo Coin','Cosmos','Cred','Crowd Machine','Crown','Crypto.com Coin','Curecoin','DATA','DECENT','DMarket','Dash',
'Decentraland','Decred','Dent','DigiByte','DigitalNote','Dogecoin','Dragonchain','Dusk Network','EOS','Edgeless',
'Einsteinium','Endor Protocol','Enigma','Enjin Coin','Ethereum','Ethereum Classic','ExclusiveCoin','Expanse','FLETA','FLO',
'FNB Protocol','Factom','Feathercoin','FirstBlood','Flexacoin','Function X','GameCredits','GeoCoin','Gifto','Gnosis',
'GoChain','Golem','Grin','Groestlcoin','Gulden','Haven Protocol','Hdac','Hedera Hashgraph','HedgeTrade','Horizen',
'Humaniq','Hxro','Hydro','I/O Coin','IHT Real Estate Protocol','ION','IOST','Ignis','Incent','IoTeX','Jibrel Network',
'Komodo','LBRY Credits','LUNA','Lambda','Lisk','Litecoin','Loom Network','Loopring','MCO','Maecenas','MaidSafeCoin',
'Mainframe','Memetic / PepeCoin','Mercury','Metadium','Metal','Metronome','MonaCoin','Monero','MonetaryUnit','More Coin',
'Morpheus.Network','Multi-collateral DAI','Myriad','NEM','NEO','NKN','NavCoin','Nexus','NoLimitCoin','Numeraire','Nxt',
'OKCash','OST','Obyte','Ocean Protocol','OmiseGO','Ontology','Ontology Gas','Orbs','OriginTrail','PAL Network','PCHAIN',
'PIVX','PIXEL','Particl','Patientory','Paxos Standard','Peercoin','Pinkcoin','PlayChip','Pledge Coin','PotCoin','Prometeus',
'PumaPay','Pundi X','Qtum','Quant','Quantum Resistant Ledger','Radium','Ravencoin','ReddCoin','Refereum',
'Ripio Credit Network','SIBCoin','SIRIN LABS Token','SIX','SOLVE','STPT','SaluS','Sentinel Protocol','Siacoin',
'SpaceChain','Spendcoin','Sphere','StableUSD','Status','Stealth','Steem','Steem Dollars','Stellar','Storj','Storm','Stratis',
'Syscoin','TEMCO','TRON','TTC','TenX','Tezos','TrueUSD',"Tutor's Diary",'Ubiq','Unikoin Gold','Uranus','Utrust','VITE',
'VeChain','Verge','VeriBlock','VeriCoin','Vertcoin','Viacoin','Viberate','Vodi X','W Green Pay','WAX','Waves','XEL','XRP',
'Zcash','Zcoin','Zilliqa','adToken','aelf','district0x','iExec RLC')
bittrex_eth_options = ('0x','AdEx','Aragon','Augur','Basic Attention Token','Bitcoin Cash','Bitcoin SV','Cardano','Civic','Cosmos','DMarket',
'Dash','Decentraland','DigiByte','EOS','Enigma','Ethereum Classic','Function X','Gnosis','Golem','Hedera Hashgraph',
'Litecoin','MCO','Monero','Multi-collateral DAI','NEM','NEO','OmiseGO','Pundi X','Qtum','SIRIN LABS Token','SOLVE',
'Siacoin','Status','Stellar','Storm','Stratis','TRON','TenX','Tezos','TrueUSD','Unikoin Gold','Viberate','Vodi X','WAX',
'Waves','XRP','Zcash')
bittrex_usdt_options = ('0x','Basic Attention Token','BitTorrent','Bitcoin','Bitcoin Cash','Bitcoin SV','Cardano','Cosmos','Dash','Decred',
'DigiByte','Dogecoin','EOS','Enjin Coin','Ethereum','Ethereum Classic','Grin','Hedera Hashgraph','Litecoin','Monero',
'Multi-collateral DAI','NEO','Ocean Protocol','OmiseGO','Ontology','PumaPay','Pundi X','Ravencoin','Siacoin','Stellar',
'TRON','Tezos','TrueUSD','VeChain','Verge','Vodi X','XRP','Zcash')
bittrex_usd_options = ('0x','Basic Attention Token','Bitcoin','Bitcoin Cash','Bitcoin SV','Cardano','Decred','DigiByte','Ethereum',
'Ethereum Classic','Hedera Hashgraph','Horizen','Komodo','Litecoin','Paxos Standard','Siacoin','TRON','Tether',
'Tezos','TrueUSD','XRP','Zcash')
if Market == 'Bitcoin':
coins = bittrex_btc_options
elif Market == 'Ethereum':
coins = bittrex_eth_options
elif Market == 'Tether':
coins = bittrex_usdt_options
elif Market == 'USD':
coins = bittrex_usd_options
return coins
def binance_coins(Market):
binance_btc_options = ('0x','ARPA Chain','Aave','AdEx','Aeron','Aeternity','Agrello','Aion','AirSwap','Algorand','Ambrosus','Ankr','AppCoins',
'Ardor','Ark','Augur','Bancor','Band Protocol','Basic Attention Token','Beam','Binance Coin','BitShares','Bitcoin Cash',
'Bitcoin Diamond','Bitcoin Gold','Blockmason Credit Protocol','Blockstack','Blox','Bluzelle','Bread''Cardano',
'Celer Network','Chainlink','Chiliz','Cindicator','Civic','Cocos-BCX','Contentos','Cortex','Cosmos','CyberMiles','Dash',
'Decentraland','Decred','DigixDAO','Dock','Dogecoin','Dusk Network','EOS','Eidoo','Elrond','Enigma','Enjin Coin',
'Ethereum','Ethereum Classic','Etherparty','Everex','FTX Token','Fantom','Fetch.ai','FunFair','GXChain','Gas',
'Genesis Vision','Gifto','GoChain','Golem','Groestlcoin','Harmony','Hedera Hashgraph','Holo','Horizen','HyperCash',
'ICON','IOST','IOTA','Insolar','IoTeX','Kava','Komodo','Kyber Network','Lisk','Litecoin','Loom Network','Loopring',
'Lunyr','MCO','Mainframe','Matic Network','Metal','Mithril','Moeda Loyalty Points','Monero','Monetha','NEM','NEO','NKN',
'NULS','Nano','NavCoin','Neblio','Nebulas','Nexus','Nucleus Vision','OAX','OST','OmiseGO','Ontology','Ontology Gas','PIVX',
'POA','Perlin','Po.et','Polymath','Populous','Power Ledger','QLC Chain','Qtum','Quantstamp','QuarkChain',
'Raiden Network Token','Ravencoin','Red Pulse Phoenix','Ren','Request','Ripio Credit Network','SONM','Selfkey','Siacoin',
'SingularDTV','SingularityNET','Skycoin','Status','Steem','Stellar','Storj','Storm','Stratis','Streamr DATAcoin','Syscoin',
'THETA','TRON','TROY','Tael','Tezos','Theta Fuel','Tierion','Time New Bank','TomoChain','VIBE','VITE','VeChain','Verge',
'Viacoin','Viberate','Voyager Token','Waltonchain','Wanchain''Waves','WePower','XRP','YOYOW','Zcash','Zcoin','Zilliqa',
'aelf','district0x','iExec RLC')
binance_eth_options = ('0x','Aave','AdEx','Aeron','Aeternity','Agrello','Aion','AirSwap','Ambrosus','AppCoins','Ardor','Ark','Augur','Bancor',
'Basic Attention Token','Binance Coin','BitShares','Bitcoin Diamond','Bitcoin Gold','Blockmason Credit Protocol','Blox',
'Bluzelle','Bread','Cardano','Chainlink','Cindicator','Civic','CyberMiles','Dash','Decentraland','Dent','DigixDAO','Dock',
'EOS','Eidoo','Enigma','Enjin Coin','Ethereum Classic','Everex','FunFair','GXChain','Genesis Vision','Gifto','Golem',
'Groestlcoin','Holo','Horizen','HyperCash','ICON','IOST','IOTA','Insolar','IoTeX','Komodo','Kyber Network','Lisk',
'Litecoin','Loom Network','Loopring','MCO','Mainframe','Metal','Metal','Moeda Loyalty Points','Monero','Monetha','NEM',
'NEO','NULS','Nano','Neblio','Nebulas','Nexus','Nucleus Vision','OAX','OST','OmiseGO','Ontology','PIVX','POA','Po.et',
'Populous','Power Ledger','Pundi X','QLC Chain','Qtum','Quantstamp','QuarkChain','Raiden Network Token','Request',
'Ripio Credit Network','SONM','Selfkey','Siacoin','SingularityNET','Skycoin','Status','Steem','Stellar','Storj','Storm',
'Stratis','Streamr DATAcoin''Syscoin','THETA','TRON','Tael','Tierion','Time New Bank','VIBE','VeChain','Verge','Viacoin',
'Viberate','Voyager Token','Waltonchain','Wanchain','Waves','WePower','XRP','YOYOW','Zcash','Zcoin','Zilliqa','aelf',
'district0x','iExec RLC')
binance_usdt_options = ('0x','ARPA Chain','Algorand','Ankr','Band Protocol','Basic Attention Token','Beam','Binance Coin','Binance USD',
'BitTorrent','Bitcoin','Bitcoin Cash','Blockstack','Cardano','Celer Network','Chainlink','Chiliz','Civic','Cocos-BCX',
'Contentos','Cortex','Cosmos','Dash','Dent','Dock','Dogecoin','Dusk Network','EOS','Elrond','Enjin Coin','Ethereum',
'Ethereum Classic','FTX Token','Fantom','Fetch.ai','FunFair','Gifto','Harmony','Hedera Hashgraph','Holo','HyperCash',
'ICON','IOST','IOTA','IoTeX','Kava','Litecoin','MCO','Mainframe','Matic Network','Metal','Mithril','Monero','NEO','NKN',
'NULS','Nano','OmiseGO','Ontology','Ontology Gas','Paxos Standard','Perlin','Pundi X','Qtum','Ravencoin','Ren','Selfkey',
'StableUSD','Stellar','Storm','THETA','TRON','TROY','Tezos','Theta Fuel','TomoChain','TrueUSD','USD Coin','VITE','VeChain',
'WINk','Wanchain','Waves','XRP','Zcash','Zilliqa','iExec RLC')
binance_bnb_options = ('0x','ARPA Chain','Aeternity','Agrello','Aion','Algorand','Ambrosus','Ankr','AppCoins','Band Protocol',
'Basic Attention Token','Beam','BitTorrent','Bitcoin Cash','Blockmason Credit Protocol','Blockstack','Bread','Cardano',
'Celer Network','Chiliz','Cindicator','Cocos-BCX','Contentos','Cortex','Cosmos','CyberMiles','Dash','Decred','Dogecoin',
'Dusk Network','EOS','Elrond','Enjin Coin','Ethereum Classic','FTX Token','Fantom','Fetch.ai','Gifto','GoChain','Harmony',
'Hedera Hashgraph','Holo','Horizen','ICON','IOST','IOTA','Kava','Lisk','Litecoin','Loom Network','MCO','Mainframe',
'Matic Network','Mithril','Monero','NEM','NEO','NKN','NULS','Nano','Neblio','Nebulas','Nexus','OST','OmiseGO','Ontology',
'Ontology Gas','PIVX','Perlin','Polymath','Power Ledger','QLC Chain','Qtum','Quantstamp','Raiden Network Token',
'Ravencoin','Red Pulse Phoenix','Ren','Ripio Credit Network','Siacoin','SingularityNET','Skycoin','Steem','Stellar',
'Storm','Syscoin','THETA','TRON','TROY','Tael','Tezos','Theta Fuel','TomoChain','VITE','VeChain','Viacoin','WINk',
'Wanchain','Waves','XRP','YOYOW','Zcash','Zcoin','Zilliqa','iExec RLC')
binance_pax_options = ('Algorand','Basic Attention Token','Binance Coin','BitTorrent','Bitcoin','Bitcoin Cash','Cardano','Chainlink',
'Dusk Network','EOS','Ethereum','Litecoin','NEO','Ontology','StableUSD','Stellar','TRON','USD Coin','XRP','Zcash')
binance_tusd_options = ('Algorand','Basic Attention Token','Binance Coin','BitTorrent','Bitcoin','Bitcoin Cash','Cardano','Chainlink','Cosmos',
'EOS','Ethereum','Ethereum Classic','Litecoin','NEO','Paxos Standard','Red Pulse Phoenix','StableUSD','Stellar','TRON',
'USD Coin','Waves','XRP','Zcash')
binance_usdc_options = ('Algorand','Basic Attention Token','Binance Coin','Binance GBP Stable Coin','BitTorrent','Bitcoin','Bitcoin Cash',
'Cardano','Chainlink','Cosmos','Dusk Network','EOS','Ethereum','Fantom','Harmony','Litecoin','NEO','Ontology','StableUSD',
'Stellar','TRON','TomoChain','WINk','Waves','XRP','Zcash')
binance_usds_options = ('Binance Coin', 'Bitcoin')
binance_ngn_options = ('Binance Coin', 'Binance USD', 'Bitcoin')
binance_busd_options = ('Binance Coin','Bitcoin','Bitcoin Cash','Cardano','Chainlink','EOS','Ethereum','Ethereum Classic',
'Litecoin','Qtum','Stellar','TRON','VeChain','XRP')
binance_rub_options = ('Binance Coin', 'Binance USD', 'Bitcoin', 'Ethereum', 'XRP')
binance_trx_options = ('BitTorrent', 'WINk')
binance_xrp_options = ('TRON', 'Zcoin')
if Market == 'Bitcoin':
coins = binance_btc_options
elif Market == 'Ethereum':
coins = binance_eth_options
elif Market == 'Tether':
coins = binance_usdt_options
elif Market == 'Binance Coin':
coins = binance_bnb_options
elif Market == 'Paxos Standard':
coins = binance_pax_options
elif Market == 'TrueUSD':
coins = binance_tusd_options
elif Market == 'USD Coin':
coins = binance_usdc_options
elif Market == 'StableUSD':
coins = binance_usds_options
elif Market == 'Nigerian Nira':
coins = binance_ngn_options
elif Market == 'Binance USD':
coins = binance_busd_options
elif Market == 'Russian Ruble':
coins = binance_rub_options
elif Market == 'Tron':
coins = binance_trx_options
elif Market == 'Ripple':
coins = binance_xrp_options
return coins
def bitfinex_coins(Market):
bitfinex_btc_options = ('0Chain','0x','AidCoin','Aion','Algorand','Ampleforth','Aragon','Auctus','Augur','Aventus','BLOCKv','Bancor',
'Basic Attention Token','BitTorrent','Bitcoin Cash','Bitcoin Gold','Bitcoin Interest','Bitcoin SV','BnkToTheFuture',
'Callisto Network','Cindicator','CommerceBlock','Cortex','Cosmos','DATA','Dash','Decentraland','Dether','DigiByte',
'Dusk Network','EOS','Edge','Eidoo','Essentia','Ethereum','Ethereum Classic','Everipedia','FunFair','Fusion','Golem',
'Hydro Protocol','IOST','IOTA','Kyber Network','Litecoin','Loopring','Lympo','Maker','Medicalchain','Metaverse ETP',
'Mithril','Monero','Multi-collateral DAI','NEO','Nectar','Nucleus Vision','ODEM','OKB','ORS Group','OmiseGO','Omni','POA',
'Polymath','Project Pai','QASH','Qtum','RIF Token','RSK Smart Bitcoin','Raiden Network Token','Request',
'Ripio Credit Network','SEER','Santiment Network Token','SingularDTV','SingularityNET','SpankChain','Status','Stellar',
'Storj','Streamr DATAcoin','TRON','Tezos','Time New Bank','UNUS SED LEO','USDK','Ultra','Utrust','V Systems','VeChain',
'Verge','WAX','WePower','XRP','YOYOW','Zcash','Zilliqa','aelf','iExec RLC')
bitfinex_eth_options = ('0Chain','0x','Abyss Token','AidCoin','Aion','AirSwap','Aragon','Auctus','Augur','Autonio','Aventus','BLOCKv','Bancor',
'Banyan Network','Basic Attention Token','Blockpass','BnkToTheFuture','Cindicator','CommerceBlock',
'Content Neutrality Network','ContentBox','Cortex','Cosmos','Credits','CryptoFranc','DAOstack','DATA','Decentraland',
'Dether','Digix Gold Token','Dragonchain','EOS','Edge','Eidoo','Enjin Coin','Essentia','Ether Kingdoms Token','FOAM',
'FunFair','Fusion','Gnosis','Golem','Hydro Protocol','INT Chain','IOST','IOTA','Kleros','Kyber Network','Loom Network',
'Loopring','Lympo''Maker','Matrix AI Network','Medicalchain','Melon','Metaverse ETP','Mithril','MobileGo','Monolith',
'Multi-collateral DAI','NEO','Nectar','Nucleus Vision','ODEM','OKB','ORS Group','OmiseGO','On.Live','POA','ParkinGo',
'Polymath','QASH','Qtum','Raiden Network Token','Rate3','Request','Ripio Credit Network','SEER','STASIS EURO',
'Santiment Network Token','SingularDTV','SingularityNET','SpankChain','Status','Stellar','Storj','Streamr DATAcoin',
'Swarm','TRON','Time New Bank','Tripio','UNUS SED LEO','USDK','Universa','Upfiring','Utrust','VeChain','Verge','Vetri',
'WAX','Waltonchain','WePower','Wrapped Bitcoin','Xriba','YGGDRASH','YOYOW','Zilliqa','aelf''iExec RLC')
bitfinex_usdt_options = ('Algorand','Ampleforth','BitKan','Bitcoin','Bitcoin Cash','Dragon Token','EOS','Ethereum',
'FTX Token','Litecoin','OKB','UNUS SED LEO','USDK')
bitfinex_usd_options = ('0Chain','0x','Abyss Token','AidCoin','Aion','AirSwap','Algorand','Ampleforth','Aragon','Atonomi','Auctus','Augur',
'Autonio','Aventus','BLOCKv','Bancor','Banyan Network','Basic Attention Token','BitKan','BitTorrent','Bitcoin',
'Bitcoin Cash','Bitcoin Gold','Bitcoin Interest','Bitcoin SV','Blockpass','BnkToTheFuture','Callisto Network','Cindicator',
'CommerceBlock','Content Neutrality Network','ContentBox','Cortex','Cosmos','Credits','CryptoFranc','DAOstack','DATA','Dash',
'Decentraland','Dether','DigiByte','Digix Gold Token','Dragon Token','Dragonchain','Dusk Network','EOS','Edge','Eidoo',
'Enjin Coin','Essentia','Ether Kingdoms Token','Ethereum','Ethereum Classic','Everipedia','FOAM','FTX Token','FunFair',
'Fusion','Gemini Dollar','Gnosis','Golem','Hydro Protocol','INT Chain','IOST','IOTA','Kleros','Kyber Network','Litecoin',
'Loom Network','Loopring','Lympo','Maker','Matrix AI Network','Medicalchain','Melon','Metaverse ETP','Mithril','MobileGo',
'Monero','Monolith','Multi-collateral DAI','NEO','Nectar','Nucleus Vision','ODEM','OKB','ORS Group','OmiseGO','Omni',
'On.Live','POA','ParkinGo','Paxos Standard','Polymath','Project Pai','QASH','Qtum','RIF Token','RSK Smart Bitcoin',
'Raiden Network Token','Rate3','Request','Ripio Credit Network','SEER','STASIS EURO','Santiment Network Token',
'SingularDTV','SingularityNET','SpankChain','Status','Stellar','Storj','Streamr DATAcoin','Swarm','TRON','Tether','Tezos',
'Time New Bank','Tripio','TrueUSD','UNUS SED LEO','USD Coin','USDK','Ultra','Universa','Upfiring','Utrust','V Systems',
'VeChain','Verge','Vetri','WAX','WOLLO','Waltonchain','WePower','Wrapped Bitcoin','XRP','Xriba','YGGDRASH','YOYOW',
'ZB Token','Zcash','Zilliqa','aelf','iExec RLC')
bitfinex_xch_options = ('Bitcoin')
bitfinex_eur_options = ('Bitcoin','EOS','Ethereum','IOTA','NEO','ParkinGo','Stellar','TRON','Verge')
bitfinex_jpy_options = ('Bitcoin', 'EOS', 'Ethereum', 'IOTA', 'NEO', 'Stellar', 'TRON', 'Verge')
bitfinex_gbp_options = ('Bitcoin', 'EOS', 'Ethereum', 'IOTA', 'NEO', 'Stellar', 'TRON', 'Verge')
bitfinex_eos_options = ('Everipedia', 'UNUS SED LEO', 'USDK')
bitfinex_xlm_options = ('WOLLO')
bitfinex_ust_options = ('ZB Token')
if Market == 'Bitcoin':
coins = bitfinex_btc_options
elif Market == 'Ethereum':
coins = bitfinex_eth_options
elif Market == 'Tether':
coins = bitfinex_usdt_options
elif Market == 'US Dollar':
coins = bitfinex_usd_options
elif Market == 'Ishares China Index ETF':
coins = bitfinex_xch_options
elif Market == 'Japanese Yen':
coins = bitfinex_jpy_options
elif Market == 'British Pound':
coins = bitfinex_gbp_options
elif Market == 'EOS':
coins = bitfinex_eos_options
elif Market == 'Stellar':
coins = bitfinex_xlm_options
elif Market == 'Ultra Salescloud':
coins = bitfinex_ust_options
return coins
def exchange_to_coins_loading (Exchange, Market):
if Exchange == 'Bittrex':
coins = bittrex_coins(Market)
elif Exchange == 'Binance':
coins = binance_coins(Market)
elif Exchange == 'Bitfinex':
coins = bitfinex_coins(Market)
return coins
def yahoo_interval(Interval):
if Interval == '1 Minute':
period = '7d'
elif Interval == '5 Minute' or Interval == '15 Minute' or Interval == '30 Minute':
period = '1mo'
elif Interval == '1 Hour':
period = '2y'
else:
period = 'max'
intervals = {'1 Minute':'1m', '5 Minute':'5m', '15 Minute':'15m', '30 Minute':'30m',
'1 Hour':'60m', '1 Day':'1d', '1 Week':'1wk', '1 Month':'1mo'}
for interval, inter in intervals.items():
if Interval == interval:
stock_interval = inter
return period, stock_interval
def crypto_interval(Exchange, Interval):
intervals = {'1 Minute':'1m', '5 Minute':'5m', '15 Minute':'15m', '30 Minute':'30m',
'1 Hour':'1h', '1 Day':'1d', '1 Week':'1w', '1 Month':'1M'}
bittrex_intervals = {'1 Minute':'oneMin', '5 Minute':'fiveMin', '15 Minute':'fifteenMin', '30 Minute':'thirtyMin',
'1 Hour':'hour', '1 Day':'day'}
if Exchange == 'Bittrex':
for bitt_interval, bitt_inter in bittrex_intervals.items():
if Interval == bitt_interval:
bittrex_interval = bitt_inter
return bittrex_interval
else:
for interval, inter in intervals.items():
if Interval == interval:
crypto_interval = inter
return crypto_interval
| StarcoderdataPython |
1622945 | <filename>backend/app/model/questionnaires/education_mixin.py<gh_stars>0
from sqlalchemy import func
from sqlalchemy.ext.declarative import declared_attr
from app import db
from app.export_service import ExportService
class EducationMixin(object):
__question_type__ = ExportService.TYPE_UNRESTRICTED
__estimated_duration_minutes__ = 5
school_services_other_hide_expression = '!(model.school_services && model.school_services.includes("servicesOther"))'
id = db.Column(db.Integer, primary_key=True)
last_updated = db.Column(db.DateTime(timezone=True), default=func.now())
time_on_task_ms = db.Column(db.BigInteger, default=0)
@declared_attr
def participant_id(cls):
return db.Column("participant_id", db.Integer, db.ForeignKey("stardrive_participant.id"))
@declared_attr
def user_id(cls):
return db.Column("user_id", db.Integer, db.ForeignKey("stardrive_user.id"))
@declared_attr
def attends_school(cls):
return db.Column(
db.Boolean,
info={
"display_order": 1,
"type": "radio",
"template_options": {
"label": "Attend a school or program?",
"required": False,
"options": [
{"value": True, "label": "Yes"},
{"value": False, "label": "No"},
],
},
"expression_properties": {
"template_options.label": cls.attends_school_desc,
},
},
)
school_name = db.Column(
db.String,
info={
"display_order": 2,
"type": "input",
"template_options": {
"label": "Name of the school or program",
"required": False,
},
"hide_expression": '!(model.attends_school)',
},
)
@declared_attr
def school_type(cls):
return db.Column(
db.String,
info={
"display_order": 3,
"type": "radio",
"template_options": {
"label": "Type of School",
"required": False,
"options":[
{"value": "public", "label": "Public"},
{"value": "private", "label": "Private"},
{"value": "homeschool", "label": "Home School"},
]
},
"expression_properties": {
"template_options.label": cls.school_type_desc,
},
"hide_expression": '!(model.attends_school)',
},
)
@declared_attr
def placement_other(cls):
return db.Column(
db.String,
info={
"display_order": 4.3,
"type": "input",
"template_options": {
"label": "Enter school placement",
"required": True,
},
"hide_expression": cls.placement_other_hide_expression,
"expression_properties": {
"template_options.required": '!' + cls.placement_other_hide_expression
}
},
)
@declared_attr
def current_grade(cls):
return db.Column(
db.String,
info={
"display_order": 5,
"type": "input",
"template_options": {
"label": "School grade level",
"required": True,
},
"hide_expression": cls.current_grade_hide_expression,
"expression_properties": {
"template_options.required": '!' + cls.current_grade_hide_expression
}
},
)
@declared_attr
def school_services(cls):
return db.Column(
db.ARRAY(db.String),
info={
"display_order": 6.1,
"type": "multicheckbox",
"template_options": {
"type": "array",
"required": False,
"options": [
{"value": "504mod", "label": "504 Modification Plan"},
{"value": "iep", "label": "Individualized Education Program (IEP)"},
{"value": "1:1aide", "label": "1:1 aide or paraprofessional in classroom"},
{"value": "partTimeInstruction", "label": "Part-time specialized instruction in a resource room or "
"special education classroom"},
{"value": "fullTimeInstruction", "label": "Full-time specialized instruction in a resource room or "
"special education classroom"},
{"value": "specializedSchool", "label": "Specialized school for special learning needs"},
{"value": "dayTreatment", "label": "Day treatment or residential center"},
{"value": "disabilitySupports", "label": "Disability supports services (at college/vocational "
"school)"},
{"value": "servicesNone", "label": "None of the above"},
{"value": "servicesOther", "label": "Other"},
],
},
"expression_properties": {
"template_options.label": cls.school_services_desc,
},
"hide_expression": '!(model.attends_school)',
},
)
school_services_other = db.Column(
db.String,
info={
"display_order": 6.2,
"type": "input",
"template_options": {
"label": "Describe additional services",
"required": True,
},
"hide_expression": school_services_other_hide_expression,
"expression_properties": {
"template_options.required": '!' + school_services_other_hide_expression
}
},
)
def get_field_groups(self):
return {
"placement_group": {
"display_order": 4,
"wrappers": ["card"],
"template_options": {"label": "Placement"},
"hide_expression": '!(model.attends_school)',
},
"school_services_group": {
"fields": ["school_services", "school_services_other"],
"display_order": 6,
"wrappers": ["card"],
"template_options": {"label": "School Services"},
"hide_expression": '!(model.attends_school)',
},
}
| StarcoderdataPython |
3355203 | <filename>code/trial/main.py
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.4'
# jupytext_version: 1.2.4
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# +
import glob
import os
# +
import sys
from collections import Counter
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
from keras import regularizers
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from keras.layers import Dense, Dropout, Flatten, Input
from keras.layers.recurrent import LSTM
from keras.models import Sequential, model_from_json
from keras.optimizers import Adam
from keras.utils import np_utils
from mutagen.easyid3 import EasyID3
# -
def makeArtistList():
path = "/home/Takumi/data/"
l = ['Beethoven', 'Haydn', 'Bach']
artist_list = []
for i, name in enumerate(l):
print('{} process start.'.format(name))
p = path + name + '/cut/'
file_count = len(os.listdir(p))
l2 = [i] * file_count
artist_list.extend(l2)
counter = Counter(artist_list)
print(counter)
return artist_list
artist_list = makeArtistList()
# +
def readwave(file):
wav, samplerate = librosa.load(file)
return wav, samplerate
def MelFilterBank(y, sr):
return librosa.feature.melspectrogram(y=y, sr=sr)
# -
path = "/home/Takumi/data/"
name_list = ['Beethoven', 'Haydn', 'Bach']
data_list = []
for name in name_list:
print('{} process start.'.format(name))
p = path + name + '/cut/'
wav_list = sorted(list(glob.glob(p + "*.wav")))
data_list.extend(wav_list)
print(len(data_list))
data_x = np.empty([128, 1292])
for i in range(len(data_list)):
wav, sr = readwave(data_list[i])
# print(wav.shape)
# print(sr)
t = np.arange(0, len(wav)) / sr
m = MelFilterBank(wav, sr)
# print(m)
# print(m.shape)
# if not i:
# data_x = np.stack([data_x, m], 0)
# else:
# data_x = np.r_[data_x, m]
data_x = np.r_['0, 3, -1', data_x, m]
if not i % 100:
print(data_x.shape)
data_x = np.delete(data_x, 0, 0)
print(data_x.shape)
def makeLSTMmodel(hidden=128, input_shape=(128, 1292,)):
model = Sequential()
model.add(LSTM(units=hidden, dropout=0.2, input_shape=input_shape, return_sequences=True))
model.add(LSTM(units=hidden, dropout=0.2, input_shape=input_shape, kernel_regularizer=regularizers.l2(0.001)))
model.add(Dense(3, activation='softmax'))
model.compile(loss="categorical_crossentropy", optimizer=Adam(lr=1e-3), metrics=['accuracy'])
return model
data_y = np.array(artist_list)
# data_y = np.delete(data_y, 0)
data_y = np_utils.to_categorical(data_y, 3)
print(data_y.shape)
model = makeLSTMmodel()
model.summary()
early_stopping = EarlyStopping(monitor="val_loss", patience=10, mode="auto")
# reduce_lr = ReduceLROnPlateau(monitor='val_loss', patience=5, factor=0.5, min_lr=0.0001, verbose=1)
# history = model.fit(data_x, data_y, batch_size=64, epochs=100, validation_split=0.2, callbacks=[early_stopping, reduce_lr])
history = model.fit(data_x, data_y, batch_size=64, epochs=100, validation_split=0.2, callbacks=[early_stopping])
with open('model.json', 'w') as json_file:
json_file.write(model.to_json())
model.save_weights('main1.h5')
model_r = model_from_json(open('model.json', 'r').read())
model_r.load_weights('main1.h5')
model_r.compile(loss="categorical_crossentropy", optimizer=Adam(lr=1e-3), metrics=['accuracy'])
history_r = model_r.fit(data_x, data_y, batch_size=64, epochs=100, validation_split=0.2, callbacks=[early_stopping])
del model
del early_stopping
# del reduce_lr
del history
print("{}{: >25}{}{: >10}{}".format('|','Variable Name','|','Memory','|'))
print(" ------------------------------------ ")
for var_name in dir():
if not var_name.startswith("_"):
print("{}{: >25}{}{: >10}{}".format('|',var_name,'|',sys.getsizeof(eval(var_name)),'|'))
# -
del m
del t
del wav
# +
# %matplotlib inline
def plot_history(history):
plt.figure(figsize=(8, 10))
plt.subplots_adjust(hspace=0.3)
plt.subplot(2, 1, 1)
plt.plot(history.history['accuracy'], '-', label='accuracy')
plt.plot(history.history['val_accuracy'], '-', label='val_acc')
plt.title('model accuracy')
plt.xlabel('epoch')
plt.ylabel('accuracy')
plt.legend(loc='lower right')
plt.subplot(2, 1, 2)
plt.plot(history.history['loss'], '-', label='loss')
plt.plot(history.history['val_loss'], '-', label='val_loss')
plt.title('model loss')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.legend(loc='upper right')
plt.show()
plot_history(history_r)
plt.savefig('graph.png')
# -
with open('model.json', 'w') as json_file:
json_file.write(model_r.to_json())
model_r.save_weights('main1.h5')
| StarcoderdataPython |
24018 | <filename>clone-zadara-volume.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import time
import logging
import logging.config
import logging.handlers
import yaml
from zadarest import ZConsoleClient
from zadarest import ZVpsaClient
logger = None
def setup_logging( log_conf=None ):
if log_conf is None:
logging.basicConfig( level=logging.DEBUG )
else:
logging.config.dictConfig( dict( log_conf ) )
logging.info( 'start logging for %s at %s' %
( __name__, time.strftime( "%y%m%d-%H%M", time.localtime() ) ) )
def read_config( config_file_path ):
with open( config_file_path, 'r' ) as ymlfile:
config = yaml.load( ymlfile )
# some validation of config
if 'zadara_cloud_console' not in config.keys() or 'url' not in config['zadara_cloud_console'].keys():
logger.critical('missing zadara CLOUD CONSOLE URL config')
exit( 1 )
if 'zadara_vpsa' not in config.keys() or 'volume_export_path' not in config['zadara_vpsa'].keys():
logger.critical('missing zadara volume EXPORT PATH config')
exit( 1 )
if 'logging' not in config.keys():
config['logging'] = None
return config
def get_value_from_env_or_user_input( env_var_name, msg="enter your value: " ):
value = None
if env_var_name in os.environ:
value = os.environ[ env_var_name ]
while not value:
value = str( raw_input( msg ) )
return value
def setup_zadara_console_client():
token = get_value_from_env_or_user_input(
'ZADARA_CONSOLE_ACCESS_TOKEN',
'enter your zadara CONSOLE access token: ' )
zcon = ZConsoleClient( cfg['zadara_cloud_console']['url'], token )
logger.debug('set zconsole for url(%s)' % cfg['zadara_cloud_console']['url'] )
logger.debug('zconsole object is (%s)' % zcon )
return zcon
def setup_zadara_vpsa_client( z_console_client, vpsa_id ):
token = get_value_from_env_or_user_input(
'ZADARA_VPSA_ACCESS_TOKEN',
'enter your zadara VPSA token: ' )
zvpsa = ZVpsaClient( z_console_client, vpsa_token=token, vpsa_id=vpsa_id )
logger.debug('set zvpsa for id (%d)' % vpsa_id )
logger.debug('zvpsa object is (%s)' % zvpsa )
return zvpsa
def setup_zadara_client():
zcon = setup_zadara_console_client()
vpsa_token = get_value_from_env_or_user_input(
'ZADARA_VPSA_ACCESS_TOKEN',
'enter your zadara VPSA token: ' )
os.environ['ZADARA_VPSA_ACCESS_TOKEN'] = vpsa_token
vpsa = zcon.vpsa_by_export_path( cfg['zadara_vpsa']['volume_export_path'], vpsa_token )
if vpsa is None:
logger.critical(
'vpsa with export_path(%s) not found; maybe it is hibernated?' %
cfg['zadara_vpsa']['volume_export_path'] )
exit( 1 )
logger.debug('found vpsa with export_path (%s)! it has id (%d)' % (
cfg['zadara_vpsa']['volume_export_path'], vpsa['id']) )
zcli = setup_zadara_vpsa_client( zcon, vpsa['id'] )
return zcli
def print_snapshot_list_from_volume( cli, volume ):
snapshots = {}
snap_list = cli.get_snapshots_for_cgroup( volume['cg_name'] )
if snap_list is None or 0 == len( snap_list ):
logger.critical(
'no snapshots available for volume with export_path(%s)' %
volume['nfs_export_path'] )
exit( 1 )
logger.debug('return from snapshot list has (%d) elements' % len( snap_list ) )
i = 1
print 'available snapshots for volume with export_path(%s):' % volume['nfs_export_path']
for s in snap_list:
print '%d: %s [%s]' % ( i, s['modified_at'], s['display_name'] )
snapshots[i] = s
i += 1
return snapshots
def clone_from_snapshot( cli, volume, snapshot_id ):
timestamp = time.strftime( "%y%m%d_%H%M", time.localtime() )
#clone_volume_display_name = 'clone_snap_%s_on_%s' % ( snapshot_id.replace('-', '_'), timestamp )
clone_volume_display_name = 'clone_on_%s' % timestamp
logger.debug( 'cloning volume (%s) with display_name (%s), from snapshot_id (%s)' %
( volume['cg_name'], clone_volume_display_name, snapshot_id ) )
clone = cli.clone_volume(
cgroup=volume['cg_name'],
clone_name=clone_volume_display_name,
snap_id=snapshot_id )
timeout_in_sec = 5
max_checks = 5
i = 0
while clone is None and i < max_checks:
time.sleep( timeout_in_sec )
clone = cli.get_volume_by_display_name( clone_volume_display_name )
i += 1
if i == max_checks and clone is None:
logger.critical('error cloning volume')
exit( 1 )
logger.debug( 'cloned volume object is (%s)' % clone )
return clone
def shift_export_paths( cli, source_volume, clone_volume ):
timestamp = time.strftime( "%y%m%d-%H%M", time.localtime() )
de_facto_export_path = source_volume['nfs_export_path']
inactive_export_path = '%s_%s' % ( source_volume['nfs_export_path'], timestamp )
logger.debug('preparing to shift export paths: (%s)-->(%s)-->X(%s)' %
( inactive_export_path, de_facto_export_path, clone_volume['nfs_export_path'] ) )
src_servers = cli.detach_volume_from_all_servers( source_volume['name'] )
src_volume_name = cli.update_export_name_for_volume(
source_volume['name'],
os.path.basename( inactive_export_path ) )
logger.debug('detached source_volume from all servers (%s)' % src_servers )
clone_volume_name = cli.update_export_name_for_volume(
clone_volume['name'],
os.path.basename( de_facto_export_path ) )
clone_servers = cli.attach_volume_to_servers( clone_volume['name'], src_servers )
logger.debug('attached all servers to clone volume (%s)' % clone_servers )
logger.debug('src_volume_name(%s) and clone_volume_name(%s)' % ( src_volume_name,
clone_volume_name ) )
return ( src_volume_name, clone_volume_name )
def copy_snapshot_policies( cli, source_volume, clone_volume ):
src_policies = cli.get_snapshot_policies_for_cgroup( source_volume['cg_name'] )
logger.debug('policies from src_volume (%s)' % src_policies )
for p in src_policies:
cli.attach_snapshot_policy_to_cgroup( clone_volume['cg_name'], p['name'] )
logger.debug('policies now attached to clone_volume as well...')
return src_policies
if __name__ == '__main__':
cfg = read_config( 'config.yml' )
setup_logging( cfg['logging'] )
logger = logging.getLogger( __name__ )
logger.info('STEP 1. logging configured!')
logger.info('STEP 2. setting up zadara client...')
zcli = setup_zadara_client()
logger.info('STEP 3. finding volume to be clone by export_path (%s)' %
cfg['zadara_vpsa']['volume_export_path'])
volume_to_clone_info = zcli.get_volume_by_export_path( cfg['zadara_vpsa']['volume_export_path'] )
logger.info('STEP 4. volume found (%s); printing snapshots available',
volume_to_clone_info['display_name'] )
snapshots = print_snapshot_list_from_volume( zcli, volume_to_clone_info )
s_index = None
while not s_index and s_index not in snapshots.keys():
s_index = int( raw_input('which snapshot to clone? [1..%d]: ' % len( snapshots ) ) )
logger.info('STEP 5. snapshot picked (%s), cloning...' % snapshots[ s_index
]['display_name'] )
clone_info = clone_from_snapshot(
zcli,
volume_to_clone_info,
snapshots[ s_index ]['name'] )
logger.info('STEP 6. cloned as volume (%s); changing export_paths...' %
clone_info['display_name'] )
( src_path, clone_path ) = shift_export_paths(
zcli,
volume_to_clone_info,
clone_info )
logger.info('STEP 7. attaching snapshot policies...')
p_list = copy_snapshot_policies(
zcli,
volume_to_clone_info,
clone_info )
logger.info('STEP 8. remount shared storage in mh nodes and we are done.')
| StarcoderdataPython |
3308624 | <reponame>danmerl/jobbot
from scrapy.item import Item, Field
class JobItem(Item):
organization = Field()
date = Field()
title = Field()
jobid = Field()
description = Field()
| StarcoderdataPython |
1699306 | <reponame>orelogo/fartberry<filename>fartberry/config.py
#!/usr/bin/env python3
import json
from pathlib import Path
from fartberry.logger import logger
POSTGRES_USER = 'postgres_user'
POSTGRES_DATABASE = 'postgres_database'
IS_GEOLOCATION_ENABLED = 'is_geolocation_enabled'
POLLING_FREQUENCY_IN_SEC = 'polling_frequency_in_sec'
class _Config():
def __init__(self) -> None:
abs_config_path = str(Path(__file__).parent.parent / 'config.json')
with open(abs_config_path, 'r') as f:
config_json = json.load(f)
self.postgres_user = config_json[POSTGRES_USER]
self.postgres_database = config_json[POSTGRES_DATABASE]
self.is_geolocation_enabled = config_json[IS_GEOLOCATION_ENABLED]
self.polling_frequency_in_sec = config_json[POLLING_FREQUENCY_IN_SEC]
logger.debug('Config loaded')
config = _Config()
| StarcoderdataPython |
137143 | import os
import re
import cv2
import numpy as np
import pandas as pd
from Scripts.Experiments import RESULTS
# ------------------------------------------------------------------------------------------------------------------ #
# -------------------------------------------------- Restructure UNBC Data ----------------------------------------- #
def get_user_number(folder_name):
regex = r"\d+(?=-)"
matches = re.findall(regex, folder_name)
try:
return int(matches[0])
except:
return ""
def get_frame_number(filename):
regex = r"\d+(?=.png)"
matches = re.findall(regex, filename)
try:
return int(matches[0])
except:
return ""
def get_session_id(filename):
regex = r"\S+(?=\d{3}.png)"
matches = re.findall(regex, filename)
try:
return matches[0]
except:
return ""
def get_user_number_from_filename(filename):
regex = r"\d{3}(?=t)"
matches = re.findall(regex, filename)
try:
return int(matches[0])
except:
return ""
def read_pain_score_from_file(filepath):
try:
h = open(filepath, 'r')
content = h.readlines()
for line in content:
return int(float(line.rstrip('\n')))
except:
return ""
def get_filename_without_extension(filename):
regex = r"\S+\d{3}"
matches = re.findall(regex, filename)
try:
return matches[0]
except:
return ""
# ------------------------------------------------------------------------------------------------------------------ #
# -------------------------------------------------- Load Pain Data ------------------------------------------------ #
def load_and_prepare_pain_data(path, person, pain, model_type):
"""
Utility function loading pain image data into memory, and preparing the labels for training.
Note, this function expects the image files to have the following naming convention:
"43_0_0_0_2_original_straight.jpg", to be converted into the following label array:
[person, session, culture, frame, pain_level, transformation_1, transformation_2]
:param path: string, root path to all images to be loaded
:param person: int, index where 'person' appears in the file name converted to an array.
:param pain: int, index where 'pain_level' appears in the file name converted to an array.
:param model_type: string, specifying the model_type (CNN, or ResNet)
:return:
data: 4D numpy array, images as numpy array in shape (N, 215, 215, 1)
labels_binary: 2D numpy array, one-hot encoded labels [no pain, pain] (N, 2)
train_labels_people: 2D numpy array, only including the "person" label [person] (N, 1)
labels: 2D numpy array, all labels as described above (N, 7)
"""
color = 0 if model_type == 'CNN' else 1
data, labels = load_pain_data(path, color=color)
labels_ord = labels[:, pain].astype(np.int)
labels_binary = reduce_pain_label_categories(labels_ord, max_pain=1)
train_labels_people = labels[:, person].astype(np.int)
return data, labels_binary, train_labels_people, labels
def load_pain_data(train_path, test_path=None, label_type=None, color=0):
"""
Load function, loading pain dataset into numpy arrays with labels. Either just loads train data, or train and test.
:param color: int, if 0 then greyscale, if 1 then color
:param train_path: string, root directory
:param test_path: string, optional, root test directory
:param label_type: string, if not None, only a specific label will be attached to each image
:return:
"""
train_data, train_labels = load_image_data(train_path, color, label_type)
print("Normalization")
np.divide(train_data, 255.0, out=train_data, dtype=np.float32)
if test_path:
test_data, test_labels = load_image_data(test_path, label_type)
print("Normalization")
test_data = np.divide(test_data, 255.0, out=test_data, dtype=np.float32)
return train_data, train_labels, test_data, test_labels
return train_data, train_labels
def load_image_data(path, color=0, label_type=None):
"""
Utility function, loading all images in a directory and its sub-directories into a numpy array, labeled
:param color: int, if 0 then greyscale, if 1 then color
:param path: string, root directory or list of strings (image paths)
:param label_type: string, if not None, only a specific label will be attached to each image
:return:
data, labels: tuple of numpy arrays, holding images and labels
"""
if type(path) is str:
img_paths = get_image_paths(path)
else:
img_paths = path
np.random.shuffle(img_paths)
data = []
for idx, path in enumerate(img_paths):
img = np.expand_dims(cv2.imread(path, color), -1) if color == 0 else cv2.imread(path, color)
data.append(img)
if not idx % 1000:
print("{} images processed".format(idx))
data = np.array(data, dtype=np.float32)
labels = np.array(get_labels(img_paths, label_type=label_type))
return data, labels
def get_image_paths(root_path, ext='.jpg'):
"""
Utility function returning all image paths in a directory adn its sub-directories.
:param root_path: path from which to start the recursive search
:param ext: file extension to look for
:return:
image_paths: list of paths
"""
image_paths = []
for dirpath, dirnames, filenames in os.walk(root_path):
for file in filenames:
f_name, f_ext = os.path.splitext(file)
if f_ext == ext:
image_paths.append(os.path.join(dirpath, file))
return image_paths
def get_labels(image_paths, label_type=None, ext='.png'):
"""
Utility function turning image paths into a 2D list of labels
:param image_paths: list of image paths
:param label_type: string, if not None, only a specific label per image will be returned
:param ext: string, file extension
:return:
labels 2D list of labels
"""
label_types = {
'person': 0,
'session': 1,
'culture': 2,
'frame': 3,
'pain': 4,
'augmented': 5,
}
labels = []
for path in image_paths:
filename = os.path.basename(path)
filename, extension = os.path.splitext(filename)
if extension == ext:
img_labels = filename.split("_")
if label_type is None:
label = img_labels
else:
label = int(img_labels[label_types[label_type]])
labels.append(label)
return labels
def reduce_pain_label_categories(labels, max_pain):
"""
Utility function reducing ordinal labels to a specified number of labels, e.g. [0,1] binary
:param labels: numpy array
:param max_pain: int, max label, e.g. if 1 then labels will reduce to [0,1]
:return:
labels_reduced numpy array
"""
return np.minimum(labels, max_pain)
def create_pain_df(path, pain_gap=(), binarize=True):
"""
Generate a Pandas DataFrame object that contains all img_paths excluding a specified pain_gap in a given folder path
:param path: string, super parent folder path
:param pain_gap: tuple of int's, specifying which pain classes to exclude from training
:param binarize: bool, if the label "pain" is to be binarized
:return:
"""
# Get image paths and convert file labels to numpy array
img_paths = np.array(get_image_paths(path, ext=".png"))
labels = np.array(get_labels(img_paths))
# Create dataframe
df = pd.DataFrame(labels, columns=['Person', 'Session', 'Culture', 'Frame', 'Pain', 'Trans_1', 'Trans_2'])
df[['Person', 'Session', 'Culture', 'Frame', 'Pain']] = df[
['Person', 'Session', 'Culture', 'Frame', 'Pain']].astype(int)
df['img_path'] = img_paths
df[['Trans_1', 'Trans_2', 'img_path']] = df[['Trans_1', 'Trans_2', 'img_path']].astype(str)
df = df.sort_values(['Person', 'Session', 'Frame', 'Trans_1', 'Trans_2'],
ascending=[True, True, True, False, False]).reset_index(drop=True)
# Create a unique ID for each entry
df['temp_id'] = df['Person'].astype(str) + df['Session'].astype(str) + df['Frame'].astype(str)
# Exclude elements in the pain gap
df = df[~df['Pain'].isin(pain_gap)]
# Binarize Pain label
if binarize:
df['Pain'] = np.minimum(df['Pain'], 1)
return df
# ------------------------------------------------ End Load Pain Data ---------------------------------------------- #
# ------------------------------------------------------------------------------------------------------------------ #
# ------------------------------------------------------------------------------------------------------------------ #
# ------------------------------------------------- Split Functions ------------------------------------------------ #
def split_data_into_clients_dict(people, *args):
"""
Utility function, splitting data into clients.
:param people: numpy array, contains image clients (set_size, 1)
:param args: tuple or list, tuple or list of numpy arrays of the same length as "people"
:return:
array of dictionaries. each dictionary represents one input array provided by *args, split into a dictionary of
clients, where the key is the client number and the value is the data of the input array associated with that
client
"""
array = []
for arg in args:
dic = {}
for key, value in zip(people, arg):
# noinspection PyTypeChecker
dic.setdefault(key, []).append(value)
for key in dic.keys():
dic[key] = np.array(dic[key])
if len(args) == 1:
return dic
else:
array.append(dic)
return tuple(array)
def split_data_into_shards(split=None, cumulative=True, array=None):
"""
Utility function, splitting data into specified subsets of shards. Scales the split array to 100%.
:param array: list of arrays to be split into shards
:param split: list of percentage split points, e.g. [0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
:param cumulative: bool, checks if concatenation should be cumulative, e.g. [0], [0, 1] or not [0], [1]
:return:
list of elements, where each element represents one shard
"""
split = [int(x / max(split) * len(array[0])) for x in split]
array = [np.array_split(elem, split) for elem in array]
if cumulative:
array = [cumconc(elem) for elem in array]
return array
def split_data_into_labels(label, all_labels, cumulative, *args):
"""
Utility function splitting arguments provided in *args into the label provided.
:param label: int, labels can be 0-6 each for one index of
[person, session, culture, frame, pain, trans_1, trans_2]
:param all_labels: 2D numpy array, where each row is of the format above
:param cumulative: bool, checks if concatenation should be cumulative, e.g. [0], [0, 1] or not [0], [1]
:param args: list, arrays to be split into the label provided
:return:
tuple of arrays split into the label provided
"""
arg_array = [np.array([all_labels[all_labels[:, label] == k] for k in np.unique(all_labels[:, label])])]
arrays = [np.array([arg[all_labels[:, label] == k] for k in np.unique(all_labels[:, label])]) for arg in args]
arg_array.extend(arrays)
if cumulative:
arg_array = [cumconc(elem) for elem in arg_array]
return tuple(arg_array)
def train_test_split(test_ratio, *args):
"""
Ordinary train/test split.
:param test_ratio: float, split value e.g. 0.8 means 80% train and 20% test data
:param args: tuple of arrays to be split
:return:
list of split arrays
"""
split = int(len(args[0]) * test_ratio)
return [(elem[:split], elem[split:]) for elem in args]
def cumconc(array):
"""
Utility function creating a cumulatively split view on a split numpy array, e.g. [[0], [1]] to [[0] , [0, 1]]
:param array: numpy array
:return:
array cumulative numpy array
"""
total = np.concatenate(array)
# noinspection PyTypeChecker
return np.array([*map(total.__getitem__, map(slice, np.fromiter(map(len, array), int, len(array)).cumsum()))])
# ----------------------------------------------- End Split Functions ---------------------------------------------- #
# ------------------------------------------------------------------------------------------------------------------ #
# ------------------------------------------------------------------------------------------------------------------ #
# ----------------------------------------- Jupyter notebook helper functions -------------------------------------- #
def mirror_folder_structure(input_path, output_path):
"""
Utility function mirroring the folder structure in one folder into another folder.
:param input_path: string, input path
:param output_path: string, output path
:return:
"""
for dir_path, dir_names, filenames in os.walk(input_path):
structure = os.path.join(output_path, dir_path[len(input_path) + 1:])
if not os.path.isdir(structure):
os.mkdir(structure)
def reset_to_raw(root_path, dest_dir='raw', ext='.jpg'):
"""
Utility function taking all files of a given type in a specified root folder and moving them to a specified
destination folder.
:param root_path: string, root_path
:param dest_dir: string, destination folder name
:param ext: string, file extension to look for
:return:
"""
if not os.path.isdir(os.path.join(root_path, dest_dir)):
os.mkdir(os.path.join(root_path, dest_dir))
for dir_path, dir_names, filenames in os.walk(root_path):
for file in filenames:
if os.path.splitext(file)[1] == ext:
src = os.path.join(dir_path, file)
dest = os.path.join(root_path, dest_dir, file)
os.rename(src, dest)
def delete_empty_folders(root_path):
"""
Utility function deleting all empty folder in a directory and its subdirectories.
:param root_path: string, root path
:return:
"""
for dir_path, dir_names, filenames in os.walk(root_path):
if not dir_names and not filenames:
os.rmdir(dir_path)
def create_pivot(path, index, columns, values, pain_level=0, pain_gap=()):
"""
Create a pivot table showing the test subjects and their pain level per session.
:param path: string, file paths, where the images lie
:param index: index of the pivot, can be 'Session' or 'Person'
:param columns: columns of the pivot, can be 'Session' or 'Person'
:param values: values of the pivot, can be 'Session' or 'Person', should equal index
:param pain_level: value from where the binary classifier should consider "pain"
:param pain_gap: tuple of int's, specifying which pain classes to exclude from training
:return:
Pandas DataFrame, pivot table
"""
group_2 = get_image_paths(path)
labels = np.array(get_labels(group_2))
cols = ['Person', 'Session', 'Culture', 'Frame', 'Pain', 'Trans_1', 'Trans_2']
df = pd.DataFrame(labels, columns=cols)
df[['Person', 'Session', 'Culture', 'Frame', 'Pain']] = df[
['Person', 'Session', 'Culture', 'Frame', 'Pain']].astype(int)
df = df[~df['Pain'].isin(pain_gap)]
pivot = ~df[['Person', 'Session']].drop_duplicates().pivot(index=index, columns=columns, values=values).isnull() * 1
pivot['# of ' + columns + 's'] = pivot.sum(1)
pivot = pivot.sort_values('# of ' + columns + 's', ascending=False)
pivot['Pain'] = 0
pivot['No Pain'] = 0
for person, df_person in df.groupby(index):
pivot.at[person, 'No Pain'] = sum(np.array(df_person['Pain'] <= pain_level))
pivot.at[person, 'Pain'] = sum(np.array(df_person['Pain'] > pain_level))
for col in pivot.columns:
if type(col) is int:
pivot.at[person, col] = sum(np.array(df_person[df_person[columns] == col]['Pain'] > pain_level))
if columns is 'Session':
for col in reversed(pivot.columns):
if type(col) is int:
pivot.rename(columns={col: col}, inplace=True)
if index is 'Session':
for idx in reversed(pivot.index):
pivot.rename(index={idx: idx}, inplace=True)
pivot = pivot.append(pivot.sum(0).rename("Total"))
pivot['Pain %'] = round(pivot['Pain'] / (pivot['Pain'] + pivot['No Pain']), 2)
pivot[pivot == 0] = ''
return pivot
# --------------------------------------- End Jupyter notebook helper functions ------------------------------------ #
# ------------------------------------------------------------------------------------------------------------------ #
# ------------------------------------------------------------------------------------------------------------------ #
# --------------------------------------------- Data Balancing algorithms ------------------------------------------ #
def sample_df(df, threshold):
"""
Utility function that samples rows from a DataFrame until a provided threshold is reached.
:param df: DataFrame with columns
['Person', 'Session', 'Culture', 'Frame', 'Pain', 'Trans_1', 'Trans_2', 'temp_id']
:param threshold: int, threshold that should be sampled to
:return:
Pandas DataFrame
"""
if len(df) > threshold:
return df.sample(threshold, replace=False)
else:
return pd.concat((df, df.sample(threshold - len(df), replace=True)))
def balance_session(df, threshold):
"""
Utility function balancing a session so taht equal number of positive and negative examples are included. Only
includes a client if there are both positive and negative examples for that client
:param df: DataFrame with columns
['Person', 'Session', 'Culture', 'Frame', 'Pain', 'Trans_1', 'Trans_2', 'temp_id']
:param threshold: int, threshold that should be sampled to
:return:
Resampled Pandas DataFrame or empty DataFrame
"""
df_train = []
for person, df_person in df.groupby('Person'):
df_pain = df_person[df_person['Pain'] == 1]
df_no_pain = df_person[df_person['Pain'] == 0]
if len(df_pain) > 0 and len(df_no_pain):
df_pain = sample_df(df_pain, threshold)
df_no_pain = sample_df(df_no_pain, threshold)
df_train.append(pd.concat((df_pain, df_no_pain)))
return pd.concat(df_train) if len(df_train) > 0 else pd.DataFrame(columns=df.columns)
def balance_data(df, threshold):
"""
A moving window over the pain data, taking preference over more recent additions to the data set, resampling so
that an equal number of positive and negative examples is sampled.
:param df: DataFrame with columns
['Person', 'Session', 'Culture', 'Frame', 'Pain', 'Trans_1', 'Trans_2', 'temp_id']
:param threshold: int, threshold that should be sampled to
:return:
Resampled Pandas DataFrame
"""
df_train = []
for person, df_person in df.groupby('Person'):
df_temp_pain = []
df_temp_no_pain = []
for sess, df_sess in reversed(tuple(df_person.groupby('Session'))):
df_temp_pain.append(df_sess[df_sess['Pain'] == 1])
df_temp_no_pain.append(df_sess[df_sess['Pain'] == 0])
if len(pd.concat(df_temp_pain)) > threshold and len(pd.concat(df_temp_no_pain)) > threshold:
break
df_temp_pain.extend(df_temp_no_pain)
df_temp = pd.concat(df_temp_pain)
df_train.append(df_temp)
df_train = pd.concat(df_train)
return balance_session(df_train, threshold)
def split_and_balance_df(df, ratio, balance_test=False):
"""
Utility function splitting a data frame into train and test file paths. The train data is balanced, while balancing
the test data is optional. If ratio == 1, serves to just balance a data frame, without splitting the data.
:param df: Pandas DataFrame, cols: [Person, Session, Culture, Frame, Pain, Trans_1, Trans_2,
img_path]
:param ratio: float, ratio of train data
:param balance_test: bool, whether to balance the test data
:return:
Tuple of two Pandas DataFrames, one with train img_paths, one with test img_paths
"""
# Split Original data into ratio (for each person)
df_original = df[(df['Trans_1'] == 'original') & (df['Trans_2'] == 'straight')]
df_train = df_original.sample(frac=1).groupby('Person', group_keys=False).apply(lambda x: x.sample(frac=ratio))
df_test = df_original.drop(df_train.index)
# Balance the training data set (1. get all permutations, 2. get all pain instances of the permutations,
# 3. down-sample no-pain to pain number
df_train = df[df['temp_id'].isin(df_train['temp_id'])]
df_pain = df_train[df_train['Pain'] > 0]
df_train = pd.concat((df_pain, df_train[df_train['Pain'] == 0].sample(len(df_pain))), ignore_index=True)
if balance_test:
df_test = df[df['temp_id'].isin(df_test['temp_id'])]
df_pain = df_test[df_test['Pain'] > 0]
df_test = pd.concat((df_pain, df_test[df_test['Pain'] == 0].sample(len(df_pain))), ignore_index=True)
# Return shuffled dfs
return df_train.sample(frac=1), df_test.sample(frac=1)
# ------------------------------------------- End Data Balancing algorithms ---------------------------------------- #
# ------------------------------------------------------------------------------------------------------------------ #
def move_files(target_folder, seed):
"""
Utility function moving result files into the correct folder.
:param target_folder: string, folder where files should be moved to
:param seed: int, seed that was run for the experiments, relevant for file renaming
:return:
"""
# Create folder structure
target_f_path = os.path.join(RESULTS, 'Thesis', target_folder)
if not os.path.isdir(target_f_path):
os.mkdir(target_f_path)
if not os.path.isdir(os.path.join(target_f_path, 'Plotting')):
os.mkdir(os.path.join(target_f_path, 'Plotting'))
# Move files and folders
elements = [elem for elem in os.listdir(RESULTS) if str(seed) in elem]
for elem in elements:
f_path = os.path.join(RESULTS, elem)
os.rename(f_path, os.path.join(target_f_path, elem))
# Delete Seed number from file and folder names
elements = [elem for elem in os.listdir(target_f_path) if "_" + str(seed) in elem]
for elem in elements:
f_path = os.path.join(target_f_path, elem)
new = elem.replace("_" + str(seed), '')
os.rename(f_path, os.path.join(target_f_path, new))
| StarcoderdataPython |
3391852 | # fonte https://www.twilio.com/docs/libraries/python
# pip install twilio
from twilio.rest import Client
# Your Account SID from twilio.com/console
account_sid = "<KEY>"
# Your Auth Token from twilio.com/console
auth_token = "<PASSWORD>"
client = Client(account_sid, auth_token)
message = client.messages.create(
to="+5548996773435",
from_="+13237680831",
body="teste de envio SMS com Python!. By geanclm in 29/08/2021")
print(message.sid) | StarcoderdataPython |
3298616 | __version__ = '0.0.1'
__all__ = ['bert_ner', 'utils']
| StarcoderdataPython |
20019 | <reponame>5x5x5x5/Back2Basics
#def spam():
# eggs = 31337
#spam()
#print(eggs)
"""
def spam():
eggs = 98
bacon()
print(eggs)
def bacon():
ham = 101
eggs = 0
spam()
"""
"""
# Global variables can be read from local scope.
def spam():
print(eggs)
eggs = 42
spam()
print(eggs)
"""
"""
# Local and global variables with the same name.
def spam():
eggs = 'spam local'
print(eggs) # prints 'spam local'
def bacon():
eggs = 'bacon local'
print(eggs) # prints 'bacon local'
spam()
print(eggs) # prints 'bacon local'
eggs = 'global'
bacon()
print(eggs) # prints 'global'
"""
"""
# the global statement
def spam():
global eggs
eggs = 'spam'
eggs = 'it don\'t matter'
spam()
print(eggs)
"""
"""
def spam():
global eggs
eggs = 'spam' # this is the global
def bacon():
eggs = 'bacon' # this is a local
def ham():
print(eggs) # this is the global
eggs = 42 # this is global
spam()
print(eggs)
"""
# Python will not fall back to using the global eggs variable
def spam():
eggs = 'wha??'
print(eggs) # ERROR!
eggs = 'spam local'
eggs = 'global'
spam()
# This error happens because Python sees that there is an assignment statement for eggs in the spam() function and therefore considers eggs to be local. Because print(eggs) is executed before eggs is assigned anything, the local variable eggs doesn't exist.
| StarcoderdataPython |
60671 | <reponame>slainesimscale/simscale-python-sdk
# coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class AdvancedSimmetrixFluidSettings(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'small_feature_tolerance': 'DimensionalLength',
'gap_elements': 'float',
'global_gradation_rate': 'float'
}
attribute_map = {
'small_feature_tolerance': 'smallFeatureTolerance',
'gap_elements': 'gapElements',
'global_gradation_rate': 'globalGradationRate'
}
def __init__(self, small_feature_tolerance=None, gap_elements=None, global_gradation_rate=None, local_vars_configuration=None): # noqa: E501
"""AdvancedSimmetrixFluidSettings - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._small_feature_tolerance = None
self._gap_elements = None
self._global_gradation_rate = None
self.discriminator = None
if small_feature_tolerance is not None:
self.small_feature_tolerance = small_feature_tolerance
if gap_elements is not None:
self.gap_elements = gap_elements
if global_gradation_rate is not None:
self.global_gradation_rate = global_gradation_rate
@property
def small_feature_tolerance(self):
"""Gets the small_feature_tolerance of this AdvancedSimmetrixFluidSettings. # noqa: E501
:return: The small_feature_tolerance of this AdvancedSimmetrixFluidSettings. # noqa: E501
:rtype: DimensionalLength
"""
return self._small_feature_tolerance
@small_feature_tolerance.setter
def small_feature_tolerance(self, small_feature_tolerance):
"""Sets the small_feature_tolerance of this AdvancedSimmetrixFluidSettings.
:param small_feature_tolerance: The small_feature_tolerance of this AdvancedSimmetrixFluidSettings. # noqa: E501
:type: DimensionalLength
"""
self._small_feature_tolerance = small_feature_tolerance
@property
def gap_elements(self):
"""Gets the gap_elements of this AdvancedSimmetrixFluidSettings. # noqa: E501
<p>Define a target number of elements across the thickness of thin gaps. <a href='https://www.simscale.com/docs/simulation-setup/meshing/standard/#gap-refinement-factor' target='_blank'>Learn more</a>.<img src=\"/spec/resources/help/imgs/simmetrix-gap-elements.png\" class=\"helpPopupImage\"/>Example of gap refinements applied with a target of 4 elements across the thickness</p> # noqa: E501
:return: The gap_elements of this AdvancedSimmetrixFluidSettings. # noqa: E501
:rtype: float
"""
return self._gap_elements
@gap_elements.setter
def gap_elements(self, gap_elements):
"""Sets the gap_elements of this AdvancedSimmetrixFluidSettings.
<p>Define a target number of elements across the thickness of thin gaps. <a href='https://www.simscale.com/docs/simulation-setup/meshing/standard/#gap-refinement-factor' target='_blank'>Learn more</a>.<img src=\"/spec/resources/help/imgs/simmetrix-gap-elements.png\" class=\"helpPopupImage\"/>Example of gap refinements applied with a target of 4 elements across the thickness</p> # noqa: E501
:param gap_elements: The gap_elements of this AdvancedSimmetrixFluidSettings. # noqa: E501
:type: float
"""
if (self.local_vars_configuration.client_side_validation and
gap_elements is not None and gap_elements > 4): # noqa: E501
raise ValueError("Invalid value for `gap_elements`, must be a value less than or equal to `4`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
gap_elements is not None and gap_elements < 0): # noqa: E501
raise ValueError("Invalid value for `gap_elements`, must be a value greater than or equal to `0`") # noqa: E501
self._gap_elements = gap_elements
@property
def global_gradation_rate(self):
"""Gets the global_gradation_rate of this AdvancedSimmetrixFluidSettings. # noqa: E501
<p>Adjust the transition from small to large cells. This value is the ratio between the size of two adjacent cells. The allowed range is 1.0 - 3.0. 1.0 would produce a uniform mesh with the smallest size everywhere. This is generally not recommended, as it may produce very large meshes.</p> # noqa: E501
:return: The global_gradation_rate of this AdvancedSimmetrixFluidSettings. # noqa: E501
:rtype: float
"""
return self._global_gradation_rate
@global_gradation_rate.setter
def global_gradation_rate(self, global_gradation_rate):
"""Sets the global_gradation_rate of this AdvancedSimmetrixFluidSettings.
<p>Adjust the transition from small to large cells. This value is the ratio between the size of two adjacent cells. The allowed range is 1.0 - 3.0. 1.0 would produce a uniform mesh with the smallest size everywhere. This is generally not recommended, as it may produce very large meshes.</p> # noqa: E501
:param global_gradation_rate: The global_gradation_rate of this AdvancedSimmetrixFluidSettings. # noqa: E501
:type: float
"""
if (self.local_vars_configuration.client_side_validation and
global_gradation_rate is not None and global_gradation_rate > 3): # noqa: E501
raise ValueError("Invalid value for `global_gradation_rate`, must be a value less than or equal to `3`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
global_gradation_rate is not None and global_gradation_rate < 1): # noqa: E501
raise ValueError("Invalid value for `global_gradation_rate`, must be a value greater than or equal to `1`") # noqa: E501
self._global_gradation_rate = global_gradation_rate
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AdvancedSimmetrixFluidSettings):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AdvancedSimmetrixFluidSettings):
return True
return self.to_dict() != other.to_dict()
| StarcoderdataPython |
1649831 | import nltk
import os
import requests
from nltk.twitter import Query, Streamer, Twitter, TweetViewer, TweetWriter, credsfromfile
import twitter as TW
from os import listdir
from os.path import isfile, join
import json
import time
app_key= 'Osyy0PSrhMRpnIWxjBLzLJeKR'
app_secret= '<KEY>'
oauth_token= '<KEY> '
oauth_token_secret= '<KEY>'
api = TW.Api(consumer_key=app_key,
consumer_secret=app_secret,
access_token_key=oauth_token,
access_token_secret=oauth_token_secret)
oauth = credsfromfile()
client = Query(**oauth)
client.register(TweetViewer(limit=10))
files = [f for f in listdir('articles')]
for f in files:
filepath = join('articles', f)
with open(filepath, 'r') as json_file:
json_data = json.load(json_file)
print(f)
for i in range (0, 10):
try:
x = str(i)
trend = json_data[x]['trend']
tweets = client.search_tweets(keywords=trend, limit=10)
for tweet in tweets:
tweet_id = tweet['id']
tweet_json = api.GetStatusOembed(status_id=tweet_id, align = 'center')
json_data[x]['tweet'] = tweet_json['html']
except Exception as e:
print(e)
with open(filepath, 'w') as json_file:
json.dump(json_data, json_file, indent = 4)
| StarcoderdataPython |
100964 | <reponame>opannekoucke/sympkf
from .random import Expectation, omega
from .util import PDESystem, Eq, remove_eval_derivative, upper_triangle
from .tool import clean_latex_name
from .constants import t as time_symbol
import collections
from sympy import Derivative, symbols, Function, sqrt, Integer, Rational, Mul, Matrix
import sympy
#__all__ =['SymbolicPKF','Field','PartialOrderDerivative']
# Add Abs operator for positive function in order to evaluate absolute values of positive function
# Example:
# --------
# >>> func = Function('f')(t,x)
# >>> Abs(f)
# |f(t,x)|
# >>> pos_func = Function('f',positive=True)(t,x)
# >>> Abs(f)
# f(t,x)
def _eval_Abs(self):
if self.is_positive:
return self
setattr(Function, '_eval_Abs', _eval_Abs)
class SymbolicPKF(object):
""" Parametric dynamics associated to a given system of evolution equations
The workflow is the follow one:
1. Calculation of the linearized dynamics using the Reynolds decomposition of random fields
2. Calculation of the Reynolds averaged dynamics
3. Calculation of the error dynamics
4. Calculation of the variance dynamics
5. Calculation of the normalized error dynamics
6. Calculation of the metric dynamics
"""
def __init__(self, pde_system, closure=None):
if not isinstance(pde_system, PDESystem):
pde_system = PDESystem(pde_system)
self.pde_system = pde_system
self.fields = collections.OrderedDict([(field, Field(field))
for field in self.pde_system.prognostic_functions]
)
self.time_coordinate = self.pde_system.time_coordinate
self.tl_system = None
self._first_order_reynolds_system = None
self._second_order_reynolds_system = None
self._expectation_system = None
self._tl_expectation_system = None
self._error_system = None
self._epsilon_system = None
self._variance_system = None
self._cross_covariance_system = None
self._std_system = None
self._metric_system = None
self._system_in_metric = None
self._system_in_diffusion = None
self._system_in_aspect = None
self._closed_system_in_metric = None
self._closed_system_in_diffusion = None
self._closed_system_in_aspect = None
self._internal_closure = None
# external closure
self._closure = {} if closure is None else closure
# Introduce or modify labels for error/normalized error/variance/cross-covariance
self._label_covariance = "V_"
def _compute_reynolds_system(self):
""" Computation of Reynolds system at the second and the first orders
:return:
"""
# Compute the linear reynolds system
first_order_reynolds = []
second_order_reynolds = []
epsilon = symbols('\\varepsilon')
subs_reynolds = {
field: Expectation(self.fields[field].random) + epsilon * self.fields[field].error
for field in self.fields
}
for equation in self.pde_system.equations:
# 1) Reynolds substitution
equation = equation.subs(subs_reynolds).doit().expand()
lhs, rhs = equation.args
# 2) Calculation of the Second order
taylor = rhs.series(epsilon, 0, 3).removeO()
equation = Eq(lhs, taylor).expand()
# 3) Computation of the second-order Taylor's expansion
second_order_reynolds.append(equation.subs(epsilon, Integer(1)))
# 4) Computation of the first-order Taylor's expansion
equation = equation.subs(epsilon*epsilon, Integer(0))
first_order_reynolds.append(equation.subs(epsilon, Integer(1)))
self._first_order_reynolds_system = first_order_reynolds
self._second_order_reynolds_system = second_order_reynolds
@property
def first_order_reynolds_system(self):
""" Create the linear dynamics by including the Reynolds decomposition of random fields.
:return:
"""
if self._first_order_reynolds_system is None:
self._compute_reynolds_system()
return self._first_order_reynolds_system
@property
def second_order_reynolds_system(self):
""" Calculation of the second-order Reynolds equations """
if self._second_order_reynolds_system is None:
self._compute_reynolds_system()
return self._second_order_reynolds_system
@property
def expectation_system(self):
""" Calculate the expectation dynamics of averaged fields """
if self._expectation_system is None:
# dic for substitution error -> variance**(1/2)*epsilon
subs_error = {
mfield.error: mfield.variance ** Rational(1 , 2) * mfield.epsilon for mfield in self.fields.values()
}
expectation_system = []
for equation in self.second_order_reynolds_system:
# 1) Computation of the expectation
equation = Expectation(equation)
# 2) Substitute error in Variance/Corrélation
equation = equation.subs(subs_error).doit().expand()
# 3) Apply internal closure
equation = equation.subs(self.internal_closure).doit()
expectation_system.append(equation)
self._expectation_system = expectation_system
return self._expectation_system
@property
def tl_expectation_system(self):
""" Calculate the TL expectation system for later use in error_sytem"""
if self._tl_expectation_system is None:
tl_expectation_system = []
for equation in self.first_order_reynolds_system:
# 1) Computation of the expectation
equation = Expectation(equation)
# 2) Apply internal closure
equation = equation.subs(self.internal_closure).doit()
tl_expectation_system.append(equation)
self._tl_expectation_system = tl_expectation_system
return self._tl_expectation_system
@property
def error_system(self):
""" Error equations are difference between reynolds equation and expectation equation """
if self._error_system is not None:
return self._error_system
else:
# Compute the error system
error_system = []
for linear_reynolds_equation, expectation_equation in zip(self.first_order_reynolds_system,
self.tl_expectation_system):
error_equation = linear_reynolds_equation - expectation_equation
error_system.append(error_equation.expand())
self._error_system = error_system
return self._error_system
@property
def variance_system(self):
if self._variance_system is not None:
return self._variance_system
else:
# Compute the variance system
subs_error_trend = {equation.args[0]: equation.args[1] for equation in self.error_system}
subs_error_to_epsilon = {
self.fields[field].error: self.fields[field].epsilon * sqrt(self.fields[field].variance)
for field in self.fields
}
# Built the system of equation
variance_system = []
for field in self.fields:
variance = self.fields[field].variance
error = self.fields[field].error
definition = Eq(
Derivative(variance, time_symbol),
Expectation(Derivative(error ** Rational(2), time_symbol), evaluate=False)
)
lhs, rhs = definition.args
rhs = rhs.doit()
rhs = rhs.subs(subs_error_trend)
rhs = rhs.expand()
rhs = rhs.subs(subs_error_to_epsilon).doit()
rhs = rhs.expand()
# Substitution with internal_closure and higher terms
rhs = self._apply_internal_closure(rhs)
equation = Eq(lhs, rhs)
variance_system.append(equation)
self._variance_system = variance_system
return self._variance_system
@property
def epsilon_system(self):
"""
Computation of the trend of epsilon (dynamics of the normalized error). This is used in the computation
of the dynamics of the metric tensor: the substitution with epsilon is easier than with the error.
:return:
"""
if self._epsilon_system is not None:
return self._epsilon_system
else:
subs_error_trend = {
eq.args[0]: eq.args[1] for eq in self.error_system
}
subs_variance_trend = {
eq.args[0]: eq.args[1] for eq in self.variance_system
}
subs_error = {
self.fields[field].error: self.fields[field].epsilon * sqrt(self.fields[field].variance)
for field in self.fields
}
epsilon_system = []
for field in self.fields:
epsilon = self.fields[field].epsilon
error = self.fields[field].error
sqrt_variance = sympy.sqrt(self.fields[field].variance)
lhs = Derivative(epsilon, time_symbol)
rhs = Derivative(error / sqrt_variance, time_symbol).doit()
rhs = rhs.subs(subs_error_trend).doit()
rhs = rhs.subs(subs_variance_trend).doit()
rhs = rhs.subs(subs_error).doit()
equation = Eq(lhs, rhs.expand())
epsilon_system.append(equation)
self._epsilon_system = epsilon_system
return self._epsilon_system
@property
def std_system(self):
""" Calculation of the dynamics of the standard deviations
Comment:
This system is not include in the calculation workflow of the parametric dynamics
"""
if self._std_system is not None:
return self._std_system
else:
# Compute the std system
subs_variance = {
self.fields[field].variance: self.fields[field].std ** Integer(2)
for field in self.fields
}
std_system = []
for equation in self.variance_system:
# Replace variance by std
equation = equation.subs(subs_variance).doit()
# Extract trends
trends = equation.args[0].atoms(Derivative)
for trend in trends:
if time_symbol == trend.args[1]:
break
equation = equation.isolate(trend)
std_system.append(equation)
self._std_system = std_system
return self._std_system
@property
def multivariate_couples(self):
""" Return couples of multivariate fields """
fields = list(self.fields.keys())
couples = ( (f1, f2) for i,f1 in enumerate(fields) for j,f2 in enumerate(fields) if i<j )
return couples
@property
def cross_covariance_system(self):
""" Return the dynamics of the cross-covariance in multivariate situations """
if self._cross_covariance_system is not None:
return self._cross_covariance_system
elif len(list(self.fields.keys()))==1:
# Univariate situation
return None
else:
# Compute the cross-covariance system
# Set substution dictionnary
subs_error_trends = {equation.args[0]: equation.args[1] for equation in self.error_system}
subs_error_to_epsilon = {
self.fields[field].error: self.fields[field].epsilon * sqrt(self.fields[field].variance)
for field in self.fields
}
# Set the cross-covariance meta-data and compute the cros-covariance dynamics
self._cross_covariance_system = []
for couple in self.multivariate_couples:
# 1) Extract fields and meta-data
f1, f2 = couple
mf1, mf2 = self.fields[f1], self.fields[f2]
# 2) extract error fields
e1, e2 = mf1.error, mf2.error
V12 = self.internal_closure[Expectation(e1*e2)]
# 3) Definition and computation of the dynamics
lhs = Derivative(V12, time_symbol)
rhs = Expectation(Derivative(e1*e2, time_symbol).doit()).subs(subs_error_trends)
rhs = rhs.subs(subs_error_to_epsilon).doit()
#.. todo ??:
# should include substitution from 'epsilon' in place of 'error'
rhs = self._apply_internal_closure(rhs)
# 4) update of the cross-covariance system
self._cross_covariance_system.append( Eq(lhs,rhs.expand()) )
return self._cross_covariance_system
@property
def metric_system(self):
""" Return the dynamics of the metric using the variances (not the stds) """
if self._metric_system is not None:
return self._metric_system
else:
#t = self.time_coordinate
subs_epsilon_trends = {
eq.args[0]: eq.args[1] for eq in self.epsilon_system
}
# Compute the pre-system
metric_system = []
for meta_field in self.fields.values():
# Create the system for each component of the metric tensors
for i,xi in enumerate(meta_field.spatial_coordinates):
for j, xj in enumerate(meta_field.spatial_coordinates):
if j<i:
continue
# Set the lhs: D_t g_ij
lhs = Derivative(meta_field.metric_func(i,j), time_symbol)
# Compute the rhs: E[ D_t(D_i eps D_j eps)]
# - Definition of the rhs
rhs = Expectation(
Derivative(
Derivative(meta_field.epsilon,xi)*Derivative(meta_field.epsilon,xj)
,time_symbol).doit()
)
# Substitutes the trends of error
rhs = rhs.subs(subs_epsilon_trends).doit()
# Subs usual statistic properties of epsilon
rhs = self._apply_internal_closure(rhs)
# Forms the equation
equation = Eq(lhs, rhs.expand())
metric_system.append(equation)
self._metric_system = metric_system
return self._metric_system
@property
def in_metric(self):
""" Return de the pkf dynamics is term of the variance/metric tensor """
if self._system_in_metric is None:
# Compute the expectation system
full_system_in_metric = []
systems = [
self.expectation_system,
self.variance_system,
self.cross_covariance_system,
self.metric_system
]
for system in systems:
if system is None: # to handle the multivariate situation: None is for univariate
continue
# 1. Closes the system (by default closure is the empty dictionary {})
if self.closure != {}:
closed_system = []
for equation in system:
equation = equation.subs(self.closure).expand()
closed_system.append(equation)
system = closed_system
# 2. Clean from expectation of random fields
full_system_in_metric += self._clean_system_from_expectation(system)
self._system_in_metric = full_system_in_metric
return self._system_in_metric
@property
def in_diffusion(self):
""" Return de the pkf dynamics is term of the variance/diffusion tensor
Description
-----------
The derivation relies on the identity $\nu g = I/2$ whose trend is $(\dot\nu)g+\nu(\dot g)=0$
so that $\dot \nu = -\nu(\dot g)g^{-1}$. Again, with $\nu g =I/2$ leading to $g^{-1} = 2\nu$
it results that
$$\dot \nu = -2\nu(\dot g)\nu.$$
"""
if self._system_in_diffusion is None:
t = self.time_coordinate
# 1. Set dictionary for substitution
# 1.1 Create the substitution dictionary for migration : metric -> diffusion
metric_to_diffusion = collections.OrderedDict()
for mfield in self.fields.values():
metric = upper_triangle(mfield.metric)
metric_in_diffusion = mfield.diffusion.inv() * Rational(1 , 2)
metric_in_diffusion = upper_triangle(metric_in_diffusion)
for metric_ij, diffusion_ij in zip(metric, metric_in_diffusion):
metric_to_diffusion[metric_ij] = diffusion_ij
# 1.2 Dictionary for the metric trends
subs_metric_trend = {}
for equation in self._apply_closure(self.metric_system):
lhs, rhs = equation.args
subs_metric_trend[lhs] = rhs
# 2. Migration of expectation and variance systems
diffusion_system = []
for system in [self.expectation_system, self.variance_system]:
# -1- apply external closure
system = self._apply_closure(system)
# -2- switch from metric to diffusion
for equation in system:
equation = equation.subs(metric_to_diffusion)
diffusion_system.append(equation)
# 3. Computation of the system at a symbolic level
# forms the equation $$ \pdt \nu = - 2\nu \pdt g \nu $$
# The computation of the system is made as a loop over univariate fields
for mfield in self.fields.values():
# Extract tensors
diffusion = mfield.diffusion
metric = mfield.metric
# Computation of the rhs: $- 2\nu \pdt g \nu$
trend_metric = Derivative(metric, t).doit()
rhs = -Integer(2)*diffusion*trend_metric*diffusion
rhs = rhs.doit()
# Computation of the lhs: $\pdt \nu$
lhs = Derivative(diffusion, t).doit()
# Set the system by substituting terms
for lhs_term, rhs_term in zip(upper_triangle(lhs), upper_triangle(rhs)):
# Replace metric trend by its value
rhs_term = rhs_term.subs(subs_metric_trend)
rhs_term = rhs_term.doit()
rhs_term = rhs_term.simplify()
rhs_term = rhs_term.expand()
# Replace metric terms by their values
rhs_term = rhs_term.subs(metric_to_diffusion)
rhs_term = rhs_term.doit()
rhs_term = rhs_term.simplify()
rhs_term = rhs_term.expand()
# Set the equation
equation = Eq(lhs_term, rhs_term)
diffusion_system.append(equation)
# 3. Clean Expectation of fields
diffusion_system = self._clean_system_from_expectation(diffusion_system)
self._system_in_diffusion = diffusion_system
return self._system_in_diffusion
@property
def in_aspect(self):
""" Return de the pkf dynamics is term of the variance/aspect tensor
Description
-----------
The derivation relies on the identity $\nu \bs = I$ whose trend is
$(\dot\bs)g+\bs(\dot g)=0$
so that $\dot \bs = -\bs(\dot g)g^{-1}$. Again, with $\bs g =I$
leading to $g^{-1} = \bs$
it results that
$$\dot \bs = -\bs(\dot g)\bs.$$
"""
if self._system_in_aspect is None:
# 1. Set dictionary for substitution
# 1.1 Create the substitution dictionary for migration : metric -> aspect
metric_to_aspect = collections.OrderedDict()
for mfield in self.fields.values():
metric = upper_triangle(mfield.metric)
# add 'aspect' in mfield
metric_in_aspect = mfield.aspect.inv()
metric_in_aspect = upper_triangle(metric_in_aspect)
for metric_ij, aspect_ij in zip(metric, metric_in_aspect):
metric_to_aspect[metric_ij] = aspect_ij
# 1.2 Dictionary for the metric trends
subs_metric_trend = {}
for equation in self._apply_closure(self.metric_system):
lhs, rhs = equation.args
subs_metric_trend[lhs] = rhs
# 2. Migration of expectation and variance systems
aspect_system = []
systems = [ self.expectation_system,
self.variance_system,
self.cross_covariance_system
]
for system in systems:
if system is None: # to handle the multivariate situation: None is for univariate
continue
# -1- apply external closure
system = self._apply_closure(system)
# -2- switch from metric to diffusion
for equation in system:
equation = equation.subs(metric_to_aspect)
aspect_system.append(equation)
# 3. Computation of the system at a symbolic level
# forms the equation $$ \pdt \bs = - \bs \pdt g \bs $$
# The computation of the system is made as a loop over univariate fields
#t = self.time_coordinate
for mfield in self.fields.values():
# Extract tensors
aspect = mfield.aspect
metric = mfield.metric
# Computation of the rhs: $- \bs \pdt g \bs$
trend_metric = Derivative(metric, time_symbol).doit()
rhs = - aspect * trend_metric * aspect
rhs = rhs.doit()
# Computation of the lhs: $\pdt \bs$
lhs = Derivative(aspect, time_symbol).doit()
# Set the system by substituting terms
for lhs_term, rhs_term in zip(upper_triangle(lhs), upper_triangle(rhs)):
# Replace metric trend by its value
rhs_term = rhs_term.subs(subs_metric_trend)
rhs_term = rhs_term.doit()
rhs_term = rhs_term.simplify()
rhs_term = rhs_term.expand()
# Replace metric terms by their values
rhs_term = rhs_term.subs(metric_to_aspect)
rhs_term = rhs_term.doit()
rhs_term = rhs_term.simplify()
rhs_term = rhs_term.expand()
# Set the equation
equation = Eq(lhs_term, rhs_term)
aspect_system.append(equation)
# 3. Clean Expectation of fields
aspect_system = self._clean_system_from_expectation(aspect_system)
self._system_in_aspect = aspect_system
return self._system_in_aspect
@property
def closure(self):
return self._closure
def set_closure(self, closure):
# 1. Update the closure
self._closure.update(closure)
# 2. Reset systems in metric/diffusion/aspect
self._system_in_metric = None
self._system_in_diffusion = None
self._system_in_aspect = None
def _clean_system_from_expectation(self, system):
""" Eliminate expectation of random fields from equation to simplify the representation and to prepare
the translation in computational codes """
clean_expectation = {}
for mfield in self.fields.values():
clean_expectation[Expectation(mfield.random)] = mfield.value
new_system = []
for equation in system:
new_system.append( equation.subs(clean_expectation))
return new_system
@property
def internal_closure(self):
if self._internal_closure is None:
self._internal_closure = {}
# 1. Set univariate closure
for meta_field in self.fields.values():
self._internal_closure.update(meta_field.internal_closure)
# 2. Set multivariate closure (only in multivariate situations)
for couple in self.multivariate_couples:
# 1) Extract fields and meta-data
f1, f2 = couple
mf1, mf2 = self.fields[f1], self.fields[f2]
# 2) extract error fields
e1, e2 = mf1.error, mf2.error
V1, V2 = mf1.variance, mf2.variance
std1, std2 = sqrt(V1), sqrt(V2)
eps1, eps2 = mf1.epsilon, mf2.epsilon
# 3) Definition of the cross_variance
# 3.a) Extract he cross covariance label
V12 = self.get_covariance(f1,f2)
# 3.c) Update internal closure
self._internal_closure[Expectation(e1*e2)] = V12
self._internal_closure[Expectation(eps1*eps2)] = V12/(std1*std2)
return self._internal_closure
def get_covariance(self, f1, f2):
if all([field in self.fields for field in [f1,f2]]):
# 1. Get associated metafields
mf1 = self.fields[f1]
mf2 = self.fields[f2]
# 2. Selection of the coordinates
# .. todo:
# Modify the selection of the coordinates to account of two-point covariances between surface / volumique fields
# this could be made from the cup product of the coordinates mf1.coordinates and mf2.coordinates
# e.g. f1(t,x) f2(t,x,y) => V12(t,x,y) ??
cf1 = mf1.coordinates
cf2 = mf2.coordinates
assert cf1==cf2, ValueError("f1 and f2 have different coordinate system")
coordinates = cf1
return Function(self._label_covariance+f1.name+f2.name)(*coordinates)
else:
raise ValueError("f1 or f2 are not prognostic fields")
@property
def subs_tree(self):
"""
:return: substitution tree where only the internal closure is used
and which corresponds to the dictionnary of univariate terms E[D^alpha eps D^beta eps]
given as function of terms in E[eps D^gamma eps], for orders larger then 1.
"""
subs_tree = {}
for meta_field in self.fields.values():
# Extract the tree
meta_subs_tree = meta_field.subs_tree()
# Close the tree from the defined internal_closure
closed_subs_tree = {key:value.subs(self.internal_closure).doit()
for key, value in meta_subs_tree.items()}
# Update the tree
subs_tree.update(closed_subs_tree)
return subs_tree
@staticmethod
def check_univariate(expr):
""" Check the univariate terms from an expression """
expectations = expr.atoms(Expectation)
univariates = {}
for term in expectations:
univariate = UnivariateTerm.is_univariate(term)
if univariate is not None:
function = univariate.function
if function in univariates:
univariates[function].add(univariate)
else:
univariates[function] = {univariate}
return univariates
def _apply_closure(self, system):
""" Apply external closure """
if self.closure == {}:
return system
else:
closed_system = []
for equation in system:
equation = equation.subs(self.closure).expand()
closed_system.append(equation)
return closed_system
def _apply_internal_closure(self, rhs):
""" Apply the internal_closure on an expression (generally the rhs of an equation) """
epsilon_to_mfields = {}
for field,mfield in self.fields.items():
epsilon_to_mfields[mfield.epsilon] = mfield
# -1- Get univariate terms
univariates = self.check_univariate(rhs)
# -2- Retain epsilon's that are in self.fields !!
for epsilon in univariates:
if epsilon not in epsilon_to_mfields:
univariates.pop(epsilon)
# -3- Compute max degree for each univariate terms, by functions
max_degrees = {}
for epsilon in univariates:
max_degrees[epsilon] = max([univariate.degree for univariate in univariates[epsilon]])
# -4- Subs univariate terms E[D^alpha eps D^beta eps] by terms in E[eps D^gamma eps]
# ---- Replace only present terms ..
for epsilon in univariates:
max_degree = max_degrees[epsilon]
subs_tree = epsilon_to_mfields[epsilon].subs_tree(max_degree)
closed_subs_tree = {key:value.subs(self.internal_closure).doit() for key, value in subs_tree.items()}
# can extract only required terms..
# -- but has to handle terms like E(eps D^beta eps) which should not be replace and in tre..
rhs = rhs.subs(closed_subs_tree)
# -3- Closure
rhs = rhs.subs(self.internal_closure)
return rhs
@property
def unclosed_terms(self):
unclosed_terms = set()
# -1- Search in variance/metric system
systems = [self.variance_system, self.metric_system]
for system in systems:
for equation in system:
unclosed_terms.update(equation.args[1].atoms(Expectation))
# -2- Eliminates Expectation(field)
for field,mfield in self.fields.items():
unclosed_terms.discard(Expectation(mfield.random))
return unclosed_terms
class UnivariateTerm(object):
"""
Handle terms in E[ D^alpha \eps D^beta \eps]
"""
""" ..todo :: details what is univariate terms
Univariate terms are of the form E(Dx^alpha eps Dx^beta eps)
"""
def __init__(self, term, function, alpha, beta, degree):
self.term = term
self.function = function
self.alpha = alpha
self.beta = beta
self.degree = degree
def __repr__(self):
return f"Univariate term: {self.function},{self.alpha}, {self.beta}, {self.degree}"
@classmethod
def is_univariate(cls, term):
# Get derivatives
derivatives = term.atoms(Derivative)
# Get functions
functions = set()
for derivative in derivatives:
functions.update(derivative.atoms(Function))
if len(functions) == 1: # Univariate term
function = functions.pop()
if len(derivatives) == 1: # E[eps D^beta eps] or E[(D^alpha eps)**2] or E[(D^alpha eps)**k] k>2
derivative = derivatives.pop()
if term is Expectation(function * derivative): # E[eps D^beta eps]
alpha = 0
beta = derivative.args[1:]
degree = derivative.derivative_count
return cls(term, function, alpha, beta, degree)
elif term is Expectation(derivative*derivative): #E[(D^alpha eps)**2]
alpha = derivative.args[1:]
beta = derivative.args[1:]
degree = 2*derivative.derivative_count
return cls(term, function, alpha, beta, degree)
else:
# E[(D^alpha eps)**k] k>2
return None
elif len(derivatives) == 2:
if term is Expectation(Mul(*derivatives)):
# -1- Compute the total degree
degree = 0
for derivative in derivatives:
degree += derivative.derivative_count
# -2- Extract the two derivatives
alpha, beta = derivatives
alpha = alpha.args[1:]
beta = beta.args[1:]
return cls(term, function, alpha, beta, degree)
else:
return None
else:
return None
else:
return None
def gamma_def(epsilon, k, i, j):
return Expectation(Derivative(epsilon, k) * Derivative(epsilon, i, j))
def gamma_subs(metric, k, i, j):
return Rational(1 , 2) * (Derivative(metric(k, j), i) + Derivative(metric(i, k), j) \
- Derivative(metric(i, j), k))
def skewness_def(epsilon, k, i, j):
return Expectation(epsilon * Derivative(epsilon, k, i, j))
def skewness_subs(metric, k, i, j):
return -Rational(1 , 2) * (Derivative(metric(k, j), i) + Derivative(metric(i, k), j) \
+ Derivative(metric(i, j), k))
class Field(object):
def __init__(self, field):
self.value = field
self.code = clean_latex_name(field.func)
self.coordinates = field.args
self.spatial_coordinates = tuple([coord for coord in self.coordinates if coord is not time_symbol])
self.coords_code = tuple(clean_latex_name(coord) for coord in self.coordinates)
self.spatial_coords_code = tuple(clean_latex_name(coord) for coord in self.spatial_coordinates)
# Associated random fields
self.random = Function(str(field.func))(*field.args, omega)
self.epsilon = Function('{\\varepsilon_{' + self.code + '}}')(*self.coordinates, omega)
self.error = Function('{e_{' + self.code + '}}')(*self.coordinates, omega)
# Associated statistics
# -- Variance field
self.variance = Function('{V_{' + self.code + '}}', positive=True)(*self.coordinates)
# -- Standard deviation field
self.std = Function('{\\sigma_{' + self.code + '}}', positive=True)(*self.coordinates)
# -- Tensor fields
shape_tensor = 2 * (len(self.spatial_coordinates),)
self.metric = Matrix(*shape_tensor, self.metric_func)
self.diffusion = Matrix(*shape_tensor, self.nu_func)
self.aspect = Matrix(*shape_tensor, self.aspect_func)
# --trends
self.trends = {
'field':Derivative(self.value,time_symbol),
'variance':Derivative(self.variance,time_symbol),
'error': Derivative(self.error, time_symbol),
'epsilon': Derivative(self.epsilon, time_symbol),
'metric': Derivative(self.metric, time_symbol),
'diffusion': Derivative(self.diffusion, time_symbol),
'aspect': Derivative(self.aspect, time_symbol),
}
self.subs_tree = UnivariateTree(self.epsilon, self.spatial_coordinates)
self._internal_closure = None
@property
def internal_closure(self):
if self._internal_closure is None:
# Computation of the default internal_closure
closure = {}
# -0- error is centered
closure.update({Expectation(self.error): Integer(0)})
closure.update({Expectation(self.error ** Integer(2)): self.variance})
# -1- epsilon is centered
closure.update({Expectation(self.epsilon): Integer(0)})
# -2- epsilon is normalized
closure.update({Expectation(self.epsilon ** Integer(2)): Integer(1)})
# -3- correlation is flat
closure.update({Expectation(self.epsilon * Derivative(self.epsilon, coord)): Integer(0)
for coord in self.spatial_coordinates})
# -4- metric is labeled
closure.update({
-self.metric_definition(i, j): -self.metric_func(i, j)
for i in range(len(self.spatial_coordinates)) for j in range(i, len(self.spatial_coordinates))}
)
# -5- skewness is function of the metric
skewness_closure = {}
metric_func = lambda xi, xj: self.metric_func(self.spatial_coordinates.index(xi),
self.spatial_coordinates.index(xj))
for partial_order in PartialOrderDerivative.all_of_degree(self.spatial_coordinates, 3):
skewness_closure[skewness_def(self.epsilon, *partial_order.as_sequence)] = skewness_subs(metric_func,
*partial_order.as_sequence)
closure.update(skewness_closure)
self._internal_closure = closure
return self._internal_closure
def index_code(self,i,j):
if j<i:
i,j = j,i
return self.spatial_coords_code[i] + self.spatial_coords_code[j]
def metric_func(self,i,j):
return Function('{g_{' + self.code + ',' + self.index_code(i, j) + '}}', real=True)(*self.coordinates)
def metric_definition(self,i,j):
return -Expectation(self.epsilon * Derivative(self.epsilon, self.spatial_coordinates[i], self.spatial_coordinates[j]))
def nu_func(self,i,j):
return Function('{\\nu_{' + self.code + ',' + self.index_code(i, j) + '}}', real=True)(
*self.coordinates)
def aspect_func(self,i,j):
return Function('{s_{' + self.code + ',' + self.index_code(i, j) + '}}', real=True)(
*self.coordinates)
class PartialOrderDerivative(object):
""" Handler for partial order derivatives """
def __init__(self, coordinates, partial_orders):
self._coordinates = coordinates
self._as_tuple = self._builder(partial_orders)
self._degree = sum(self._as_tuple)
@property
def coordinates(self):
return self._coordinates
@property
def as_tuple(self):
return self._as_tuple
@property
def as_couples(self):
return [(coord, order) for coord, order in zip(self._coordinates, self._as_tuple)]
@property
def as_sequence(self):
# format du type [x0,x1,x1]
sequence = []
for x_i, alpha_i in zip(self._coordinates, self._as_tuple):
if alpha_i > 0:
sequence += alpha_i * [x_i]
return sequence
def _builder(self, partial_orders):
builder_names = ['tuple', 'couples', 'sequence']
test = [getattr(self, 'is_as_' + name)(partial_orders) for name in builder_names]
if any(test):
name = builder_names[test.index(True)]
return getattr(self, '_set_from_' + name)(partial_orders)
else:
raise TypeError(f"{partial_orders} is not a valid derivative order for coordinates {self._coordinates}")
def is_as_tuple(self, partial_orders):
if len(partial_orders) != len(self._coordinates):
return False
if isinstance(partial_orders, tuple):
test = [isinstance(alpha_i, int) for alpha_i in partial_orders]
return False not in test
return False
def is_as_couples(self, partial_orders):
# Can not be empty (ie can not be [])
if partial_orders == []:
return False
try:
# can be a simple couple (x_i,alpha_i)
if isinstance(partial_orders, tuple):
xi, alpha_i = partial_orders
return xi in self._coordinates and isinstance(alpha_i, int)
# can be a list of as_couples [.. (x_i,alpha_i) ..]
if isinstance(partial_orders, list):
test = [xi in self._coordinates and isinstance(alpha_i, int) for xi, alpha_i in partial_orders]
return False not in test
except:
return False
def is_as_sequence(self, partial_orders):
if partial_orders == []:
return True
test = [xi in self._coordinates for xi in partial_orders]
return False not in test
def _set_from_tuple(self, partial_orders_tuple):
return partial_orders_tuple
def _set_from_couples(self, couples):
orders = len(self._coordinates) * [0]
if isinstance(couples, tuple):
xi, alpha_i = couples
orders[self._coordinates.index(xi)] += alpha_i
else:
for xi, alpha_i in couples:
orders[self._coordinates.index(xi)] += alpha_i
return tuple(orders)
def _set_from_sequence(self, sequence):
return tuple([sequence.count(xi) for xi in self._coordinates])
def __eq__(self, rhs):
return self._as_tuple == rhs._as_tuple and self._coordinates == rhs._coordinates
def new(self, partial_orders):
return PartialOrderDerivative(self._coordinates, partial_orders)
def copy(self):
return PartialOrderDerivative(self._coordinates, self._as_tuple)
@classmethod
def all_of_degree(cls, coordinates, derivative_order):
""" Return partial order derivative of all derivative at a given degree
Description
-----------
The algorithm employs dynamical programming based on sets to avoid duplicate outputs.
Each generation is computed from the previous by moving the partial order of the first coordinate toward the others.
Example
-------
>>> coords = symbols(' '.join(['x'+str(i) for i in range(3)]))
>>> for index in PartialOrderDerivative.all_of_degree(coords,4):
>>> print(index.as_couples)
[(x0, 4.0), (x1, 0.0), (x2, 0.0)]
[(x0, 3.0), (x1, 0.0), (x2, 1.0)]
[(x0, 3.0), (x1, 1.0), (x2, 0.0)]
[(x0, 2.0), (x1, 0.0), (x2, 2.0)]
[(x0, 2.0), (x1, 1.0), (x2, 1.0)]
[(x0, 2.0), (x1, 2.0), (x2, 0.0)]
[(x0, 1.0), (x1, 2.0), (x2, 1.0)]
[(x0, 1.0), (x1, 0.0), (x2, 3.0)]
[(x0, 1.0), (x1, 3.0), (x2, 0.0)]
[(x0, 1.0), (x1, 1.0), (x2, 2.0)]
[(x0, 0.0), (x1, 1.0), (x2, 3.0)]
[(x0, 0.0), (x1, 4.0), (x2, 0.0)]
[(x0, 0.0), (x1, 2.0), (x2, 2.0)]
[(x0, 0.0), (x1, 3.0), (x2, 1.0)]
[(x0, 0.0), (x1, 0.0), (x2, 4.0)]
"""
import numpy as np
nb_coordinates = len(coordinates)
start = np.zeros(nb_coordinates, dtype=int)
start[0] = derivative_order
start = tuple(start)
fathers = {start}
generation = 0
while True:
# Exit if all generation has been done.
if generation > derivative_order:
break
# Yield partial order for derivative [ .. (xi,partial_order_i) .. ] for all i's
for partial_orders in fathers:
partial_orders = tuple([int(order) for order in partial_orders])
yield PartialOrderDerivative(coordinates, partial_orders)
# Compute new generation
generation += 1
sons = set()
for father in fathers:
# takes one ball in 0 and distribute it to others
father = np.asarray(father)
flux = np.zeros(nb_coordinates, dtype=int)
flux[0] = -1
for move in range(1, nb_coordinates):
flux[move] = 1
son = father + flux
sons.add(tuple(son))
flux[move] = 0
fathers = sons
class UnivariateTree(object):
""" Compute and handle subtitution tree for terms E[D^alpha epsilon D^beta epsilon]
of degree |alpha|+|beta|<= max_degree
Description
-----------
Dynamical structure that return the tree at a given degree from stored levels
It replaces terms in E[D^alpha epsilon D^beta epsilon] by terms in E[epsilon D^gamma epsilon]
"""
def __init__(self, epsilon, spatial_coordinates, self_substitute = True):
self.epsilon = epsilon
self.spatial_coordinates = spatial_coordinates
self._self_substitute = self_substitute
self._degree = 0
self._levels = {}
self._tree = {}
def __call__(self, degree=None):
""" Return a tree of degree 'degree' """
if degree is None:
degree = self._degree
if degree > self._degree:
# Compute the tree until degree
for add_degree in range(self._degree, degree):
self._add_tree_level()
tree = {}
for level, level_dico in self._levels.items():
if level>degree:
continue
tree.update(level_dico)
return tree
def _add_tree_level(self):
self._degree += 1
degree = self._degree
level = {}
# Compute all termes E[D^alpha eps D^beta eps] such as |alpha|+|beta| = degree, with 1<=|alpha|<= degree//2
for alpha_degree in range(1, degree // 2 + 1):
beta_degree = degree - alpha_degree
for alpha in PartialOrderDerivative.all_of_degree(self.spatial_coordinates, alpha_degree):
sub_alpha = alpha.new(alpha.as_sequence[1:])
for beta in PartialOrderDerivative.all_of_degree(self.spatial_coordinates, beta_degree):
# term = Expectation(Derivative(self.epsilon, *alpha) * Derivative(self.epsilon, *beta))
term = Derivative(self.epsilon, *alpha.as_couples) * Derivative(self.epsilon, *beta.as_couples)
term = Expectation(term)
lhs = Derivative(Derivative(self.epsilon, *sub_alpha.as_couples) * Derivative(self.epsilon, *beta.as_couples), alpha.as_sequence[0])
rhs = lhs.doit()
equation = Eq(Expectation(lhs), Expectation(rhs))
equation = equation.isolate(term)
# Add into tree
subs_term = equation.args[1]
if self._self_substitute:
subs_term = subs_term.subs(self._tree)
self._tree[term] = subs_term
level[term] = subs_term
self._levels[degree] = level
@property
def degree(self):
return self._degree
class Closure(object):
"""
Exploration of closure from the Taylor expansion of heterogeneous correlation functions,
written in the aspect tensor approximation (aspect tensor of the correlation
approximately equals the local parameter tensor `s` of the correlation)
\begin{equation}
\rho(x,y) = \frac{|s_x|^{1/4}|s_y|^{1/4}}{|\frac{1}{2}(s_x+s_y)|^{1/2}}
\exp\left(-||x-y||^2_{(s_x+s_y)^{-1}}\right)
\end{equation}
"""
def __init__(self, mfield):
if not isinstance(mfield, Field):
mfield = Field(mfield)
self.mfield = mfield
self.spatial_coordinates = mfield.spatial_coordinates
self._x = self.spatial_coordinates
self._dx = symbols(' '.join([f"d{xi}" for xi in self.spatial_coordinates]))
if not isinstance(self._dx, tuple):
self._dx = (self._dx,)
self.epsilon = mfield.epsilon
self.metric = mfield.metric
self.diffusion = mfield.diffusion
self.aspect = mfield.aspect
self._order = 0
self._taylor = None
self._in_metric = None
self._in_diffusion = None
self._in_aspect = None
self._closure_by_order = {}
def correlation_in_diffusion(self, px, py):
"""
Prototype of correlation function with parameters defined in the so-called
diffusion tensor.
"""
from sympy import exp
cpx = {xi: pxi for xi, pxi in zip(self.spatial_coordinates, px)}
cpy = {yi: pyi for yi, pyi in zip(self.spatial_coordinates, py)}
nux = self.diffusion.subs(cpx)
nuy = self.diffusion.subs(cpy)
normalization = (nux.det() ** Rational(1 , 4) * nuy.det() ** Rational(1 , 4)) / (
Rational(1 , 2) * (nux + nuy)).det() ** Rational(1 , 2)
vx = Matrix(px)
vy = Matrix(py)
dxy = vx - vy
dot_prod = dxy.T @ (nux + nuy).inv() @ dxy
h_correlation = exp(-Rational(1 , 2) * dot_prod[0])
return normalization * h_correlation
def correlation(self, px, py):
""" Heterogeneous Gaussian correlation function in aspect tensor
\begin{equation}
\rho(x,y) = \frac{|s_x|^{1/4}|s_y|^{1/4}}{|\frac{1}{2}(s_x+s_y)|^{1/2}}
\exp\left(-||x-y||^2_{(s_x+s_y)^{-1}}\right)
\end{equation}
(see eg Pannekoucke 2021)
"""
from sympy import exp
cpx = {xi: pxi for xi, pxi in zip(self.spatial_coordinates, px)}
cpy = {yi: pyi for yi, pyi in zip(self.spatial_coordinates, py)}
sx = self.aspect.subs(cpx)
sy = self.aspect.subs(cpy)
normalization = (sx.det() ** Rational(1 , 4) * sy.det() ** Rational(1 , 4)) / (
Rational(1 , 2) * (sx + sy)).det() ** Rational(1 , 2)
vx = Matrix(px)
vy = Matrix(py)
dxy = vx - vy
dot_prod = dxy.T @ (sx + sy).inv() @ dxy
h_correlation = exp(-dot_prod[0])
return normalization * h_correlation
def taylor(self, order):
""" Compute the Taylor expansion until degree 'order' """
if self._order < order:
px = self._x
py = tuple([xi + dxi for xi, dxi in zip(self._x, self._dx)])
taylor = self.correlation(px, py)
for dxi in self._dx:
taylor = taylor.series(dxi, 0, order).removeO()
taylor = remove_eval_derivative(taylor)
self._taylor = taylor
self._order = order
return self._taylor
def closure_by_order(self, order):
"""
Compute the closure for termes E[eps * D^alpha eps]
:param order:
:return: a
"""
order = order + 1
if self._order < order:
# 1. Compute terms until order
factorial = 1
for partial_order in range(order):
partial_order = int(partial_order)
closure = {}
if partial_order != 0:
factorial *= partial_order
for alpha in PartialOrderDerivative.all_of_degree(self.spatial_coordinates, partial_order):
# -1- Extract taylor coefficient of dx^alpha
coeff_alpha = self.taylor(order).expand()
for xi, dxi in zip(self._x,self._dx):
alpha_i = alpha.as_sequence.count(xi)
coeff_alpha = coeff_alpha.coeff(dxi, alpha_i)
# -2- Extract coefficient of dx^alpha in taylor expansion
term = Expectation(self.epsilon * Derivative(self.epsilon, *alpha.as_couples))
closure[term] = Integer(factorial) * coeff_alpha
self._closure_by_order[partial_order] = closure
# 2. Update the order
self._order = order
return self._closure_by_order
def closure(self, order):
closure = {}
for elm in self.closure_by_order(order).values():
closure.update(elm)
return closure
def in_metric_by_order(self, order):
subs_closure_by_order = self.closure_by_order(order)
g_dico = {} # denotes the metric tensor
s_dico = {} # denotes the aspect tensor (here the parameter of the formulation)
# -1- Supress partial derivative in metric terms
second_order = 2
system_s_to_g = []
for alpha in PartialOrderDerivative.all_of_degree(self.spatial_coordinates, second_order):
# -1- get term
term = Expectation(self.epsilon * Derivative(self.epsilon, *alpha.as_couples))
# -2- Eliminate derivatives
subs_term = subs_closure_by_order[second_order][term]
subs_term = subs_term.subs({derivative: 0 for derivative in subs_term.atoms(Derivative)})
subs_closure_by_order[second_order][term] = subs_term
# -3-
if len(self.spatial_coordinates) > 1:
i, j = alpha.as_tuple[0], alpha.as_tuple[1]
else:
i, j = 0, 0
g_dico[term] = -self.mfield.metric_func(i, j)
s_dico[term] = self.mfield.aspect_func(i, j)
system_s_to_g.append(Eq(g_dico[term], subs_term))
# -2- expression en fonction de la notation de la métrique
s_to_g = sympy.solve(system_s_to_g, *s_dico.values())
# -3- Replace in all subs_closure
for order in subs_closure_by_order:
for term in subs_closure_by_order[order]:
subs_closure_by_order[order][term] = subs_closure_by_order[order][term].subs(s_to_g).doit()
return subs_closure_by_order
def in_diffusion(self, order):
raise NotImplementedError
def in_aspect(self, order):
raise NotImplementedError
class LocGaussianClosure(Closure):
"""
Closure associated with the local Gaussian written in aspect tensor.
"""
def correlation(self, px, py, metric=True):
from sympy import exp
cpx = {xi: pxi for xi, pxi in zip(self.spatial_coordinates, px)}
cpy = {yi: pyi for yi, pyi in zip(self.spatial_coordinates, py)}
vx = Matrix(px)
vy = Matrix(py)
dxy = vx - vy
if metric:
gux = self.metric.subs(cpx)
dot_prod = dxy.T @ gux @ dxy
else:
sx = self.aspect.subs(cpx)
dot_prod = dxy.T @ (Integer(2)*sx).inv() @ dxy
h_correlation = exp(-dot_prod[0])
return h_correlation
| StarcoderdataPython |
4800436 | #-*- coding: utf-8 -*-
""" Wakeword listener package """ | StarcoderdataPython |
150617 | from luts import kana_to_romaji, romaji_to_kana, simple_kana_to_romaji
import random
import os
import time
## Term info ##
term = os.popen('stty size', 'r').read().split()
term_height, term_width = int(term[0]), int(term[1]) - 5
# term_width = 30 # i have issues
gap = 3
count = 50
max_count = 50
kana_set = simple_kana_to_romaji
## Functions ##
def acceptchar(chars):
"""Convert romaji to hiragana"""
# If this is a kana, just return it
if ord(chars[0]) > 122:
return chars
# Deal with conversion
if chars in kana_set:
return kana_set[chars]
else:
return ""
def setTicker(line):
"""Sets the ticker line with data. Truncates if screen width < data len"""
# Move to saved position
print("\u001b[u", end="", flush=True)
# Move up 1 line
print("\u001b[1A", end="", flush=True)
# Truncate the line to the terminal width
trunc_line = "".join(line)[: int(min(term_width, max_count) / 2)][1:]
first_char = line[0]
# Clear line
print(">" + " " * (term_width - 1), end="\r", flush=True)
# Display the first char in green
print("\u001b[37m\u001b[42;1m" + first_char, end="", flush=True)
# Display the rest of the ticker normally
print("\u001b[0m" + trunc_line + "\r", end="", flush=True)
# Move down 1
print("\u001b[1B", end="", flush=True)
def handleInput():
"""
Accept input,
and clean up terminal on \\n
"""
# Clear line
print(">" + " " * (term_width - 1), end="\r", flush=True)
# Read from user
data = input(">").strip()
if len(data) == 0:
data = " "
# Move up 1 line
print("\u001b[1A", end="", flush=True)
return data
def pickKana(n):
"""Select n random kana"""
keys = list(kana_set.keys())
return [random.choice(keys) for _ in range(n)]
## App ##
if __name__ == "__main__":
successes = 0
# Pick {count} random hiragana
challenge_set = pickKana(count)
# newlines for ticker and input
print("\n" * (3 + gap), end="")
# Move up
print(f"\u001b[{gap + 1}A", end="", flush=True)
# Print cheat line
print("AKS TNH MYR W\r", end="", flush=True)
# Move up 1
print(f"\u001b[1A", end="", flush=True)
# Save the cursor position
print("\u001b[s", end="", flush=True)
start_time = time.time()
problems = []
completed = []
# Run until we are out of kana
while len(challenge_set) > 0:
# Display the ticker
setTicker(challenge_set)
# read input
data = handleInput()
current_kana = challenge_set[0]
if data == " ":
continue
# Check if we should skip
if data == "ー":
challenge_set = challenge_set[1:]
completed.append((current_kana, False))
continue
# Check if we should stop the game
if data == "=":
break
# Check if we should explain
if data == "+":
# Move to saved position
print("\u001b[u", end="", flush=True)
# Move down 1
print("\u001b[1B", end="", flush=True)
# Print the options
print(" \r", end="", flush=True)
print(kana_set[current_kana], end="", flush=True)
successes -= 1
problems.append(current_kana)
continue
# Get input as hiragana
kana = acceptchar(data)
# Check if we should moce on
if kana == current_kana:
successes += 1
completed.append((current_kana, True))
else:
problems.append(current_kana)
completed.append((current_kana, False))
challenge_set = challenge_set[1:]
# Game end
# Move to saved position
print("\u001b[u", end="", flush=True)
# Move up 1 lines
print("\u001b[1A", end="", flush=True)
if successes != 0:
kps = round(round(time.time() - start_time) / successes)
else:
kps = 0
# Print kana list with problems highlighted
print(" "* max_count + "\r", end="", flush=True)
for kana in completed:
# Should this be Red BG?
if not kana[1]:
print("\u001b[37m\u001b[41;1m" + kana[0], end="", flush=True)
else:
print("\u001b[0m" + kana[0], end="", flush=True)
print("\u001b[0m")
print(
f"Completed {successes}/{count} in ~{round(time.time() - start_time)} seconds ({kps} seconds per kana)")
# print(f"You had issues with:\n{problems}")
| StarcoderdataPython |
1606819 | #-------------------------------------------------------------------------------
# A mock ICF kind 'o problem.
#-------------------------------------------------------------------------------
from math import *
from Spheral import *
from SpheralTestUtilities import *
from SpheralGnuPlotUtilities import *
from SpheralVisitDump import dumpPhysicsState
from findLastRestart import findLastRestart
from GzipFileNodeGenerator import *
# Load the mpi module if we're parallel.
import loadmpi
mpi, rank, procs = loadmpi.loadmpi()
title("2-D ICF test problem")
#-------------------------------------------------------------------------------
# Generic problem parameters
#-------------------------------------------------------------------------------
commandLine(NodeListConstructor = AsphNodeList2d,
rhoAir = 1.4,
rhoDrive = 0.1,
rhoShell = 1.0,
PAir = 1.0,
PDrive = 100.0,
PShell = 1.0,
gammaAir = 1.4,
gammaDrive = 1.4,
gammaShell = 1.6,
mu = 1.0,
Qconstructor = MonaghanGingoldViscosity2d,
#Qconstructor = TensorMonaghanGingoldViscosity2d,
Cl = 1.0,
Cq = 0.75,
Qlimiter = False,
balsaraCorrection = False,
epsilon2 = 1e-2,
hmin = 1e-5,
hmax = 0.5,
hminratio = 0.1,
nPerh = 2.01,
cfl = 0.5,
XSPH = True,
epsilonTensile = 0.0,
nTensile = 8,
goalTime = 0.06,
dtSample = 0.01,
dt = 0.0001,
dtMin = 1.0e-6,
dtMax = 0.1,
dtGrowth = 2.0,
maxSteps = None,
statsStep = 10,
smoothIters = 0,
HEvolution = Hydro2d.HEvolutionType.IdealH,
sumForMassDensity = Hydro2d.MassDensityType.RigorousSumDensity,
compatibleEnergy = True,
restoreCycle = None,
restartStep = 1000,
dataDirBase = "icf-2d",
graphics = True,
)
epsAir = PAir/((gammaAir - 1.0)*rhoAir)
epsDrive = PDrive/((gammaDrive - 1.0)*rhoDrive)
epsShell = PShell/((gammaShell - 1.0)*rhoShell)
dataDir = dataDirBase
restartDir = dataDir + "/restarts"
visitDir = dataDir + "/visit"
restartBaseName = restartDir + "/icf-2d"
#-------------------------------------------------------------------------------
# Check if the necessary output directories exist. If not, create them.
#-------------------------------------------------------------------------------
import os, sys
if mpi.rank == 0:
if not os.path.exists(restartDir):
os.makedirs(restartDir)
if not os.path.exists(visitDir):
os.makedirs(visitDir)
mpi.barrier()
#-------------------------------------------------------------------------------
# If we're restarting, find the set of most recent restart files.
#-------------------------------------------------------------------------------
restoreCycle = findLastRestart(restartBaseName)
#-------------------------------------------------------------------------------
# Material properties.
#-------------------------------------------------------------------------------
eosAir = GammaLawGasCGS2d(gammaAir, mu)
eosDrive = GammaLawGasCGS2d(gammaDrive, mu)
eosShell = GammaLawGasCGS2d(gammaShell, mu)
#-------------------------------------------------------------------------------
# Interpolation kernels.
#-------------------------------------------------------------------------------
WT = TableKernel2d(BSplineKernel2d(), 1000)
WTPi = TableKernel2d(BSplineKernel2d(), 1000)
output("WT")
output("WTPi")
kernelExtent = WT.kernelExtent()
#-------------------------------------------------------------------------------
# Make the NodeLists.
#-------------------------------------------------------------------------------
nodesAir = NodeListConstructor("air", eosAir, WT, WTPi)
nodesDrive = NodeListConstructor("drive", eosAir, WT, WTPi)
nodesShell = NodeListConstructor("shell", eosAir, WT, WTPi)
nodeSet = [nodesAir, nodesDrive, nodesShell]
for nodes in nodeSet:
nodes.XSPH = XSPH
nodes.hmin = hmin
nodes.hmax = hmax
nodes.hminratio = hminratio
nodes.nodesPerSmoothingScale = nPerh
nodes.epsilonTensile = epsilonTensile
nodes.nTensile = nTensile
output("nodes.name()")
output(" nodes.hmin")
output(" nodes.hmax")
output(" nodes.hminratio")
output(" nodes.nodesPerSmoothingScale")
output(" nodes.epsilonTensile")
output(" nodes.nTensile")
output(" nodes.XSPH")
#-------------------------------------------------------------------------------
# Construct the neighbor objects.
#-------------------------------------------------------------------------------
_cache = []
for nodes in nodeSet:
neighbor = TreeNeighbor2d(nodes,
kernelExtent = kernelExtent)
nodes.registerNeighbor(neighbor)
_cache.append(neighbor)
#-------------------------------------------------------------------------------
# Set the node properties.
#-------------------------------------------------------------------------------
outerNodeFlags = IntField2d("outer node flags", nodesDrive)
if restoreCycle is None:
filename = "icf-10-20-8x90.gz"
generatorAir = GzipFileNodeGeneratorRZto2D(filename, "Inner", nPerh,
SPH = (type(nodesAir) == SphNodeList2d))
generatorDrive = GzipFileNodeGeneratorRZto2D(filename, "Driver", nPerh,
SPH = (type(nodesDrive) == SphNodeList2d),
extraFields = ["Driver"])
generatorShell = GzipFileNodeGeneratorRZto2D(filename, "Shell", nPerh,
SPH = (type(nodesShell) == SphNodeList2d))
# Get the outer node flags.
nodesDrive.numInternalNodes = generatorDrive.numLocalNodes()
for i in xrange(generatorDrive.numLocalNodes()):
outerNodeFlags[i] = int(generatorDrive.outerNodes[i] + 0.1)
from ParMETISDistributeNodes import distributeNodes2d
distributeNodes2d((nodesAir, generatorAir),
(nodesDrive, generatorDrive),
(nodesShell, generatorShell))
for nodes in nodeSet:
output("nodes.name()")
output(" mpi.reduce(nodes.numInternalNodes, mpi.MIN)")
output(" mpi.reduce(nodes.numInternalNodes, mpi.MAX)")
output(" mpi.reduce(nodes.numInternalNodes, mpi.SUM)")
# Set node specific thermal energies
nodesAir.specificThermalEnergy(ScalarField2d("tmp", nodesAir, epsAir))
nodesDrive.specificThermalEnergy(ScalarField2d("tmp", nodesDrive, epsAir))
nodesShell.specificThermalEnergy(ScalarField2d("tmp", nodesShell, epsAir))
#-------------------------------------------------------------------------------
# Construct a DataBase.
#-------------------------------------------------------------------------------
db = DataBase2d()
for nodes in nodeSet:
db.appendNodeList(nodes)
output("db")
output("db.numNodeLists")
output("db.numFluidNodeLists")
#-------------------------------------------------------------------------------
# Construct the artificial viscosity.
#-------------------------------------------------------------------------------
q = Qconstructor(Cl, Cq)
q.epsilon2 = epsilon2
q.limiter = Qlimiter
q.balsaraShearCorrection = balsaraCorrection
output("q")
output("q.Cl")
output("q.Cq")
output("q.epsilon2")
output("q.limiter")
output("q.balsaraShearCorrection")
#-------------------------------------------------------------------------------
# Construct the hydro physics object.
#-------------------------------------------------------------------------------
hydro = Hydro2d(WT, WTPi, q, compatibleEnergy)
hydro.cfl = cfl
hydro.HEvolution = HEvolution
hydro.sumForMassDensity = sumForMassDensity
hydro.HsmoothMin = hmin
hydro.HsmoothMax = hmax
hydro.HratioMin = hminratio
output("hydro")
output("hydro.cfl")
output("hydro.HEvolution")
output("hydro.sumForMassDensity")
output("hydro.HsmoothMin")
output("hydro.HsmoothMax")
output("hydro.compatibleEnergyEvolution")
output("hydro.kernel()")
output("hydro.PiKernel()")
output("hydro.HratioMin")
output("hydro.valid()")
#-------------------------------------------------------------------------------
# Create boundary conditions.
#-------------------------------------------------------------------------------
xPlane0 = Plane2d(Vector2d(0.0, 0.0), Vector2d(1.0, 0.0))
yPlane0 = Plane2d(Vector2d(0.0, 0.0), Vector2d(0.0, 1.0))
xbc0 = ReflectingBoundary2d(xPlane0)
ybc0 = ReflectingBoundary2d(yPlane0)
hydro.appendBoundary(xbc0)
hydro.appendBoundary(ybc0)
output("hydro.haveBoundary(xbc0)")
output("hydro.haveBoundary(ybc0)")
#-------------------------------------------------------------------------------
# Construct an integrator, and add the physics packages.
#-------------------------------------------------------------------------------
integrator = SynchronousRK2Integrator2d(db)
integrator.appendPhysicsPackage(hydro)
integrator.lastDt = dt
integrator.dtMin = dtMin
integrator.dtMax = dtMax
integrator.dtGrowth = dtGrowth
output("integrator")
output("integrator.havePhysicsPackage(hydro)")
output("integrator.valid()")
output("integrator.lastDt")
output("integrator.dtMin")
output("integrator.dtMax")
output("integrator.dtGrowth")
#-------------------------------------------------------------------------------
# Make the problem controller.
#-------------------------------------------------------------------------------
control = SpheralController(integrator, WT,
statsStep = statsStep,
restartStep = restartStep,
restartBaseName = restartBaseName)
output("control")
# Smooth the initial conditions.
if restoreCycle is not None:
control.loadRestartFile(restoreCycle)
else:
control.iterateIdealH()
control.smoothState(smoothIters)
control.dropRestartFile()
dumpPhysicsState(integrator,
"icf-2d",
visitDir)
#-------------------------------------------------------------------------------
# Advance to the end time.
#-------------------------------------------------------------------------------
hstats(nodeSet)
while control.time() < goalTime:
nextGoalTime = min(control.time() + dtSample, goalTime)
control.advance(nextGoalTime, maxSteps)
control.dropRestartFile()
dumpPhysicsState(integrator,
"icf-2d",
visitDir)
#-------------------------------------------------------------------------------
# Plot the results.
#-------------------------------------------------------------------------------
if graphics:
# Plot the elongation (h1/h2) for the H tensors.
import Gnuplot
rPlot = plotNodePositions2d(db, colorNodeLists=True, colorDomains=False)
# Plot the final state.
rhoPlot, vrPlot, epsPlot, PPlot, HPlot = plotRadialState(db)
del HPlot
Hinverse = db.fluidHinverse
hr = db.newFluidScalarFieldList()
ht = db.newFluidScalarFieldList()
for Hfield, hrfield, htfield in zip(Hinverse.fields(),
hr.fields(),
ht.fields()):
n = Hfield.numElements()
assert hrfield.numElements() == n
assert htfield.numElements() == n
positions = Hfield.nodeList().positions()
for i in xrange(n):
runit = positions[i].unitVector()
tunit = Vector2d(-(positions[i].y), positions[i].x).unitVector()
hrfield[i] = (Hfield[i]*runit).magnitude()
htfield[i] = (Hfield[i]*tunit).magnitude()
hrPlot = plotFieldList(hr, xFunction="%s.magnitude()", plotStyle="points", winTitle="h_r")
htPlot = plotFieldList(ht, xFunction="%s.magnitude()", plotStyle="points", winTitle="h_t")
| StarcoderdataPython |
1794186 | from mcstats import mcstats
mcstats.registry.append(
mcstats.MinecraftStat(
'enchant',
{
'title': 'Enchanter',
'desc': 'Items enchanted',
'unit': 'int',
},
mcstats.StatReader(['minecraft:custom','minecraft:enchant_item'])
))
| StarcoderdataPython |
1779868 | import ast
import io
import os
import pathlib
import pickle
import time
from typing import List, Union
import click
import pydantic
import yaml
from respo import core, settings
def save_respo_model(model: core.RespoModel) -> None:
"""Dumps respo model into bin and yml format files.
Pickle file is generated and saved to path specified
in settings. Path may be overwritten using environment variables.
"""
pathlib.Path(settings.config.RESPO_AUTO_FOLDER_NAME).mkdir(
parents=True, exist_ok=True
)
with open(settings.config.path_bin_file, "wb") as file:
pickle.dump(model, file)
def generate_respo_model_file(respo_model: core.RespoModel) -> None:
"""Generates python file with class RespoModel.
Generated file contains class definition that inheritates from
RespoModel, but with additional typing annotations. It is saved
in config.RESPO_FILE_NAME_RESPO_MODEL.
"""
def class_definition(
labels_container: Union[core.ROLESContainer, core.PERMSContainer],
class_name: str,
):
result_lst = []
result_lst.append(f" class {class_name}:\n")
if not len(labels_container):
result_lst.append(" pass\n")
else:
for name in sorted(
label for label in labels_container.__dict__ if label.isupper()
):
result_lst.append(f" {name}: str\n")
result_lst.append("\n")
return "".join(result_lst)
output_text_lst: List[str] = []
output_text_lst.append('"""\nAuto generated using respo create command\n')
output_text_lst.append('Docs: https://rafsaf.github.io/respo/\n"""\n\n')
roles_definition = class_definition(
respo_model.ROLES, "_ROLES(respo.ROLESContainer)"
)
perms_definition = class_definition(
respo_model.PERMS, "_PERMS(respo.PERMSContainer)"
)
output_text_lst.append("import typing\n\n")
output_text_lst.append("import respo\n\n\n")
output_text_lst.append("class RespoModel(respo.RespoModel):\n")
output_text_lst.append(" if typing.TYPE_CHECKING:\n\n")
output_text_lst.append(roles_definition)
output_text_lst.append(perms_definition)
output_text_lst.append(" PERMS: _PERMS\n")
output_text_lst.append(" ROLES: _ROLES\n\n")
output_text_lst.append(" @staticmethod\n")
output_text_lst.append(' def get_respo_model() -> "RespoModel":\n')
output_text_lst.append(
" return respo.RespoModel.get_respo_model() # type: ignore\n"
)
with open(settings.config.RESPO_FILE_NAME_RESPO_MODEL, "w") as file:
file.write("".join(output_text_lst))
def good(text: str) -> str:
"""Styles text to green."""
return click.style(f"INFO: {text}", fg="green", bold=True)
def bad(text: str) -> str:
"""Styles text to yellow."""
return click.style(f"ERROR: {text}", fg="yellow", bold=True)
@click.group()
def app():
pass
@click.option("--no-python-file", is_flag=True, type=bool, default=False)
@click.argument("file", type=click.File("r"))
@app.command()
def create(
file: io.TextIOWrapper,
no_python_file: bool,
):
"""Parses FILENAME with declared respo resource policies.
Creates pickled model representation by default in .respo_cache folder
and python file with generated model in respo_model.py to improve
typing support for end user.
"""
click.echo(good(f"Validating respo model from {file.name}..."))
start_time = time.time()
try:
data = yaml.safe_load(file.read())
respo_model = core.RespoModel.parse_obj(data)
except yaml.YAMLError as yml_error:
click.echo(f"\n{yml_error}\n")
click.echo(bad("Could not process file, yml syntax is invalid"))
raise click.Abort()
except pydantic.ValidationError as respo_errors:
errors = [
error
for error in respo_errors.errors()
if error["type"] != "assertion_error" # theese are unuseful errors
]
for error in errors:
if error["type"] == "value_error.respomodel":
loc_msg = error["msg"].split("|")
error["loc"] = ast.literal_eval(loc_msg[0])
error["msg"] = loc_msg[1]
no_errors = len(errors)
click.echo(bad("Could not validate respo model"))
click.echo(
bad(
f'Found {no_errors} validation error{"" if no_errors == 1 else "s"} for RespoModel\n\n'
)
+ f"{pydantic.error_wrappers.display_errors(errors)}\n"
)
raise click.Abort()
save_respo_model(respo_model)
if not no_python_file:
generate_respo_model_file(respo_model=respo_model)
click.echo(good(f"Saved binary file to {settings.config.path_bin_file}"))
click.echo(good(f"Saved python file to {settings.config.path_python_file}"))
process_time = round(time.time() - start_time, 4)
bin_file_size = round(os.path.getsize(settings.config.path_bin_file) / 1048576, 4)
click.echo(
good(f"Processed in {process_time}s. Bin file size: {bin_file_size} mb.")
)
click.echo(good("Success!"))
| StarcoderdataPython |
3241818 | <filename>multiple_object_detection.py
# -*- coding: utf-8 -*-
# Version: 0.1a9
from os.path import exists, isfile
import cv2 as cv
import imutils
import numpy as np
from matplotlib import pyplot as plt
def multiple_objects_detection(template, image, scale=1.0,
method='cv.TM_CCOEFF_NORMED', threshold=0.7, mode='hide'):
"""Multiple object detection. Function compares template against overlapped
source image regions.
Input:
template -- template. Template must be not greater than the source
image and have the same data type (required | type: str);
image -- source image. Source image must be 8-bit or 32-bit
floating-point (required | type: str);
scale -- scale between source image and template. For example 0.5 scale
means proportional ratio of source image to template as 1:2
(not required | type: float | default: 1.0);
method -- template matching method of OpenCV Python library
(not required | type: str | default: 'cv.TM_CCOEFF_NORMED');
threshold -- value of thresholding. For example 0.7 threshold means
with best matches from 0.7 to 1.0 (>=0.7 and <=1.0);
mode -- visualization mode. No visualization in 'hide' mode.
Visualization in Matplotlib with 'matplotlib' mode.
Visualization in OpenCV with 'opencv' mode (not required |
type: str | default: 'hide').
Output:
result -- coordinates of top left and bottom right points of matched
template. Example [[(1, 1), (2, 2)], [(3, 1), (4, 2)]]
(type: list);
"""
result = []
color = (255, 0, 0) # RGB color model
thickness = 8 # thickness of line
# Verify 'method' param
if method in ('cv.TM_CCOEFF', 'cv.TM_CCOEFF_NORMED', 'cv.TM_CCORR',
'cv.TM_CCORR_NORMED', 'cv.TM_SQDIFF', 'cv.TM_SQDIFF_NORMED'):
method = eval(method)
else:
return 1
# Load template from file
if exists(template) and isfile(template):
tmpl = cv.imread(template)
else:
return 1
# Load source image from file
if exists(image) and isfile(image):
img = cv.imread(image)
else:
return 1
# Convert template to grayscale color model
tmpl = cv.cvtColor(tmpl, cv.COLOR_BGR2GRAY)
# Convert source image to grayscale color model
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
# Get width and height of template
th, tw = tmpl.shape
# Get width and height of source image
ih, iw = img.shape
# Check 'scale' param
if scale == 1:
resized = tmpl
rh, rw = th, tw
r = 1
else:
width = int(tw * scale)
# Resize
resized = imutils.resize(tmpl, width=width)
# Get width and height of resized template
rh, rw = resized.shape
r = tw / rw
# If template is bigger than source image, then return empty result
if (rh > ih) or (rw > iw):
return result
# Match template
match = cv.matchTemplate(img, resized, method)
# Check 'threshold' param
if method in [cv.TM_CCOEFF, cv.TM_CCORR, cv.TM_SQDIFF]:
return result
elif method == cv.TM_SQDIFF_NORMED:
threshold = 1 - threshold
loc = np.where(match <= threshold)
else:
loc = np.where(match >= threshold)
# Store results of match to 'result'
for el in zip(*loc[::-1]):
start_x, start_y = el
end_x, end_y = int(start_x + tw/r), int(start_y + th/r)
result.append([(start_x, start_y), (end_x, end_y)])
# Check 'mode' param
if mode != 'hide':
visualization = cv.cvtColor(img, cv.COLOR_GRAY2RGB)
for el in result:
start, end = el
cv.rectangle(visualization, start, end, color, thickness)
# Visualization in Matplotlib
if mode == 'matplotlib':
plt.imshow(visualization)
plt.title('Scale: %s' % scale)
plt.show()
# Visualization in OpenCV
elif mode == 'opencv':
visualization = cv.cvtColor(visualization, cv.COLOR_RGB2BGR)
title = 'Scale: %s' % scale
cv.imshow(title, visualization)
cv.waitKey(0)
cv.destroyAllWindows()
return result
# Example. Multiple detection of template.jpg object in example.jpg
# source image
#result = multiple_objects_detection('template.jpg', 'example.jpg')
# Example. Multiple detection of template.jpg object in example.jpg
# source image. Where scale of source image to template is 1:2
#result = multiple_objects_detection('template.jpg', 'example.jpg', scale=0.5)
# Example. Multiple detection of template.jpg object in example.jpg
# source image. Where scale of source image to template is 2:1
#result = multiple_objects_detection('template.jpg', 'example.jpg', scale=2.0)
# Example. Multiple detection of template.jpg object in example.jpg
# source image with cv.TM_CCORR_NORMED method
#result = multiple_objects_detection('template.jpg', 'example.jpg',
# method='cv.TM_SQDIFF_NORMED')
# Example. Multiple detection of template.jpg object in example.jpg
# source image with best matches from 0.9 to 1.0 (>=0.9 and <=1.0)
#result = multiple_objects_detection('template.jpg', 'example.jpg',
# threshold=0.9)
# Example. Multiple detection of template.jpg object in example.jpg
# source image and visualization with Matplotlib
#result = multiple_objects_detection('template.jpg', 'example.jpg',
# mode='matplotlib')
| StarcoderdataPython |
31873 | from unittest import TestCase
from src.lineout.data import *
class TestDataUtils(TestCase):
def test_get_result_list(self):
sample_list = [{'id': 1, 'name': 'a'}, {'id': 2, 'name': 'b'}]
paginated = {
'count': 2,
'previous': None,
'next': None,
'results': sample_list
}
not_paginated_1 = {
'foo': 'bar'
}
not_paginated_2 = {
'results': {'foo': 'bar'}
}
self.assertListEqual(get_result_list(sample_list), sample_list)
self.assertListEqual(get_result_list(paginated), sample_list)
with self.assertRaises(ValueError):
get_result_list(not_paginated_1)
with self.assertRaises(ValueError):
get_result_list(not_paginated_2)
| StarcoderdataPython |
1799814 | from Website.site_base import BaseHandler
import tornado.web
import tornado
import SQL.table_simulation as SQLsim
class RawPacketHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
if self.current_user is None:
self.redirect('login.html?next=edit')
return
if self.current_user.permission < 9000:
self.redirect('/')
return
sim = self.database.query(SQLsim.Simulation).filter( SQLsim.Simulation.status==1 ).all()
print('Sim: ' + str(sim))
self.render('raw_packet.html', simulation=sim)
self.database.commit() #Important otherwise transactions are reused and cached data is used not (possible) new
def post(self):
print(str(self.request))
print('Message: ' + str( self.get_argument('message', '')))
print('Client: ' + str( self.get_argument('client', '')))
print('header: ' + str( self.get_argument('header', '')))
self.redirect('/raw') | StarcoderdataPython |
3324194 | <filename>examples/xor_ex.py
#!/usr/bin/env python3
from symcollab.algebra import *
from symcollab.xor import *
a = Constant("a")
b = Constant("b")
c = Constant("c")
x = Variable("x")
y = Variable("y")
z = Variable("z")
print("xor(a,b,x,x,y,a,c) =", end = " ")
print(xor(a, b, x, x, y, a, c)) | StarcoderdataPython |
1701954 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_hingedRigidBodyStateEffector')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_hingedRigidBodyStateEffector')
_hingedRigidBodyStateEffector = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_hingedRigidBodyStateEffector', [dirname(__file__)])
except ImportError:
import _hingedRigidBodyStateEffector
return _hingedRigidBodyStateEffector
try:
_mod = imp.load_module('_hingedRigidBodyStateEffector', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_hingedRigidBodyStateEffector = swig_import_helper()
del swig_import_helper
else:
import _hingedRigidBodyStateEffector
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
return _hingedRigidBodyStateEffector.SwigPyIterator_value(self)
def incr(self, n=1):
return _hingedRigidBodyStateEffector.SwigPyIterator_incr(self, n)
def decr(self, n=1):
return _hingedRigidBodyStateEffector.SwigPyIterator_decr(self, n)
def distance(self, x):
return _hingedRigidBodyStateEffector.SwigPyIterator_distance(self, x)
def equal(self, x):
return _hingedRigidBodyStateEffector.SwigPyIterator_equal(self, x)
def copy(self):
return _hingedRigidBodyStateEffector.SwigPyIterator_copy(self)
def next(self):
return _hingedRigidBodyStateEffector.SwigPyIterator_next(self)
def __next__(self):
return _hingedRigidBodyStateEffector.SwigPyIterator___next__(self)
def previous(self):
return _hingedRigidBodyStateEffector.SwigPyIterator_previous(self)
def advance(self, n):
return _hingedRigidBodyStateEffector.SwigPyIterator_advance(self, n)
def __eq__(self, x):
return _hingedRigidBodyStateEffector.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
return _hingedRigidBodyStateEffector.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
return _hingedRigidBodyStateEffector.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
return _hingedRigidBodyStateEffector.SwigPyIterator___isub__(self, n)
def __add__(self, n):
return _hingedRigidBodyStateEffector.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
return _hingedRigidBodyStateEffector.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _hingedRigidBodyStateEffector.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
def new_doubleArray(nelements):
return _hingedRigidBodyStateEffector.new_doubleArray(nelements)
new_doubleArray = _hingedRigidBodyStateEffector.new_doubleArray
def delete_doubleArray(ary):
return _hingedRigidBodyStateEffector.delete_doubleArray(ary)
delete_doubleArray = _hingedRigidBodyStateEffector.delete_doubleArray
def doubleArray_getitem(ary, index):
return _hingedRigidBodyStateEffector.doubleArray_getitem(ary, index)
doubleArray_getitem = _hingedRigidBodyStateEffector.doubleArray_getitem
def doubleArray_setitem(ary, index, value):
return _hingedRigidBodyStateEffector.doubleArray_setitem(ary, index, value)
doubleArray_setitem = _hingedRigidBodyStateEffector.doubleArray_setitem
def new_longArray(nelements):
return _hingedRigidBodyStateEffector.new_longArray(nelements)
new_longArray = _hingedRigidBodyStateEffector.new_longArray
def delete_longArray(ary):
return _hingedRigidBodyStateEffector.delete_longArray(ary)
delete_longArray = _hingedRigidBodyStateEffector.delete_longArray
def longArray_getitem(ary, index):
return _hingedRigidBodyStateEffector.longArray_getitem(ary, index)
longArray_getitem = _hingedRigidBodyStateEffector.longArray_getitem
def longArray_setitem(ary, index, value):
return _hingedRigidBodyStateEffector.longArray_setitem(ary, index, value)
longArray_setitem = _hingedRigidBodyStateEffector.longArray_setitem
def new_intArray(nelements):
return _hingedRigidBodyStateEffector.new_intArray(nelements)
new_intArray = _hingedRigidBodyStateEffector.new_intArray
def delete_intArray(ary):
return _hingedRigidBodyStateEffector.delete_intArray(ary)
delete_intArray = _hingedRigidBodyStateEffector.delete_intArray
def intArray_getitem(ary, index):
return _hingedRigidBodyStateEffector.intArray_getitem(ary, index)
intArray_getitem = _hingedRigidBodyStateEffector.intArray_getitem
def intArray_setitem(ary, index, value):
return _hingedRigidBodyStateEffector.intArray_setitem(ary, index, value)
intArray_setitem = _hingedRigidBodyStateEffector.intArray_setitem
def new_shortArray(nelements):
return _hingedRigidBodyStateEffector.new_shortArray(nelements)
new_shortArray = _hingedRigidBodyStateEffector.new_shortArray
def delete_shortArray(ary):
return _hingedRigidBodyStateEffector.delete_shortArray(ary)
delete_shortArray = _hingedRigidBodyStateEffector.delete_shortArray
def shortArray_getitem(ary, index):
return _hingedRigidBodyStateEffector.shortArray_getitem(ary, index)
shortArray_getitem = _hingedRigidBodyStateEffector.shortArray_getitem
def shortArray_setitem(ary, index, value):
return _hingedRigidBodyStateEffector.shortArray_setitem(ary, index, value)
shortArray_setitem = _hingedRigidBodyStateEffector.shortArray_setitem
def getStructSize(self):
try:
return eval('sizeof_' + repr(self).split(';')[0].split('.')[-1])
except (NameError) as e:
typeString = 'sizeof_' + repr(self).split(';')[0].split('.')[-1]
raise NameError(e.message + '\nYou tried to get this size macro: ' + typeString +
'\n It appears to be undefined. \nYou need to run the SWIG GEN_SIZEOF' +
' SWIG macro against the class/struct in your SWIG file if you want to ' +
' make this call.\n')
def protectSetAttr(self, name, value):
if(hasattr(self, name) or name == 'this'):
object.__setattr__(self, name, value)
else:
raise ValueError('You tried to add this variable: ' + name + '\n' +
'To this class: ' + str(self))
def protectAllClasses(moduleType):
import inspect
clsmembers = inspect.getmembers(sys.modules[__name__], inspect.isclass)
for member in clsmembers:
try:
exec(str(member[0]) + '.__setattr__ = protectSetAttr')
exec(str(member[0]) + '.getStructSize = getStructSize')
except (AttributeError, TypeError) as e:
pass
def new_boolArray(nelements):
return _hingedRigidBodyStateEffector.new_boolArray(nelements)
new_boolArray = _hingedRigidBodyStateEffector.new_boolArray
def delete_boolArray(ary):
return _hingedRigidBodyStateEffector.delete_boolArray(ary)
delete_boolArray = _hingedRigidBodyStateEffector.delete_boolArray
def boolArray_getitem(ary, index):
return _hingedRigidBodyStateEffector.boolArray_getitem(ary, index)
boolArray_getitem = _hingedRigidBodyStateEffector.boolArray_getitem
def boolArray_setitem(ary, index, value):
return _hingedRigidBodyStateEffector.boolArray_setitem(ary, index, value)
boolArray_setitem = _hingedRigidBodyStateEffector.boolArray_setitem
class IntVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, IntVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, IntVector, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.IntVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.IntVector___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.IntVector___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.IntVector___len__(self)
def __getslice__(self, i, j):
return _hingedRigidBodyStateEffector.IntVector___getslice__(self, i, j)
def __setslice__(self, *args):
return _hingedRigidBodyStateEffector.IntVector___setslice__(self, *args)
def __delslice__(self, i, j):
return _hingedRigidBodyStateEffector.IntVector___delslice__(self, i, j)
def __delitem__(self, *args):
return _hingedRigidBodyStateEffector.IntVector___delitem__(self, *args)
def __getitem__(self, *args):
return _hingedRigidBodyStateEffector.IntVector___getitem__(self, *args)
def __setitem__(self, *args):
return _hingedRigidBodyStateEffector.IntVector___setitem__(self, *args)
def pop(self):
return _hingedRigidBodyStateEffector.IntVector_pop(self)
def append(self, x):
return _hingedRigidBodyStateEffector.IntVector_append(self, x)
def empty(self):
return _hingedRigidBodyStateEffector.IntVector_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.IntVector_size(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.IntVector_swap(self, v)
def begin(self):
return _hingedRigidBodyStateEffector.IntVector_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.IntVector_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.IntVector_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.IntVector_rend(self)
def clear(self):
return _hingedRigidBodyStateEffector.IntVector_clear(self)
def get_allocator(self):
return _hingedRigidBodyStateEffector.IntVector_get_allocator(self)
def pop_back(self):
return _hingedRigidBodyStateEffector.IntVector_pop_back(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.IntVector_erase(self, *args)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_IntVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _hingedRigidBodyStateEffector.IntVector_push_back(self, x)
def front(self):
return _hingedRigidBodyStateEffector.IntVector_front(self)
def back(self):
return _hingedRigidBodyStateEffector.IntVector_back(self)
def assign(self, n, x):
return _hingedRigidBodyStateEffector.IntVector_assign(self, n, x)
def resize(self, *args):
return _hingedRigidBodyStateEffector.IntVector_resize(self, *args)
def insert(self, *args):
return _hingedRigidBodyStateEffector.IntVector_insert(self, *args)
def reserve(self, n):
return _hingedRigidBodyStateEffector.IntVector_reserve(self, n)
def capacity(self):
return _hingedRigidBodyStateEffector.IntVector_capacity(self)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_IntVector
__del__ = lambda self: None
IntVector_swigregister = _hingedRigidBodyStateEffector.IntVector_swigregister
IntVector_swigregister(IntVector)
class DoubleVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, DoubleVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, DoubleVector, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.DoubleVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.DoubleVector___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.DoubleVector___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.DoubleVector___len__(self)
def __getslice__(self, i, j):
return _hingedRigidBodyStateEffector.DoubleVector___getslice__(self, i, j)
def __setslice__(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector___setslice__(self, *args)
def __delslice__(self, i, j):
return _hingedRigidBodyStateEffector.DoubleVector___delslice__(self, i, j)
def __delitem__(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector___delitem__(self, *args)
def __getitem__(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector___getitem__(self, *args)
def __setitem__(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector___setitem__(self, *args)
def pop(self):
return _hingedRigidBodyStateEffector.DoubleVector_pop(self)
def append(self, x):
return _hingedRigidBodyStateEffector.DoubleVector_append(self, x)
def empty(self):
return _hingedRigidBodyStateEffector.DoubleVector_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.DoubleVector_size(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.DoubleVector_swap(self, v)
def begin(self):
return _hingedRigidBodyStateEffector.DoubleVector_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.DoubleVector_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.DoubleVector_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.DoubleVector_rend(self)
def clear(self):
return _hingedRigidBodyStateEffector.DoubleVector_clear(self)
def get_allocator(self):
return _hingedRigidBodyStateEffector.DoubleVector_get_allocator(self)
def pop_back(self):
return _hingedRigidBodyStateEffector.DoubleVector_pop_back(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector_erase(self, *args)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_DoubleVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _hingedRigidBodyStateEffector.DoubleVector_push_back(self, x)
def front(self):
return _hingedRigidBodyStateEffector.DoubleVector_front(self)
def back(self):
return _hingedRigidBodyStateEffector.DoubleVector_back(self)
def assign(self, n, x):
return _hingedRigidBodyStateEffector.DoubleVector_assign(self, n, x)
def resize(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector_resize(self, *args)
def insert(self, *args):
return _hingedRigidBodyStateEffector.DoubleVector_insert(self, *args)
def reserve(self, n):
return _hingedRigidBodyStateEffector.DoubleVector_reserve(self, n)
def capacity(self):
return _hingedRigidBodyStateEffector.DoubleVector_capacity(self)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_DoubleVector
__del__ = lambda self: None
DoubleVector_swigregister = _hingedRigidBodyStateEffector.DoubleVector_swigregister
DoubleVector_swigregister(DoubleVector)
class StringVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StringVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StringVector, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.StringVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.StringVector___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.StringVector___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.StringVector___len__(self)
def __getslice__(self, i, j):
return _hingedRigidBodyStateEffector.StringVector___getslice__(self, i, j)
def __setslice__(self, *args):
return _hingedRigidBodyStateEffector.StringVector___setslice__(self, *args)
def __delslice__(self, i, j):
return _hingedRigidBodyStateEffector.StringVector___delslice__(self, i, j)
def __delitem__(self, *args):
return _hingedRigidBodyStateEffector.StringVector___delitem__(self, *args)
def __getitem__(self, *args):
return _hingedRigidBodyStateEffector.StringVector___getitem__(self, *args)
def __setitem__(self, *args):
return _hingedRigidBodyStateEffector.StringVector___setitem__(self, *args)
def pop(self):
return _hingedRigidBodyStateEffector.StringVector_pop(self)
def append(self, x):
return _hingedRigidBodyStateEffector.StringVector_append(self, x)
def empty(self):
return _hingedRigidBodyStateEffector.StringVector_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.StringVector_size(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.StringVector_swap(self, v)
def begin(self):
return _hingedRigidBodyStateEffector.StringVector_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.StringVector_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.StringVector_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.StringVector_rend(self)
def clear(self):
return _hingedRigidBodyStateEffector.StringVector_clear(self)
def get_allocator(self):
return _hingedRigidBodyStateEffector.StringVector_get_allocator(self)
def pop_back(self):
return _hingedRigidBodyStateEffector.StringVector_pop_back(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.StringVector_erase(self, *args)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_StringVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _hingedRigidBodyStateEffector.StringVector_push_back(self, x)
def front(self):
return _hingedRigidBodyStateEffector.StringVector_front(self)
def back(self):
return _hingedRigidBodyStateEffector.StringVector_back(self)
def assign(self, n, x):
return _hingedRigidBodyStateEffector.StringVector_assign(self, n, x)
def resize(self, *args):
return _hingedRigidBodyStateEffector.StringVector_resize(self, *args)
def insert(self, *args):
return _hingedRigidBodyStateEffector.StringVector_insert(self, *args)
def reserve(self, n):
return _hingedRigidBodyStateEffector.StringVector_reserve(self, n)
def capacity(self):
return _hingedRigidBodyStateEffector.StringVector_capacity(self)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_StringVector
__del__ = lambda self: None
StringVector_swigregister = _hingedRigidBodyStateEffector.StringVector_swigregister
StringVector_swigregister(StringVector)
class StringSet(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StringSet, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StringSet, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.StringSet_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.StringSet___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.StringSet___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.StringSet___len__(self)
def append(self, x):
return _hingedRigidBodyStateEffector.StringSet_append(self, x)
def __contains__(self, x):
return _hingedRigidBodyStateEffector.StringSet___contains__(self, x)
def __getitem__(self, i):
return _hingedRigidBodyStateEffector.StringSet___getitem__(self, i)
def add(self, x):
return _hingedRigidBodyStateEffector.StringSet_add(self, x)
def discard(self, x):
return _hingedRigidBodyStateEffector.StringSet_discard(self, x)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_StringSet(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def empty(self):
return _hingedRigidBodyStateEffector.StringSet_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.StringSet_size(self)
def clear(self):
return _hingedRigidBodyStateEffector.StringSet_clear(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.StringSet_swap(self, v)
def count(self, x):
return _hingedRigidBodyStateEffector.StringSet_count(self, x)
def begin(self):
return _hingedRigidBodyStateEffector.StringSet_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.StringSet_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.StringSet_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.StringSet_rend(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.StringSet_erase(self, *args)
def find(self, x):
return _hingedRigidBodyStateEffector.StringSet_find(self, x)
def lower_bound(self, x):
return _hingedRigidBodyStateEffector.StringSet_lower_bound(self, x)
def upper_bound(self, x):
return _hingedRigidBodyStateEffector.StringSet_upper_bound(self, x)
def equal_range(self, x):
return _hingedRigidBodyStateEffector.StringSet_equal_range(self, x)
def insert(self, __x):
return _hingedRigidBodyStateEffector.StringSet_insert(self, __x)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_StringSet
__del__ = lambda self: None
StringSet_swigregister = _hingedRigidBodyStateEffector.StringSet_swigregister
StringSet_swigregister(StringSet)
class intSet(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, intSet, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, intSet, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.intSet_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.intSet___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.intSet___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.intSet___len__(self)
def append(self, x):
return _hingedRigidBodyStateEffector.intSet_append(self, x)
def __contains__(self, x):
return _hingedRigidBodyStateEffector.intSet___contains__(self, x)
def __getitem__(self, i):
return _hingedRigidBodyStateEffector.intSet___getitem__(self, i)
def add(self, x):
return _hingedRigidBodyStateEffector.intSet_add(self, x)
def discard(self, x):
return _hingedRigidBodyStateEffector.intSet_discard(self, x)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_intSet(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def empty(self):
return _hingedRigidBodyStateEffector.intSet_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.intSet_size(self)
def clear(self):
return _hingedRigidBodyStateEffector.intSet_clear(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.intSet_swap(self, v)
def count(self, x):
return _hingedRigidBodyStateEffector.intSet_count(self, x)
def begin(self):
return _hingedRigidBodyStateEffector.intSet_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.intSet_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.intSet_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.intSet_rend(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.intSet_erase(self, *args)
def find(self, x):
return _hingedRigidBodyStateEffector.intSet_find(self, x)
def lower_bound(self, x):
return _hingedRigidBodyStateEffector.intSet_lower_bound(self, x)
def upper_bound(self, x):
return _hingedRigidBodyStateEffector.intSet_upper_bound(self, x)
def equal_range(self, x):
return _hingedRigidBodyStateEffector.intSet_equal_range(self, x)
def insert(self, __x):
return _hingedRigidBodyStateEffector.intSet_insert(self, __x)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_intSet
__del__ = lambda self: None
intSet_swigregister = _hingedRigidBodyStateEffector.intSet_swigregister
intSet_swigregister(intSet)
class ConstCharVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ConstCharVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ConstCharVector, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.ConstCharVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.ConstCharVector___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.ConstCharVector___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.ConstCharVector___len__(self)
def __getslice__(self, i, j):
return _hingedRigidBodyStateEffector.ConstCharVector___getslice__(self, i, j)
def __setslice__(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector___setslice__(self, *args)
def __delslice__(self, i, j):
return _hingedRigidBodyStateEffector.ConstCharVector___delslice__(self, i, j)
def __delitem__(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector___delitem__(self, *args)
def __getitem__(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector___getitem__(self, *args)
def __setitem__(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector___setitem__(self, *args)
def pop(self):
return _hingedRigidBodyStateEffector.ConstCharVector_pop(self)
def append(self, x):
return _hingedRigidBodyStateEffector.ConstCharVector_append(self, x)
def empty(self):
return _hingedRigidBodyStateEffector.ConstCharVector_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.ConstCharVector_size(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.ConstCharVector_swap(self, v)
def begin(self):
return _hingedRigidBodyStateEffector.ConstCharVector_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.ConstCharVector_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.ConstCharVector_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.ConstCharVector_rend(self)
def clear(self):
return _hingedRigidBodyStateEffector.ConstCharVector_clear(self)
def get_allocator(self):
return _hingedRigidBodyStateEffector.ConstCharVector_get_allocator(self)
def pop_back(self):
return _hingedRigidBodyStateEffector.ConstCharVector_pop_back(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector_erase(self, *args)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_ConstCharVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _hingedRigidBodyStateEffector.ConstCharVector_push_back(self, x)
def front(self):
return _hingedRigidBodyStateEffector.ConstCharVector_front(self)
def back(self):
return _hingedRigidBodyStateEffector.ConstCharVector_back(self)
def assign(self, n, x):
return _hingedRigidBodyStateEffector.ConstCharVector_assign(self, n, x)
def resize(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector_resize(self, *args)
def insert(self, *args):
return _hingedRigidBodyStateEffector.ConstCharVector_insert(self, *args)
def reserve(self, n):
return _hingedRigidBodyStateEffector.ConstCharVector_reserve(self, n)
def capacity(self):
return _hingedRigidBodyStateEffector.ConstCharVector_capacity(self)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_ConstCharVector
__del__ = lambda self: None
ConstCharVector_swigregister = _hingedRigidBodyStateEffector.ConstCharVector_swigregister
ConstCharVector_swigregister(ConstCharVector)
class MultiArray(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MultiArray, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MultiArray, name)
__repr__ = _swig_repr
def iterator(self):
return _hingedRigidBodyStateEffector.MultiArray_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
return _hingedRigidBodyStateEffector.MultiArray___nonzero__(self)
def __bool__(self):
return _hingedRigidBodyStateEffector.MultiArray___bool__(self)
def __len__(self):
return _hingedRigidBodyStateEffector.MultiArray___len__(self)
def __getslice__(self, i, j):
return _hingedRigidBodyStateEffector.MultiArray___getslice__(self, i, j)
def __setslice__(self, *args):
return _hingedRigidBodyStateEffector.MultiArray___setslice__(self, *args)
def __delslice__(self, i, j):
return _hingedRigidBodyStateEffector.MultiArray___delslice__(self, i, j)
def __delitem__(self, *args):
return _hingedRigidBodyStateEffector.MultiArray___delitem__(self, *args)
def __getitem__(self, *args):
return _hingedRigidBodyStateEffector.MultiArray___getitem__(self, *args)
def __setitem__(self, *args):
return _hingedRigidBodyStateEffector.MultiArray___setitem__(self, *args)
def pop(self):
return _hingedRigidBodyStateEffector.MultiArray_pop(self)
def append(self, x):
return _hingedRigidBodyStateEffector.MultiArray_append(self, x)
def empty(self):
return _hingedRigidBodyStateEffector.MultiArray_empty(self)
def size(self):
return _hingedRigidBodyStateEffector.MultiArray_size(self)
def swap(self, v):
return _hingedRigidBodyStateEffector.MultiArray_swap(self, v)
def begin(self):
return _hingedRigidBodyStateEffector.MultiArray_begin(self)
def end(self):
return _hingedRigidBodyStateEffector.MultiArray_end(self)
def rbegin(self):
return _hingedRigidBodyStateEffector.MultiArray_rbegin(self)
def rend(self):
return _hingedRigidBodyStateEffector.MultiArray_rend(self)
def clear(self):
return _hingedRigidBodyStateEffector.MultiArray_clear(self)
def get_allocator(self):
return _hingedRigidBodyStateEffector.MultiArray_get_allocator(self)
def pop_back(self):
return _hingedRigidBodyStateEffector.MultiArray_pop_back(self)
def erase(self, *args):
return _hingedRigidBodyStateEffector.MultiArray_erase(self, *args)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_MultiArray(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
return _hingedRigidBodyStateEffector.MultiArray_push_back(self, x)
def front(self):
return _hingedRigidBodyStateEffector.MultiArray_front(self)
def back(self):
return _hingedRigidBodyStateEffector.MultiArray_back(self)
def assign(self, n, x):
return _hingedRigidBodyStateEffector.MultiArray_assign(self, n, x)
def resize(self, *args):
return _hingedRigidBodyStateEffector.MultiArray_resize(self, *args)
def insert(self, *args):
return _hingedRigidBodyStateEffector.MultiArray_insert(self, *args)
def reserve(self, n):
return _hingedRigidBodyStateEffector.MultiArray_reserve(self, n)
def capacity(self):
return _hingedRigidBodyStateEffector.MultiArray_capacity(self)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_MultiArray
__del__ = lambda self: None
MultiArray_swigregister = _hingedRigidBodyStateEffector.MultiArray_swigregister
MultiArray_swigregister(MultiArray)
class SysModel(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SysModel, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SysModel, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_SysModel(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_SysModel
__del__ = lambda self: None
def SelfInit(self):
return _hingedRigidBodyStateEffector.SysModel_SelfInit(self)
def CrossInit(self):
return _hingedRigidBodyStateEffector.SysModel_CrossInit(self)
def IntegratedInit(self):
return _hingedRigidBodyStateEffector.SysModel_IntegratedInit(self)
def UpdateState(self, CurrentSimNanos):
return _hingedRigidBodyStateEffector.SysModel_UpdateState(self, CurrentSimNanos)
def Reset(self, CurrentSimNanos):
return _hingedRigidBodyStateEffector.SysModel_Reset(self, CurrentSimNanos)
__swig_setmethods__["ModelTag"] = _hingedRigidBodyStateEffector.SysModel_ModelTag_set
__swig_getmethods__["ModelTag"] = _hingedRigidBodyStateEffector.SysModel_ModelTag_get
if _newclass:
ModelTag = _swig_property(_hingedRigidBodyStateEffector.SysModel_ModelTag_get, _hingedRigidBodyStateEffector.SysModel_ModelTag_set)
__swig_setmethods__["CallCounts"] = _hingedRigidBodyStateEffector.SysModel_CallCounts_set
__swig_getmethods__["CallCounts"] = _hingedRigidBodyStateEffector.SysModel_CallCounts_get
if _newclass:
CallCounts = _swig_property(_hingedRigidBodyStateEffector.SysModel_CallCounts_get, _hingedRigidBodyStateEffector.SysModel_CallCounts_set)
__swig_setmethods__["RNGSeed"] = _hingedRigidBodyStateEffector.SysModel_RNGSeed_set
__swig_getmethods__["RNGSeed"] = _hingedRigidBodyStateEffector.SysModel_RNGSeed_get
if _newclass:
RNGSeed = _swig_property(_hingedRigidBodyStateEffector.SysModel_RNGSeed_get, _hingedRigidBodyStateEffector.SysModel_RNGSeed_set)
__swig_setmethods__["moduleID"] = _hingedRigidBodyStateEffector.SysModel_moduleID_set
__swig_getmethods__["moduleID"] = _hingedRigidBodyStateEffector.SysModel_moduleID_get
if _newclass:
moduleID = _swig_property(_hingedRigidBodyStateEffector.SysModel_moduleID_get, _hingedRigidBodyStateEffector.SysModel_moduleID_set)
SysModel_swigregister = _hingedRigidBodyStateEffector.SysModel_swigregister
SysModel_swigregister(SysModel)
class StateData(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StateData, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StateData, name)
__repr__ = _swig_repr
__swig_setmethods__["state"] = _hingedRigidBodyStateEffector.StateData_state_set
__swig_getmethods__["state"] = _hingedRigidBodyStateEffector.StateData_state_get
if _newclass:
state = _swig_property(_hingedRigidBodyStateEffector.StateData_state_get, _hingedRigidBodyStateEffector.StateData_state_set)
__swig_setmethods__["stateDeriv"] = _hingedRigidBodyStateEffector.StateData_stateDeriv_set
__swig_getmethods__["stateDeriv"] = _hingedRigidBodyStateEffector.StateData_stateDeriv_get
if _newclass:
stateDeriv = _swig_property(_hingedRigidBodyStateEffector.StateData_stateDeriv_get, _hingedRigidBodyStateEffector.StateData_stateDeriv_set)
__swig_setmethods__["stateName"] = _hingedRigidBodyStateEffector.StateData_stateName_set
__swig_getmethods__["stateName"] = _hingedRigidBodyStateEffector.StateData_stateName_get
if _newclass:
stateName = _swig_property(_hingedRigidBodyStateEffector.StateData_stateName_get, _hingedRigidBodyStateEffector.StateData_stateName_set)
__swig_setmethods__["stateEnabled"] = _hingedRigidBodyStateEffector.StateData_stateEnabled_set
__swig_getmethods__["stateEnabled"] = _hingedRigidBodyStateEffector.StateData_stateEnabled_get
if _newclass:
stateEnabled = _swig_property(_hingedRigidBodyStateEffector.StateData_stateEnabled_get, _hingedRigidBodyStateEffector.StateData_stateEnabled_set)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_StateData(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_StateData
__del__ = lambda self: None
def setState(self, newState):
return _hingedRigidBodyStateEffector.StateData_setState(self, newState)
def propagateState(self, dt):
return _hingedRigidBodyStateEffector.StateData_propagateState(self, dt)
def setDerivative(self, newDeriv):
return _hingedRigidBodyStateEffector.StateData_setDerivative(self, newDeriv)
def getState(self):
return _hingedRigidBodyStateEffector.StateData_getState(self)
def getStateDeriv(self):
return _hingedRigidBodyStateEffector.StateData_getStateDeriv(self)
def getName(self):
return _hingedRigidBodyStateEffector.StateData_getName(self)
def getRowSize(self):
return _hingedRigidBodyStateEffector.StateData_getRowSize(self)
def getColumnSize(self):
return _hingedRigidBodyStateEffector.StateData_getColumnSize(self)
def isStateActive(self):
return _hingedRigidBodyStateEffector.StateData_isStateActive(self)
def disable(self):
return _hingedRigidBodyStateEffector.StateData_disable(self)
def enable(self):
return _hingedRigidBodyStateEffector.StateData_enable(self)
def scaleState(self, scaleFactor):
return _hingedRigidBodyStateEffector.StateData_scaleState(self, scaleFactor)
def __add__(self, operand):
return _hingedRigidBodyStateEffector.StateData___add__(self, operand)
def __mul__(self, scaleFactor):
return _hingedRigidBodyStateEffector.StateData___mul__(self, scaleFactor)
StateData_swigregister = _hingedRigidBodyStateEffector.StateData_swigregister
StateData_swigregister(StateData)
class BackSubMatrices(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, BackSubMatrices, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, BackSubMatrices, name)
__repr__ = _swig_repr
__swig_setmethods__["matrixA"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixA_set
__swig_getmethods__["matrixA"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixA_get
if _newclass:
matrixA = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_matrixA_get, _hingedRigidBodyStateEffector.BackSubMatrices_matrixA_set)
__swig_setmethods__["matrixB"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixB_set
__swig_getmethods__["matrixB"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixB_get
if _newclass:
matrixB = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_matrixB_get, _hingedRigidBodyStateEffector.BackSubMatrices_matrixB_set)
__swig_setmethods__["matrixC"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixC_set
__swig_getmethods__["matrixC"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixC_get
if _newclass:
matrixC = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_matrixC_get, _hingedRigidBodyStateEffector.BackSubMatrices_matrixC_set)
__swig_setmethods__["matrixD"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixD_set
__swig_getmethods__["matrixD"] = _hingedRigidBodyStateEffector.BackSubMatrices_matrixD_get
if _newclass:
matrixD = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_matrixD_get, _hingedRigidBodyStateEffector.BackSubMatrices_matrixD_set)
__swig_setmethods__["vecTrans"] = _hingedRigidBodyStateEffector.BackSubMatrices_vecTrans_set
__swig_getmethods__["vecTrans"] = _hingedRigidBodyStateEffector.BackSubMatrices_vecTrans_get
if _newclass:
vecTrans = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_vecTrans_get, _hingedRigidBodyStateEffector.BackSubMatrices_vecTrans_set)
__swig_setmethods__["vecRot"] = _hingedRigidBodyStateEffector.BackSubMatrices_vecRot_set
__swig_getmethods__["vecRot"] = _hingedRigidBodyStateEffector.BackSubMatrices_vecRot_get
if _newclass:
vecRot = _swig_property(_hingedRigidBodyStateEffector.BackSubMatrices_vecRot_get, _hingedRigidBodyStateEffector.BackSubMatrices_vecRot_set)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_BackSubMatrices(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_BackSubMatrices
__del__ = lambda self: None
BackSubMatrices_swigregister = _hingedRigidBodyStateEffector.BackSubMatrices_swigregister
BackSubMatrices_swigregister(BackSubMatrices)
class EffectorMassProps(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, EffectorMassProps, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, EffectorMassProps, name)
__repr__ = _swig_repr
__swig_setmethods__["mEff"] = _hingedRigidBodyStateEffector.EffectorMassProps_mEff_set
__swig_getmethods__["mEff"] = _hingedRigidBodyStateEffector.EffectorMassProps_mEff_get
if _newclass:
mEff = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_mEff_get, _hingedRigidBodyStateEffector.EffectorMassProps_mEff_set)
__swig_setmethods__["mEffDot"] = _hingedRigidBodyStateEffector.EffectorMassProps_mEffDot_set
__swig_getmethods__["mEffDot"] = _hingedRigidBodyStateEffector.EffectorMassProps_mEffDot_get
if _newclass:
mEffDot = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_mEffDot_get, _hingedRigidBodyStateEffector.EffectorMassProps_mEffDot_set)
__swig_setmethods__["IEffPntB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_IEffPntB_B_set
__swig_getmethods__["IEffPntB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_IEffPntB_B_get
if _newclass:
IEffPntB_B = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_IEffPntB_B_get, _hingedRigidBodyStateEffector.EffectorMassProps_IEffPntB_B_set)
__swig_setmethods__["rEff_CB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_rEff_CB_B_set
__swig_getmethods__["rEff_CB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_rEff_CB_B_get
if _newclass:
rEff_CB_B = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_rEff_CB_B_get, _hingedRigidBodyStateEffector.EffectorMassProps_rEff_CB_B_set)
__swig_setmethods__["rEffPrime_CB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_rEffPrime_CB_B_set
__swig_getmethods__["rEffPrime_CB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_rEffPrime_CB_B_get
if _newclass:
rEffPrime_CB_B = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_rEffPrime_CB_B_get, _hingedRigidBodyStateEffector.EffectorMassProps_rEffPrime_CB_B_set)
__swig_setmethods__["IEffPrimePntB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_IEffPrimePntB_B_set
__swig_getmethods__["IEffPrimePntB_B"] = _hingedRigidBodyStateEffector.EffectorMassProps_IEffPrimePntB_B_get
if _newclass:
IEffPrimePntB_B = _swig_property(_hingedRigidBodyStateEffector.EffectorMassProps_IEffPrimePntB_B_get, _hingedRigidBodyStateEffector.EffectorMassProps_IEffPrimePntB_B_set)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_EffectorMassProps(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_EffectorMassProps
__del__ = lambda self: None
EffectorMassProps_swigregister = _hingedRigidBodyStateEffector.EffectorMassProps_swigregister
EffectorMassProps_swigregister(EffectorMassProps)
class StateEffector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StateEffector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StateEffector, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_setmethods__["nameOfSpacecraftAttachedTo"] = _hingedRigidBodyStateEffector.StateEffector_nameOfSpacecraftAttachedTo_set
__swig_getmethods__["nameOfSpacecraftAttachedTo"] = _hingedRigidBodyStateEffector.StateEffector_nameOfSpacecraftAttachedTo_get
if _newclass:
nameOfSpacecraftAttachedTo = _swig_property(_hingedRigidBodyStateEffector.StateEffector_nameOfSpacecraftAttachedTo_get, _hingedRigidBodyStateEffector.StateEffector_nameOfSpacecraftAttachedTo_set)
__swig_setmethods__["effProps"] = _hingedRigidBodyStateEffector.StateEffector_effProps_set
__swig_getmethods__["effProps"] = _hingedRigidBodyStateEffector.StateEffector_effProps_get
if _newclass:
effProps = _swig_property(_hingedRigidBodyStateEffector.StateEffector_effProps_get, _hingedRigidBodyStateEffector.StateEffector_effProps_set)
__swig_setmethods__["forceOnBody_B"] = _hingedRigidBodyStateEffector.StateEffector_forceOnBody_B_set
__swig_getmethods__["forceOnBody_B"] = _hingedRigidBodyStateEffector.StateEffector_forceOnBody_B_get
if _newclass:
forceOnBody_B = _swig_property(_hingedRigidBodyStateEffector.StateEffector_forceOnBody_B_get, _hingedRigidBodyStateEffector.StateEffector_forceOnBody_B_set)
__swig_setmethods__["torqueOnBodyPntB_B"] = _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntB_B_set
__swig_getmethods__["torqueOnBodyPntB_B"] = _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntB_B_get
if _newclass:
torqueOnBodyPntB_B = _swig_property(_hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntB_B_get, _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntB_B_set)
__swig_setmethods__["torqueOnBodyPntC_B"] = _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntC_B_set
__swig_getmethods__["torqueOnBodyPntC_B"] = _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntC_B_get
if _newclass:
torqueOnBodyPntC_B = _swig_property(_hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntC_B_get, _hingedRigidBodyStateEffector.StateEffector_torqueOnBodyPntC_B_set)
__swig_setmethods__["r_BP_P"] = _hingedRigidBodyStateEffector.StateEffector_r_BP_P_set
__swig_getmethods__["r_BP_P"] = _hingedRigidBodyStateEffector.StateEffector_r_BP_P_get
if _newclass:
r_BP_P = _swig_property(_hingedRigidBodyStateEffector.StateEffector_r_BP_P_get, _hingedRigidBodyStateEffector.StateEffector_r_BP_P_set)
__swig_setmethods__["dcm_BP"] = _hingedRigidBodyStateEffector.StateEffector_dcm_BP_set
__swig_getmethods__["dcm_BP"] = _hingedRigidBodyStateEffector.StateEffector_dcm_BP_get
if _newclass:
dcm_BP = _swig_property(_hingedRigidBodyStateEffector.StateEffector_dcm_BP_get, _hingedRigidBodyStateEffector.StateEffector_dcm_BP_set)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_StateEffector
__del__ = lambda self: None
def updateEffectorMassProps(self, integTime):
return _hingedRigidBodyStateEffector.StateEffector_updateEffectorMassProps(self, integTime)
def updateContributions(self, integTime, backSubContr, sigma_BN, omega_BN_B, g_N):
return _hingedRigidBodyStateEffector.StateEffector_updateContributions(self, integTime, backSubContr, sigma_BN, omega_BN_B, g_N)
def updateEnergyMomContributions(self, integTime, rotAngMomPntCContr_B, rotEnergyContr, omega_BN_B):
return _hingedRigidBodyStateEffector.StateEffector_updateEnergyMomContributions(self, integTime, rotAngMomPntCContr_B, rotEnergyContr, omega_BN_B)
def modifyStates(self, integTime):
return _hingedRigidBodyStateEffector.StateEffector_modifyStates(self, integTime)
def calcForceTorqueOnBody(self, integTime, omega_BN_B):
return _hingedRigidBodyStateEffector.StateEffector_calcForceTorqueOnBody(self, integTime, omega_BN_B)
def writeOutputStateMessages(self, integTimeNanos):
return _hingedRigidBodyStateEffector.StateEffector_writeOutputStateMessages(self, integTimeNanos)
def registerStates(self, states):
return _hingedRigidBodyStateEffector.StateEffector_registerStates(self, states)
def linkInStates(self, states):
return _hingedRigidBodyStateEffector.StateEffector_linkInStates(self, states)
def computeDerivatives(self, integTime, rDDot_BN_N, omegaDot_BN_B, sigma_BN):
return _hingedRigidBodyStateEffector.StateEffector_computeDerivatives(self, integTime, rDDot_BN_N, omegaDot_BN_B, sigma_BN)
def prependSpacecraftNameToStates(self):
return _hingedRigidBodyStateEffector.StateEffector_prependSpacecraftNameToStates(self)
def receiveMotherSpacecraftData(self, rSC_BP_P, dcmSC_BP):
return _hingedRigidBodyStateEffector.StateEffector_receiveMotherSpacecraftData(self, rSC_BP_P, dcmSC_BP)
StateEffector_swigregister = _hingedRigidBodyStateEffector.StateEffector_swigregister
StateEffector_swigregister(StateEffector)
class StateVector(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StateVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StateVector, name)
__repr__ = _swig_repr
__swig_setmethods__["stateMap"] = _hingedRigidBodyStateEffector.StateVector_stateMap_set
__swig_getmethods__["stateMap"] = _hingedRigidBodyStateEffector.StateVector_stateMap_get
if _newclass:
stateMap = _swig_property(_hingedRigidBodyStateEffector.StateVector_stateMap_get, _hingedRigidBodyStateEffector.StateVector_stateMap_set)
def __add__(self, operand):
return _hingedRigidBodyStateEffector.StateVector___add__(self, operand)
def __mul__(self, scaleFactor):
return _hingedRigidBodyStateEffector.StateVector___mul__(self, scaleFactor)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_StateVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_StateVector
__del__ = lambda self: None
StateVector_swigregister = _hingedRigidBodyStateEffector.StateVector_swigregister
StateVector_swigregister(StateVector)
class DynParamManager(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, DynParamManager, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, DynParamManager, name)
__repr__ = _swig_repr
__swig_setmethods__["dynProperties"] = _hingedRigidBodyStateEffector.DynParamManager_dynProperties_set
__swig_getmethods__["dynProperties"] = _hingedRigidBodyStateEffector.DynParamManager_dynProperties_get
if _newclass:
dynProperties = _swig_property(_hingedRigidBodyStateEffector.DynParamManager_dynProperties_get, _hingedRigidBodyStateEffector.DynParamManager_dynProperties_set)
__swig_setmethods__["stateContainer"] = _hingedRigidBodyStateEffector.DynParamManager_stateContainer_set
__swig_getmethods__["stateContainer"] = _hingedRigidBodyStateEffector.DynParamManager_stateContainer_get
if _newclass:
stateContainer = _swig_property(_hingedRigidBodyStateEffector.DynParamManager_stateContainer_get, _hingedRigidBodyStateEffector.DynParamManager_stateContainer_set)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_DynParamManager
__del__ = lambda self: None
def registerState(self, nRow, nCol, stateName):
return _hingedRigidBodyStateEffector.DynParamManager_registerState(self, nRow, nCol, stateName)
def getStateObject(self, stateName):
return _hingedRigidBodyStateEffector.DynParamManager_getStateObject(self, stateName)
def getStateVector(self):
return _hingedRigidBodyStateEffector.DynParamManager_getStateVector(self)
def updateStateVector(self, newState):
return _hingedRigidBodyStateEffector.DynParamManager_updateStateVector(self, newState)
def propagateStateVector(self, dt):
return _hingedRigidBodyStateEffector.DynParamManager_propagateStateVector(self, dt)
def createProperty(self, propName, propValue):
return _hingedRigidBodyStateEffector.DynParamManager_createProperty(self, propName, propValue)
def getPropertyReference(self, propName):
return _hingedRigidBodyStateEffector.DynParamManager_getPropertyReference(self, propName)
def setPropertyValue(self, propName, propValue):
return _hingedRigidBodyStateEffector.DynParamManager_setPropertyValue(self, propName, propValue)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_DynParamManager(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
DynParamManager_swigregister = _hingedRigidBodyStateEffector.DynParamManager_swigregister
DynParamManager_swigregister(DynParamManager)
class HingedRigidBodyStateEffector(StateEffector, SysModel):
__swig_setmethods__ = {}
for _s in [StateEffector, SysModel]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, HingedRigidBodyStateEffector, name, value)
__swig_getmethods__ = {}
for _s in [StateEffector, SysModel]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, HingedRigidBodyStateEffector, name)
__repr__ = _swig_repr
__swig_setmethods__["mass"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_mass_set
__swig_getmethods__["mass"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_mass_get
if _newclass:
mass = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_mass_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_mass_set)
__swig_setmethods__["d"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_d_set
__swig_getmethods__["d"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_d_get
if _newclass:
d = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_d_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_d_set)
__swig_setmethods__["k"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_k_set
__swig_getmethods__["k"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_k_get
if _newclass:
k = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_k_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_k_set)
__swig_setmethods__["c"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_set
__swig_getmethods__["c"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_get
if _newclass:
c = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_set)
__swig_setmethods__["thetaInit"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaInit_set
__swig_getmethods__["thetaInit"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaInit_get
if _newclass:
thetaInit = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaInit_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaInit_set)
__swig_setmethods__["thetaDotInit"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaDotInit_set
__swig_getmethods__["thetaDotInit"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaDotInit_get
if _newclass:
thetaDotInit = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaDotInit_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_thetaDotInit_set)
__swig_setmethods__["nameOfThetaState"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaState_set
__swig_getmethods__["nameOfThetaState"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaState_get
if _newclass:
nameOfThetaState = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaState_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaState_set)
__swig_setmethods__["nameOfThetaDotState"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaDotState_set
__swig_getmethods__["nameOfThetaDotState"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaDotState_get
if _newclass:
nameOfThetaDotState = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaDotState_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_nameOfThetaDotState_set)
__swig_setmethods__["c_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_B_set
__swig_getmethods__["c_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_B_get
if _newclass:
c_B = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_B_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_c_B_set)
__swig_setmethods__["cPrime_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_cPrime_B_set
__swig_getmethods__["cPrime_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_cPrime_B_get
if _newclass:
cPrime_B = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_cPrime_B_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_cPrime_B_set)
__swig_setmethods__["IPntS_S"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_IPntS_S_set
__swig_getmethods__["IPntS_S"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_IPntS_S_get
if _newclass:
IPntS_S = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_IPntS_S_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_IPntS_S_set)
__swig_setmethods__["r_HB_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_r_HB_B_set
__swig_getmethods__["r_HB_B"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_r_HB_B_get
if _newclass:
r_HB_B = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_r_HB_B_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_r_HB_B_set)
__swig_setmethods__["dcm_HB"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_dcm_HB_set
__swig_getmethods__["dcm_HB"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_dcm_HB_get
if _newclass:
dcm_HB = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_dcm_HB_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_dcm_HB_set)
__swig_setmethods__["HingedRigidBodyOutMsgName"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HingedRigidBodyOutMsgName_set
__swig_getmethods__["HingedRigidBodyOutMsgName"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HingedRigidBodyOutMsgName_get
if _newclass:
HingedRigidBodyOutMsgName = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HingedRigidBodyOutMsgName_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HingedRigidBodyOutMsgName_set)
__swig_setmethods__["HRBoutputStates"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HRBoutputStates_set
__swig_getmethods__["HRBoutputStates"] = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HRBoutputStates_get
if _newclass:
HRBoutputStates = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HRBoutputStates_get, _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_HRBoutputStates_set)
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_HingedRigidBodyStateEffector
__del__ = lambda self: None
def SelfInit(self):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_SelfInit(self)
def CrossInit(self):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_CrossInit(self)
def writeOutputStateMessages(self, CurrentClock):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_writeOutputStateMessages(self, CurrentClock)
def UpdateState(self, CurrentSimNanos):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_UpdateState(self, CurrentSimNanos)
def registerStates(self, statesIn):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_registerStates(self, statesIn)
def linkInStates(self, states):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_linkInStates(self, states)
def updateContributions(self, integTime, backSubContr, sigma_BN, omega_BN_B, g_N):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_updateContributions(self, integTime, backSubContr, sigma_BN, omega_BN_B, g_N)
def computeDerivatives(self, integTime, rDDot_BN_N, omegaDot_BN_B, sigma_BN):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_computeDerivatives(self, integTime, rDDot_BN_N, omegaDot_BN_B, sigma_BN)
def updateEffectorMassProps(self, integTime):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_updateEffectorMassProps(self, integTime)
def updateEnergyMomContributions(self, integTime, rotAngMomPntCContr_B, rotEnergyContr, omega_BN_B):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_updateEnergyMomContributions(self, integTime, rotAngMomPntCContr_B, rotEnergyContr, omega_BN_B)
def calcForceTorqueOnBody(self, integTime, omega_BN_B):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_calcForceTorqueOnBody(self, integTime, omega_BN_B)
def prependSpacecraftNameToStates(self):
return _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_prependSpacecraftNameToStates(self)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_HingedRigidBodyStateEffector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
HingedRigidBodyStateEffector_swigregister = _hingedRigidBodyStateEffector.HingedRigidBodyStateEffector_swigregister
HingedRigidBodyStateEffector_swigregister(HingedRigidBodyStateEffector)
class HingedRigidBodySimMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, HingedRigidBodySimMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, HingedRigidBodySimMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["theta"] = _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_theta_set
__swig_getmethods__["theta"] = _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_theta_get
if _newclass:
theta = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodySimMsg_theta_get, _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_theta_set)
__swig_setmethods__["thetaDot"] = _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_thetaDot_set
__swig_getmethods__["thetaDot"] = _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_thetaDot_get
if _newclass:
thetaDot = _swig_property(_hingedRigidBodyStateEffector.HingedRigidBodySimMsg_thetaDot_get, _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_thetaDot_set)
def __init__(self, *args):
this = _hingedRigidBodyStateEffector.new_HingedRigidBodySimMsg(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _hingedRigidBodyStateEffector.delete_HingedRigidBodySimMsg
__del__ = lambda self: None
HingedRigidBodySimMsg_swigregister = _hingedRigidBodyStateEffector.HingedRigidBodySimMsg_swigregister
HingedRigidBodySimMsg_swigregister(HingedRigidBodySimMsg)
sizeof_HingedRigidBodySimMsg = _hingedRigidBodyStateEffector.sizeof_HingedRigidBodySimMsg
import sys
protectAllClasses(sys.modules[__name__])
# This file is compatible with both classic and new-style classes.
| StarcoderdataPython |
1732252 | <gh_stars>0
import display_page
import movie
cars = movie.Movie("Cars",
"Story about live cars",
"2006",
" Pixar Animation Studios",
"Walt Disney Pictures",
" <NAME>",
" Golden Globe Award for Best Animated Feature Film",
"https://upload.wikimedia.org/wikipedia/en/3/34/Cars_2006.jpg",
"https://www.youtube.com/watch?v=WGByijP0Leo")
ratatouille = movie.Movie("Ratatouille",
"Anybody can cook",
"2007",
"Pixar Animation Studios",
"Walt Disney Pictures",
"<NAME>",
" Academy Award for Best Animated Feature",
"https://upload.wikimedia.org/wikipedia/en/5/50/RatatouillePoster.jpg",
"https://www.youtube.com/watch?v=c3sBBRxDAqk")
tangled = movie.Movie("Tangled",
"Girl with long golden hairs",
"2010",
"Walt Disney Animation Studios",
"Walt Disney Pictures",
"<NAME> & <NAME>",
" Best Original Song at the 83rd Academy Awards",
"https://upload.wikimedia.org/wikipedia/en/a/a8/Tangled_poster.jpg",
"https://www.youtube.com/watch?v=pyOyBVXDJ9Q")
brave = movie.Movie("Brave",
"Girl with lot of courage",
"2012",
"Pixar Animation Studios",
"Walt Disney Pictures",
"<NAME> and <NAME>",
"Academy Award,the Golden Globe,and the BAFTA Award for Best Animated Feature Film.",
"https://upload.wikimedia.org/wikipedia/en/9/96/Brave_Poster.jpg",
"https://www.youtube.com/watch?v=6CKcqIahedc")
movies_list = [cars,ratatouille,tangled,brave]
display_page.open_movies_page(movies_list)
| StarcoderdataPython |
3320941 | <reponame>osoco/better-ways-of-thinking-about-software
"""Entitlement Models"""
import logging
import uuid as uuid_tools
from datetime import timedelta
from django.conf import settings
from django.contrib.sites.models import Site
from django.db import IntegrityError, models, transaction
from django.utils.timezone import now
from model_utils import Choices
from model_utils.models import TimeStampedModel
from simple_history.models import HistoricalRecords
from common.djangoapps.course_modes.models import CourseMode
from common.djangoapps.entitlements.utils import is_course_run_entitlement_fulfillable
from common.djangoapps.student.models import CourseEnrollment, CourseEnrollmentException
from common.djangoapps.util.date_utils import strftime_localized
from lms.djangoapps.certificates import api as certificates_api
from lms.djangoapps.certificates.data import CertificateStatuses
from lms.djangoapps.commerce.utils import refund_entitlement
from openedx.core.djangoapps.catalog.utils import get_course_uuid_for_course
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
log = logging.getLogger("common.entitlements.models")
class CourseEntitlementPolicy(models.Model):
"""
Represents the Entitlement's policy for expiration, refunds, and regaining a used certificate
.. no_pii:
"""
DEFAULT_EXPIRATION_PERIOD_DAYS = 730
DEFAULT_REFUND_PERIOD_DAYS = 60
DEFAULT_REGAIN_PERIOD_DAYS = 14
MODES = Choices((None, '---------'), CourseMode.VERIFIED, CourseMode.PROFESSIONAL)
# Use a DurationField to calculate time as it returns a timedelta, useful in performing operations with datetimes
expiration_period = models.DurationField(
default=timedelta(days=DEFAULT_EXPIRATION_PERIOD_DAYS),
help_text="Duration in days from when an entitlement is created until when it is expired.",
null=False
)
refund_period = models.DurationField(
default=timedelta(days=DEFAULT_REFUND_PERIOD_DAYS),
help_text="Duration in days from when an entitlement is created until when it is no longer refundable",
null=False
)
regain_period = models.DurationField(
default=timedelta(days=DEFAULT_REGAIN_PERIOD_DAYS),
help_text=("Duration in days from when an entitlement is redeemed for a course run until "
"it is no longer able to be regained by a user."),
null=False
)
site = models.ForeignKey(Site, null=True, on_delete=models.CASCADE)
mode = models.CharField(max_length=32, choices=MODES, null=True)
def get_days_until_expiration(self, entitlement):
"""
Returns an integer of number of days until the entitlement expires.
Includes the logic for regaining an entitlement.
"""
now_timestamp = now()
expiry_date = entitlement.created + self.expiration_period
days_until_expiry = (expiry_date - now_timestamp).days
if not entitlement.enrollment_course_run:
return days_until_expiry
course_overview = CourseOverview.get_from_id(entitlement.enrollment_course_run.course_id)
# Compute the days left for the regain
days_since_course_start = (now_timestamp - course_overview.start).days
days_since_enrollment = (now_timestamp - entitlement.enrollment_course_run.created).days
days_since_entitlement_created = (now_timestamp - entitlement.created).days
# We want to return whichever days value is less since it is then the more recent one
days_until_regain_ends = (self.regain_period.days -
min(days_since_course_start, days_since_enrollment, days_since_entitlement_created))
# If the base days until expiration is less than the days until the regain period ends, use that instead
if days_until_expiry < days_until_regain_ends:
return days_until_expiry
return days_until_regain_ends
def is_entitlement_regainable(self, entitlement):
"""
Determines from the policy if an entitlement can still be regained by the user, if they choose
to by leaving and regaining their entitlement within policy.regain_period days from start date of
the course or their redemption, whichever comes later, and the expiration period hasn't passed yet
"""
if entitlement.expired_at:
return False
if entitlement.enrollment_course_run:
certificate = certificates_api.get_certificate_for_user_id(
entitlement.user,
entitlement.enrollment_course_run.course_id
)
if certificate and not CertificateStatuses.is_refundable_status(certificate.status):
return False
# This is >= because a days_until_expiration 0 means that the expiration day has not fully passed yet
# and that the entitlement should not be expired as there is still time
return self.get_days_until_expiration(entitlement) >= 0
return False
def is_entitlement_refundable(self, entitlement):
"""
Determines from the policy if an entitlement can still be refunded, if the entitlement has not
yet been redeemed (enrollment_course_run is NULL) and policy.refund_period has not yet passed, or if
the entitlement has been redeemed, but the regain period hasn't passed yet.
"""
# If the Entitlement is expired already it is not refundable
if entitlement.expired_at:
return False
# If there's no order number, it cannot be refunded
if entitlement.order_number is None:
return False
# This is > because a get_days_since_created of refund_period means that that many days have passed,
# which should then make the entitlement no longer refundable
if entitlement.get_days_since_created() > self.refund_period.days:
return False
if entitlement.enrollment_course_run:
return self.is_entitlement_regainable(entitlement)
return True
def is_entitlement_redeemable(self, entitlement):
"""
Determines from the policy if an entitlement can be redeemed, if it has not passed the
expiration period of policy.expiration_period, and has not already been redeemed
"""
# This is < because a get_days_since_created of expiration_period means that that many days have passed,
# which should then expire the entitlement
return (entitlement.get_days_since_created() < self.expiration_period.days
and not entitlement.enrollment_course_run
and not entitlement.expired_at)
def __str__(self):
return 'Course Entitlement Policy: expiration_period: {}, refund_period: {}, regain_period: {}, mode: {}'\
.format(
self.expiration_period,
self.refund_period,
self.regain_period,
self.mode
)
class CourseEntitlement(TimeStampedModel):
"""
Represents a Student's Entitlement to a Course Run for a given Course.
.. no_pii:
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
uuid = models.UUIDField(default=uuid_tools.uuid4, editable=False, unique=True)
course_uuid = models.UUIDField(help_text='UUID for the Course, not the Course Run')
expired_at = models.DateTimeField(
null=True,
help_text='The date that an entitlement expired, if NULL the entitlement has not expired.',
blank=True
)
mode = models.CharField(max_length=100, help_text='The mode of the Course that will be applied on enroll.')
enrollment_course_run = models.ForeignKey(
'student.CourseEnrollment',
null=True,
help_text='The current Course enrollment for this entitlement. If NULL the Learner has not enrolled.',
blank=True,
on_delete=models.CASCADE,
)
order_number = models.CharField(max_length=128, default=None, null=True)
refund_locked = models.BooleanField(default=False)
_policy = models.ForeignKey(CourseEntitlementPolicy, null=True, blank=True, on_delete=models.CASCADE)
history = HistoricalRecords()
class Meta:
unique_together = ('course_uuid', 'order_number')
@property
def expired_at_datetime(self):
"""
Getter to be used instead of expired_at because of the conditional check and update
"""
self.update_expired_at()
return self.expired_at
@expired_at_datetime.setter
def expired_at_datetime(self, value):
"""
Setter to be used instead for expired_at for consistency
"""
self.expired_at = value
@property
def policy(self):
"""
Getter to be used instead of _policy because of the null object pattern
"""
return self._policy or CourseEntitlementPolicy()
@policy.setter
def policy(self, value):
"""
Setter to be used instead of _policy because of the null object pattern
"""
self._policy = value
def get_days_since_created(self):
"""
Returns an integer of number of days since the entitlement has been created
"""
return (now() - self.created).days
def update_expired_at(self):
"""
Updates the expired_at attribute if it is not set AND it is expired according to the entitlement's policy,
OR if the policy can no longer be regained AND the policy has been redeemed
"""
if not self.expired_at:
if (self.policy.get_days_until_expiration(self) < 0 or
(self.enrollment_course_run and not self.is_entitlement_regainable())):
self.expire_entitlement()
def get_days_until_expiration(self):
"""
Returns an integer of number of days until the entitlement expires based on the entitlement's policy
"""
return self.policy.get_days_until_expiration(self)
def is_entitlement_regainable(self):
"""
Returns a boolean as to whether or not the entitlement can be regained based on the entitlement's policy
"""
return self.policy.is_entitlement_regainable(self)
def is_entitlement_refundable(self):
"""
Returns a boolean as to whether or not the entitlement can be refunded based on the entitlement's policy
"""
return not self.refund_locked and self.policy.is_entitlement_refundable(self)
def is_entitlement_redeemable(self):
"""
Returns a boolean as to whether or not the entitlement can be redeemed based on the entitlement's policy
"""
return self.policy.is_entitlement_redeemable(self)
def to_dict(self):
"""
Convert entitlement to dictionary representation including relevant policy information.
Returns:
The entitlement UUID
The associated course's UUID
The date at which the entitlement expired. None if it is still active.
The localized string representing the date at which the entitlement expires.
"""
expiration_date = None
if self.get_days_until_expiration() < settings.ENTITLEMENT_EXPIRED_ALERT_PERIOD:
expiration_date = strftime_localized(
now() + timedelta(days=self.get_days_until_expiration()),
'SHORT_DATE'
)
expired_at = strftime_localized(self.expired_at_datetime, 'SHORT_DATE') if self.expired_at_datetime else None
return {
'uuid': str(self.uuid),
'course_uuid': str(self.course_uuid),
'expired_at': expired_at,
'expiration_date': expiration_date
}
def set_enrollment(self, enrollment):
"""
Fulfills an entitlement by specifying a session.
"""
self.enrollment_course_run = enrollment
self.save()
def expire_entitlement(self):
"""
Expire the entitlement.
"""
self.expired_at = now()
self.save()
@classmethod
def unexpired_entitlements_for_user(cls, user):
return cls.objects.filter(user=user, expired_at=None).select_related('user')
@classmethod
def get_entitlement_if_active(cls, user, course_uuid):
"""
Retrieves the active entitlement for the course_uuid and User.
An active entitlement is defined as an entitlement that has not yet expired or has a currently enrolled session.
If there is more than one entitlement, return the most recently created active entitlement.
Arguments:
user: User that owns the Course Entitlement
course_uuid: The Course UUID for a Course that we are retrieving active entitlements for.
Returns:
CourseEntitlement: Returns the most recently created entitlement for a given course uuid if an
active entitlement exists, otherwise returns None
"""
try:
return cls.objects.filter(
user=user,
course_uuid=course_uuid
).exclude(
expired_at__isnull=False,
enrollment_course_run=None
).latest('created')
except CourseEntitlement.DoesNotExist:
return None
@classmethod
def get_active_entitlements_for_user(cls, user):
"""
Returns a list of active (enrolled or not yet expired) entitlements.
Returns any entitlements that are:
1) Not expired and no session selected
2) Not expired and a session is selected
3) Expired and a session is selected
Does not return any entitlements that are:
1) Expired and no session selected
"""
return cls.objects.filter(user=user).exclude(
expired_at__isnull=False,
enrollment_course_run=None
).select_related('user').select_related('enrollment_course_run')
@classmethod
def get_fulfillable_entitlements(cls, user):
"""
Returns all fulfillable entitlements for a User
Arguments:
user (User): The user we are looking at the entitlements of.
Returns
Queryset: A queryset of course Entitlements ordered descending by creation date that a user can enroll in.
These must not be expired and not have a course run already assigned to it.
"""
return cls.objects.filter(
user=user,
).exclude(
expired_at__isnull=False,
enrollment_course_run__isnull=False
).order_by('-created')
@classmethod
def get_fulfillable_entitlement_for_user_course_run(cls, user, course_run_key):
"""
Retrieves a fulfillable entitlement for the user and the given course run.
Arguments:
user (User): The user that we are inspecting the entitlements for.
course_run_key (CourseKey): The course run Key.
Returns:
CourseEntitlement: The most recent fulfillable CourseEntitlement, None otherwise.
"""
# Check if the User has any fulfillable entitlements.
# Note: Wait to retrieve the Course UUID until we have confirmed the User has fulfillable entitlements.
# This was done to avoid calling the APIs when the User does not have an entitlement.
entitlements = cls.get_fulfillable_entitlements(user)
if entitlements:
course_uuid = get_course_uuid_for_course(course_run_key)
if course_uuid:
entitlement = entitlements.filter(course_uuid=course_uuid).first()
if (entitlement and is_course_run_entitlement_fulfillable(
course_run_key=course_run_key, entitlement=entitlement) and
entitlement.is_entitlement_redeemable()):
return entitlement
return None
@classmethod
@transaction.atomic
def enroll_user_and_fulfill_entitlement(cls, entitlement, course_run_key):
"""
Enrolls the user in the Course Run and updates the entitlement with the new Enrollment.
Returns:
bool: True if successfully fulfills given entitlement by enrolling the user in the given course run.
"""
try:
enrollment = CourseEnrollment.enroll(
user=entitlement.user,
course_key=course_run_key,
mode=entitlement.mode
)
except CourseEnrollmentException:
log.exception(f'Login for Course Entitlement {entitlement.uuid} failed')
return False
entitlement.set_enrollment(enrollment)
return True
@classmethod
def check_for_existing_entitlement_and_enroll(cls, user, course_run_key):
"""
Looks at the User's existing entitlements to see if the user already has a Course Entitlement for the
course run provided in the course_key. If the user does have an Entitlement with no run set, the User is
enrolled in the mode set in the Entitlement.
Arguments:
user (User): The user that we are inspecting the entitlements for.
course_run_key (CourseKey): The course run Key.
Returns:
bool: True if the user had an eligible course entitlement to which an enrollment in the
given course run was applied.
"""
entitlement = cls.get_fulfillable_entitlement_for_user_course_run(user, course_run_key)
if entitlement:
return cls.enroll_user_and_fulfill_entitlement(entitlement, course_run_key)
return False
@classmethod
def unenroll_entitlement(cls, course_enrollment, skip_refund):
"""
Un-enroll the user from entitlement and refund if needed.
"""
course_uuid = get_course_uuid_for_course(course_enrollment.course_id)
course_entitlement = cls.get_entitlement_if_active(course_enrollment.user, course_uuid)
if course_entitlement and course_entitlement.enrollment_course_run == course_enrollment:
course_entitlement.set_enrollment(None)
if not skip_refund and course_entitlement.is_entitlement_refundable():
course_entitlement.expire_entitlement()
course_entitlement.refund()
def refund(self):
"""
Initiate refund process for the entitlement.
"""
refund_successful = refund_entitlement(course_entitlement=self)
if not refund_successful:
# This state is achieved in most cases by a failure in the ecommerce service to process the refund.
log.warning(
'Entitlement Refund failed for Course Entitlement [%s], alert User',
self.uuid
)
# Force Transaction reset with an Integrity error exception, this will revert all previous transactions
raise IntegrityError
def save(self, *args, **kwargs):
"""
Null out empty strings in order_number
"""
if not self.order_number:
self.order_number = None
super().save(*args, **kwargs)
class CourseEntitlementSupportDetail(TimeStampedModel):
"""
Table recording support interactions with an entitlement
.. no_pii:
"""
# Reasons deprecated
LEAVE_SESSION = 'LEAVE'
CHANGE_SESSION = 'CHANGE'
LEARNER_REQUEST_NEW = 'LEARNER_NEW'
COURSE_TEAM_REQUEST_NEW = 'COURSE_TEAM_NEW'
OTHER = 'OTHER'
ENTITLEMENT_SUPPORT_REASONS = (
(LEAVE_SESSION, 'Learner requested leave session for expired entitlement'),
(CHANGE_SESSION, 'Learner requested session change for expired entitlement'),
(LEARNER_REQUEST_NEW, 'Learner requested new entitlement'),
(COURSE_TEAM_REQUEST_NEW, 'Course team requested entitlement for learnerg'),
(OTHER, 'Other'),
)
REISSUE = 'REISSUE'
CREATE = 'CREATE'
ENTITLEMENT_SUPPORT_ACTIONS = (
(REISSUE, 'Re-issue entitlement'),
(CREATE, 'Create new entitlement'),
)
entitlement = models.ForeignKey('entitlements.CourseEntitlement', on_delete=models.CASCADE)
support_user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
#Deprecated: use action instead.
reason = models.CharField(max_length=15, choices=ENTITLEMENT_SUPPORT_REASONS)
action = models.CharField(max_length=15, choices=ENTITLEMENT_SUPPORT_ACTIONS)
comments = models.TextField(null=True)
unenrolled_run = models.ForeignKey(
CourseOverview,
null=True,
blank=True,
db_constraint=False,
on_delete=models.DO_NOTHING,
)
history = HistoricalRecords()
def __str__(self):
"""Unicode representation of an Entitlement"""
return 'Course Entitlement Support Detail: entitlement: {}, support_user: {}, reason: {}'.format(
self.entitlement,
self.support_user,
self.reason,
)
@classmethod
def get_support_actions_list(cls):
"""
Method for retrieving a serializable version of the entitlement support reasons
Returns
list: Containing the possible support actions
"""
return [
action[0] # get just the action code, not the human readable description.
for action
in cls.ENTITLEMENT_SUPPORT_ACTIONS
]
| StarcoderdataPython |
3282058 | """Schema object tests"""
import pytest
from neoalchemy import Node, Property
def test_simple_labeled_node():
node = Node('Node')
assert node.labels == ('Node',)
# cannot reset label once created
with pytest.raises(AttributeError):
node.labels = ('bob',)
assert not node.schema
def test_node_one_index():
person = Node('Person', name=Property(indexed=True))
assert person.schema == ['INDEX ON :Person(name)']
assert person['name'].indexed
assert not person['name'].unique
assert not person['name'].required
def test_node_one_unique():
person = Node('Person', SSN=Property(unique=True))
assert person.schema == ['CONSTRAINT ON ( person:Person ) '
'ASSERT person.SSN IS UNIQUE']
assert person['SSN'].indexed
assert person['SSN'].unique
assert not person['SSN'].required
def test_node_one_required():
person = Node('Person', name=Property(required=True))
assert person.schema == ['CONSTRAINT ON ( person:Person ) '
'ASSERT exists(person.name)']
assert not person['name'].indexed
assert not person['name'].unique
assert person['name'].required
def test_node_one_required_and_indexed():
person = Node('Person', name=Property(required=True, indexed=True))
assert person.schema == ['INDEX ON :Person(name)',
'CONSTRAINT ON ( person:Person ) '
'ASSERT exists(person.name)']
assert person['name'].indexed
assert not person['name'].unique
assert person['name'].required
def test_node_one_required_and_unique():
person = Node('Person', name=Property(required=True, unique=True))
assert person.schema == ['CONSTRAINT ON ( person:Person ) '
'ASSERT person.name IS UNIQUE',
'CONSTRAINT ON ( person:Person ) '
'ASSERT exists(person.name)']
assert person['name'].indexed
assert person['name'].unique
assert person['name'].required
| StarcoderdataPython |
36999 | <filename>src/pretix/base/templatetags/cache_large.py
#
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 <NAME> and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
from django.conf import settings
from django.template import Library, Node, TemplateSyntaxError, Variable
from django.templatetags.cache import CacheNode
register = Library()
class DummyNode(Node):
def __init__(self, nodelist, *args):
self.nodelist = nodelist
def render(self, context):
value = self.nodelist.render(context)
return value
@register.tag('cache_large')
def do_cache(parser, token):
nodelist = parser.parse(('endcache_large',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
if not settings.CACHE_LARGE_VALUES_ALLOWED:
return DummyNode(
nodelist,
)
return CacheNode(
nodelist, parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(t) for t in tokens[3:]],
Variable(repr(settings.CACHE_LARGE_VALUES_ALIAS)),
)
| StarcoderdataPython |
197864 | """Network utils for alternative location tool."""
import json
import netifaces
import requests
import socket
class NetworkUtils(object):
"""Network utils for alternative location tool."""
def GetWanIP(self):
"""Gets the external ip address or the WAN IP address.
Returns:
The ip address as a string.
"""
r = requests.get('https://api.ipify.org?format=json')
json_data = json.loads(r.text)
return json_data['ip']
def GetLanInterface(self):
"""Gets the LAN IP address.
Inspired by: http://stackoverflow.com/questions/270745/
Another option is to use the sockets module: socket.gethostbyname(socket.gethostname())
Returns:
A dictionary of network interfaces associated with the mac address and local ip address.
"""
internal_addresses = {}
for interface in netifaces.interfaces():
mac_address = None
local_address = None
try:
mac_address = netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr']
except KeyError:
pass
try:
local_address = netifaces.ifaddresses(interface)[netifaces.AF_INET][0]['addr']
except KeyError:
pass
if local_address and mac_address:
internal_addresses[interface] = {
'mac_address': mac_address,
'local_address': local_address
}
return internal_addresses | StarcoderdataPython |
1679712 | # coding: utf-8
from __future__ import print_function
import logging
from PySide2.QtCore import Qt, Signal
from PySide2.QtWidgets import QDockWidget
LOGGER = logging.getLogger('ProfileInspector.dockable_widget')
class DockableWindow(QDockWidget):
close_event = Signal()
def __init__(self, title):
QDockWidget.__init__(self, title)
self._delete_children = None
def delete_children(self):
for children in self.children():
if children.objectName() == self._delete_children:
children.deleteLater()
def closeEvent(self, event):
self.delete_children()
if self.isFloating():
self.setFloating(False)
LOGGER.debug('Re docking widget')
event.ignore()
# self.close_event.emit()
def changeEvent(self, event):
""" if window is undocked add min max buttons window """
if event.type().name == 'ActivationChange' and self.isFloating():
self.setWindowFlag(Qt.WindowMinMaxButtonsHint)
self.show()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.